repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
freman/genesysapi
|
client/external_contacts/put_externalcontacts_contact_note_responses.go
|
// Code generated by go-swagger; DO NOT EDIT.
package external_contacts
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/freman/genesysapi/models"
)
// PutExternalcontactsContactNoteReader is a Reader for the PutExternalcontactsContactNote structure.
type PutExternalcontactsContactNoteReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *PutExternalcontactsContactNoteReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewPutExternalcontactsContactNoteOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 400:
result := NewPutExternalcontactsContactNoteBadRequest()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 401:
result := NewPutExternalcontactsContactNoteUnauthorized()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 403:
result := NewPutExternalcontactsContactNoteForbidden()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 404:
result := NewPutExternalcontactsContactNoteNotFound()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 408:
result := NewPutExternalcontactsContactNoteRequestTimeout()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 413:
result := NewPutExternalcontactsContactNoteRequestEntityTooLarge()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 415:
result := NewPutExternalcontactsContactNoteUnsupportedMediaType()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 422:
result := NewPutExternalcontactsContactNoteUnprocessableEntity()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 429:
result := NewPutExternalcontactsContactNoteTooManyRequests()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 500:
result := NewPutExternalcontactsContactNoteInternalServerError()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 503:
result := NewPutExternalcontactsContactNoteServiceUnavailable()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 504:
result := NewPutExternalcontactsContactNoteGatewayTimeout()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("response status code does not match any response statuses defined for this endpoint in the swagger spec", response, response.Code())
}
}
// NewPutExternalcontactsContactNoteOK creates a PutExternalcontactsContactNoteOK with default headers values
func NewPutExternalcontactsContactNoteOK() *PutExternalcontactsContactNoteOK {
return &PutExternalcontactsContactNoteOK{}
}
/*PutExternalcontactsContactNoteOK handles this case with default header values.
successful operation
*/
type PutExternalcontactsContactNoteOK struct {
Payload *models.Note
}
func (o *PutExternalcontactsContactNoteOK) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteOK %+v", 200, o.Payload)
}
func (o *PutExternalcontactsContactNoteOK) GetPayload() *models.Note {
return o.Payload
}
func (o *PutExternalcontactsContactNoteOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Note)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteBadRequest creates a PutExternalcontactsContactNoteBadRequest with default headers values
func NewPutExternalcontactsContactNoteBadRequest() *PutExternalcontactsContactNoteBadRequest {
return &PutExternalcontactsContactNoteBadRequest{}
}
/*PutExternalcontactsContactNoteBadRequest handles this case with default header values.
The request could not be understood by the server due to malformed syntax.
*/
type PutExternalcontactsContactNoteBadRequest struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteBadRequest) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteBadRequest %+v", 400, o.Payload)
}
func (o *PutExternalcontactsContactNoteBadRequest) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteUnauthorized creates a PutExternalcontactsContactNoteUnauthorized with default headers values
func NewPutExternalcontactsContactNoteUnauthorized() *PutExternalcontactsContactNoteUnauthorized {
return &PutExternalcontactsContactNoteUnauthorized{}
}
/*PutExternalcontactsContactNoteUnauthorized handles this case with default header values.
No authentication bearer token specified in authorization header.
*/
type PutExternalcontactsContactNoteUnauthorized struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteUnauthorized) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteUnauthorized %+v", 401, o.Payload)
}
func (o *PutExternalcontactsContactNoteUnauthorized) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteUnauthorized) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteForbidden creates a PutExternalcontactsContactNoteForbidden with default headers values
func NewPutExternalcontactsContactNoteForbidden() *PutExternalcontactsContactNoteForbidden {
return &PutExternalcontactsContactNoteForbidden{}
}
/*PutExternalcontactsContactNoteForbidden handles this case with default header values.
You are not authorized to perform the requested action.
*/
type PutExternalcontactsContactNoteForbidden struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteForbidden) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteForbidden %+v", 403, o.Payload)
}
func (o *PutExternalcontactsContactNoteForbidden) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteForbidden) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteNotFound creates a PutExternalcontactsContactNoteNotFound with default headers values
func NewPutExternalcontactsContactNoteNotFound() *PutExternalcontactsContactNoteNotFound {
return &PutExternalcontactsContactNoteNotFound{}
}
/*PutExternalcontactsContactNoteNotFound handles this case with default header values.
The requested resource was not found.
*/
type PutExternalcontactsContactNoteNotFound struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteNotFound) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteNotFound %+v", 404, o.Payload)
}
func (o *PutExternalcontactsContactNoteNotFound) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteRequestTimeout creates a PutExternalcontactsContactNoteRequestTimeout with default headers values
func NewPutExternalcontactsContactNoteRequestTimeout() *PutExternalcontactsContactNoteRequestTimeout {
return &PutExternalcontactsContactNoteRequestTimeout{}
}
/*PutExternalcontactsContactNoteRequestTimeout handles this case with default header values.
The client did not produce a request within the server timeout limit. This can be caused by a slow network connection and/or large payloads.
*/
type PutExternalcontactsContactNoteRequestTimeout struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteRequestTimeout) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteRequestTimeout %+v", 408, o.Payload)
}
func (o *PutExternalcontactsContactNoteRequestTimeout) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteRequestTimeout) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteRequestEntityTooLarge creates a PutExternalcontactsContactNoteRequestEntityTooLarge with default headers values
func NewPutExternalcontactsContactNoteRequestEntityTooLarge() *PutExternalcontactsContactNoteRequestEntityTooLarge {
return &PutExternalcontactsContactNoteRequestEntityTooLarge{}
}
/*PutExternalcontactsContactNoteRequestEntityTooLarge handles this case with default header values.
The request is over the size limit. Content-Length: %s
*/
type PutExternalcontactsContactNoteRequestEntityTooLarge struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteRequestEntityTooLarge) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteRequestEntityTooLarge %+v", 413, o.Payload)
}
func (o *PutExternalcontactsContactNoteRequestEntityTooLarge) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteRequestEntityTooLarge) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteUnsupportedMediaType creates a PutExternalcontactsContactNoteUnsupportedMediaType with default headers values
func NewPutExternalcontactsContactNoteUnsupportedMediaType() *PutExternalcontactsContactNoteUnsupportedMediaType {
return &PutExternalcontactsContactNoteUnsupportedMediaType{}
}
/*PutExternalcontactsContactNoteUnsupportedMediaType handles this case with default header values.
Unsupported Media Type - Unsupported or incorrect media type, such as an incorrect Content-Type value in the header.
*/
type PutExternalcontactsContactNoteUnsupportedMediaType struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteUnsupportedMediaType) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteUnsupportedMediaType %+v", 415, o.Payload)
}
func (o *PutExternalcontactsContactNoteUnsupportedMediaType) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteUnsupportedMediaType) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteUnprocessableEntity creates a PutExternalcontactsContactNoteUnprocessableEntity with default headers values
func NewPutExternalcontactsContactNoteUnprocessableEntity() *PutExternalcontactsContactNoteUnprocessableEntity {
return &PutExternalcontactsContactNoteUnprocessableEntity{}
}
/*PutExternalcontactsContactNoteUnprocessableEntity handles this case with default header values.
PutExternalcontactsContactNoteUnprocessableEntity put externalcontacts contact note unprocessable entity
*/
type PutExternalcontactsContactNoteUnprocessableEntity struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteUnprocessableEntity) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteUnprocessableEntity %+v", 422, o.Payload)
}
func (o *PutExternalcontactsContactNoteUnprocessableEntity) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteUnprocessableEntity) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteTooManyRequests creates a PutExternalcontactsContactNoteTooManyRequests with default headers values
func NewPutExternalcontactsContactNoteTooManyRequests() *PutExternalcontactsContactNoteTooManyRequests {
return &PutExternalcontactsContactNoteTooManyRequests{}
}
/*PutExternalcontactsContactNoteTooManyRequests handles this case with default header values.
Rate limit exceeded the maximum. Retry the request in [%s] seconds
*/
type PutExternalcontactsContactNoteTooManyRequests struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteTooManyRequests) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteTooManyRequests %+v", 429, o.Payload)
}
func (o *PutExternalcontactsContactNoteTooManyRequests) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteTooManyRequests) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteInternalServerError creates a PutExternalcontactsContactNoteInternalServerError with default headers values
func NewPutExternalcontactsContactNoteInternalServerError() *PutExternalcontactsContactNoteInternalServerError {
return &PutExternalcontactsContactNoteInternalServerError{}
}
/*PutExternalcontactsContactNoteInternalServerError handles this case with default header values.
The server encountered an unexpected condition which prevented it from fulfilling the request.
*/
type PutExternalcontactsContactNoteInternalServerError struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteInternalServerError) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteInternalServerError %+v", 500, o.Payload)
}
func (o *PutExternalcontactsContactNoteInternalServerError) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteInternalServerError) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteServiceUnavailable creates a PutExternalcontactsContactNoteServiceUnavailable with default headers values
func NewPutExternalcontactsContactNoteServiceUnavailable() *PutExternalcontactsContactNoteServiceUnavailable {
return &PutExternalcontactsContactNoteServiceUnavailable{}
}
/*PutExternalcontactsContactNoteServiceUnavailable handles this case with default header values.
Service Unavailable - The server is currently unavailable (because it is overloaded or down for maintenance).
*/
type PutExternalcontactsContactNoteServiceUnavailable struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteServiceUnavailable) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteServiceUnavailable %+v", 503, o.Payload)
}
func (o *PutExternalcontactsContactNoteServiceUnavailable) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteServiceUnavailable) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPutExternalcontactsContactNoteGatewayTimeout creates a PutExternalcontactsContactNoteGatewayTimeout with default headers values
func NewPutExternalcontactsContactNoteGatewayTimeout() *PutExternalcontactsContactNoteGatewayTimeout {
return &PutExternalcontactsContactNoteGatewayTimeout{}
}
/*PutExternalcontactsContactNoteGatewayTimeout handles this case with default header values.
The request timed out.
*/
type PutExternalcontactsContactNoteGatewayTimeout struct {
Payload *models.ErrorBody
}
func (o *PutExternalcontactsContactNoteGatewayTimeout) Error() string {
return fmt.Sprintf("[PUT /api/v2/externalcontacts/contacts/{contactId}/notes/{noteId}][%d] putExternalcontactsContactNoteGatewayTimeout %+v", 504, o.Payload)
}
func (o *PutExternalcontactsContactNoteGatewayTimeout) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PutExternalcontactsContactNoteGatewayTimeout) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
|
bitigchi/MuditaOS
|
products/BellHybrid/apps/application-bell-main/windows/BellBatteryShutdownWindow.hpp
|
<reponame>bitigchi/MuditaOS<filename>products/BellHybrid/apps/application-bell-main/windows/BellBatteryShutdownWindow.hpp
// Copyright (c) 2017-2021, Mudita Sp. z.o.o. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#pragma once
#include <apps-common/ApplicationCommon.hpp>
#include <apps-common/windows/AppWindow.hpp>
namespace gui
{
class Icon;
class BellBatteryShutdownWindow : public gui::AppWindow
{
public:
static constexpr auto defaultName = "BellBatteryShutdown";
BellBatteryShutdownWindow(app::ApplicationCommon *app, const std::string &name = defaultName);
private:
void buildInterface() override;
bool onInput(const InputEvent &inputEvent) override;
};
} // namespace gui
|
nazarepiedady/next.js
|
test/e2e/middleware-rewrites/app/pages/ab-test/a.js
|
<reponame>nazarepiedady/next.js
export default function Home() {
return <p className="title">Welcome Page A</p>
}
export const getServerSideProps = () => ({
props: {
abtest: true,
},
})
|
kooiot/siridb-server
|
test/test_vec/test_vec.c
|
<filename>test/test_vec/test_vec.c
#include "../test.h"
#include <vec/vec.h>
const unsigned int num_entries = 14;
char * entries[] = {
"Zero",
"First entry",
"Second entry",
"Third entry",
"Fourth entry",
"Fifth entry",
"Sixth entry",
"Seventh entry",
"8",
"9",
"entry 10",
"entry 11",
"entry 12",
"entry-last"
};
int main()
{
test_start("vec");
/* vec_append_safe */
{
vec_t * vec = vec_new(0);
_assert (vec->len == 0);
_assert (vec->size == 0);
unsigned int i;
for (i = 0; i < num_entries; i++)
{
_assert (vec_append_safe(&vec, entries[i]) == 0);
}
/* vec_copy */
{
vec_t * veccp = vec_copy(vec);
unsigned int i;
for (i = 0; i < num_entries; i++)
{
_assert (veccp->data[i] == entries[i]);
}
vec_free(veccp);
}
_assert (vec->len == num_entries);
vec_free(vec);
}
/* vec_append */
{
vec_t * vec = vec_new(num_entries);
_assert (vec->len == 0);
_assert (vec->size == num_entries);
unsigned int i;
for (i = 0; i < num_entries; i++)
{
vec_append(vec, entries[i]);
}
_assert (vec->len == num_entries);
/* vec_pop */
for (i = num_entries; i-- > 0;)
{
_assert (vec_pop(vec) == entries[i]);
}
vec_free(vec);
}
return test_end();
}
|
RTHMaK/RPGOne
|
deep_qa-master/deep_qa/training/trainer.py
|
<filename>deep_qa-master/deep_qa/training/trainer.py
import logging
import os
from typing import Any, Dict, List
import numpy
import keras.backend as K
from keras.models import model_from_json
from keras.callbacks import LambdaCallback, TensorBoard, EarlyStopping, CallbackList, ModelCheckpoint
from . import concrete_pretrainers
from ..common.checks import ConfigurationError
from ..common.params import get_choice
from ..data.dataset import Dataset
from ..data.instances.instance import Instance
from ..layers.wrappers.output_mask import OutputMask
from .models import DeepQaModel
from .optimizers import optimizer_from_params
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class Trainer:
"""
A Trainer object specifies data, a model, and a way to train the model with the data. Here we
group all of the common code related to these things, making only minimal assumptions about
what kind of data you're using or what the structure of your model is.
The main benefits of this class are having a common place for setting parameters related to
training, actually running the training with those parameters, and code for saving and loading
models.
"""
def __init__(self, params: Dict[str, Any]):
self.name = "Trainer"
# Should we save the models that we train? If this is True, you are required to also set
# the model_serialization_prefix parameter, or the code will crash.
self.save_models = params.pop('save_models', True)
# Prefix for saving and loading model files
self.model_prefix = params.pop('model_serialization_prefix', None)
if self.model_prefix:
parent_directory = os.path.dirname(self.model_prefix)
os.makedirs(parent_directory, exist_ok=True)
# Preferred backend to use for training. If a different backend is detected, we still
# train but we also warn the user.
self.preferred_backend = params.pop('preferred_backend', None)
if self.preferred_backend and self.preferred_backend.lower() != K.backend():
warning_message = self._make_backend_warning(self.preferred_backend.lower(),
K.backend())
logger.warning(warning_message)
self.batch_size = params.pop('batch_size', 32)
# Upper limit on the number of training instances. If this is set, and we get more than
# this, we will truncate the data.
self.max_training_instances = params.pop('max_training_instances', None)
# Amount of training data to use for Keras' validation (not our QA validation, set by
# the validation_file param, which is separate). This value is passed as
# 'validation_split' to Keras' model.fit().
self.keras_validation_split = params.pop('keras_validation_split', 0.1)
# Number of train epochs.
self.num_epochs = params.pop('num_epochs', 20)
# Number of epochs to be patient before early stopping.
self.patience = params.pop('patience', 1)
# Log directory for tensorboard.
self.tensorboard_log = params.pop('tensorboard_log', None)
# Tensorboard histogram frequency: note that activating the tensorboard histgram (frequency > 0) can
# drastically increase model training time. Please set frequency with consideration to desired runtime.
self.tensorboard_histogram_freq = params.pop('tensorboard_histogram_freq', 0)
# The files containing the data that should be used for training. See
# _load_dataset_from_files().
self.train_files = params.pop('train_files', None)
# The files containing the data that should be used for validation, if you do not want to
# use a split of the training data for validation. The default of None means to just use
# the `keras_validation_split` parameter to split the training data for validation.
self.validation_files = params.pop('validation_files', None)
# The files containing the data that should be used for evaluation.
# The default of None means to just not perform test set evaluation.
self.test_files = params.pop('test_files', None)
optimizer_params = params.pop('optimizer', 'adam')
self.optimizer = optimizer_from_params(optimizer_params)
self.loss = params.pop('loss', 'categorical_crossentropy')
self.metrics = params.pop('metrics', ['accuracy'])
self.validation_metric = params.pop('validation_metric', 'val_acc')
# A dict of additional arguments to the fit method. These get added to
# the options already captured by other arguments.
self.fit_kwargs = params.pop('fit_kwargs', {})
# This is a debugging setting, mostly - we have written a custom model.summary() method
# that supports showing masking info, to help understand what's going on with the masks.
self.show_summary_with_masking = params.pop('show_summary_with_masking_info', False)
# This should be a dict, containing the following keys:
# - "layer_names", which has as a value a list of names that must match layer names in the
# model build by this Trainer.
# - "data", which has as a value either "training", "validation", or a list of file names.
# If you give "training" or "validation", we'll use those datasets, otherwise we'll
# load data from the provided files. Note that currently "validation" only works if
# you provide validation files, not if you're just using Keras to split the training
# data.
# - "masks", an optional key that functions identically to "layer_names", except we output
# the mask at each layer given here.
self.debug_params = params.pop('debug', {})
pretrainer_params = params.pop('pretrainers', [])
self.pretrainers = []
for pretrainer_param in pretrainer_params:
pretrainer_type = get_choice(pretrainer_param, "type", concrete_pretrainers.keys())
pretrainer = concrete_pretrainers[pretrainer_type](self, pretrainer_param)
self.pretrainers.append(pretrainer)
# We've now processed all of the parameters, and we're the base class, so there should not
# be anything left.
if len(params.keys()) != 0:
raise ConfigurationError("You passed unrecognized parameters: " + str(params))
# Model-specific member variables that will get set and used later.
self.model = None
self.debug_model = None
# Training-specific member variables that will get set and used later.
self.best_epoch = -1
# We store the datasets used for training and validation, both before processing and after
# processing, in case a subclass wants to modify it between epochs for whatever reason.
self.training_dataset = None
self.train_input = None
self.train_labels = None
self.validation_dataset = None
self.validation_input = None
self.validation_labels = None
self.test_dataset = None
self.test_input = None
self.test_labels = None
self.debug_dataset = None
self.debug_input = None
def can_train(self):
return self.train_files is not None
def _load_dataset_from_files(self, files: List[str]) -> Dataset:
"""
Given a list of file inputs, load a raw dataset from the files. This is a list because
some datasets are specified in more than one file (e.g., a file containing the instances,
and a file containing background information about those instances).
"""
raise NotImplementedError
def _prepare_data(self, dataset: Dataset, for_train: bool, update_data_indexer=True):
"""
Takes a raw dataset and converts it into training inputs and labels that can be used to
either train a model or make predictions.
Input: a Dataset of the same format as read by the read_dataset_from_files() method, and an
indicator for whether this is being done for the training data (so that, e.g., you can set
model parameters based on characteristics of the training data).
Output: a tuple of (inputs, labels), which can be fed directly to Keras' model.fit()
and model.predict() methods. `labels` is allowed to be None in the second case.
"""
raise NotImplementedError
def _prepare_instance(self, instance: Instance, make_batch: bool=True):
"""
Like self._prepare_data(), but for a single Instance. Used most often for making
predictions one at a time on test data (though you should avoid that if possible, as larger
batches would be more efficient).
The make_batch argument determines whether we make the return value into a batch or not by
calling numpy.expand_dims. Keras' model.predict() method requires a batch, so we need an
extra dimension. If you're going to do the batch conversion yourself, or don't need it,
you can pass False for that parameter.
"""
raise NotImplementedError
def _pretrain(self):
"""
Runs whatever pre-training has been specified in the constructor.
"""
logger.info("Running pre-training")
for pretrainer in self.pretrainers:
pretrainer.train()
self._pretraining_finished_hook()
def _process_pretraining_data(self):
"""
Processes the pre-training data in whatever way you want, typically for setting model
parameters like vocabulary. This happens _before_ the training data itself is processed.
We don't know what processing this might entail, or whether you are even doing any
pre-training, so we just pass here by default.
"""
pass
def _pretraining_finished_hook(self):
"""
This is called when pre-training finishes (if there were any pre-trainers specified). You
can do whatever you want in here, like changing model parameters based on what happened
during pre-training, or saving a pre-trained model, or whatever.
Default implementation is to call pretrainer._on_finished() for each pre-trainer, which by
default is a `pass`.
"""
for pretrainer in self.pretrainers:
pretrainer.on_finished()
def _compile_kwargs(self):
"""
Because we call model.compile() in a few different places in the code, and we have a few
member variables that we use to set arguments for model.compile(), we group those arguments
together here, to only specify them once.
"""
return {
'loss': self.loss,
'optimizer': self.optimizer,
'metrics': self.metrics,
}
def _build_model(self) -> DeepQaModel:
"""Constructs and returns a DeepQaModel (which is a wrapper around a Keras Model) that will
take the output of self._get_training_data as input, and produce as output a true/false
decision for each input.
The returned model will be used to call model.fit(train_input, train_labels).
"""
raise NotImplementedError
def prepare_data(self, train_files, max_training_instances,
validation_files=None, test_files=None, update_data_indexer=True):
logger.info("Getting training data")
training_dataset = self._load_dataset_from_files(train_files)
if max_training_instances is not None:
logger.info("Truncating the training dataset to %d instances", max_training_instances)
training_dataset = training_dataset.truncate(max_training_instances)
train_input, train_labels = self._prepare_data(training_dataset,
for_train=True,
update_data_indexer=update_data_indexer)
validation_dataset = validation_input = validation_labels = None
if validation_files:
logger.info("Getting validation data")
validation_dataset = self._load_dataset_from_files(validation_files)
validation_input, validation_labels = self._prepare_data(validation_dataset,
for_train=False,
update_data_indexer=update_data_indexer)
test_dataset = test_input = test_labels = None
if test_files:
logger.info("Getting test data")
test_dataset = self._load_dataset_from_files(test_files)
test_input, test_labels = self._prepare_data(validation_dataset,
for_train=False,
update_data_indexer=update_data_indexer)
return ((training_dataset, train_input, train_labels),
(validation_dataset, validation_input, validation_labels),
(test_dataset, test_input, test_labels))
def train(self):
'''
Trains the model.
All training parameters have already been passed to the constructor, so we need no
arguments to this method.
'''
logger.info("Running training (%s)", self.name)
# Before actually doing any training, we'll run whatever pre-training has been specified.
# Note that this can have funny interactions with model parameters that get fit to the
# training data. We don't really know here what you want to do with the data you have for
# pre-training, if any, so we provide a hook that you can override to do whatever you want.
if self.pretrainers:
self._process_pretraining_data()
# First we need to prepare the data that we'll use for training.
train_data, val_data, test_data = self.prepare_data(self.train_files, self.max_training_instances,
self.validation_files,
self.test_files)
self.training_dataset, self.train_input, self.train_labels = train_data
self.validation_dataset, self.validation_input, self.validation_labels = val_data
self.test_dataset, self.test_input, self.test_labels = test_data
# We need to actually do pretraining _after_ we've loaded the training data, though, as we
# need to build the models to be consistent between training and pretraining. The training
# data tells us a max sentence length, which we need for the pretrainer.
if self.pretrainers:
self._pretrain()
# Then we build the model and compile it.
logger.info("Building the model")
self.model = self._build_model()
self.model.summary(show_masks=self.show_summary_with_masking)
self.model.compile(**self._compile_kwargs())
if self.debug_params:
# Get the list of layers whose outputs will be visualized as per the
# solver definition and build a debug model.
debug_layer_names = self.debug_params['layer_names']
debug_masks = self.debug_params.get('masks', [])
debug_data = self.debug_params['data']
if debug_data == "training":
self.debug_dataset = self.training_dataset
self.debug_input = self.train_input
elif debug_data == "validation":
# NOTE: This currently only works if you've specified specific validation data, not
# if you are just splitting the training data for validation.
self.debug_dataset = self.validation_dataset
self.debug_input = self.validation_input
else:
# If the `data` param is not "training" or "validation", we assume it's a list of
# file names.
self.debug_dataset = self._load_dataset_from_files(debug_data)
self.debug_input, _ = self._prepare_data(self.debug_dataset, for_train=False)
self.debug_model = self._build_debug_model(debug_layer_names, debug_masks)
# Now we actually train the model using various Keras callbacks to control training.
callbacks = self._get_callbacks()
kwargs = {'epochs': self.num_epochs, 'callbacks': [callbacks], 'batch_size': self.batch_size}
# We'll check for explicit validation data first; if you provided this, you definitely
# wanted to use it for validation. self.keras_validation_split is non-zero by default,
# so you may have left it above zero on accident.
if self.validation_input is not None:
kwargs['validation_data'] = (self.validation_input, self.validation_labels)
elif self.keras_validation_split > 0.0:
kwargs['validation_split'] = self.keras_validation_split
# add the user-specified arguments to fit
kwargs.update(self.fit_kwargs)
# We now pass all the arguments to the model's fit function, which does all of the training.
history = self.model.fit(self.train_input, self.train_labels, **kwargs)
# After finishing training, we save the best weights and
# any auxillary files, such as the model config.
self.best_epoch = int(numpy.argmax(history.history[self.validation_metric]))
if self.save_models:
self._save_best_model()
self._save_auxiliary_files()
# If there are test files, we evaluate on the test data.
if self.test_files:
logger.info("Evaluting model on the test set.")
scores = self.model.evaluate(self.test_input, self.test_labels)
for idx, metric in enumerate(self.model.metrics_names):
print("{}: {}".format(metric, scores[idx]))
def _get_callbacks(self):
"""
Returns a set of Callbacks which are used to perform various functions within Keras' .fit method.
Here, we use an early stopping callback to add patience with respect to the validation metric and
a Lambda callback which performs the model specific callbacks which you might want to build into
a model, such as re-encoding some background knowledge.
Additionally, there is also functionality to create Tensorboard log files. These can be visualised
using 'tensorboard --logdir /path/to/log/files' after training.
"""
early_stop = EarlyStopping(monitor=self.validation_metric, patience=self.patience)
model_callbacks = LambdaCallback(on_epoch_begin=lambda epoch, logs: self._pre_epoch_hook(epoch),
on_epoch_end=lambda epoch, logs: self._post_epoch_hook(epoch))
callbacks = [early_stop, model_callbacks]
if self.tensorboard_log is not None:
if K.backend() == 'theano':
raise ConfigurationError("Tensorboard logging is only compatibile with Tensorflow. "
"Change the backend using the KERAS_BACKEND environment variable.")
tensorboard_visualisation = TensorBoard(log_dir=self.tensorboard_log,
histogram_freq=self.tensorboard_histogram_freq)
callbacks.append(tensorboard_visualisation)
if self.debug_params:
debug_callback = LambdaCallback(on_epoch_end=lambda epoch, logs:
self._debug(self.debug_params["layer_names"],
self.debug_params.get("masks", []), epoch))
callbacks.append(debug_callback)
return CallbackList(callbacks)
# Some witchcraft is happening here - we don't specify the epoch replacement variable
# checkpointing string, because Keras does that within the callback if we specify it here.
if self.save_models:
checkpointing = ModelCheckpoint(self.model_prefix + "_weights_epoch={epoch:d}.h5",
save_best_only=True, save_weights_only=True,
monitor=self.validation_metric)
callbacks.append(checkpointing)
return CallbackList(callbacks)
def _debug(self, debug_layer_names: List[str], debug_masks: List[str], epoch: int):
"""
Runs the debug model and saves the results to a file.
"""
logger.info("Running debug model")
# Shows intermediate outputs of the model on validation data
outputs = self.debug_model.predict(self.debug_input)
output_dict = {}
if len(debug_layer_names) == 1:
output_dict[debug_layer_names[0]] = outputs
else:
for layer_name, output in zip(debug_layer_names, outputs[:len(debug_layer_names)]):
output_dict[layer_name] = output
for layer_name, output in zip(debug_masks, outputs[len(debug_layer_names):]):
if 'masks' not in output_dict:
output_dict['masks'] = {}
output_dict['masks'][layer_name] = output
self._output_debug_info(output_dict, epoch)
def _output_debug_info(self, output_dict: Dict[str, numpy.array], epoch: int):
logger.info("Outputting debug results")
debug_output_file = open("%s_debug_%d.txt" % (self.model_prefix, epoch), "w")
overall_debug_info = self._overall_debug_output(output_dict)
debug_output_file.write(overall_debug_info)
for instance_index, instance in enumerate(self.debug_dataset.instances):
instance_output_dict = {}
for layer_name, output in output_dict.items():
if layer_name == 'masks':
instance_output_dict['masks'] = {}
for mask_name, mask_output in output.items():
instance_output_dict['masks'][mask_name] = mask_output[instance_index]
else:
instance_output_dict[layer_name] = output[instance_index]
instance_info = self._instance_debug_output(instance, instance_output_dict)
debug_output_file.write(instance_info + '\n')
debug_output_file.close()
def _pre_epoch_hook(self, epoch: int):
"""
This method gets called before each epoch of training. If you want to do any kind of
processing in between epochs (e.g., updating the training data for whatever reason), here
is your chance to do so.
"""
pass
def _post_epoch_hook(self, epoch: int):
"""
This method gets called directly after model.fit(), before making any early stopping
decisions. If you want to modify anything after each iteration (e.g., computing a
different kind of validation loss to use for early stopping, or just computing and printing
accuracy on some other held out data), you can do that here. If you require extra parameters,
use calls to local methods rather than passing new parameters, as this hook is run via a
Keras Callback, which is fairly strict in it's interface.
"""
pass
def _build_debug_model(self, debug_layer_names: List[str], debug_masks: List[str]):
"""
Here we build a very simple kind of debug model: one that takes the same inputs as
self.model, and runs the model up to some particular layers, and outputs the values at
those layers.
In addition, you can optionally specify some number of layers for which you want to output
the mask computed by that layer.
If you want something more complicated, override this method.
"""
debug_inputs = self.model.get_input_at(0) # list of all input_layers
debug_output_dict = {}
layer_names = set(debug_layer_names)
mask_names = set(debug_masks)
for layer in self.model.layers:
if layer.name in layer_names:
debug_output_dict[layer.name] = layer.get_output_at(0)
layer_names.remove(layer.name)
if layer.name in mask_names:
mask = OutputMask()(layer.get_output_at(0))
debug_output_dict['mask_for_' + layer.name] = mask
mask_names.remove(layer.name)
if len(layer_names) != 0 or len(mask_names):
raise ConfigurationError("Unmatched debug layer names: " + str(layer_names | mask_names))
# The outputs need to be in the same order as `debug_layer_names`, or downstream code will
# have issues.
debug_outputs = [debug_output_dict[name] for name in debug_layer_names]
debug_outputs.extend([debug_output_dict['mask_for_' + name] for name in debug_masks])
return DeepQaModel(input=debug_inputs, output=debug_outputs)
def _overall_debug_output(self, output_dict: Dict[str, numpy.array]) -> str: # pylint: disable=unused-argument
return "Number of instances: %d\n" % len(self.debug_dataset.instances)
def _instance_debug_output(self, instance: Instance, outputs: Dict[str, numpy.array]) -> str:
"""
This method takes an Instance and all of the debug outputs for that Instance, puts them
into some human-readable format, and returns that as a string. `outputs` will have one key
corresponding to each item in the `debug.layer_names` parameter given to the constructor of
this object.
The default here is `pass` instead of `raise NotImplementedError`, because you're not
required to implement debugging for your model.
"""
pass
def score_dataset(self, dataset: Dataset):
inputs, _ = self._prepare_data(dataset, False)
return self.model.predict(inputs)
def score_instance(self, instance: Instance):
inputs, _ = self._prepare_instance(instance)
try:
return self.model.predict(inputs)
except:
print('Inputs were: ' + str(inputs))
raise
def load_model(self, epoch: int=None):
"""
Loads a serialized model. If epoch is not None, we try to load the model from that epoch.
If epoch is not given, we load the best saved model.
Paths in here must match those in self._save_model(epoch) and self._save_best_model(), or
things will break.
"""
logger.info("Loading serialized model")
# Loading serialized model
model_config_file = open("%s_config.json" % self.model_prefix)
model_config_json = model_config_file.read()
model_config_file.close()
self.model = model_from_json(model_config_json,
custom_objects=self._get_custom_objects())
if epoch is not None:
model_file = "%s_weights_epoch=%d.h5" % (self.model_prefix, epoch)
else:
model_file = "%s_weights.h5" % self.model_prefix
logger.info("Loading weights from file %s", model_file)
self.model.load_weights(model_file)
self.model.summary(show_masks=self.show_summary_with_masking)
self._load_layers()
self._load_auxiliary_files()
self._set_params_from_model()
self.model.compile(**self._compile_kwargs())
def _load_layers(self):
"""
If you want to use member variables that contain Layers after the model is loaded, you need
to set them from the model. For instance, say you have an embedding layer for word
sequences, and you want to take a loaded model, build a sub-model out of it that contains
the embedding layer, and use that model somehow. In that case, the member variable for the
embedding layer needs to be set from the loaded model. You can do that here.
"""
pass
def _load_auxiliary_files(self):
"""
Called during model loading. If you have some auxiliary pickled object, such as an object
storing the vocabulary of your model, you can load it here.
"""
pass
def _set_params_from_model(self):
"""
Called after a model is loaded, this lets you update member variables that contain model
parameters, like max sentence length, that are not stored as weights in the model object.
This is necessary if you want to process a new data instance to be compatible with the
model for prediction, for instance.
"""
pass
def _save_best_model(self):
"""
Copies the weights from the best epoch to a final weight file.
The point of this is so that the input/output spec of the NNSolver is simpler. Someone
calling this as a subroutine doesn't have to worry about which epoch ended up being the
best, they can just use the final weight file. You can still use models from other epochs
if you really want to.
"""
from shutil import copyfile
epoch_weight_file = "%s_weights_epoch=%d.h5" % (self.model_prefix, self.best_epoch)
final_weight_file = "%s_weights.h5" % self.model_prefix
copyfile(epoch_weight_file, final_weight_file)
logger.info("Saved the best model to %s", final_weight_file)
def _save_auxiliary_files(self):
"""
Called after training. If you have some auxiliary object, such as an object storing
the vocabulary of your model, you can save it here. The model config is saved by default.
"""
model_config = self.model.to_json()
model_config_file = open("%s_config.json" % (self.model_prefix), "w")
print(model_config, file=model_config_file)
model_config_file.close()
@staticmethod
def _make_backend_warning(preferred_backend, actual_backend):
warning_info = ("@ Preferred backend is %s, but "
"current backend is %s. @" % (preferred_backend,
actual_backend))
end_row = "@" * len(warning_info)
warning_row_spaces = len(warning_info) - len("@ WARNING: @")
left_warning_row_spaces = right_warning_row_spaces = warning_row_spaces // 2
if warning_row_spaces % 2 == 1:
# left and right have uneven spacing
right_warning_row_spaces += 1
left_warning_row = "\n@" + " " * left_warning_row_spaces
right_warning_row = " " * right_warning_row_spaces + "@\n"
warning_message = ("\n" + end_row +
left_warning_row + " WARNING: " + right_warning_row +
warning_info +
"\n" + end_row)
return warning_message
@classmethod
def _get_custom_objects(cls):
"""
If you've used any Layers that Keras doesn't know about, you need to specify them in this
dictionary, so we can load them correctly.
"""
return {
"DeepQaModel": DeepQaModel
}
|
lpassamano/scavenger_hunt
|
db/migrate/20171124200453_create_found_items.rb
|
class CreateFoundItems < ActiveRecord::Migration[5.1]
def change
create_table :found_items do |t|
t.boolean :found
t.integer :team_id
t.integer :item_id
end
end
end
|
KL-HIS/stream-reactor
|
kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/model/TopicPartitionOffset.scala
|
/*
* Copyright 2020 Lenses.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lenses.streamreactor.connect.aws.s3.model
import org.apache.kafka.common.{TopicPartition => KafkaTopicPartition}
case class OffsetReaderResult(path: String, line: String)
case class PollResults(
resultList: Vector[_ <: SourceData],
bucketAndPath: RemotePathLocation,
prefix: String,
targetTopic: String
)
case class Topic(value: String) {
require(value != null && value.trim.nonEmpty)
def withPartition(partition: Int): TopicPartition = {
TopicPartition(this, partition)
}
}
object Offset {
implicit def orderingByOffsetValue[A <: Offset]: Ordering[A] =
Ordering.by(_.value)
}
case class Offset(value: Long) {
require(value >= 0)
}
object TopicPartition {
def apply(kafkaTopicPartition: KafkaTopicPartition): TopicPartition = {
TopicPartition(Topic(kafkaTopicPartition.topic()), kafkaTopicPartition.partition())
}
}
case class TopicPartition(topic: Topic, partition: Int) {
def withOffset(offset: Offset): TopicPartitionOffset = TopicPartitionOffset(topic, partition, offset)
def withOffset(offset: Long): TopicPartitionOffset = withOffset(Offset(offset))
def toKafka = new KafkaTopicPartition(topic.value, partition)
}
case class TopicPartitionOffset(topic: Topic, partition: Int, offset: Offset) {
def toTopicPartition: TopicPartition = TopicPartition(topic, partition)
}
|
shrewdlin/BPE
|
src/business/SapTLVBody.cpp
|
<gh_stars>1-10
#include "SapTLVBody.h"
#include <boost/asio.hpp>
#include "SapLogHelper.h"
void CSapTLVBodyEncoder::SetValue(unsigned short wKey,const void* pValue,unsigned int nValueLen)
{
unsigned int nFactLen=((nValueLen&0x03)!=0?((nValueLen>>2)+1)<<2:nValueLen);
if(m_buffer.capacity()<nFactLen+4)
{
m_buffer.add_capacity(SAP_ALIGN(nFactLen+4-m_buffer.capacity()));
}
SSapMsgAttribute *pAtti=(SSapMsgAttribute *)m_buffer.top();
pAtti->wType=htons(wKey);
pAtti->wLength=htons(nValueLen+sizeof(SSapMsgAttribute));
memcpy(pAtti->acValue,pValue,nValueLen);
memset(pAtti->acValue+nValueLen,0,nFactLen-nValueLen);
m_buffer.inc_loc(sizeof(SSapMsgAttribute)+nFactLen);
}
void CSapTLVBodyEncoder::SetValue(unsigned short wKey, const string &strValue)
{
SetValue(wKey,strValue.c_str(),strValue.length());
}
void CSapTLVBodyEncoder::SetValue(unsigned short wKey, unsigned int wValue)
{
int nNetValue=htonl(wValue);
SetValue(wKey,&nNetValue,4);
}
void CSapTLVBodyEncoder::BeginValue(unsigned short wType)
{
if(m_buffer.capacity()<sizeof(SSapMsgAttribute))
{
m_buffer.add_capacity(SAP_ALIGN(sizeof(SSapMsgAttribute)-m_buffer.capacity()));
}
SSapMsgAttribute * pAttri=(SSapMsgAttribute *)m_buffer.top();
pAttri->wType=htons(wType);
m_buffer.inc_loc(sizeof(SSapMsgAttribute));
pAttriBlock=(unsigned char *)pAttri;
}
void CSapTLVBodyEncoder::AddValueBloc(const void *pData,unsigned int nLen)
{
unsigned int nFactLen=((nLen&0x03)!=0?((nLen>>2)+1)<<2:nLen);
if(m_buffer.capacity()<nFactLen)
{
m_buffer.add_capacity(SAP_ALIGN(nFactLen-m_buffer.capacity()));
}
memcpy(m_buffer.top(),pData,nLen);
memset(m_buffer.top()+nLen,0,nFactLen-nLen);
m_buffer.inc_loc(nFactLen);
}
void CSapTLVBodyEncoder::EndValue()
{
((SSapMsgAttribute *)pAttriBlock)->wLength=htons(m_buffer.top()-pAttriBlock);
}
CSapTLVBodyDecoder::CSapTLVBodyDecoder(const void * pBuffer, unsigned int nLen):
m_pBuffer((unsigned char *)pBuffer),m_nLen(nLen)
{
const unsigned char *ptrLoc=m_pBuffer;
while(ptrLoc<m_pBuffer+m_nLen)
{
SSapMsgAttribute *pAtti=(SSapMsgAttribute *)ptrLoc;
unsigned short nLen=ntohs(pAtti->wLength);
if(nLen==0)
{
break;
}
m_mapMultiAttri.insert(AttriMultiMap::value_type(ntohs(pAtti->wType),ptrLoc));
int nFactLen=((nLen&0x03)!=0?((nLen>>2)+1)<<2:nLen);
ptrLoc+=nFactLen;
}
SS_XLOG(XLOG_DEBUG,"CSapDecodeMsg::%s,map size[%u]!\n",__FUNCTION__, m_mapMultiAttri.size());
}
void CSapTLVBodyDecoder::SetBuffer(const void * pBuffer, unsigned int nLen)
{
m_pBuffer = (unsigned char *)pBuffer;
m_nLen = nLen;
const unsigned char *ptrLoc=m_pBuffer;
while(ptrLoc<m_pBuffer+m_nLen)
{
SSapMsgAttribute *pAtti=(SSapMsgAttribute *)ptrLoc;
unsigned short nLen=ntohs(pAtti->wLength);
if(nLen==0)
{
break;
}
m_mapMultiAttri.insert(AttriMultiMap::value_type(ntohs(pAtti->wType),ptrLoc));
int nFactLen=((nLen&0x03)!=0?((nLen>>2)+1)<<2:nLen);
ptrLoc+=nFactLen;
}
}
/*Get attribute*/
int CSapTLVBodyDecoder::GetValue(unsigned short wKey,void** pValue, unsigned int * pValueLen)
{
AttriMultiMap::const_iterator itr=m_mapMultiAttri.find(wKey);
if(itr==m_mapMultiAttri.end())
{
SS_XLOG(XLOG_DEBUG,"CSapDecodeMsg::%s,type[%d] not found\n",__FUNCTION__,wKey);
return -1;
}
const unsigned char *ptrLoc=itr->second;
SSapMsgAttribute *pAtti=(SSapMsgAttribute *)ptrLoc;
*pValueLen=ntohs(pAtti->wLength)-sizeof(SSapMsgAttribute);
*pValue=pAtti->acValue;
return 0;
}
int CSapTLVBodyDecoder::GetValue(unsigned short wKey,string & strValue)
{
void *pData=NULL;
unsigned int nLen=0;
if(GetValue(wKey,&pData,&nLen)==-1||nLen<0)
{
SS_XLOG(XLOG_DEBUG,"CSapDecodeMsg::%s,type[%d],len[%d] fail!\n",__FUNCTION__,wKey,nLen);
return -1;
}
strValue=string((const char *)pData,nLen);
return 0;
}
int CSapTLVBodyDecoder::GetValue(unsigned short wKey, unsigned int * pValue)
{
void *pData=NULL;
unsigned int nLen=0;
if(GetValue(wKey,&pData,&nLen)==-1||nLen!=4)
{
SS_XLOG(XLOG_DEBUG,"CSapDecodeMsg::%s,type[%d],len[%d] fail!\n",__FUNCTION__,wKey,nLen);
return -1;
}
*pValue=ntohl(*(int *)pData);
return 0;
}
void CSapTLVBodyDecoder::GetValues(unsigned short wKey,vector<SSapValueNode> &vecValues)
{
std::pair<AttriMultiMap::const_iterator, AttriMultiMap::const_iterator> itr_pair = m_mapMultiAttri.equal_range(wKey);
AttriMultiMap::const_iterator itr;
for(itr=itr_pair.first; itr!=itr_pair.second;itr++)
{
const unsigned char *ptrLoc=itr->second;
SSapMsgAttribute *pAtti=(SSapMsgAttribute *)ptrLoc;
SSapValueNode tmp;
tmp.nLen=ntohs(pAtti->wLength)-sizeof(SSapMsgAttribute);
tmp.pLoc=pAtti->acValue;
vecValues.push_back(tmp);
}
}
void CSapTLVBodyDecoder::GetValues(unsigned short wKey,vector<string> &vecValues)
{
std::pair<AttriMultiMap::const_iterator, AttriMultiMap::const_iterator> itr_pair = m_mapMultiAttri.equal_range(wKey);
AttriMultiMap::const_iterator itr;
for(itr=itr_pair.first; itr!=itr_pair.second;itr++)
{
const unsigned char *ptrLoc=itr->second;
SSapMsgAttribute *pAtti=(SSapMsgAttribute *)ptrLoc;
int nLen=ntohs(pAtti->wLength)-sizeof(SSapMsgAttribute);
if(nLen>0)
{
string strValue=string((const char *)pAtti->acValue,nLen);
vecValues.push_back(strValue);
}
}
}
void CSapTLVBodyDecoder::GetValues(unsigned short wKey,vector<unsigned int> &vecValues)
{
std::pair<AttriMultiMap::const_iterator, AttriMultiMap::const_iterator> itr_pair = m_mapMultiAttri.equal_range(wKey);
AttriMultiMap::const_iterator itr;
for(itr=itr_pair.first; itr!=itr_pair.second;itr++)
{
const unsigned char *ptrLoc=itr->second;
SSapMsgAttribute *pAtti=(SSapMsgAttribute *)ptrLoc;
if(ntohs(pAtti->wLength)-sizeof(SSapMsgAttribute)==4)
{
vecValues.push_back(ntohl(*(unsigned int *)pAtti->acValue));
}
}
}
|
xdvalue/mcoding
|
base_dependencies/dst_module/src/main/java/com/mcoding/base/dst/service/income/impl/DstIncomeProductServiceImpl.java
|
package com.mcoding.base.dst.service.income.impl;
import com.mcoding.base.core.PageView;
import com.mcoding.base.dst.bean.income.DstIncomeProduct;
import com.mcoding.base.dst.bean.income.DstIncomeProductExample;
import com.mcoding.base.dst.persistence.income.DstIncomeProductMapper;
import com.mcoding.base.dst.service.income.DstIncomeProductService;
import java.util.List;
import javax.annotation.Resource;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
@Service("defaultDstIncomeProductService")
public class DstIncomeProductServiceImpl implements DstIncomeProductService {
@Resource
protected DstIncomeProductMapper dstIncomeProductMapper;
@CacheEvict(value={"dstIncomeProduct"}, allEntries=true)
@Override
public void addObj(DstIncomeProduct t) {
this.dstIncomeProductMapper.insertSelective(t);
}
@CacheEvict(value={"dstIncomeProduct"}, allEntries=true)
@Override
public void deleteObjById(int id) {
this.dstIncomeProductMapper.deleteByPrimaryKey(id);
}
@CacheEvict(value={"dstIncomeProduct"}, allEntries=true)
@Override
public void modifyObj(DstIncomeProduct t) {
if (t.getId() == null || t.getId() ==0) {
throw new NullPointerException("id 为空,无法更新");
}
this.dstIncomeProductMapper.updateByPrimaryKeySelective(t);
}
@Cacheable(value="dstIncomeProduct", key="'DstIncomeProductService_' + #root.methodName + '_' +#id")
@Override
public DstIncomeProduct queryObjById(int id) {
return this.dstIncomeProductMapper.selectByPrimaryKey(id);
}
@Cacheable(value="dstIncomeProduct", key="'DstIncomeProductService_' + #root.methodName + '_'+ #example.toJson()")
@Override
public List<DstIncomeProduct> queryAllObjByExample(DstIncomeProductExample example) {
return this.dstIncomeProductMapper.selectByExample(example);
}
@Cacheable(value="dstIncomeProduct", key="'DstIncomeProductService_' + #root.methodName + '_'+ #example.toJson()")
@Override
public PageView<DstIncomeProduct> queryObjByPage(DstIncomeProductExample example) {
PageView<DstIncomeProduct> pageView = example.getPageView();
if (pageView == null) {
pageView = new PageView<>(1, 10);
example.setPageView(pageView);
}
pageView.setQueryResult(this.dstIncomeProductMapper.selectByExampleByPage(example));
return pageView;
}
@Cacheable(value="dstIncomeProduct", key="'DstIncomeProductService_' + #root.methodName + '_'+ #levelId + '_' + #productId")
@Override
public DstIncomeProduct queryByLevelId(int levelId, int productId) {
DstIncomeProductExample example = new DstIncomeProductExample();
example.createCriteria().andLevelIdEqualTo(levelId).andProductIdEqualTo(productId);
List<DstIncomeProduct> list = this.queryAllObjByExample(example);
if (CollectionUtils.isEmpty(list)) {
return null;
}
return list.get(0);
}
}
|
jkulton/topical
|
internal/api/api_test.go
|
<filename>internal/api/api_test.go
package api
import (
"errors"
"github.com/gorilla/mux"
"github.com/jkulton/topical/internal/models"
"github.com/jkulton/topical/internal/session"
"github.com/jkulton/topical/internal/templates"
"html/template"
"net/http"
"net/http/httptest"
"strings"
"testing"
)
type MockStorage struct {
GetTopicFunc func(id int) (*models.Topic, error)
GetRecentTopicsFunc func() ([]models.Topic, error)
CreateMessageFunc func(m *models.Message) (*models.Message, error)
CreateTopicFunc func(title string) (*models.Topic, error)
}
func (s *MockStorage) GetTopic(id int) (*models.Topic, error) {
return s.GetTopicFunc(id)
}
func (s *MockStorage) GetRecentTopics() ([]models.Topic, error) {
return s.GetRecentTopicsFunc()
}
func (s *MockStorage) CreateMessage(m *models.Message) (*models.Message, error) {
return s.CreateMessageFunc(m)
}
func (s *MockStorage) CreateTopic(title string) (*models.Topic, error) {
return s.CreateTopicFunc(title)
}
var (
testSession *session.Session
testTemplates *template.Template
testStorage MockStorage
api TopicalAPI
)
func setupTests() {
testSession = session.NewSession("test")
testTemplates, _ = templates.GenerateTemplates("../../web/views/*.gohtml")
testStorage = MockStorage{
GetTopicFunc: func(id int) (*models.Topic, error) {
return &models.Topic{ID: &id, Title: "First Title"}, nil
},
GetRecentTopicsFunc: func() ([]models.Topic, error) {
return []models.Topic{}, nil
},
CreateMessageFunc: func(m *models.Message) (*models.Message, error) {
return nil, nil
},
CreateTopicFunc: func(title string) (*models.Topic, error) {
return nil, nil
},
}
api = TopicalAPI{testTemplates, &testStorage, testSession}
}
func assertRedirect(location string, t *testing.T, res *httptest.ResponseRecorder) {
redirect := res.Header()["Location"][0]
if res.Code != http.StatusFound {
t.Errorf("got status %d but wanted %d", res.Code, http.StatusFound)
}
if redirect != location {
t.Errorf("got redirect %s but wanted %s", redirect, location)
}
}
func TestTopicShow(t *testing.T) {
t.Run("redirects home if topic not found", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/12", nil)
res := httptest.NewRecorder()
vars := map[string]string{"id": "12"}
req = mux.SetURLVars(req, vars)
testStorage.GetTopicFunc = func(id int) (*models.Topic, error) {
return &models.Topic{}, nil
}
api.TopicShow(res, req)
assertRedirect("/topics", t, res)
})
t.Run("renders error page if parsing route id error", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/12", nil)
res := httptest.NewRecorder()
vars := map[string]string{"id": "abc"}
req = mux.SetURLVars(req, vars)
api.TopicShow(res, req)
if strings.Contains(res.Body.String(), "Uh oh. Something went wrong.") == false {
t.Error("response body should include error page")
}
})
t.Run("renders error page on get topic error", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/12", nil)
res := httptest.NewRecorder()
vars := map[string]string{"id": "12"}
req = mux.SetURLVars(req, vars)
testStorage.GetTopicFunc = func(id int) (*models.Topic, error) {
return nil, errors.New("get topic error")
}
api.TopicShow(res, req)
if strings.Contains(res.Body.String(), "Uh oh. Something went wrong.") == false {
t.Error("response body should include error page")
}
})
t.Run("renders topic successfully", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/12", nil)
res := httptest.NewRecorder()
vars := map[string]string{"id": "12"}
req = mux.SetURLVars(req, vars)
api.TopicShow(res, req)
if strings.Contains(res.Body.String(), "<h2>First Title</h2>") == false {
t.Error("response body should include Topic title")
}
if res.Code != http.StatusOK {
t.Errorf("got status %d but wanted %d", res.Code, http.StatusOK)
}
})
}
func TestTopicList(t *testing.T) {
t.Run("renders flash messages from session, if present", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics", nil)
res := httptest.NewRecorder()
api.session.SaveFlash("Important flash", req, res)
api.TopicList(res, req)
if strings.Contains(res.Body.String(), "<section class=\"flash flash-error\">") == false {
t.Error("response body should include redirect found link")
}
})
t.Run("renders error page if getting recent topics reutrns error", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/12", nil)
res := httptest.NewRecorder()
testStorage.GetRecentTopicsFunc = func() ([]models.Topic, error) {
return nil, errors.New("get recent topics error")
}
api.TopicList(res, req)
if strings.Contains(res.Body.String(), "Uh oh. Something went wrong.") == false {
t.Error("response body should include error page")
}
})
t.Run("renders list of topics successfully", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/12", nil)
res := httptest.NewRecorder()
testStorage.GetRecentTopicsFunc = func() ([]models.Topic, error) {
return []models.Topic{{Title: "First list title"}, {Title: "Second list title"}}, nil
}
api.TopicList(res, req)
if strings.Contains(res.Body.String(), "First list title") == false {
t.Error("response body should include first list topic")
}
if strings.Contains(res.Body.String(), "Second list title") == false {
t.Error("response body should include second list topic")
}
})
}
func TestMessageCreate(t *testing.T) {
t.Run("responds with 302 to dashboard if user not logged in", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/3/messages", nil)
res := httptest.NewRecorder()
api.MessageCreate(res, req)
assertRedirect("/topics", t, res)
})
t.Run("responds with 302 to error page if parsing route id fails", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/abc/messages", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
vars := map[string]string{"id": "abc"}
req = mux.SetURLVars(req, vars)
api.MessageCreate(res, req)
if strings.Contains(res.Body.String(), "Uh oh. Something went wrong.") == false {
t.Error("response body should include error page")
}
})
t.Run("responds with 302 to dashboard if saving message fails", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/3/messages?content=My+New+Message", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
vars := map[string]string{"id": "3"}
req = mux.SetURLVars(req, vars)
testStorage.CreateMessageFunc = func(m *models.Message) (*models.Message, error) {
return nil, errors.New("something went wrong")
}
api.MessageCreate(res, req)
assertRedirect("/topics", t, res)
})
t.Run("success", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/3/messages?content=My+New+Message", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
vars := map[string]string{"id": "3"}
req = mux.SetURLVars(req, vars)
api.MessageCreate(res, req)
assertRedirect("/topics/3", t, res)
})
}
func TestTopicNew(t *testing.T) {
t.Run("responds with 302 to dashboard if user not logged in", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/new", nil)
res := httptest.NewRecorder()
api.TopicNew(res, req)
assertRedirect("/topics", t, res)
})
t.Run("renders new topic form for logged in users", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/topics/new", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
api.TopicNew(res, req)
if strings.Contains(res.Body.String(), "<form class=\"new-message-form") == false {
t.Error("response body should include new topic form")
}
})
}
func TestTopicCreate(t *testing.T) {
t.Run("responds with 302 to dashboard if user not logged in", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/new", nil)
res := httptest.NewRecorder()
api.TopicCreate(res, req)
assertRedirect("/topics", t, res)
})
t.Run("responds with error page if creating topic returns error", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/new?title=Birdwatchig+tips&content=check+it+out", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
testStorage.CreateTopicFunc = func(title string) (*models.Topic, error) {
return nil, errors.New("something went wrong creating topic")
}
api.TopicCreate(res, req)
if strings.Contains(res.Body.String(), "Uh oh. Something went wrong.") == false {
t.Error("response body should include error page")
}
})
t.Run("responds with error page if creating message returns error", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/new?title=Birdwatchig+tips&content=check+it+out", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
topicID := 321
testStorage.CreateTopicFunc = func(title string) (*models.Topic, error) {
return &models.Topic{ID: &topicID, Title: title, Messages: &[]models.Message{}}, nil
}
testStorage.CreateMessageFunc = func(m *models.Message) (*models.Message, error) {
return nil, errors.New("something went wrong creating message")
}
api.TopicCreate(res, req)
if strings.Contains(res.Body.String(), "Uh oh. Something went wrong.") == false {
t.Error("response body should include error page")
}
})
t.Run("responds with 302 to newly created topic on success", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/topics/new?title=Birdwatchig+tips&content=check+it+out", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
topicID := 321
testStorage.CreateTopicFunc = func(title string) (*models.Topic, error) {
return &models.Topic{ID: &topicID, Title: title, Messages: &[]models.Message{}}, nil
}
api.TopicCreate(res, req)
assertRedirect("/topics/321", t, res)
})
}
func TestJoinShow(t *testing.T) {
t.Run("responds with 302 to dashboard if user logged in", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/join", nil)
res := httptest.NewRecorder()
api.session.SaveUser(&models.User{Initials: "AK", Theme: 3}, req, res)
api.JoinShow(res, req)
assertRedirect("/topics", t, res)
})
t.Run("renders join page", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodGet, "/join", nil)
res := httptest.NewRecorder()
api.JoinShow(res, req)
if strings.Contains(res.Body.String(), "Join the conversation.") == false {
t.Error("response body should include join page")
}
})
}
func TestJoinCreate(t *testing.T) {
t.Run("does not save user if user initials invalid", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/join?initials=ABC&theme=1", nil)
res := httptest.NewRecorder()
api.JoinCreate(res, req)
user, _ := api.session.GetUser(req)
if user != nil {
t.Error("user should not have been set")
}
assertRedirect("/join", t, res)
})
t.Run("redirects back to join page if theme invalid", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/join?initials=AK&theme=abc", nil)
res := httptest.NewRecorder()
api.JoinCreate(res, req)
user, _ := api.session.GetUser(req)
if user != nil {
t.Error("user should not have been set")
}
assertRedirect("/join", t, res)
})
t.Run("saves user and redirects home", func(t *testing.T) {
setupTests()
req := httptest.NewRequest(http.MethodPost, "/join?initials=AK&theme=3", nil)
res := httptest.NewRecorder()
api.JoinCreate(res, req)
user, _ := api.session.GetUser(req)
if user.Initials != "AK" {
t.Error("user should have been set to correct value")
}
assertRedirect("/topics", t, res)
})
}
|
ranchlin/Leetcode
|
Python3.x/300-Longest Increasing Subsequence.py
|
# dp
class Solution:
def lengthOfLIS(self, nums: 'List[int]') -> 'int':
if len(nums) < 2: return len(nums)
dp = [1] * (len(nums) + 1)
for i in range(1, len(nums)):
for j in range(0, i):
if nums[i] > nums[j]: dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
|
ung-org/lib-c
|
src/wchar/getwc.c
|
#include <wchar.h>
#include <stdio.h>
wint_t getwc(FILE * stream)
{
return fgetwc(stream);
}
/*
STDC(199409)
*/
|
kenmutuma001/galleria
|
virtual/lib/python3.6/site-packages/object_tools/tests/tools.py
|
<filename>virtual/lib/python3.6/site-packages/object_tools/tests/tools.py
from __future__ import unicode_literals
from django import forms
from django.contrib.admin.widgets import AdminSplitDateTime
import object_tools
class TestForm(forms.Form):
pass
class TestMediaForm(forms.Form):
media_field = forms.fields.DateTimeField(
widget=AdminSplitDateTime,
)
class TestTool(object_tools.ObjectTool):
name = 'test_tool'
label = 'Test Tool'
form_class = TestForm
def view(self, request, extra_context=None):
pass
class TestMediaTool(object_tools.ObjectTool):
name = 'test_media_tool'
label = 'Test Media Tool'
form_class = TestMediaForm
def view(self, request, extra_context=None):
pass
class TestInvalidTool(object_tools.ObjectTool):
pass
object_tools.tools.register(TestTool)
object_tools.tools.register(TestMediaTool)
|
MaiReo/crass
|
src/cui-1.0.4/666-SYSTEM/666_SYSTEM.cpp
|
<reponame>MaiReo/crass
#include <windows.h>
#include <tchar.h>
#include <crass_types.h>
#include <acui.h>
#include <cui.h>
#include <package.h>
#include <resource.h>
#include <cui_error.h>
#include <stdio.h>
/* 接口数据结构: 表示cui插件的一般信息 */
struct acui_information _666_SYSTEM_cui_information = {
_T("HEXA"), /* copyright */
_T("666-SYSTEM"), /* system */
_T(".dat .ml .gl .sl"), /* package */
_T("1.0.2"), /* revision */
_T("痴漢公賊"), /* author */
_T("2008-7-12 16:15"), /* date */
NULL, /* notion */
ACUI_ATTRIBUTE_LEVEL_STABLE
};
/* 所有的封包特定的数据结构都要放在这个#pragma段里 */
#pragma pack (1)
typedef struct {
s8 magic[4];
u16 version; // 100
u32 data_offset;
u32 entries;
u32 data_length;
} msg_header_t;
typedef struct {
s8 magic[4]; // "HSL "
u16 version; // 100
u32 name_offset;// name_offset
u32 offset_table_offset; // 每项4字节
u32 data_offset;
u16 start_name_length;
s8 *start;
u32 entries;
u16 name_length;
s8 *name;
} snr_header_t;
typedef struct {
s8 magic[4]; // "GLNK"
u16 version; // 101, 110
u32 index_entries;
u32 index_offset;
u32 index_length;
} glnk_header_t;
typedef struct {
s8 magic[4]; // "SKMD"
u16 version; // 100
u32 entries; // 多一项,以0结尾
u32 data_length;
} skmd_header_t;
#pragma pack ()
typedef struct {
s8 name[MAX_PATH];
u32 offset;
u32 length;
} my_msg_entry_t;
typedef struct {
s8 name[MAX_PATH];
u32 offset;
u32 length;
} my_glnk_entry_t;
/********************* msg.dat *********************/
/* 封包匹配回调函数 */
static int SYSTEM666_msg_match(struct package *pkg)
{
s8 magic[4];
if (pkg->pio->open(pkg, IO_READONLY))
return -CUI_EOPEN;
if (pkg->pio->read(pkg, magic, sizeof(magic))) {
pkg->pio->close(pkg);
return -CUI_EREAD;
}
if (memcmp(magic, "HMO ", 4)) {
pkg->pio->close(pkg);
return -CUI_EMATCH;
}
return 0;
}
/* 封包资源提取函数 */
static int SYSTEM666_msg_extract_resource(struct package *pkg,
struct package_resource *pkg_res)
{
msg_header_t *msg_header;
DWORD *offset_table;
DWORD i;
BYTE *dat;
u32 fsize;
if (pkg->pio->length_of(pkg, &fsize))
return -CUI_ELEN;
dat = (BYTE *)malloc(fsize);
if (!dat)
return -CUI_EMEM;
if (pkg->pio->readvec(pkg, dat, fsize, 0, IO_SEEK_SET))
return -CUI_EREADVEC;
msg_header = (msg_header_t *)dat;
offset_table = (DWORD *)(msg_header + 1);
BYTE *raw_data = dat + msg_header->data_offset;
for (i = 0; i < msg_header->entries; i++) {
DWORD len;
BYTE *entry_data;
entry_data = raw_data + offset_table[i];
len = *(u32 *)entry_data;
entry_data += 4;
for (DWORD k = 0; k < len; k++)
entry_data[k] = ~entry_data[k];
}
pkg_res->raw_data = dat;
pkg_res->raw_data_length = fsize;
return 0;
}
/* 资源保存函数 */
static int SYSTEM666_msg_save_resource(struct resource *res,
struct package_resource *pkg_res)
{
if (res->rio->create(res))
return -CUI_ECREATE;
if (pkg_res->raw_data && pkg_res->raw_data_length) {
if (res->rio->write(res, pkg_res->raw_data, pkg_res->raw_data_length)) {
res->rio->close(res);
return -CUI_EWRITE;
}
}
res->rio->close(res);
return 0;
}
/* 封包资源释放函数 */
static void SYSTEM666_msg_release_resource(struct package *pkg,
struct package_resource *pkg_res)
{
if (pkg_res->raw_data) {
free(pkg_res->raw_data);
pkg_res->raw_data = NULL;
}
}
/* 封包卸载函数 */
static void SYSTEM666_msg_release(struct package *pkg,
struct package_directory *pkg_dir)
{
if (pkg_dir->directory) {
free(pkg_dir->directory);
pkg_dir->directory = NULL;
}
pkg->pio->close(pkg);
}
/* 封包处理回调函数集合 */
static cui_ext_operation SYSTEM666_msg_operation = {
SYSTEM666_msg_match, /* match */
NULL, /* extract_directory */
NULL, /* parse_resource_info */
SYSTEM666_msg_extract_resource, /* extract_resource */
SYSTEM666_msg_save_resource, /* save_resource */
SYSTEM666_msg_release_resource, /* release_resource */
SYSTEM666_msg_release /* release */
};
/********************* msg.dat *********************/
/* 封包匹配回调函数 */
static int SYSTEM666_skmd_match(struct package *pkg)
{
s8 magic[4];
if (pkg->pio->open(pkg, IO_READONLY))
return -CUI_EOPEN;
if (pkg->pio->read(pkg, magic, sizeof(magic))) {
pkg->pio->close(pkg);
return -CUI_EREAD;
}
if (memcmp(magic, "SKMD", 4)) {
pkg->pio->close(pkg);
return -CUI_EMATCH;
}
return 0;
}
/* 封包资源提取函数 */
static int SYSTEM666_skmd_extract_resource(struct package *pkg,
struct package_resource *pkg_res)
{
u32 skmd_size;
if (pkg->pio->length_of(pkg, &skmd_size))
return -CUI_ELEN;
BYTE *skmd = (BYTE *)malloc(skmd_size);
if (!skmd)
return -CUI_EMEM;
if (pkg->pio->readvec(pkg, skmd, skmd_size, 0, IO_SEEK_SET)) {
free(skmd);
return -CUI_EREADVEC;
}
skmd_header_t *skmd_head = (skmd_header_t *)skmd;
BYTE *data = skmd + sizeof(skmd_header_t) + (skmd_head->entries + 1) * 4;
for (DWORD i = 0; i < skmd_head->data_length; ++i)
data[i] = ~data[i];
pkg_res->raw_data = skmd;
pkg_res->raw_data_length = skmd_size;
return 0;
}
/* 封包处理回调函数集合 */
static cui_ext_operation SYSTEM666_skmd_operation = {
SYSTEM666_skmd_match, /* match */
NULL, /* extract_directory */
NULL, /* parse_resource_info */
SYSTEM666_skmd_extract_resource,/* extract_resource */
SYSTEM666_msg_save_resource, /* save_resource */
SYSTEM666_msg_release_resource, /* release_resource */
SYSTEM666_msg_release /* release */
};
/********************* .dat *********************/
/* 封包匹配回调函数 */
static int SYSTEM666_glnk_match(struct package *pkg)
{
s8 magic[4];
if (pkg->pio->open(pkg, IO_READONLY))
return -CUI_EOPEN;
if (pkg->pio->read(pkg, magic, sizeof(magic))) {
pkg->pio->close(pkg);
return -CUI_EREAD;
}
if (memcmp(magic, "GLNK", 4)) {
pkg->pio->close(pkg);
return -CUI_EMATCH;
}
return 0;
}
/* 封包索引目录提取函数 */
static int SYSTEM666_glnk_extract_directory(struct package *pkg,
struct package_directory *pkg_dir)
{
glnk_header_t glnk_header;
DWORD index_buffer_length;
DWORD i;
my_glnk_entry_t *index_buffer;
BYTE *index;
if (pkg->pio->readvec(pkg, &glnk_header, sizeof(glnk_header), 0, IO_SEEK_SET))
return -CUI_EREADVEC;
index = (BYTE *)malloc(glnk_header.index_length);
if (!index)
return -CUI_EMEM;
if (pkg->pio->read(pkg, index, glnk_header.index_length)) {
free(index);
return -CUI_EREAD;
}
index_buffer_length = glnk_header.index_entries * sizeof(my_glnk_entry_t);
index_buffer = (my_glnk_entry_t *)malloc(index_buffer_length);
if (!index_buffer) {
free(index);
return -CUI_EMEM;
}
BYTE *p = index;
for (i = 0; i < glnk_header.index_entries; i++) {
BYTE name_len;
name_len = *p++;
strncpy(index_buffer[i].name, (char *)p, name_len);
index_buffer[i].name[name_len] = 0;
p += name_len;
index_buffer[i].offset = *(DWORD *)p;
p += 4;
index_buffer[i].length = *(DWORD *)p;
p += 4;
if (glnk_header.version == 110)
p += 4; /* 忽略length_hi */
}
free(index);
pkg_dir->index_entries = glnk_header.index_entries;
pkg_dir->directory = index_buffer;
pkg_dir->directory_length = index_buffer_length;
pkg_dir->index_entry_length = sizeof(my_glnk_entry_t);
return 0;
}
/* 封包索引项解析函数 */
static int SYSTEM666_glnk_parse_resource_info(struct package *pkg,
struct package_resource *pkg_res)
{
my_glnk_entry_t *my_glnk_entry;
my_glnk_entry = (my_glnk_entry_t *)pkg_res->actual_index_entry;
strcpy(pkg_res->name, my_glnk_entry->name);
pkg_res->name_length = -1; /* -1表示名称以NULL结尾 */
pkg_res->raw_data_length = my_glnk_entry->length;
pkg_res->actual_data_length = 0; /* 数据都是明文 */
pkg_res->offset = my_glnk_entry->offset;
return 0;
}
/* 封包资源提取函数 */
static int SYSTEM666_glnk_extract_resource(struct package *pkg,
struct package_resource *pkg_res)
{
pkg_res->raw_data = malloc(pkg_res->raw_data_length);
if (!pkg_res->raw_data)
return -CUI_EMEM;
if (pkg->pio->readvec(pkg, pkg_res->raw_data, pkg_res->raw_data_length,
pkg_res->offset, IO_SEEK_SET)) {
free(pkg_res->raw_data);
pkg_res->raw_data = NULL;
return -CUI_EREADVEC;
}
return 0;
}
/* 资源保存函数 */
static int SYSTEM666_glnk_save_resource(struct resource *res,
struct package_resource *pkg_res)
{
if (res->rio->create(res))
return -CUI_ECREATE;
if (pkg_res->raw_data && pkg_res->raw_data_length) {
if (res->rio->write(res, pkg_res->raw_data, pkg_res->raw_data_length)) {
res->rio->close(res);
return -CUI_EWRITE;
}
}
res->rio->close(res);
return 0;
}
/* 封包资源释放函数 */
static void SYSTEM666_glnk_release_resource(struct package *pkg,
struct package_resource *pkg_res)
{
if (pkg_res->raw_data) {
free(pkg_res->raw_data);
pkg_res->raw_data = NULL;
}
}
/* 封包卸载函数 */
static void SYSTEM666_glnk_release(struct package *pkg,
struct package_directory *pkg_dir)
{
if (pkg_dir->directory) {
free(pkg_dir->directory);
pkg_dir->directory = NULL;
}
pkg->pio->close(pkg);
}
/* 封包处理回调函数集合 */
static cui_ext_operation SYSTEM666_glnk_operation = {
SYSTEM666_glnk_match, /* match */
SYSTEM666_glnk_extract_directory, /* extract_directory */
SYSTEM666_glnk_parse_resource_info, /* parse_resource_info */
SYSTEM666_glnk_extract_resource, /* extract_resource */
SYSTEM666_glnk_save_resource, /* save_resource */
SYSTEM666_glnk_release_resource, /* release_resource */
SYSTEM666_glnk_release /* release */
};
/* 接口函数: 向cui_core注册支持的封包类型 */
int CALLBACK _666_SYSTEM_register_cui(struct cui_register_callback *callback)
{
if (callback->add_extension(callback->cui, _T(".dat"), NULL,
NULL, &SYSTEM666_glnk_operation, CUI_EXT_FLAG_PKG | CUI_EXT_FLAG_DIR))
return -1;
if (callback->add_extension(callback->cui, _T(".ml"), NULL,
NULL, &SYSTEM666_glnk_operation, CUI_EXT_FLAG_PKG | CUI_EXT_FLAG_DIR))
return -1;
if (callback->add_extension(callback->cui, _T(".gl"), NULL,
NULL, &SYSTEM666_glnk_operation, CUI_EXT_FLAG_PKG | CUI_EXT_FLAG_DIR))
return -1;
if (callback->add_extension(callback->cui, _T(".sl"), NULL,
NULL, &SYSTEM666_glnk_operation, CUI_EXT_FLAG_PKG | CUI_EXT_FLAG_DIR))
return -1;
if (callback->add_extension(callback->cui, _T(".dat"), _T(".dat_"),
NULL, &SYSTEM666_msg_operation, CUI_EXT_FLAG_PKG))
return -1;
if (callback->add_extension(callback->cui, _T(".dat"), _T(".dat_"),
NULL, &SYSTEM666_skmd_operation, CUI_EXT_FLAG_PKG))
return -1;
return 0;
}
}
|
coconut2015/agg-tutorial
|
docs/classagg_1_1scanline__p8.js
|
<gh_stars>1-10
var classagg_1_1scanline__p8 =
[
[ "span", "structagg_1_1scanline__p8_1_1span.html", "structagg_1_1scanline__p8_1_1span" ],
[ "const_iterator", "classagg_1_1scanline__p8.html#a51f8bfca101215e0a19b51926166a160", null ],
[ "coord_type", "classagg_1_1scanline__p8.html#a904acf43583706c4c887da5c03a02dac", null ],
[ "cover_type", "classagg_1_1scanline__p8.html#afa458213f658a779c701346972423096", null ],
[ "iterator", "classagg_1_1scanline__p8.html#a50cd4d55209dc177b68f9d12f5545688", null ],
[ "self_type", "classagg_1_1scanline__p8.html#aa76c740ab7e3f9bef028d637d0cc15c1", null ],
[ "scanline_p8", "classagg_1_1scanline__p8.html#ad3531a002ea6434d41286fbdd551df65", null ],
[ "add_cell", "classagg_1_1scanline__p8.html#a843e7c9bf4dad137bb4c42c418b60a3d", null ],
[ "add_cells", "classagg_1_1scanline__p8.html#aaf44f1a5c146e57c733c73d402cb2324", null ],
[ "add_span", "classagg_1_1scanline__p8.html#ac7cde0da406aaeb3ca8747b1c2c97437", null ],
[ "begin", "classagg_1_1scanline__p8.html#a4ff048c5240b5763284a575005d027ef", null ],
[ "finalize", "classagg_1_1scanline__p8.html#aa0facc30b0e39abb692d19506e3d09e5", null ],
[ "num_spans", "classagg_1_1scanline__p8.html#a282510c20a7ae96a3354507555c12cee", null ],
[ "reset", "classagg_1_1scanline__p8.html#a89f147cb6793a1290872b209dacc9e13", null ],
[ "reset_spans", "classagg_1_1scanline__p8.html#a885aecdc282f5ab95b228a566071713d", null ],
[ "y", "classagg_1_1scanline__p8.html#a6753f7a62d5058bcb453198a4fe9848b", null ]
];
|
0003088/libelektra-qt-gui-test
|
src/libgetenv/examples/getenv.c
|
<gh_stars>0
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int main(int argc, char** argv, char** environ)
{
if (argc == 1)
{
char** env;
for (env = environ; *env != 0; env++)
{
const size_t len = strcspn(*env, "=");
char name[len+1];
strncpy(name, *env, len);
name[len] = 0;
const char *c = getenv(name);
printf ("getenv(\"%s\") -> ", name);
printf ("%s\n", c);
}
}
else
{
for (int i=1; i<argc; ++i)
{
const char *name = argv[i];
const char *c = getenv(name);
if (!c) return 1;
printf ("%s\n", c);
}
}
return 0;
}
|
YoApp/yo-api
|
tests/__init__.py
|
<reponame>YoApp/yo-api
# -*- coding: utf-8 -*-
from gevent import monkey
monkey.patch_all()
import mock
import unittest
from flask import json
from flask_principal import identity_changed
from pygeocoder import GeocoderError
from giphypop import Giphy
from imgurpython import ImgurClient
from requests import Session
from parse_rest.query import QueryManager as ParseUserQuery
from parse_rest.user import User as ParseUser
from twilio.rest import Messages
from werkzeug.datastructures import Headers
from yoapi.accounts import facebook
from yoapi.core import cache, redis, principals, limiter, sns, s3
from yoapi.factory import create_api_app, create_worker_app
from yoapi.helpers import random_string
from yoapi.jwt import generate_token
from yoapi.models import (User, Contact, Yo, Header,
NotificationEndpoint, ABExperiment, ABTest,
ResponseCategory)
from yoapi.parse import Parse
from yoapi.security import YoIdentity
from yoapi.services import low_rq, medium_rq, high_rq
from yoapi.urltools import UrlHelper
from yoapi.extensions.flask_sendgrid import SendGridClient
class BaseTestCase(unittest.TestCase):
"""Base for common functions across all tests.
TODO: We need a dedicated test account.
"""
app = None
worker_app = None
_ephemeral_account = {'username': 'TESTUSERTEMP',
'password': '<PASSWORD>'}
_ephemeral_api = {'username': 'TESTUSERPYTHONAPI',
'parent': 'TESTUSERPYTHON',
'name': 'API testing account',
'needs_location': True,
'dont_send_api_email': True}
_ephemeral_invalid_api = {'username': '18493848473',
'parent': 'TESTUSERPYTHON',
'name': 'API testing account',
'needs_location': True,
'dont_send_api_email': True}
_user1_push_token = ('<KEY>'
'lSlg1lK9ertowyhyia-71Fh1KpE311hdijVPZFlXwryDXDdx_X'
'QIBlwRcrL5Nvlo39yzkb7SXU5x3IPqnulcx5dryq5-oGoc6fc9'
'pBRhVHvkMdRQ')
_user2_push_token = ('c5165dad5f8ab1b90dbda9be37263829b1477c011a5e6e942a'
'<PASSWORD>')
_photo = """
<KEY>
QUFBQUFBREREREREREREREREREREREREREREREREREREREQBFRkZIBwgJhgY<KEY>DYrKzZE
RERCNUJERERERERERERERERERERERERERERERERERERERERERERERERERP/AABEIABQAFAMBIgAC
EQEDEQH/xABhAAEAAwAAAAAAAAAAAAAAAAAAAgUGAQEAAAAAAAAAAAAAAAAAAAAAEAAABQIFAwUB
AAAAAAAAAAAAARECEgMEITEiEwVRgRXwQZGhMxQRAQAAAAAAAAAAAAAAAAAAAAD/2gAMAwEAAhED
EQA/ANG3mLY21XG427P6E4kMuwhV521pMY9xu1lJrYnJOqCt5jjirX9BxMM2VDStEjTA8FE7lr7D
kTuzpufScyBbbVieHt2+wFj5uz2P6J6FjlivRMwGY8fcz8hsuhuz2k1RVcvXwADcAAAAAAD/2Q==
"""
_test_parse_token = '<KEY>' # Never expires?
android_111064067_ua = 'Yo/111064067 (Android; Nexus 5; 21; 5.0.1)'
android_111064076_ua = 'Yo/111064076 (Android; Nexus 5; 21; 5.0.1)'
androidbeta_111064077_ua = 'YoBeta/111064077 (Android; Nexus 5; 21; 5.0.1)'
ios_141_ua = 'Yo/1.4.1 (iPhone; iOS 8.1.2; Scale/2.00)'
ios_146_ua = 'Yo/1.4.6 (iPhone; iOS 8.1.2; Scale/2.00)'
ios_154_ua = 'Yo/1.5.4 (iPhone; iOS 8.1.2; Scale/2.00)'
ios_155_ua = 'Yo/1.5.5 (iPhone; iOS 8.1.2; Scale/2.00)'
ios_big_155_ua = 'Yo/1.5.5 (iPhone; iOS 8.1.2; Scale/3.00)'
iosbeta_156_ua = 'YoBeta/1.5.6 (iPhone; iOS 8.1.2; Scale/2.00)'
installation_id = 'automatic-testing'
_sns_delivery_failure_json = {
'MessageId': 'adb968a8-2a7a-5d30-8bfc-25e8843f2596',
'Timestamp': '2015-01-26T21:05:09.766Z',
'TopicArn': 'arn:aws:sns:us-east-1:131325091098:sys_delivery_failure',
'UnsubscribeURL': 'https://sns.us-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-east-1:131325091098:sys_delivery_failure:07f18142-7a8a-41d4-a72f-5a13c5d52f6f',
'Subject': 'DeliveryFailure event for app iOS (APNS)',
'Signature': 'JF2JK9GlPEnB9Yu8yeZ47ZnHEcVRP4+Oycxs7sx5zyuNK5lL/Ul0glt8i24LiUD9CUGBcQUQn9AsyjVTCpnK93+x69rObhn5MxcjEewp+vXCbmysBNKWmHF4yfyR/F2qeHy+sZ65JDuf0rVIZSKQf0ajleGVMuVfb3/DHQ6l822t7lXsCdKvAZGvgwgt8buv8BthdVpIY4Navvpi3j1feMiMy2ZtRM5LHNz1q2KMm+VYCSWGE/0MJ+ySMokDQtSKQBmrbYQ2csJ67BkWQGCaZqDW9O+yWrDiiIj8LqlcJZbvboJcZnwfWDjnw2eLBqe/N0RuxA3gPHWv3zL4wDRGmA==',
'SigningCertURL': 'https://sns.us-east-1.amazonaws.com/SimpleNotificationService-d6d679a1d18e95c2f9ffcf11f4f9e198.pem',
'SignatureVersion': '1',
'Type': 'Notification',
'Message': '{\"DeliveryAttempts\":1,\"EndpointArn\":\"arn:aws:sns:us-east-1:131325091098:endpoint/APNS/iOS/cc9a9af5-59a6-3593-9305-834ccb9ca9f7\",\"EventType\":\"DeliveryFailure\",\"FailureMessage\":\"Endpoint is disabled\",\"FailureType\":\"EndpointDisabled\",\"MessageId\":\"7094feb9-e6ca-5faa-bc20-b3f9488faf48\",\"Resource\":\"arn:aws:sns:us-east-1:131325091098:app/APNS/iOS\",\"Service\":\"SNS\",\"Time\":\"2015-01-26T21:05:09.730Z\"}'
}
@classmethod
def setup_class(cls):
# Prepare the app and push the app context.
cls.app = cls._create_app()
cls.app_context = cls.app.app_context()
cls.app_context.push()
cls.client = cls.app.test_client(use_cookies=False)
# Create a worker app as well for background tasks.
cls.worker_app = cls._create_worker_app()
cls.worker = low_rq.create_worker(app=cls.worker_app, pool_size=20)
# Make sure we are not working against the production database.
assert 'localhost' in cls.app.config['MONGODB_HOST']
assert 'localhost' in cls.app.config['CACHE_REDIS_URL']
assert 'localhost' in cls.app.config['REDIS_URL']
# Install patchers for libraries that make external requests.
cls.live_counter_patcher = mock.patch('yoapi.yos.send.ping_live_counter')
cls.geocoder_patcher = mock.patch('yoapi.yos.send.geocoder.reverse_geocode')
cls.parse_push_patcher = mock.patch.object(Parse, 'push')
cls.parse_signup_patcher = mock.patch.object(ParseUser, 'signup')
cls.parse_subscribe_patcher = mock.patch.object(Parse, 'subscribe')
cls.parse_unsubscribe_patcher = mock.patch.object(Parse, 'unsubscribe')
cls.parse_delete_patcher = mock.patch.object(ParseUser, 'DELETE')
cls.parse_query_get_patcher = mock.patch.object(ParseUserQuery, 'get')
cls.send_grid_send_patcher = mock.patch.object(SendGridClient, 'send')
cls.facebook_get_profile_patcher = mock.patch.object(facebook, 'get_profile')
cls.facebook_get_picture_patcher = mock.patch.object(facebook, 'get_profile_picture')
cls.s3_upload_image_patcher = mock.patch.object(s3, 'upload_image')
cls.sns_subscribe_patcher = mock.patch.object(sns, 'subscribe')
cls.sns_unsubscribe_patcher = mock.patch.object(sns, 'unsubscribe')
cls.sns_publish_patcher = mock.patch.object(sns, 'publish')
cls.sns_create_endpoint_patcher = mock.patch.object(sns, 'create_endpoint')
cls.sns_delete_endpoint_patcher = mock.patch.object(sns, 'delete_endpoint')
cls.sns_create_topic_patcher = mock.patch.object(sns, 'create_topic')
cls.sns_set_endpoint_patcher = mock.patch.object(sns, 'set_endpoint')
cls.twilio_send_patcher = mock.patch.object(Messages, 'create')
cls.get_request_patcher = mock.patch.object(Session, 'request')
cls.get_link_content_type_patcher = mock.patch('yoapi.yos.send.get_link_content_type')
cls.short_url_patcher = mock.patch.object(UrlHelper, 'get_short_url')
cls.giphy_search_patcher = mock.patch.object(Giphy, 'search')
cls.imgur_search_patcher = mock.patch.object(ImgurClient, 'gallery_search')
cls.experiment_logger_patcher = mock.patch.object(ABExperiment, 'log')
def setUp(self):
"""Runs before each job"""
# Drop data stored in previous test runs.
Contact.drop_collection()
NotificationEndpoint.drop_collection()
User.drop_collection()
Yo.drop_collection()
Header.drop_collection()
ABTest.drop_collection()
ResponseCategory.drop_collection()
# Clear the flask-cache redis cache.
cache.clear()
# Clear redis cache.
redis.flushdb()
# Clear RQ databases.
low_rq.connection.flushdb()
medium_rq.connection.flushdb()
high_rq.connection.flushdb()
# Clear limiter redis cache.
limiter.storage.storage.flushdb()
# Ensure indexes
Contact.ensure_indexes()
NotificationEndpoint.ensure_indexes()
User.ensure_indexes()
Yo.ensure_indexes()
Header.ensure_indexes()
ABTest.ensure_indexes()
ResponseCategory.ensure_indexes()
self._phone1 = '+14153351320'
# Create user 1
token = random_string(length=5)
_user1 = User(
username='TESTUSERPYTHON',
facebook_id='testuser1',
email='<EMAIL>',
phone=self._phone1,
first_name='<NAME>',
last_name='User',
api_token=token)
_user1.set_password('<PASSWORD>')
_user1.save()
self._user1_jwt = generate_token(_user1)
self._user1 = _user1
# Create user 2
token = random_string(length=5)
_user2 = User(username='TESTUSERPYTHON2',
facebook_id='testuser2',
first_name='<NAME>',
last_name='User',
topic_arn='test:arn',
api_token=token)
_user2.set_password('<PASSWORD>')
_user2.save()
self._user2_jwt = generate_token(_user2)
self._user2 = _user2
# Create user 3
_user3 = User(username='TESTUSERPYTHON3')
_user3.set_password('<PASSWORD>')
_user3.save()
self._user3_jwt = generate_token(_user3)
self._user3 = _user3
# Create user 4
_user4 = User(username='TESTUSERPYTHON4',
first_name='Very Long First Name With Lots and Lots of Words',
last_name='Also very long but probably doesn\'t matter')
_user4.set_password('<PASSWORD>')
_user4.save()
self._user4_jwt = generate_token(_user4)
self._user4 = _user4
# Create user 5 for YOALL rate limit testing **ONLY
_yoalluser = User(username='TESTYOALLUSER')
_yoalluser.set_password('<PASSWORD>')
_yoalluser.save()
self._yoalluser_jwt = generate_token(_yoalluser)
self._yoalluser = _yoalluser
# Create headers for first yos.
Header(sms=u'👉 Swipe/Tap to open\n📎 Yo From %(sender_display_name)s',
push=u'👉 Swipe/Tap to open\n📎 Yo From %(sender_display_name)s',
ending='\n\nTap to Yo back: %(webclient_url)s',
yo_type='link_yo', is_default=False, group_yo=False,
id='54dd685ca17351c1d859689e').save()
Header(sms=u'👉 Swipe/Tap to open\n📍 Yo From %(sender_display_name)s',
push=u'👉 Swipe/Tap to open\n📍 Yo From %(sender_display_name)s',
ending='\n\nTap to Yo back: %(webclient_url)s',
yo_type='location_yo', is_default=False, group_yo=False,
id='54dd6880a17351c1d85968b3').save()
Header(sms=(u'👉 You won\'t believe what Yo can do.\n📎 Open this'
u'Yo From %(sender_display_name)s'),
push=(u'👉 You won\'t believe what Yo can do.\n📎 Open this'
u'Yo From %(sender_display_name)s'),
ending='\n\nTap to Yo back: %(webclient_url)s',
yo_type='link_yo', is_default=False, group_yo=False,
id='54dd6939a17351c1d859692e').save()
Header(sms=(u'📍This is a Location Yo\n👐 Open it. \n☝️Double tap your '
u'friends to send one From %(sender_display_name)s'),
push=(u'📍This is a Location Yo\n👐 Open it. \n☝️Double tap your '
u'friends to send one From %(sender_display_name)s'),
ending='\n\nTap to Yo back: %(webclient_url)s',
yo_type='link_yo', is_default=False, group_yo=False,
id='54de9ecba17351c1d85a55aa').save()
# Create headers for SMS copy.
Header(sms='Yo from %(sender_display_name)s.',
push='%(emoji)s Yo %(from)s %(sender_display_name)s',
ending='\n\nTap to Yo back: %(webclient_url)s',
id='54dd67efa17351c1d8596887',
yo_type='default_yo', group_yo=False, is_default=True).save()
Header(sms=('Yo Photo from %(sender_display_name)s '
'via %(forwarded_from)s.'),
push=('%(emoji)s Yo Photo %(from)s %(forwarded_from)s '
'via %(sender_display_name)s'),
ending='\n\nTap to view: %(webclient_url)s',
yo_type='forwarded_photo_yo', group_yo=False,
is_default=True).save()
Header(sms=('Yo Link from %(sender_display_name)s '
'via %(forwarded_from)s.'),
push=('%(emoji)s Yo Link %(from)s %(forwarded_from)s '
'via %(sender_display_name)s'),
ending='\n\nTap to view: %(webclient_url)s',
yo_type='forwarded_yo', group_yo=False, is_default=True).save()
Header(sms='Yo Link from %(sender_display_name)s.',
push='%(emoji)s Yo Link %(from)s %(sender_display_name)s',
ending='\n\nTap to view: %(webclient_url)s',
yo_type='link_yo', group_yo=False, is_default=True).save()
Header(sms='Yo Location from %(sender_display_name)s @ %(city)s.',
push='%(emoji)s Yo Location %(from)s %(sender_display_name)s @ %(city)s',
ending='\n\nTap to see where they are: %(webclient_url)s',
yo_type='location_city_yo', group_yo=False,
is_default=True).save()
Header(sms='Yo Location from %(sender_display_name)s.',
push='%(emoji)s Yo Location %(from)s %(sender_display_name)s',
ending='\n\nTap to see where they are: %(webclient_url)s',
yo_type='location_yo', group_yo=False, is_default=True).save()
Header(sms='Yo Photo from %(sender_display_name)s.',
push='%(emoji)s Yo Photo %(from)s %(sender_display_name)s',
ending='\n\nTap to view: %(webclient_url)s',
yo_type='photo_yo', group_yo=False, is_default=True).save()
Header(sms=('Yo from %(sender_display_name)s to '
'\'%(group_name)s\' %(social_text)s.'),
push=('%(emoji)s Yo %(from)s %(sender_display_name)s to '
'%(group_name)s %(social_text)s'),
ending='\n\nTap to Yo back: %(webclient_url)s',
yo_type='default_yo', group_yo=True, is_default=True).save()
Header(sms=('Yo Photo from %(sender_display_name)s '
'via %(forwarded_from)s to \'%(group_name)s\' '
'%(social_text)s.'),
push=('%(emoji)s Yo Photo %(from)s %(forwarded_from)s '
'via %(sender_display_name)s to %(group_name)s'),
ending='\n\nTap to view: %(webclient_url)s',
yo_type='forwarded_photo_yo', group_yo=True,
is_default=True).save()
Header(sms=('Yo Link from %(sender_display_name)s '
'via %(forwarded_from)s to \'%(group_name)s\' '
'%(social_text)s.'),
push=('%(emoji)s Yo Link %(from)s %(forwarded_from)s '
'via %(sender_display_name)s to %(group_name)s'),
ending='\n\nTap to view: %(webclient_url)s',
yo_type='forwarded_yo', group_yo=True,
is_default=True).save()
Header(sms=('Yo Link from %(sender_display_name)s to '
'\'%(group_name)s\' %(social_text)s.'),
push=('%(emoji)s Yo Link %(from)s %(sender_display_name)s to '
'%(group_name)s'),
ending='\n\nTap to view: %(webclient_url)s',
yo_type='link_yo', group_yo=True, is_default=True).save()
Header(sms=('Yo Location from %(sender_display_name)s @ '
'%(city)s to \'%(group_name)s\' %(social_text)s.'),
push=('%(emoji)s Yo Location %(from)s %(sender_display_name)s @ '
'%(city)s to %(group_name)s'),
ending='\n\nTap to see where they are: %(webclient_url)s',
yo_type='location_city_yo', group_yo=True,
is_default=True).save()
Header(sms=('Yo Location from %(sender_display_name)s to '
'\'%(group_name)s\' %(social_text)s.'),
push=('%(emoji)s Yo Location %(from)s %(sender_display_name)s to '
'%(group_name)s'),
ending='\n\nTap to see where they are: %(webclient_url)s',
yo_type='location_yo', group_yo=True,
is_default=True).save()
Header(sms=('Yo Photo from %(sender_display_name)s to '
'\'%(group_name)s\' %(social_text)s.'),
push=('%(emoji)s Yo Photo %(from)s %(sender_display_name)s to '
'%(group_name)s'),
ending='\n\nTap to view: %(webclient_url)s',
yo_type='photo_yo', group_yo=True, is_default=True).save()
# Start mocking functions, objects and libraries.
self.geocoder_mock = self.geocoder_patcher.start()
self.parse_push_mock = self.parse_push_patcher.start()
self.parse_signup_mock = self.parse_signup_patcher.start()
self.parse_delete_mock = self.parse_delete_patcher.start()
self.parse_query_get_mock = self.parse_query_get_patcher.start()
self.parse_subscribe_mock = self.parse_subscribe_patcher.start()
self.parse_unsubscribe_mock = self.parse_unsubscribe_patcher.start()
self.send_grid_send_mock = self.send_grid_send_patcher.start()
self.s3_upload_image_mock = self.s3_upload_image_patcher.start()
self.facebook_get_profile_mock = self.facebook_get_profile_patcher.start()
self.facebook_get_picture_mock = self.facebook_get_picture_patcher.start()
# When patching a function on an instance of an object then only said
# instance will have the mocked function. One of the functions mocked
# on SNS is defined in the extension instance object. So we switch
# to the worker app context before starting the patcher.
with self.worker_app.app_context():
self.sns_subscribe_mock = self.sns_subscribe_patcher.start()
self.sns_unsubscribe_mock = self.sns_unsubscribe_patcher.start()
self.sns_publish_mock = self.sns_publish_patcher.start()
self.sns_create_endpoint_mock = self.sns_create_endpoint_patcher.start()
self.sns_create_topic_mock = self.sns_create_topic_patcher.start()
self.sns_delete_endpoint_mock = self.sns_delete_endpoint_patcher.start()
self.sns_set_endpoint_mock = self.sns_set_endpoint_patcher.start()
self.twilio_send_mock = self.twilio_send_patcher.start()
self.get_request_mock = self.get_request_patcher.start()
self.get_link_content_type_mock = self.get_link_content_type_patcher.start()
self.short_url_mock = self.short_url_patcher.start()
self.giphy_search_mock = self.giphy_search_patcher.start()
self.imgur_search_mock = self.imgur_search_patcher.start()
# Start the patcher to get the mock but stop it until it
# needs to be used.
self.experiment_logger_mock = self.experiment_logger_patcher.start()
self.experiment_logger_patcher.stop()
# Setup return values for mock objects.
self.get_request_mock.return_value.json.return_value = None
self.geocoder_mock.side_effect = GeocoderError('Error')
self.get_link_content_type_mock.return_value = 'application/unknown'
self.short_url_mock.return_value = None
self.s3_upload_image_mock.return_value = 'image.jpg'
self.sns_create_endpoint_mock.return_value = 'aws:is:great'
self.sns_create_topic_mock.return_value = 'aws:is:great'
self.sns_subscribe_mock.return_value = 'aws:is:great'
self.facebook_get_profile_mock.return_value = {}
# by setting is_silhouette the picture will be skipped.
self.facebook_get_picture_mock.return_value = {'is_silhouette': True}
self.giphy_search_mock.return_value = []
self.imgur_search_mock.return_value = []
self.addCleanup(self.tearDown)
def tearDown(self):
"""Runs after each job"""
# Stop mocking functions, objects and libraries.
patchers = [self.experiment_logger_patcher,
self.geocoder_patcher,
self.giphy_search_patcher,
self.facebook_get_profile_patcher,
self.facebook_get_picture_patcher,
self.imgur_search_patcher,
self.live_counter_patcher,
self.parse_push_patcher,
self.parse_signup_patcher,
self.parse_subscribe_patcher,
self.parse_unsubscribe_patcher,
self.parse_query_get_patcher,
self.parse_delete_patcher,
self.send_grid_send_patcher,
self.s3_upload_image_patcher,
self.sns_subscribe_patcher,
self.sns_unsubscribe_patcher,
self.sns_publish_patcher,
self.sns_create_endpoint_patcher,
self.sns_create_topic_patcher,
self.sns_set_endpoint_patcher,
self.sns_delete_endpoint_patcher,
self.twilio_send_patcher,
self.get_request_patcher,
self.get_link_content_type_patcher,
self.short_url_patcher]
for patcher in patchers:
try:
patcher.stop()
except:
# Patcher already stopped.
pass
# Check for failed items.
self.assertEquals(low_rq.failed_queue.count, 0)
self.assertEquals(high_rq.failed_queue.count, 0)
self.assertEquals(medium_rq.failed_queue.count, 0)
@classmethod
def _create_app(cls):
"""Creates a Flask application instance
We need to create the app in this base class even though the super class
determines what app we are creating. Not following this patterns would mean
we need duplication of the code to push the context.
For testing non-endpoint related functionality we create an app straight
from the factory.
"""
return create_api_app('automated_tests', config='tests.config.Testing')
@classmethod
def _create_worker_app(cls):
"""Creates a Flask application instance
We need to create the app in this base class even though the super class
determines what app we are creating. Not following this patterns would mean
we need duplication of the code to push the context.
For testing non-endpoint related functionality we create an app straight
from the factory.
"""
return create_worker_app('automated_tests_worker',
config='tests.config.Testing')
def become(self, user):
"""Impersates a user"""
identity = YoIdentity(str(user.id))
principals.set_identity(identity)
# Tell listeners that the identity has changed.
identity_changed.send(self.app, identity=identity)
def shortDescription(self):
"""Turns off doctstrings in verbose output"""
return None
def jsonpost(self, *args, **kwargs):
"""Convenience method for making JSON POST requests."""
kwargs.setdefault('content_type', 'application/json')
if 'data' in kwargs:
kwargs['data'] = json.dumps(kwargs['data'])
headers = Headers()
override_headers = kwargs.pop('headers', {})
if override_headers:
for k, v in override_headers.items():
headers.add(k, v)
if 'useragent' in kwargs:
useragent = kwargs.pop('useragent')
headers.add('User-Agent', useragent)
if 'jwt_token' in kwargs:
token = kwargs.pop('jwt_token')
if kwargs.pop('auth', False):
raise Exception('Can\'t use multiple identities')
headers.add('Authorization', 'Bearer ' + token)
elif kwargs.pop('auth', True):
token = self._user1_jwt
headers.add('Authorization', 'Bearer ' + token)
if not 'X-Yo-Installation-Id' in headers:
headers.add('X-Yo-Installation-Id', self.installation_id)
# Set a quick JSON lookup attribute.
response = self.client.post(headers=headers, *args, **kwargs)
try:
response.json = json.loads(response.data)
except:
response.json = None
return response
def jsonput(self, *args, **kwargs):
"""Convenience method for making JSON PUT requests."""
kwargs.setdefault('content_type', 'application/json')
if 'data' in kwargs:
kwargs['data'] = json.dumps(kwargs['data'])
headers = Headers()
override_headers = kwargs.pop('headers', {})
if override_headers:
for k, v in override_headers.items():
headers.add(k, v)
if 'useragent' in kwargs:
useragent = kwargs.pop('useragent')
headers.add('User-Agent', useragent)
if 'jwt_token' in kwargs:
token = kwargs.pop('jwt_token')
if kwargs.pop('auth', False):
raise Exception('Can\'t use multiple identities')
headers.add('Authorization', 'Bearer ' + token)
elif kwargs.pop('auth', True):
token = self._user1_jwt
headers.add('Authorization', 'Bearer ' + token)
if not 'X-Yo-Installation-Id' in headers:
headers.add('X-Yo-Installation-Id', self.installation_id)
# Set a quick JSON lookup attribute.
response = self.client.put(headers=headers, *args, **kwargs)
try:
response.json = json.loads(response.data)
except:
response.json = None
return response
|
iiag/iiag-legacy
|
src/io/sdl/display.c
|
//
// io/sdl/display.c
//
#include <stdio.h>
#include "../../log.h"
#ifndef WITH_SDL
void sdl_init(FILE *f) {
error("Cannot use SDL backend (not compiled in)");
}
#else
#include <SDL2/SDL.h>
#include <SDL2/SDL_ttf.h>
#include <stdarg.h>
#include <unistd.h>
#include "input.h"
#include "display.h"
#include "../input.h"
#include "../display.h"
#define TILE_RECT(x, y) {(x)*twidth, (y)*theight, twidth, theight}
SDL_Window *win;
unsigned int swidth, sheight, twidth, theight, trows, tcols;
SDL_Surface *disp;
static SDL_Surface **tiles;
static size_t sztiles;
TTF_Font *font;
const SDL_Color COL_WHITE={255, 255, 255, 255};
void sdl_put(int x, int y, int tile)
{
SDL_Rect r = TILE_RECT(x+1, y);
SDL_Rect ss = {0, theight, disp->w, disp->h-theight*3};
SDL_Rect tmp;
SDL_Surface *text;
char buf[16];
if(!SDL_IntersectRect(&ss, &r, &tmp)) {
return;
}
if(tile >= sztiles) {
snprintf(buf, 16, "%d", tile);
text = TTF_RenderUTF8_Solid(font, buf, COL_WHITE);
SDL_BlitSurface(text, NULL, disp, &r);
SDL_FreeSurface(text);
} else {
SDL_BlitScaled(tiles[tile], NULL, disp, &r);
}
}
void sdl_dim_update(int * x, int * y)
{
*x = tcols;
*y = trows - 2;
}
void sdl_zoom(int sx, int sy)
{
int w, h;
SDL_GetWindowSize(win, &w, &h);
if(sx > 0) {
twidth <<= sx;
} else {
twidth >>= -sx;
}
if(sy > 0) {
theight <<= sy;
} else {
theight >>= -sy;
}
tcols = w / twidth;
trows = h / theight;
}
void sdl_memo(const char * fmt, ...)
{
va_list vl;
char s[512];
SDL_Rect r = TILE_RECT(0, 0);
SDL_Surface *text;
r.w = twidth * tcols;
va_start(vl, fmt);
vsnprintf(s, 512, fmt, vl);
text = TTF_RenderUTF8_Solid(font, s, COL_WHITE);
SDL_FillRect(disp, &r, 0);
SDL_BlitSurface(text, NULL, disp, &r);
SDL_FreeSurface(text);
SDL_UpdateWindowSurface(win);
va_end(vl);
}
void sdl_statline(int line, const char * fmt, ...)
{
va_list vl;
char s[512];
SDL_Rect r = TILE_RECT(0, trows - 3 + line);
SDL_Surface *text;
r.w = twidth * tcols;
va_start(vl, fmt);
vsnprintf(s, 512, fmt, vl);
text = TTF_RenderUTF8_Solid(font, s, COL_WHITE);
SDL_FillRect(disp, &r, 0);
SDL_BlitSurface(text, NULL, disp, &r);
SDL_FreeSurface(text);
SDL_UpdateWindowSurface(win);
va_end(vl);
}
void sdl_clear(void) {
SDL_FillRect(disp, NULL, 0);
SDL_UpdateWindowSurface(win);
}
void sdl_refresh(void) {
SDL_UpdateWindowSurface(win);
}
void sdl_end(void) {
int i;
for(i = 0; i < sztiles; i++) {
SDL_FreeSurface(tiles[i]);
}
SDL_FreeSurface(disp);
SDL_DestroyWindow(win);
SDL_Quit();
}
void sdl_init(FILE *conf)
{
char dirname[256], fname[256], oldwd[256], curwd[256];
unsigned long fcol, bcol;
size_t i;
SDL_Surface *fmul, *img;
info("Initializing SDL graphics backend...");
graphics_put = sdl_put;
graphics_end = sdl_end;
graphics_dim_update = sdl_dim_update;
graphics_clear = sdl_clear;
graphics_zoom = sdl_zoom;
disp_refresh = sdl_refresh;
memo = sdl_memo;
statline = sdl_statline;
name_from_key = sdl_name_from_key;
key_from_name = sdl_key_from_name;
input_get_ctrl = sdl_get_ctrl;
input_get_key = sdl_get_key;
input_prompt_dir = sdl_prompt_dir;
input_prompt_inv = sdl_prompt_inv;
input_prompt_equipped = sdl_prompt_equipped;
input_prompt_string = sdl_prompt_string;
if(SDL_Init(SDL_INIT_EVERYTHING) || TTF_Init()) {
fatal("Could not initialize SDL: %s / ", SDL_GetError(), TTF_GetError());
}
if(!(win = SDL_CreateWindow("iiag", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 640, 480, SDL_WINDOW_RESIZABLE|SDL_WINDOW_SHOWN))) {
fatal("Could not create a window: %s", SDL_GetError());
}
// TODO not compiled in font name
if(!(font = TTF_OpenFont("GohuFont-Medium.ttf", 8))) {
fatal("Could not open font: %s", TTF_GetError());
}
if(!(disp = SDL_GetWindowSurface(win))) {
fatal("Could not create drawable: %s", SDL_GetError());
}
fscanf(conf, "%s", dirname);
fclose(conf);
getcwd(oldwd, 256);
debug("Current working directory: %s", oldwd);
debug("Changing to %s...", dirname);
if(chdir(dirname)) {
fatal("Couldn't chdir to %s; aborting.", dirname);
}
getcwd(curwd, 256);
debug("Current working directory: %s", curwd);
if(!(conf = fopen("tileset", "r"))) {
fatal("Could not open tileset; aborting.");
}
fscanf(conf, "%zd %d %d %d %d", &sztiles, &swidth, &sheight, &twidth, &theight);
tcols = 640 / twidth;
trows = 480 / theight;
tiles = malloc(sztiles*sizeof(SDL_Surface *));
if(!tiles) {
fatal("Could not allocate graphics memory");
}
fmul = SDL_CreateRGBSurface(0, twidth, theight, 24, 0xff, 0xff00, 0xff0000, 0);
SDL_SetSurfaceBlendMode(fmul, SDL_BLENDMODE_MOD);
for(i=0; i<sztiles; i++) {
fscanf(conf, "%s %lx %lx", fname, &fcol, &bcol);
SDL_FillRect(fmul, NULL, fcol);
img = SDL_LoadBMP(fname);
SDL_BlitSurface(fmul, NULL, img, NULL);
SDL_SetColorKey(img, SDL_TRUE, 0);
tiles[i] = SDL_CreateRGBSurface(0, swidth, sheight, 24, 0xff, 0xff00, 0xff0000, 0);
SDL_FillRect(tiles[i], NULL, bcol);
SDL_BlitSurface(img, NULL, tiles[i], NULL);
SDL_FreeSurface(img);
}
SDL_FreeSurface(fmul);
chdir(oldwd);
info("SDL initialized.");
debug("SDL Parameters:\nTile dimensions: %dx%d\nScreen dimensions: %dx%d", twidth, theight, tcols, trows);
}
#endif
|
mazhar-ansari-ardeh/gpucarp
|
src/tl/knowledge/sst/package-info.java
|
/**
* This package contains the implementation of the Search-Space Transfer idea.
*/
package tl.knowledge.sst;
|
ZmeiDev/stormtroopers
|
public/dist/services/authentication/auth.service.js
|
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var core_1 = require('@angular/core');
require('rxjs/add/observable/throw');
var requester_service_1 = require('../shared/requester.service');
var local_storage_service_1 = require('../shared/local-storage.service');
var navbar_service_1 = require('../shared/navbar.service');
var LOGIN_URL = '/api/sing-in';
var AuthService = (function () {
function AuthService(_requester, _localeStarageService, _navbarService) {
this._requester = _requester;
this._localeStarageService = _localeStarageService;
this._navbarService = _navbarService;
}
AuthService.prototype.login = function (userCreds) {
var _this = this;
var body = "username=" + userCreds.username + "&password=" + userCreds.password;
return this._requester
.postEncoded(LOGIN_URL, body)
.do(function (data) {
_this._navbarService.updateUserInfo(data.body);
_this._localeStarageService.updateToken(data.body);
});
};
AuthService.prototype.logout = function () {
this._navbarService.updateUserInfo({ side: 'neutral' });
this._localeStarageService.deleteUser();
};
AuthService.prototype.isLoggedIn = function () {
var isUserLoggedIn = this._localeStarageService.getUser();
if (isUserLoggedIn) {
return true;
}
return false;
};
AuthService = __decorate([
core_1.Injectable(),
__metadata('design:paramtypes', [requester_service_1.RequesterService, local_storage_service_1.LocalStorageService, navbar_service_1.NavbarService])
], AuthService);
return AuthService;
}());
exports.AuthService = AuthService;
//# sourceMappingURL=auth.service.js.map
|
ramrod-project/database-brain
|
schema/test_put_and_get_binary.py
|
"""
test CRUD ops
put, list_dir, get, delete
"""
from os import environ
from dict_to_protobuf import protobuf_to_dict
from pytest import fixture, raises
import docker
from time import time
from .brain import connect, r
from .brain.binary.data import put, get, list_dir, delete, put_buffer
from .brain.queries import RBF
from .brain.brain_pb2 import Binary
CLIENT = docker.from_env()
TEST_FILE_NAME = "TEST_FILE.txt"
BIG_TEST_FILE_NAME = "BIG_TEST_FILE.txt"
TEST_FILE_CONTENT = "content data is binary 灯火 标 and string stuff ".encode('utf-8')
TEST_TEXT_NAME = "TEST_TEXT.txt"
TEST_TEXT_CONTENT = "standard text stuff"
@fixture(scope='module')
def rethink():
tag = environ.get("TRAVIS_BRANCH", "dev").replace("master", "latest")
container_name = "brainmoduletestCRUD"
CLIENT.containers.run(
"ramrodpcp/database-brain:{}".format(tag),
name=container_name,
detach=True,
ports={"28015/tcp": 28015},
remove=True
)
yield True
# Teardown for module tests
containers = CLIENT.containers.list()
for container in containers:
if container.name == container_name:
container.stop()
break
def test_ensure_files_table_exists(rethink):
try:
r.db("Brain").table_create("Files").run(connect())
except r.ReqlOpFailedError:
pass # table may already exist and that's ok
RBF.run(connect()) # test it can pull a cursor
def test_put_binary(rethink):
bin_obj = Binary()
bin_obj.Name = TEST_FILE_NAME
bin_obj.Content = TEST_FILE_CONTENT
obj_dict = protobuf_to_dict(bin_obj)
put(obj_dict)
assert TEST_FILE_NAME in list_dir()
def test_put_binary_again(rethink):
assert TEST_FILE_NAME in list_dir()
response = put_buffer(TEST_FILE_NAME, TEST_FILE_CONTENT)
assert response['errors'] == 1
def test_obj_in_listing(rethink):
assert TEST_FILE_NAME in list_dir()
def test_get_file(rethink):
assert get(TEST_FILE_NAME)['Content'] == TEST_FILE_CONTENT
def test_remove_file(rethink):
assert TEST_FILE_NAME in list_dir()
assert delete(TEST_FILE_NAME)
assert TEST_FILE_NAME not in list_dir()
def test_remove_non_existant_file(rethink):
assert TEST_FILE_NAME not in list_dir()
assert delete(TEST_FILE_NAME)
def test_verify_put_command(rethink):
bin_obj = Binary()
bin_obj.Name = TEST_FILE_NAME
bin_obj.Content = TEST_FILE_CONTENT
obj_dict = protobuf_to_dict(bin_obj)
put(obj_dict, verify=True)
def test_huge_insert_split(rethink):
"""
134217727 is the biggest query size
make an object bigger than that
add the overhead of the other query params, should be over
:param rethink:
:return:
"""
big_content = ("a"*134217727).encode("utf-8")
bin_obj = Binary()
bin_obj.Name = BIG_TEST_FILE_NAME
bin_obj.Content = TEST_FILE_CONTENT
bin_obj.Timestamp = time()
obj_dict = protobuf_to_dict(bin_obj)
obj_dict["Content"] = big_content
resp = put(obj_dict)
assert resp["inserted"] == 3
def test_huge_insert_again(rethink):
assert BIG_TEST_FILE_NAME in list_dir()
post_count = RBF.count().run(connect())
big_content = ("a" * 134217727).encode("utf-8")
response = put_buffer(BIG_TEST_FILE_NAME, big_content)
post_count_after = RBF.count().run(connect())
assert post_count_after == post_count
assert response['errors'] == 1
def test_list_dir_large_files(rethink):
the_dir = list_dir()
assert BIG_TEST_FILE_NAME in the_dir
assert BIG_TEST_FILE_NAME + "001" not in the_dir
def test_huge_split_read(rethink):
assert get(BIG_TEST_FILE_NAME)["Content"] == ("a"*134217727).encode("utf-8")
def test_delete_split(rethink):
pre_count = RBF.count().run(connect())
assert delete(BIG_TEST_FILE_NAME)
assert BIG_TEST_FILE_NAME not in list_dir()
post_count = RBF.count().run(connect())
assert pre_count - post_count == 3
def test_put_text_file(rethink):
basic_put_object = {"Name": TEST_TEXT_NAME,
"Content": TEST_TEXT_CONTENT}
put(basic_put_object)
assert get(TEST_TEXT_NAME)["Content"] == TEST_TEXT_CONTENT
|
johnoliver/bnd
|
biz.aQute.repository/src/aQute/p2/provider/ArtifactRepository.java
|
package aQute.p2.provider;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.osgi.framework.Version;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import aQute.lib.converter.Converter;
import aQute.lib.converter.TypeReference;
import aQute.lib.filter.Filter;
import aQute.libg.sed.Domain;
import aQute.libg.sed.ReplacerAdapter;
import aQute.p2.api.Artifact;
import aQute.p2.api.Classifier;
/**
* @formatter:off
* <pre>
* <?xml version='1.0' encoding='UTF-8'?>
* <?artifactRepository version='1.1.0'?>
* <repository name='Bndtools' type='org.eclipse.equinox.p2.artifact.repository.simpleRepository' version='1'>
* <properties size='2'>
* <property name='p2.timestamp' value='1463781466748'/>
* <property name='p2.compressed' value='true'/>
* </properties>
* <mappings size='3'>
* <rule filter='(& (classifier=osgi.bundle))' output='${repoUrl}/plugins/${id}_${version}.jar'/>
* <rule filter='(& (classifier=binary))' output='${repoUrl}/binary/${id}_${version}'/>
* <rule filter='(& (classifier=org.eclipse.update.feature))' output='${repoUrl}/features/${id}_${version}.jar'/>
* </mappings>
* <artifacts size='22'>
* <artifact classifier='osgi.bundle' id='org.bndtools.versioncontrol.ignores.plugin.git' version='3.3.0.201605202157'>
* <properties size='3'>
* <property name='artifact.size' value='9356'/>
* <property name='download.size' value='9356'/>
* <property name='download.md5' value='745f389a49189112a785848ad466097b'/>
* </properties>
* </artifact>
* </artifacts>
* </pre>
* @formatter:on
*/
class ArtifactRepository extends XML {
static class Rule {
final Filter filter;
String output;
Rule(String filter, String output) {
this.filter = new Filter(filter);
this.output = output;
}
public boolean matches(Map<String, String> map) throws Exception {
return filter.matchMap(map);
}
}
public static class XMLArtifact {
public String classifier;
public String id;
public String version;
public String format;
}
List<Rule> rules;
List<Artifact> artifacts = new ArrayList<>();
private URI base;
ArtifactRepository(InputStream in, URI base) throws Exception {
super(getDocument(in));
this.base = base;
parse();
}
private Rule createRule(Node ruleNode) {
String filter = getAttribute(ruleNode, "filter");
String output = getAttribute(ruleNode, "output");
return new Rule(filter, output);
}
void parse() throws Exception {
final Map<String, String> properties = getProperties("repository/properties/property");
properties.put("repoUrl", base.resolve("")
.toString());
final Domain parent = new Domain() {
@Override
public Map<String, String> getMap() {
return properties;
}
@Override
public Domain getParent() {
return null;
}
};
rules = getRules();
NodeList artifactNodes = getNodes("repository/artifacts/artifact");
for (int i = 0; i < artifactNodes.getLength(); i++) {
final Node artifactNode = artifactNodes.item(i)
.cloneNode(true);
final XMLArtifact xmlArtifact = getFromType(artifactNode, XMLArtifact.class);
final Map<String, String> map = Converter.cnv(new TypeReference<Map<String, String>>() {}, xmlArtifact);
Classifier classifier = null;
if (Classifier.BUNDLE.name.equals(xmlArtifact.classifier)) {
classifier = Classifier.BUNDLE;
} else if (Classifier.FEATURE.name.equals(xmlArtifact.classifier)) {
classifier = Classifier.FEATURE;
}
if (classifier != null) {
Domain domain = new Domain() {
@Override
public Map<String, String> getMap() {
return map;
}
@Override
public Domain getParent() {
return parent;
}
};
ReplacerAdapter ra = new ReplacerAdapter(domain);
for (Rule r : rules) {
if (r.matches(map)) {
String s = ra.process(r.output);
URI uri = new URI(s).normalize();
Artifact artifact = new Artifact();
artifact.classifier = classifier;
artifact.uri = uri;
artifact.id = xmlArtifact.id;
artifact.version = new Version(xmlArtifact.version);
artifact.md5 = getProperties(artifactNode, "properties/property").get("download.md5");
artifacts.add(artifact);
break;
}
}
}
}
}
/**
* * <artifact classifier='osgi.bundle' id=
* 'org.bndtools.versioncontrol.ignores.plugin.git' version=
* '3.3.0.201605202157'>
*
* @param item
* @return
* @throws Exception
* @throws IllegalAccessException
* @throws IllegalArgumentException
*/
List<Rule> getRules() throws Exception {
List<Rule> rules = new ArrayList<>();
NodeList ruleNodes = getNodes("repository/mappings/rule");
for (int i = 0; i < ruleNodes.getLength(); i++) {
Node ruleNode = ruleNodes.item(i);
Rule rule = createRule(ruleNode);
rules.add(rule);
}
return rules;
}
public List<Artifact> getArtifacts() {
return artifacts;
}
}
|
ByronLian/algorithm-javascript
|
Leetcode/count-binary-substrings.js
|
<reponame>ByronLian/algorithm-javascript
// https://leetcode.com/problems/count-binary-substrings/
// Runtime: 84 ms, faster than 92.06% of JavaScript online submissions for Count Binary Substrings.
// Memory Usage: 47 MB, less than 6.35% of JavaScript online submissions for Count Binary Substrings.
/*
* @param {string} s
* @return {number}
*/
var countBinarySubstrings = function (s) {
const zero = "0";
const one = "1";
let stackZero = [];
let stackOne = [];
let count = 0;
let prev = s[0];
for (let i = 0; i < s.length; i++) {
if (s[i] === zero) {
if (prev !== zero) {
stackZero = [];
prev = zero;
}
stackZero.push(zero);
if (stackOne.pop() === one) count++;
} else {
if (prev !== one) {
stackOne = [];
prev = one;
}
stackOne.push(one);
if (stackZero.pop() === zero) count++;
}
}
return count;
};
|
miseri/rtp_plus_plus
|
src/Libs/CodecUtils/BitStreamReader.cpp
|
/** @file
MODULE : BitStreamReader
TAG : BSR
FILE NAME : BitStreamReader.cpp
DESCRIPTION : A bit stream reader implementation of the BitStreamBase
base class. Add the functionality to do the reading.
REVISION HISTORY :
:
COPYRIGHT : (c)VICS 2000-2006 all rights resevered - <EMAIL>
RESTRICTIONS : The information/data/code contained within this file is
the property of VICS limited and has been classified as
CONFIDENTIAL.
===========================================================================
*/
#ifdef _WINDOWS
#define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers
#include <windows.h>
#else
#include <stdio.h>
#endif
#include "BitStreamReader.h"
BitStreamReader::BitStreamReader()
{
}//end constructor.
BitStreamReader::~BitStreamReader()
{
}//end destructor.
/** Read a single bit.
Read from the current bit position in the stream.
@param val : Bit value to write.
@return : The bit [0,1].
*/
int BitStreamReader::Read(void)
{
// Strip out the bit.
int codeBit = (int)(_bitStream[_bytePos] >> _bitPos) & 1;
// Point to next available bit.
if(_bitPos < 7)
_bitPos++;
else
{
_bitPos = 0;
_bytePos++;
}//end else...
return(codeBit);
}//end Read.
/** Read bits from the stream.
Read multiple bits from the least significant bit upwards
from the current stream position.
@param numBits : No. of bits to read.
@return : The code.
*/
int BitStreamReader::Read(int numBits)
{
int pos = _bitPos;
int b = 0;
for(int i = 0; i < numBits; i++)
{
// Strip out the next bit and update in the bit position i.
if((_bitStream[_bytePos] >> pos) & 1)
b = (int)(b | (1 << i)); // bit=1
else
b = (int)(b & ~(1 << i)); // bit=0
// Point to next available bit.
if(pos < 7)
pos++;
else
{
pos = 0;
_bytePos++;
}//end else...
}//end for i...
// Update the global next bit position.
_bitPos = pos;
// Output the result.
return(b);
}//end Read.
/** Peek bits in the stream.
Read multiple bits from the least significant bit upwards
from the specified stream position without disturbing the
current stream position.
@param bitLoc : Bit pos in stream.
@param numBits : No. of bits to read.
@return : The code.
*/
int BitStreamReader::Peek(int bitLoc, int numBits)
{
int bytePos = bitLoc / 8;
int bitPos = bitLoc % 8;
int b = 0;
for(int i = 0; i < numBits; i++)
{
// Strip out the next bit and update in the bit position i.
if((_bitStream[bytePos] >> bitPos) & 1)
b = (int)(b | (1 << i)); // bit=1
else
b = (int)(b & ~(1 << i)); // bit=0
// Point to next available bit.
if(bitPos < 7)
bitPos++;
else
{
bitPos = 0;
bytePos++;
}//end else...
}//end for i...
// Output the result.
return(b);
}//end Peek.
|
WCry/demo
|
spring-cloud/springcloud-sso/spring-security-oauth2-master/client/src/main/java/com/crhms/security/client/config/UiSecurityConfig.java
|
package com.crhms.security.client.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.security.oauth2.client.EnableOAuth2Sso;
import org.springframework.boot.autoconfigure.security.oauth2.resource.ResourceServerProperties;
import org.springframework.boot.autoconfigure.security.oauth2.resource.UserInfoTokenServices;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.Authentication;
import org.springframework.security.oauth2.client.OAuth2ClientContext;
import org.springframework.security.oauth2.client.OAuth2RestTemplate;
import org.springframework.security.oauth2.client.filter.OAuth2ClientAuthenticationProcessingFilter;
import org.springframework.security.oauth2.client.filter.OAuth2ClientContextFilter;
import org.springframework.security.oauth2.client.token.grant.code.AuthorizationCodeResourceDetails;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableOAuth2Client;
import org.springframework.security.web.authentication.SimpleUrlAuthenticationSuccessHandler;
import org.springframework.security.web.authentication.logout.LogoutFilter;
import org.springframework.security.web.authentication.www.BasicAuthenticationFilter;
import javax.servlet.Filter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@Configuration
@EnableOAuth2Sso
public class UiSecurityConfig extends WebSecurityConfigurerAdapter {
// @Autowired
// @Qualifier("ssoLogoutfilterRegistrationBean")
// private FilterRegistrationBean ssoLogoutfilterRegistrationBean;
@Override
public void configure(HttpSecurity http) throws Exception {
http.antMatcher("/**")
.authorizeRequests()
.antMatchers("/", "/login**")
.permitAll().anyRequest().authenticated()
.and()
//.addFilterBefore(oauth2ClientAuthenticationProcessingFilter,BasicAuthenticationFilter.class)
.csrf().disable()
//.addFilterBefore(ssoLogoutfilterRegistrationBean.getFilter(), LogoutFilter.class)
;
}
}
|
blackdeve/interp
|
erp/assets/js/pages/ui/notifications.js
|
<filename>erp/assets/js/pages/ui/notifications.js<gh_stars>0
(function ($) {
'use strict';
$(function () {
$('.js-positions .btn').on('click', function () {
var type = $(this).data('type');
var position = $(this).data('position');
$('#toast-container').remove();
toastr.options = {
"closeButton": true,
"debug": false,
"newestOnTop": false,
"progressBar": true,
"positionClass": "toast-" + position,
"preventDuplicates": false,
"onclick": null,
"showDuration": "300",
"hideDuration": "1000",
"timeOut": "3000",
"extendedTimeOut": "1000",
"showEasing": "swing",
"hideEasing": "linear",
"showMethod": "fadeIn",
"hideMethod": "fadeOut"
}
toastr[type]("Responsive Admin Template", "AdminBSB Sensitive");
});
$('.js-types .btn').on('click', function () {
var type = $(this).data('type');
toastr.options = {
"closeButton": true,
"debug": false,
"newestOnTop": false,
"progressBar": true,
"positionClass": "toast-top-right",
"preventDuplicates": false,
"onclick": null,
"showDuration": "300",
"hideDuration": "1000",
"timeOut": "4000",
"extendedTimeOut": "1000",
"showEasing": "swing",
"hideEasing": "linear",
"showMethod": "fadeIn",
"hideMethod": "fadeOut"
}
toastr[type]("Responsive Admin Template", "AdminBSB Sensitive");
});
//Init switch buttons
var $switchButtons = Array.prototype.slice.call(document.querySelectorAll('.js-switch'));
$switchButtons.forEach(function (e) {
var size = $(e).data('size');
var options = {};
options['color'] = '#009688';
if (size !== undefined) options['size'] = size;
var switchery = new Switchery(e, options);
});
});
}(jQuery))
|
theclashingfritz/Cog-Invasion-Online-Dump
|
aifc.py
|
# uncompyle6 version 3.2.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.15 (v2.7.15:ca079a3ea3, Apr 30 2018, 16:30:26) [MSC v.1500 64 bit (AMD64)]
# Embedded file name: aifc
import struct, __builtin__
__all__ = [
'Error', 'open', 'openfp']
class Error(Exception):
pass
_AIFC_version = 2726318400L
def _read_long(file):
try:
return struct.unpack('>l', file.read(4))[0]
except struct.error:
raise EOFError
def _read_ulong(file):
try:
return struct.unpack('>L', file.read(4))[0]
except struct.error:
raise EOFError
def _read_short(file):
try:
return struct.unpack('>h', file.read(2))[0]
except struct.error:
raise EOFError
def _read_ushort(file):
try:
return struct.unpack('>H', file.read(2))[0]
except struct.error:
raise EOFError
def _read_string(file):
length = ord(file.read(1))
if length == 0:
data = ''
else:
data = file.read(length)
if length & 1 == 0:
dummy = file.read(1)
return data
_HUGE_VAL = 1.79769313486231e+308
def _read_float(f):
expon = _read_short(f)
sign = 1
if expon < 0:
sign = -1
expon = expon + 32768
himant = _read_ulong(f)
lomant = _read_ulong(f)
if expon == himant == lomant == 0:
f = 0.0
else:
if expon == 32767:
f = _HUGE_VAL
else:
expon = expon - 16383
f = (himant * 4294967296L + lomant) * pow(2.0, expon - 63)
return sign * f
def _write_short(f, x):
f.write(struct.pack('>h', x))
def _write_ushort(f, x):
f.write(struct.pack('>H', x))
def _write_long(f, x):
f.write(struct.pack('>l', x))
def _write_ulong(f, x):
f.write(struct.pack('>L', x))
def _write_string(f, s):
if len(s) > 255:
raise ValueError('string exceeds maximum pstring length')
f.write(struct.pack('B', len(s)))
f.write(s)
if len(s) & 1 == 0:
f.write(chr(0))
def _write_float(f, x):
import math
if x < 0:
sign = 32768
x = x * -1
else:
sign = 0
if x == 0:
expon = 0
himant = 0
lomant = 0
else:
fmant, expon = math.frexp(x)
if expon > 16384 or fmant >= 1 or fmant != fmant:
expon = sign | 32767
himant = 0
lomant = 0
else:
expon = expon + 16382
if expon < 0:
fmant = math.ldexp(fmant, expon)
expon = 0
expon = expon | sign
fmant = math.ldexp(fmant, 32)
fsmant = math.floor(fmant)
himant = long(fsmant)
fmant = math.ldexp(fmant - fsmant, 32)
fsmant = math.floor(fmant)
lomant = long(fsmant)
_write_ushort(f, expon)
_write_ulong(f, himant)
_write_ulong(f, lomant)
from chunk import Chunk
class Aifc_read():
def initfp(self, file):
self._version = 0
self._decomp = None
self._convert = None
self._markers = []
self._soundpos = 0
self._file = file
chunk = Chunk(file)
if chunk.getname() != 'FORM':
raise Error, 'file does not start with FORM id'
formdata = chunk.read(4)
if formdata == 'AIFF':
self._aifc = 0
else:
if formdata == 'AIFC':
self._aifc = 1
else:
raise Error, 'not an AIFF or AIFF-C file'
self._comm_chunk_read = 0
while 1:
self._ssnd_seek_needed = 1
try:
chunk = Chunk(self._file)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == 'COMM':
self._read_comm_chunk(chunk)
self._comm_chunk_read = 1
else:
if chunkname == 'SSND':
self._ssnd_chunk = chunk
dummy = chunk.read(8)
self._ssnd_seek_needed = 0
else:
if chunkname == 'FVER':
self._version = _read_ulong(chunk)
else:
if chunkname == 'MARK':
self._readmark(chunk)
chunk.skip()
if not self._comm_chunk_read or not self._ssnd_chunk:
raise Error, 'COMM chunk and/or SSND chunk missing'
if self._aifc and self._decomp:
import cl
params = [
cl.ORIGINAL_FORMAT, 0,
cl.BITS_PER_COMPONENT, self._sampwidth * 8,
cl.FRAME_RATE, self._framerate]
if self._nchannels == 1:
params[1] = cl.MONO
else:
if self._nchannels == 2:
params[1] = cl.STEREO_INTERLEAVED
else:
raise Error, 'cannot compress more than 2 channels'
self._decomp.SetParams(params)
return
def __init__(self, f):
if type(f) == type(''):
f = __builtin__.open(f, 'rb')
self.initfp(f)
def getfp(self):
return self._file
def rewind(self):
self._ssnd_seek_needed = 1
self._soundpos = 0
def close(self):
if self._decomp:
self._decomp.CloseDecompressor()
self._decomp = None
self._file.close()
return
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def getparams(self):
return (
self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def getmarkers(self):
if len(self._markers) == 0:
return None
return self._markers
def getmark(self, id):
for marker in self._markers:
if id == marker[0]:
return marker
raise Error, 'marker %r does not exist' % (id,)
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error, 'position not in range'
self._soundpos = pos
self._ssnd_seek_needed = 1
def readframes(self, nframes):
if self._ssnd_seek_needed:
self._ssnd_chunk.seek(0)
dummy = self._ssnd_chunk.read(8)
pos = self._soundpos * self._framesize
if pos:
self._ssnd_chunk.seek(pos + 8)
self._ssnd_seek_needed = 0
if nframes == 0:
return ''
data = self._ssnd_chunk.read(nframes * self._framesize)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
return data
def _decomp_data(self, data):
import cl
dummy = self._decomp.SetParam(cl.FRAME_BUFFER_SIZE, len(data) * 2)
return self._decomp.Decompress(len(data) // self._nchannels, data)
def _ulaw2lin(self, data):
import audioop
return audioop.ulaw2lin(data, 2)
def _adpcm2lin(self, data):
import audioop
if not hasattr(self, '_adpcmstate'):
self._adpcmstate = None
data, self._adpcmstate = audioop.adpcm2lin(data, 2, self._adpcmstate)
return data
def _read_comm_chunk(self, chunk):
self._nchannels = _read_short(chunk)
self._nframes = _read_long(chunk)
self._sampwidth = (_read_short(chunk) + 7) // 8
self._framerate = int(_read_float(chunk))
self._framesize = self._nchannels * self._sampwidth
if self._aifc:
kludge = 0
if chunk.chunksize == 18:
kludge = 1
print 'Warning: bad COMM chunk size'
chunk.chunksize = 23
self._comptype = chunk.read(4)
if kludge:
length = ord(chunk.file.read(1))
if length & 1 == 0:
length = length + 1
chunk.chunksize = chunk.chunksize + length
chunk.file.seek(-1, 1)
self._compname = _read_string(chunk)
if self._comptype != 'NONE':
if self._comptype == 'G722':
try:
import audioop
except ImportError:
pass
else:
self._convert = self._adpcm2lin
self._framesize = self._framesize // 4
return
try:
import cl
except ImportError:
if self._comptype == 'ULAW':
try:
import audioop
self._convert = self._ulaw2lin
self._framesize = self._framesize // 2
return
except ImportError:
pass
raise Error, 'cannot read compressed AIFF-C files'
if self._comptype == 'ULAW':
scheme = cl.G711_ULAW
self._framesize = self._framesize // 2
else:
if self._comptype == 'ALAW':
scheme = cl.G711_ALAW
self._framesize = self._framesize // 2
else:
raise Error, 'unsupported compression type'
self._decomp = cl.OpenDecompressor(scheme)
self._convert = self._decomp_data
else:
self._comptype = 'NONE'
self._compname = 'not compressed'
def _readmark(self, chunk):
nmarkers = _read_short(chunk)
try:
for i in range(nmarkers):
id = _read_short(chunk)
pos = _read_long(chunk)
name = _read_string(chunk)
if pos or name:
self._markers.append((id, pos, name))
except EOFError:
print 'Warning: MARK chunk contains only',
print len(self._markers),
if len(self._markers) == 1:
print 'marker',
else:
print 'markers',
print 'instead of', nmarkers
class Aifc_write():
def __init__(self, f):
if type(f) == type(''):
filename = f
f = __builtin__.open(f, 'wb')
else:
filename = '???'
self.initfp(f)
if filename[-5:] == '.aiff':
self._aifc = 0
else:
self._aifc = 1
def initfp(self, file):
self._file = file
self._version = _AIFC_version
self._comptype = 'NONE'
self._compname = 'not compressed'
self._comp = None
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._markers = []
self._marklength = 0
self._aifc = 1
return
def __del__(self):
if self._file:
self.close()
def aiff(self):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
self._aifc = 0
def aifc(self):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
self._aifc = 1
def setnchannels(self, nchannels):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
if nchannels < 1:
raise Error, 'bad # of channels'
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error, 'number of channels not set'
return self._nchannels
def setsampwidth(self, sampwidth):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
if sampwidth < 1 or sampwidth > 4:
raise Error, 'bad sample width'
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error, 'sample width not set'
return self._sampwidth
def setframerate(self, framerate):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
if framerate <= 0:
raise Error, 'bad frame rate'
self._framerate = framerate
def getframerate(self):
if not self._framerate:
raise Error, 'frame rate not set'
return self._framerate
def setnframes(self, nframes):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
if comptype not in ('NONE', 'ULAW', 'ALAW', 'G722'):
raise Error, 'unsupported compression type'
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def setparams(self, info):
nchannels, sampwidth, framerate, nframes, comptype, compname = info
if self._nframeswritten:
raise Error, 'cannot change parameters after starting to write'
if comptype not in ('NONE', 'ULAW', 'ALAW', 'G722'):
raise Error, 'unsupported compression type'
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error, 'not all parameters set'
return (self._nchannels, self._sampwidth, self._framerate,
self._nframes, self._comptype, self._compname)
def setmark(self, id, pos, name):
if id <= 0:
raise Error, 'marker ID must be > 0'
if pos < 0:
raise Error, 'marker position must be >= 0'
if type(name) != type(''):
raise Error, 'marker name must be a string'
for i in range(len(self._markers)):
if id == self._markers[i][0]:
self._markers[i] = (
id, pos, name)
return
self._markers.append((id, pos, name))
def getmark(self, id):
for marker in self._markers:
if id == marker[0]:
return marker
raise Error, 'marker %r does not exist' % (id,)
def getmarkers(self):
if len(self._markers) == 0:
return None
return self._markers
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
self._ensure_header_written(len(data))
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
self._file.write(data)
self._nframeswritten = self._nframeswritten + nframes
self._datawritten = self._datawritten + len(data)
def writeframes(self, data):
self.writeframesraw(data)
if self._nframeswritten != self._nframes or self._datalength != self._datawritten:
self._patchheader()
def close(self):
if self._file is None:
return
try:
self._ensure_header_written(0)
if self._datawritten & 1:
self._file.write(chr(0))
self._datawritten = self._datawritten + 1
self._writemarkers()
if self._nframeswritten != self._nframes or self._datalength != self._datawritten or self._marklength:
self._patchheader()
if self._comp:
self._comp.CloseCompressor()
self._comp = None
finally:
self._convert = None
f = self._file
self._file = None
f.close()
return
def _comp_data(self, data):
import cl
dummy = self._comp.SetParam(cl.FRAME_BUFFER_SIZE, len(data))
dummy = self._comp.SetParam(cl.COMPRESSED_BUFFER_SIZE, len(data))
return self._comp.Compress(self._nframes, data)
def _lin2ulaw(self, data):
import audioop
return audioop.lin2ulaw(data, 2)
def _lin2adpcm(self, data):
import audioop
if not hasattr(self, '_adpcmstate'):
self._adpcmstate = None
data, self._adpcmstate = audioop.lin2adpcm(data, 2, self._adpcmstate)
return data
def _ensure_header_written(self, datasize):
if not self._nframeswritten:
if self._comptype in ('ULAW', 'ALAW'):
if not self._sampwidth:
self._sampwidth = 2
if self._sampwidth != 2:
raise Error, 'sample width must be 2 when compressing with ULAW or ALAW'
if self._comptype == 'G722':
if not self._sampwidth:
self._sampwidth = 2
if self._sampwidth != 2:
raise Error, 'sample width must be 2 when compressing with G7.22 (ADPCM)'
if not self._nchannels:
raise Error, '# channels not specified'
if not self._sampwidth:
raise Error, 'sample width not specified'
if not self._framerate:
raise Error, 'sampling rate not specified'
self._write_header(datasize)
def _init_compression(self):
if self._comptype == 'G722':
self._convert = self._lin2adpcm
return
try:
import cl
except ImportError:
if self._comptype == 'ULAW':
try:
import audioop
self._convert = self._lin2ulaw
return
except ImportError:
pass
raise Error, 'cannot write compressed AIFF-C files'
if self._comptype == 'ULAW':
scheme = cl.G711_ULAW
else:
if self._comptype == 'ALAW':
scheme = cl.G711_ALAW
else:
raise Error, 'unsupported compression type'
self._comp = cl.OpenCompressor(scheme)
params = [cl.ORIGINAL_FORMAT, 0,
cl.BITS_PER_COMPONENT, self._sampwidth * 8,
cl.FRAME_RATE, self._framerate,
cl.FRAME_BUFFER_SIZE, 100,
cl.COMPRESSED_BUFFER_SIZE, 100]
if self._nchannels == 1:
params[1] = cl.MONO
else:
if self._nchannels == 2:
params[1] = cl.STEREO_INTERLEAVED
else:
raise Error, 'cannot compress more than 2 channels'
self._comp.SetParams(params)
dummy = self._comp.Compress(0, '')
self._convert = self._comp_data
def _write_header(self, initlength):
if self._aifc and self._comptype != 'NONE':
self._init_compression()
self._file.write('FORM')
if not self._nframes:
self._nframes = initlength // (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
if self._datalength & 1:
self._datalength = self._datalength + 1
if self._aifc:
if self._comptype in ('ULAW', 'ALAW'):
self._datalength = self._datalength // 2
if self._datalength & 1:
self._datalength = self._datalength + 1
elif self._comptype == 'G722':
self._datalength = (self._datalength + 3) // 4
if self._datalength & 1:
self._datalength = self._datalength + 1
self._form_length_pos = self._file.tell()
commlength = self._write_form_length(self._datalength)
if self._aifc:
self._file.write('AIFC')
self._file.write('FVER')
_write_ulong(self._file, 4)
_write_ulong(self._file, self._version)
else:
self._file.write('AIFF')
self._file.write('COMM')
_write_ulong(self._file, commlength)
_write_short(self._file, self._nchannels)
self._nframes_pos = self._file.tell()
_write_ulong(self._file, self._nframes)
_write_short(self._file, self._sampwidth * 8)
_write_float(self._file, self._framerate)
if self._aifc:
self._file.write(self._comptype)
_write_string(self._file, self._compname)
self._file.write('SSND')
self._ssnd_length_pos = self._file.tell()
_write_ulong(self._file, self._datalength + 8)
_write_ulong(self._file, 0)
_write_ulong(self._file, 0)
def _write_form_length(self, datalength):
if self._aifc:
commlength = 23 + len(self._compname)
if commlength & 1:
commlength = commlength + 1
verslength = 12
else:
commlength = 18
verslength = 0
_write_ulong(self._file, 4 + verslength + self._marklength + 8 + commlength + 16 + datalength)
return commlength
def _patchheader(self):
curpos = self._file.tell()
if self._datawritten & 1:
datalength = self._datawritten + 1
self._file.write(chr(0))
else:
datalength = self._datawritten
if datalength == self._datalength and self._nframes == self._nframeswritten and self._marklength == 0:
self._file.seek(curpos, 0)
return
self._file.seek(self._form_length_pos, 0)
dummy = self._write_form_length(datalength)
self._file.seek(self._nframes_pos, 0)
_write_ulong(self._file, self._nframeswritten)
self._file.seek(self._ssnd_length_pos, 0)
_write_ulong(self._file, datalength + 8)
self._file.seek(curpos, 0)
self._nframes = self._nframeswritten
self._datalength = datalength
def _writemarkers(self):
if len(self._markers) == 0:
return
self._file.write('MARK')
length = 2
for marker in self._markers:
id, pos, name = marker
length = length + len(name) + 1 + 6
if len(name) & 1 == 0:
length = length + 1
_write_ulong(self._file, length)
self._marklength = length + 8
_write_short(self._file, len(self._markers))
for marker in self._markers:
id, pos, name = marker
_write_short(self._file, id)
_write_ulong(self._file, pos)
_write_string(self._file, name)
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Aifc_read(f)
if mode in ('w', 'wb'):
return Aifc_write(f)
raise Error, "mode must be 'r', 'rb', 'w', or 'wb'"
return
openfp = open
if __name__ == '__main__':
import sys
if not sys.argv[1:]:
sys.argv.append('/usr/demos/data/audio/bach.aiff')
fn = sys.argv[1]
f = open(fn, 'r')
print 'Reading', fn
print 'nchannels =', f.getnchannels()
print 'nframes =', f.getnframes()
print 'sampwidth =', f.getsampwidth()
print 'framerate =', f.getframerate()
print 'comptype =', f.getcomptype()
print 'compname =', f.getcompname()
if sys.argv[2:]:
gn = sys.argv[2]
print 'Writing', gn
g = open(gn, 'w')
g.setparams(f.getparams())
while 1:
data = f.readframes(1024)
if not data:
break
g.writeframes(data)
g.close()
f.close()
print 'Done.'
|
odss/py-odss
|
tests/cdi/test_decorators.py
|
import pytest
from odss.cdi import consts
from odss.cdi.contexts import get_factory_context
from odss.cdi.decorators import (
Component,
Instantiate,
Invalidate,
Provides,
Requires,
Validate,
)
def test_component():
with pytest.raises(TypeError):
Component()
@Component
class Dummy1:
pass
config = get_factory_context(Dummy1)
assert config.name == "tests.cdi.test_decorators.Dummy1"
assert config.completed
@Component("dummy2")
class Dummy2:
pass
config = get_factory_context(Dummy2)
assert config.name == "dummy2"
assert config.completed
@Component("dummy3")
class Dummy3:
def __init__(self, d1: Dummy1, d2: Dummy2):
pass
config = get_factory_context(Dummy3)
assert config.name == "dummy3"
assert config.completed
requires = config.get_handler(consts.HANDLER_REQUIRES)
assert len(requires) == 2
def test_instantiate():
with pytest.raises(TypeError):
Instantiate()
for invalid_name in (None, True, False, 1, [1, 2], (1, 2)):
with pytest.raises(TypeError):
Instantiate(invalid_name)()
for invalid_props in (None, True, False, 1, [1, 2], (1, 2)):
with pytest.raises(TypeError):
Instantiate("name", invalid_props)()
with pytest.raises(NameError):
Instantiate(Dummy1)
@Instantiate
class Dummy1:
pass
instances = get_factory_context(Dummy1).get_instances()
assert len(instances) == 1
assert instances[Dummy1.__name__] == {}
@Instantiate("dummy")
@Instantiate("new-dummy", {"id": 1})
class Dummy2:
pass
instances = get_factory_context(Dummy2).get_instances()
assert len(instances) == 2
assert instances["dummy"] == {}
assert instances["new-dummy"] == {"id": 1}
def test_provide():
@Provides("test1")
class Dummy1:
pass
specs = get_factory_context(Dummy1).get_handler(consts.HANDLER_PROVIDES)
assert len(specs) == 1
assert specs[0] == "test1"
@Provides(["test1", "test2"])
class Dummy2:
pass
specs = get_factory_context(Dummy2).get_handler(consts.HANDLER_PROVIDES)
assert len(specs) == 2
assert specs[0] == "test1"
assert specs[1] == "test2"
def test_requires():
@Requires("test1")
class Dummy1:
pass
specs = get_factory_context(Dummy1).get_handler(consts.HANDLER_REQUIRES)
assert len(specs) == 1
assert specs[0] == "test1"
@Requires("test1", "test2")
class Dummy2:
pass
specs = get_factory_context(Dummy2).get_handler(consts.HANDLER_REQUIRES)
assert len(specs) == 2
assert specs[0] == "test1"
assert specs[1] == "test2"
def test_default_requires():
class IService1:
pass
class IService2:
pass
@Component("test1")
class Dummy1:
def __init__(self, s1: IService1, s2: IService2):
pass
specs = get_factory_context(Dummy1).get_handler(consts.HANDLER_REQUIRES)
assert len(specs) == 2
assert isinstance(specs[0], str)
assert isinstance(specs[1], str)
assert IService1.__name__ in specs[0]
assert IService2.__name__ in specs[1]
def test_validate():
@Component
class Dummy:
@Validate
def validate(self, ctx):
pass
@Invalidate
def invalidate(self, ctx):
pass
context = get_factory_context(Dummy)
assert Dummy.validate == context.get_callback(consts.CALLBACK_VALIDATE)[0]
assert Dummy.invalidate == context.get_callback(consts.CALLBACK_INVALIDATE)[0]
|
yunjieyao/calcentral
|
src/redux/actions/routeActions.js
|
<filename>src/redux/actions/routeActions.js
export const SET_CURRENT_ROUTE_PROPERTIES = 'SET_CURRENT_ROUTE_PROPERTIES';
export const setCurrentRouteProperties = props => ({
type: SET_CURRENT_ROUTE_PROPERTIES,
value: props
});
|
djstaros/qmcpack
|
src/AFQMC/Matrix/tests/test_csr_matrix.cpp
|
//////////////////////////////////////////////////////////////////////////////////////
// This file is distributed under the University of Illinois/NCSA Open Source License.
// See LICENSE file in top directory for details.
//
// Copyright (c) 2017 <NAME> and QMCPACK developers.
//
// File developed by:
//
// File created by:
//////////////////////////////////////////////////////////////////////////////////////
#undef NDEBUG
#include "catch.hpp"
#include <algorithm> // std::sort
#include <cassert>
#include <iostream>
#include <random>
#include "mpi3/shared_window.hpp"
#include "mpi3/shared_communicator.hpp"
#include "AFQMC/Memory/SharedMemory/shm_ptr_with_raw_ptr_dispatch.hpp"
#include "AFQMC/Matrix/csr_matrix.hpp"
#if defined(ENABLE_CUDA) || defined(ENABLE_HIP)
#include "AFQMC/Memory/custom_pointers.hpp"
#endif
using std::cerr;
using std::cout;
using std::endl;
using std::get;
using tp_ul_ul = std::tuple<std::size_t, std::size_t>;
namespace mpi3 = boost::mpi3;
namespace qmcplusplus
{
template<typename Type, typename IndxType, typename IntType, class Alloc, class is_root>
void test_csr_matrix_shm_allocator(Alloc A, bool serial)
{
auto world = boost::mpi3::environment::get_world_instance();
mpi3::shared_communicator node(world.split_shared());
using ucsr_matrix = ma::sparse::ucsr_matrix<Type, IndxType, IntType, Alloc, is_root>;
using csr_matrix = ma::sparse::csr_matrix<Type, IndxType, IntType, Alloc, is_root>;
std::vector<Type> v_ = {9, 10, 3, 1};
auto itv = v_.begin();
std::vector<IndxType> c_ = {2, 1, 1, 3};
auto itc = c_.begin();
std::vector<int> non_zero_per_row = {2, 0, 1, 1};
std::vector<int> max_non_zero_per_row = {5, 3, 4, 2};
{
ucsr_matrix small(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, 2, A);
if (serial || node.rank() == 0)
small[3][3] = 1;
if (serial || node.rank() == 0)
small[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small[0][1] = 10;
node.barrier();
REQUIRE(small.num_non_zero_elements() == 4);
auto val = small.non_zero_values_data();
auto col = small.non_zero_indices2_data();
for (std::size_t i = 0; i < small.size(0); i++)
{
for (auto it = small.pointers_begin()[i]; it != small.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
}
{
ucsr_matrix small(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, non_zero_per_row, A);
if (serial || node.rank() == 0)
small[3][3] = 1;
if (serial || node.rank() == 0)
small[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small[0][1] = 10;
node.barrier();
REQUIRE(small.num_non_zero_elements() == 4);
auto val = small.non_zero_values_data();
auto col = small.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small.size(0); i++)
{
for (auto it = small.pointers_begin()[i]; it != small.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
}
{
ucsr_matrix small(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, max_non_zero_per_row, A);
if (serial || node.rank() == 0)
small[3][3] = 1;
if (serial || node.rank() == 0)
small[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small[0][1] = 10;
node.barrier();
REQUIRE(small.num_non_zero_elements() == 4);
auto val = small.non_zero_values_data();
auto col = small.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small.size(0); i++)
{
for (auto it = small.pointers_begin()[i]; it != small.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
}
{
ucsr_matrix small(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, 2, A);
if (serial || node.rank() == 0)
small[3][3] = 1;
if (serial || node.rank() == 0)
small[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small[0][1] = 10;
node.barrier();
REQUIRE(small.num_non_zero_elements() == 4);
auto val = small.non_zero_values_data();
auto col = small.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small.size(0); i++)
{
for (auto it = small.pointers_begin()[i]; it != small.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
ucsr_matrix small0(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, 2, A);
if (serial || node.rank() == 0)
small0[3][3] = 1;
if (serial || node.rank() == 0)
small0[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small0[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small0[0][1] = 10;
node.barrier();
REQUIRE(small0.num_non_zero_elements() == 4);
val = small0.non_zero_values_data();
col = small0.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small0.size(0); i++)
{
for (auto it = small0.pointers_begin()[i]; it != small0.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
ucsr_matrix small2(std::move(small));
REQUIRE(small2.num_non_zero_elements() == 4);
val = small2.non_zero_values_data();
col = small2.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small2.size(0); i++)
{
for (auto it = small2.pointers_begin()[i]; it != small2.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
ucsr_matrix small3(tp_ul_ul{0, 0}, tp_ul_ul{0, 0}, 0, A);
small3 = std::move(small2);
REQUIRE(small3.num_non_zero_elements() == 4);
val = small3.non_zero_values_data();
col = small3.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small3.size(0); i++)
{
for (auto it = small3.pointers_begin()[i]; it != small3.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
// copy assignment
small2 = small3;
REQUIRE(small2.num_non_zero_elements() == 4);
val = small2.non_zero_values_data();
col = small2.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small2.size(0); i++)
{
for (auto it = small2.pointers_begin()[i]; it != small2.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
// copy constructor
auto small9(small2);
REQUIRE(small9.num_non_zero_elements() == 4);
val = small9.non_zero_values_data();
col = small9.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small9.size(0); i++)
{
for (auto it = small9.pointers_begin()[i]; it != small9.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
// ordered
v_ = {10, 9, 3, 1};
c_ = {1, 2, 1, 3};
csr_matrix small10(small2);
REQUIRE(small10.num_non_zero_elements() == 4);
val = small10.non_zero_values_data();
col = small10.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small10.size(0); i++)
{
for (auto it = small10.pointers_begin()[i]; it != small10.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
csr_matrix small4(std::move(small3));
REQUIRE(small4.num_non_zero_elements() == 4);
val = small4.non_zero_values_data();
col = small4.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small4.size(0); i++)
{
for (auto it = small4.pointers_begin()[i]; it != small4.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
#if defined(ENABLE_CUDA) || defined(ENABLE_HIP)
{
using dev_csr_matrix = ma::sparse::csr_matrix<Type, IndxType, IntType, device::device_allocator<Type>>;
dev_csr_matrix small11(small4);
REQUIRE(small11.num_non_zero_elements() == 4);
auto val_ = small11.non_zero_values_data();
auto col_ = small11.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small11.size(0); i++)
{
for (IntType it = small11.pointers_begin()[i]; it != small11.pointers_end()[i]; it++)
{
REQUIRE(Type(val_[it]) == *(itv++));
REQUIRE(col_[it] == *(itc++));
}
}
// this is not a move! just making sure it makes a copy, otherwise
// it will fail below
dev_csr_matrix small12(std::move(small4));
REQUIRE(small12.num_non_zero_elements() == 4);
val_ = small12.non_zero_values_data();
col_ = small12.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small12.size(0); i++)
{
for (IntType it = small12.pointers_begin()[i]; it != small12.pointers_end()[i]; it++)
{
REQUIRE(val_[it] == *(itv++));
REQUIRE(col_[it] == *(itc++));
}
}
}
#endif
ucsr_matrix small5(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, non_zero_per_row, A);
if (serial || node.rank() == 0)
small5[3][3] = 1;
if (serial || node.rank() == 0)
small5[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small5[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small5[0][1] = 10;
node.barrier();
small4.reserve(max_non_zero_per_row);
small4 = std::move(small5);
REQUIRE(small4.num_non_zero_elements() == 4);
val = small4.non_zero_values_data();
col = small4.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small4.size(0); i++)
{
for (auto it = small4.pointers_begin()[i]; it != small4.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
ucsr_matrix small6(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, max_non_zero_per_row, A);
if (serial || node.rank() == 0)
small6[3][3] = 1;
if (serial || node.rank() == 0)
small6[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small6[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small6[0][1] = 10;
node.barrier();
small4.reserve(100);
small4 = std::move(small6);
REQUIRE(small4.num_non_zero_elements() == 4);
val = small4.non_zero_values_data();
col = small4.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small4.size(0); i++)
{
for (auto it = small4.pointers_begin()[i]; it != small4.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
csr_matrix small7(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, max_non_zero_per_row, A);
if (serial || node.rank() == 0)
small7[3][3] = 1;
if (serial || node.rank() == 0)
small7[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small7[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small7[0][1] = 10;
node.barrier();
small7.remove_empty_spaces();
REQUIRE(small7.num_non_zero_elements() == 4);
val = small7.non_zero_values_data();
col = small7.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small7.size(0); i++)
{
if (i < small7.size(0) - 1)
REQUIRE(small7.pointers_end()[i] - small7.pointers_begin()[i] == small7.capacity(i));
for (auto it = small7.pointers_begin()[i]; it != small7.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
// copy assignment
small7 = small4;
REQUIRE(small7.num_non_zero_elements() == 4);
val = small7.non_zero_values_data();
col = small7.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small7.size(0); i++)
{
for (auto it = small7.pointers_begin()[i]; it != small7.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
// copy constructor
auto small8(small4);
REQUIRE(small8.num_non_zero_elements() == 4);
val = small8.non_zero_values_data();
col = small8.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small8.size(0); i++)
{
for (auto it = small8.pointers_begin()[i]; it != small8.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
}
// ordered
v_ = {10, 9, 3, 1};
c_ = {1, 2, 1, 3};
{
csr_matrix small(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, 2, A);
if (serial || node.rank() == 0)
small[3][3] = 1;
if (serial || node.rank() == 0)
small[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small[0][1] = 10;
node.barrier();
REQUIRE(small.num_non_zero_elements() == 4);
auto val = small.non_zero_values_data();
auto col = small.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small.size(0); i++)
{
for (auto it = small.pointers_begin()[i]; it != small.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
std::array<IndxType, 2> range = {0, 2};
auto small2 = small[range];
REQUIRE(small2.num_non_zero_elements() == 2);
val = small2.non_zero_values_data();
col = small2.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
auto i0 = small2.pointers_begin()[0];
for (std::size_t i = 0; i < small2.size(0); i++)
{
for (auto it = small2.pointers_begin()[i]; it != small2.pointers_end()[i]; it++)
{
REQUIRE(val[it - i0] == *(itv++));
REQUIRE(col[it - i0] == *(itc++));
}
}
range = {2, 4};
auto small3 = small[range];
REQUIRE(small3.num_non_zero_elements() == 2);
val = small3.non_zero_values_data();
col = small3.non_zero_indices2_data();
itv = v_.begin() + 2;
itc = c_.begin() + 2;
i0 = small3.pointers_begin()[0];
for (std::size_t i = 0; i < small3.size(0); i++)
{
for (auto it = small3.pointers_begin()[i]; it != small3.pointers_end()[i]; it++)
{
REQUIRE(val[it - i0] == *(itv++));
REQUIRE(col[it - i0] == *(itc++));
}
}
node.barrier();
std::vector<Type> v__ = {10, 3};
std::vector<IndxType> c__ = {1, 1};
std::array<IndxType, 4> range2 = {0, 3, 0, 2};
auto small4 = small[range2];
REQUIRE(small4.num_non_zero_elements() == 2);
REQUIRE(small4.size(0) == 3);
REQUIRE(small4.size(1) == 2);
val = small4.non_zero_values_data();
col = small4.non_zero_indices2_data();
itv = v__.begin();
itc = c__.begin();
auto i1 = small4.pointers_begin()[0];
for (std::size_t i = 0; i < small4.size(0); i++)
{
for (auto it = small4.pointers_begin()[i]; it != small4.pointers_end()[i]; it++)
{
REQUIRE(val[it - i1] == *(itv++));
REQUIRE(col[it - i1] == *(itc++));
}
}
node.barrier();
v__ = {9, 1};
c__ = {2, 3};
range2 = {0, 4, 2, 4};
auto small5 = small[range2];
REQUIRE(small5.num_non_zero_elements() == 2);
REQUIRE(small5.size(0) == 4);
REQUIRE(small5.size(1) == 4);
val = small5.non_zero_values_data();
col = small5.non_zero_indices2_data();
itv = v__.begin();
itc = c__.begin();
i1 = small5.pointers_begin()[0];
for (std::size_t i = 0; i < small5.size(0); i++)
{
for (auto it = small5.pointers_begin()[i]; it != small5.pointers_end()[i]; it++)
{
REQUIRE(val[it - i1] == *(itv++));
REQUIRE(col[it - i1] == *(itc++));
}
}
}
{
csr_matrix small(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, non_zero_per_row, A);
if (serial || node.rank() == 0)
small[3][3] = 1;
if (serial || node.rank() == 0)
small[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small[0][1] = 10;
node.barrier();
REQUIRE(small.num_non_zero_elements() == 4);
auto val = small.non_zero_values_data();
auto col = small.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small.size(0); i++)
{
for (auto it = small.pointers_begin()[i]; it != small.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
}
{
csr_matrix small(tp_ul_ul{4, 4}, tp_ul_ul{0, 0}, max_non_zero_per_row, A);
if (serial || node.rank() == 0)
small[3][3] = 1;
if (serial || node.rank() == 0)
small[0][2] = 9;
node.barrier();
if (serial || node.rank() == node.size() - 1)
small[2][1] = 3;
if (serial || node.rank() == node.size() - 1)
small[0][1] = 10;
node.barrier();
REQUIRE(small.num_non_zero_elements() == 4);
auto val = small.non_zero_values_data();
auto col = small.non_zero_indices2_data();
itv = v_.begin();
itc = c_.begin();
for (std::size_t i = 0; i < small.size(0); i++)
{
for (auto it = small.pointers_begin()[i]; it != small.pointers_end()[i]; it++)
{
REQUIRE(val[it] == *(itv++));
REQUIRE(col[it] == *(itc++));
}
}
node.barrier();
}
};
TEST_CASE("csr_matrix_serial", "[csr]")
{
// serial
{
using Type = double;
using Alloc = std::allocator<Type>;
using is_root = ma::sparse::null_is_root<Alloc>;
test_csr_matrix_shm_allocator<Type, int, std::size_t, Alloc, is_root>(Alloc(), true);
test_csr_matrix_shm_allocator<Type, int, int, Alloc, is_root>(Alloc(), true);
}
{
using Type = std::complex<double>;
using Alloc = std::allocator<Type>;
using is_root = ma::sparse::null_is_root<Alloc>;
test_csr_matrix_shm_allocator<Type, int, std::size_t, Alloc, is_root>(Alloc(), true);
test_csr_matrix_shm_allocator<Type, int, int, Alloc, is_root>(Alloc(), true);
}
}
TEST_CASE("csr_matrix_shm", "[csr]")
{
auto world = boost::mpi3::environment::get_world_instance();
mpi3::shared_communicator node(world.split_shared());
{
using Type = double;
using Alloc = shm::allocator_shm_ptr_with_raw_ptr_dispatch<Type>;
using is_root = ma::sparse::is_root;
test_csr_matrix_shm_allocator<Type, int, std::size_t, Alloc, is_root>(Alloc(node), false);
test_csr_matrix_shm_allocator<Type, int, int, Alloc, is_root>(Alloc(node), false);
}
{
using Type = std::complex<double>;
using Alloc = shm::allocator_shm_ptr_with_raw_ptr_dispatch<Type>;
using is_root = ma::sparse::is_root;
test_csr_matrix_shm_allocator<Type, int, std::size_t, Alloc, is_root>(Alloc(node), false);
test_csr_matrix_shm_allocator<Type, int, int, Alloc, is_root>(Alloc(node), false);
}
}
//#define TEST_CSR_LARGE_MEMORY
#ifdef TEST_CSR_LARGE_MEMORY
TEST_CASE("csr_matrix_shm_large_memory", "[csr]")
{
auto world = boost::mpi3::environment::get_world_instance();
mpi3::shared_communicator node(world.split_shared());
using Type = std::complex<double>;
using Alloc = shm::allocator_shm_ptr_with_raw_ptr_dispatch<Type>;
using is_root = ma::sparse::is_root;
using ucsr_matrix = ma::sparse::ucsr_matrix<Type, int, size_t, Alloc, is_root>;
using csr_matrix = ma::sparse::csr_matrix<Type, int, size_t, Alloc, is_root>;
Alloc A(node);
ucsr_matrix umat({400000, 400000}, tp_ul_ul{0, 0}, 7000, A);
world.barrier();
if (node.root())
{
std::cout << " capacity: " << umat.capacity() << std::endl;
umat.emplace({399999, 0}, Type(1));
std::cout << " pbegin[399999]: " << umat.pointers_begin()[399999] << std::endl;
}
world.barrier();
Alloc B(node);
ucsr_matrix umat2({400000, 400000}, tp_ul_ul{0, 0}, 7000, B);
world.barrier();
if (node.root())
{
std::cout << " capacity: " << umat2.capacity() << std::endl;
umat2.emplace({399999, 0}, Type(1));
std::cout << " pbegin[399999]: " << umat2.pointers_begin()[399999] << std::endl;
}
world.barrier();
}
#endif
} // namespace qmcplusplus
|
benshrimpton/node
|
app/models/customerGroup.server.model.js
|
/**
* Created by tebesfinwo on 7/28/14.
*/
'use strict';
var Mongoose = require('mongoose'),
Schema = Mongoose.Schema;
/**
* Customer Group Schema
* */
var customerGroupSchema = new Schema({
customer_group_id : {
type : Number
},
customer_group_code : {
type : String
}
});
Mongoose.model('CustomerGroup', customerGroupSchema);
|
kai-ako/kai-ako
|
spec/models/user_spec.rb
|
<filename>spec/models/user_spec.rb
require 'rails_helper'
RSpec.describe User, type: :model do
describe "Mulitple user creation" do
it "can create multiple users in the db" do
expect{create_list(:user, 5)}.to change{User.count}.by(5)
end
end
describe "#self.find_or_create_from_omniauth" do
it "can find a user by omni auth" do
auth = mock_auth_hash
other_auth = mock_auth_hash
other_auth.uid = SecureRandom.hex
expect(auth).to_not eq(other_auth)
expect {
User.find_or_create_from_omniauth(auth)
User.find_or_create_from_omniauth(other_auth)
}.to change { User.count }.by(2)
end
it "creates a new user when the current user doesn't exist" do
auth = mock_auth_hash
expect{User.find_or_create_from_omniauth(auth)}.to change{User.count}.by(1)
end
end
end
|
Ligtus/JavaPracticeHacktoberfest
|
src/marcoscastro2.java
|
<gh_stars>10-100
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
public class Ejemplo1urlCon {
public static void main(String[] args) {
URL url = null;
URLConnection urlCon = null;
try {
url = new URL("http://www.elaltozano.es");
urlCon = url.openConnection();
BufferedReader in;
InputStream inputStream = urlCon.getInputStream();
in = new BufferedReader(new InputStreamReader(inputStream));
String inputLine;
while ((inputLine = in.readLine()) != null)
System.out.println(inputLine);
in.close();
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
wapache/opengauss
|
src/gausskernel/storage/mot/core/src/system/transaction/txn.cpp
|
<filename>src/gausskernel/storage/mot/core/src/system/transaction/txn.cpp
/*
* Copyright (c) 2020 Huawei Technologies Co.,Ltd.
*
* openGauss is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PSL v2 for more details.
* -------------------------------------------------------------------------
*
* txn.cpp
* Transaction manager used to manage the life cycle of a single transaction.
*
* IDENTIFICATION
* src/gausskernel/storage/mot/core/src/system/transaction/txn.cpp
*
* -------------------------------------------------------------------------
*/
#include <stdlib.h>
#include <algorithm>
#include <unordered_map>
#include "../storage/table.h" // explicit path in order to solve collision with B header file with the same name
#include "mot_engine.h"
#include "redo_log_writer.h"
#include "sentinel.h"
#include "txn.h"
#include "txn_access.h"
#include "txn_insert_action.h"
#include "db_session_statistics.h"
#include "utilities.h"
#include "mm_api.h"
namespace MOT {
DECLARE_LOGGER(TxnManager, System);
void TxnManager::RemoveTableFromStat(Table* t)
{
m_accessMgr->RemoveTableFromStat(t);
}
void TxnManager::UpdateRow(Row* row, const int attr_id, double attr_value)
{
row->SetValue(attr_id, attr_value);
UpdateLastRowState(AccessType::WR);
}
void TxnManager::UpdateRow(Row* row, const int attr_id, uint64_t attr_value)
{
row->SetValue(attr_id, attr_value);
UpdateLastRowState(AccessType::WR);
}
InsItem* TxnManager::GetNextInsertItem(Index* index)
{
return m_accessMgr->GetInsertMgr()->GetInsertItem(index);
}
Key* TxnManager::GetTxnKey(Index* index)
{
int size = index->GetAlignedKeyLength() + sizeof(Key);
void* buf = MemSessionAlloc(size);
if (buf == nullptr) {
return nullptr;
}
return new (buf) Key(index->GetAlignedKeyLength());
}
RC TxnManager::InsertRow(Row* row)
{
GcSessionStart();
RC result = m_accessMgr->GetInsertMgr()->ExecuteOptimisticInsert(row);
if (result == RC_OK) {
MOT::DbSessionStatisticsProvider::GetInstance().AddInsertRow();
}
return result;
}
Row* TxnManager::RowLookup(const AccessType type, Sentinel* const& originalSentinel, RC& rc)
{
rc = RC_OK;
// Look for the Sentinel in the cache
Row* local_row = nullptr;
// error handling
if (unlikely(originalSentinel == nullptr)) {
return nullptr;
}
// if txn not started, tag as started and take global epoch
GcSessionStart();
RC res = AccessLookup(type, originalSentinel, local_row);
switch (res) {
case RC::RC_LOCAL_ROW_DELETED:
return nullptr;
case RC::RC_LOCAL_ROW_FOUND:
return local_row;
case RC::RC_LOCAL_ROW_NOT_FOUND:
if (likely(originalSentinel->IsCommited() == true)) {
// For Read-Only Txn return the Commited row
if (GetTxnIsoLevel() == READ_COMMITED and type == AccessType::RD) {
return m_accessMgr->GetReadCommitedRow(originalSentinel);
} else {
// Row is not in the cache,map it and return the local row
return m_accessMgr->MapRowtoLocalTable(AccessType::RD, originalSentinel, rc);
}
} else
return nullptr;
case RC::RC_MEMORY_ALLOCATION_ERROR:
rc = RC_MEMORY_ALLOCATION_ERROR;
return nullptr;
default:
return nullptr;
}
}
RC TxnManager::AccessLookup(const AccessType type, Sentinel* const& originalSentinel, Row*& localRow)
{
return m_accessMgr->AccessLookup(type, originalSentinel, localRow);
}
RC TxnManager::DeleteLastRow()
{
RC rc;
Access* access = m_accessMgr->GetLastAccess();
if (access == nullptr)
return RC_ERROR;
rc = m_accessMgr->UpdateRowState(AccessType::DEL, access);
if (rc != RC_OK)
return rc;
return rc;
}
RC TxnManager::UpdateLastRowState(AccessType state)
{
return m_accessMgr->UpdateRowState(state, m_accessMgr->GetLastAccess());
}
RC TxnManager::StartTransaction(uint64_t transactionId, int isolationLevel)
{
m_transactionId = transactionId;
m_isolationLevel = isolationLevel;
m_state = TxnState::TXN_START;
GcSessionStart();
return RC_OK;
}
RC TxnManager::LiteRollback(TransactionId transactionId)
{
if (m_txnDdlAccess->Size() > 0) {
if (transactionId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transactionId;
RollbackDDLs();
Cleanup();
}
MOT::DbSessionStatisticsProvider::GetInstance().AddRollbackTxn();
return RC::RC_ABORT;
}
RC TxnManager::LiteRollbackPrepared(TransactionId transactionId)
{
if (m_txnDdlAccess->Size() > 0) {
if (transactionId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transactionId;
m_redoLog.RollbackPrepared();
RollbackDDLs();
Cleanup();
}
MOT::DbSessionStatisticsProvider::GetInstance().AddRollbackPreparedTxn();
return RC::RC_ABORT;
}
RC TxnManager::RollbackInternal(bool isPrepared)
{
if (isPrepared) {
m_occManager.ReleaseHeaders(this);
m_redoLog.RollbackPrepared();
} else {
m_occManager.ReleaseLocks(this);
}
// We have to undo changes to secondary indexes and ddls
m_occManager.RollbackInserts(this);
RollbackDDLs();
Cleanup();
if (isPrepared)
MOT::DbSessionStatisticsProvider::GetInstance().AddRollbackPreparedTxn();
else
MOT::DbSessionStatisticsProvider::GetInstance().AddRollbackTxn();
return RC::RC_ABORT;
}
void TxnManager::CleanTxn()
{
Cleanup();
}
RC TxnManager::Prepare(TransactionId transactionId)
{
if (transactionId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transactionId;
// Run only first validation phase
RC rc = m_occManager.ValidateOcc(this);
if (rc == RC_OK)
m_redoLog.Prepare();
return rc;
}
RC TxnManager::LitePrepare(TransactionId transactionId)
{
if (m_txnDdlAccess->Size() == 0)
return RC_OK;
if (transactionId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transactionId;
m_redoLog.Prepare();
return RC_OK;
}
RC TxnManager::CommitInternal()
{
SetCommitSequenceNumber(GetCSNManager().GetNextCSN());
// Record the start write phase for this transaction
if (GetGlobalConfiguration().m_enableCheckpoint) {
GetCheckpointManager()->BeginTransaction(this);
}
// first write to redo log, then write changes
m_redoLog.Commit();
WriteDDLChanges();
if (!m_occManager.WriteChanges(this))
return RC_PANIC;
if (GetGlobalConfiguration().m_enableCheckpoint) {
GetCheckpointManager()->TransactionCompleted(this);
}
if (!GetGlobalConfiguration().m_enableRedoLog ||
GetGlobalConfiguration().m_redoLogHandlerType == RedoLogHandlerType::ASYNC_REDO_LOG_HANDLER) {
m_occManager.ReleaseLocks(this);
m_occManager.CleanRowsFromIndexes(this);
}
return RC_OK;
}
RC TxnManager::Commit()
{
return Commit(INVALID_TRANSACTIOIN_ID);
}
RC TxnManager::Commit(uint64_t transcationId)
{
// Validate concurrency control
if (transcationId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transcationId;
RC rc = m_occManager.ValidateOcc(this);
if (rc == RC_OK) {
rc = CommitInternal();
MOT::DbSessionStatisticsProvider::GetInstance().AddCommitTxn();
}
return rc;
}
RC TxnManager::LiteCommit(uint64_t transcationId)
{
if (m_txnDdlAccess->Size() > 0) {
if (transcationId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transcationId;
SetCommitSequenceNumber(GetCSNManager().GetNextCSN());
// first write to redo log, then write changes
m_redoLog.Commit();
WriteDDLChanges();
Cleanup();
}
MOT::DbSessionStatisticsProvider::GetInstance().AddCommitTxn();
return RC_OK;
}
RC TxnManager::CommitPrepared()
{
return CommitPrepared(INVALID_TRANSACTIOIN_ID);
}
RC TxnManager::CommitPrepared(uint64_t transactionId)
{
if (transactionId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transactionId;
SetCommitSequenceNumber(GetCSNManager().GetNextCSN());
// Record the start write phase for this transaction
if (GetGlobalConfiguration().m_enableCheckpoint) {
GetCheckpointManager()->BeginTransaction(this);
}
// first write to redo log, then write changes
m_redoLog.CommitPrepared();
// Run second validation phase
WriteDDLChanges();
if (!m_occManager.WriteChanges(this))
return RC_PANIC;
GetCheckpointManager()->TransactionCompleted(this);
if (!GetGlobalConfiguration().m_enableRedoLog ||
GetGlobalConfiguration().m_redoLogHandlerType == RedoLogHandlerType::ASYNC_REDO_LOG_HANDLER) {
m_occManager.ReleaseLocks(this);
m_occManager.CleanRowsFromIndexes(this);
}
MOT::DbSessionStatisticsProvider::GetInstance().AddCommitPreparedTxn();
return RC_OK;
}
RC TxnManager::LiteCommitPrepared(uint64_t transactionId)
{
if (m_txnDdlAccess->Size() > 0) {
if (transactionId != INVALID_TRANSACTIOIN_ID)
m_transactionId = transactionId;
// first write to redo log, then write changes
m_redoLog.CommitPrepared();
WriteDDLChanges();
Cleanup();
}
MOT::DbSessionStatisticsProvider::GetInstance().AddCommitPreparedTxn();
return RC_OK;
}
RC TxnManager::EndTransaction()
{
if (GetGlobalConfiguration().m_enableRedoLog &&
GetGlobalConfiguration().m_redoLogHandlerType != RedoLogHandlerType::ASYNC_REDO_LOG_HANDLER &&
IsFailedCommitPrepared() == false) {
m_occManager.ReleaseLocks(this);
m_occManager.CleanRowsFromIndexes(this);
}
Cleanup();
return RC::RC_OK;
}
void TxnManager::RedoWriteAction(bool isCommit)
{
m_redoLog.SetForceWrite();
if (isCommit)
m_redoLog.Commit();
else
m_redoLog.Rollback();
}
RC TxnManager::FailedCommitPrepared(uint64_t transcationId)
{
if (m_isLightSession && m_txnDdlAccess->Size() == 0)
return RC_OK;
uint64_t used_tid = m_transactionId;
if (transcationId != INVALID_TRANSACTIOIN_ID)
used_tid = transcationId;
SetCommitSequenceNumber(GetCSNManager().GetNextCSN());
if (GetGlobalConfiguration().m_enableCheckpoint)
GetCheckpointManager()->BeginTransaction(this);
if (m_isLightSession == false) {
// Row already Locked!
if (!m_occManager.WriteChanges(this))
return RC_PANIC;
}
if (GetGlobalConfiguration().m_enableCheckpoint)
GetCheckpointManager()->TransactionCompleted(this);
if (SavePreparedData() != RC_OK)
return RC_ERROR;
Cleanup();
return RC_OK;
}
void TxnManager::Cleanup()
{
if (m_isLightSession == false) {
m_accessMgr->ClearSet();
}
m_txnDdlAccess->Reset();
m_checkpointPhase = CheckpointPhase::NONE;
m_csn = 0;
m_occManager.CleanUp();
m_err = RC_OK;
m_errIx = nullptr;
m_flushDone = false;
m_internalTransactionId++;
m_internalStmtCount = 0;
m_redoLog.Reset();
SetFailedCommitPrepared(false);
GcSessionEnd();
ClearErrorStack();
m_accessMgr->ClearTableCache();
m_queryState.clear();
}
void TxnManager::UndoInserts()
{
uint32_t rollbackCounter = 0;
TxnOrderedSet_t& OrderedSet = m_accessMgr->GetOrderedRowSet();
for (const auto& ra_pair : OrderedSet) {
Access* ac = ra_pair.second;
if (ac->m_type != AccessType::INS) {
continue;
} else {
rollbackCounter++;
RollbackInsert(ac);
m_accessMgr->IncreaseTableStat(ac->GetTxnRow()->GetTable());
}
}
uint32_t counter = rollbackCounter;
// Release local rows!
for (const auto& ra_pair : OrderedSet) {
Access* ac = ra_pair.second;
if (ac->m_type == AccessType::INS) {
Index* index_ = ac->GetSentinel()->GetIndex();
// Row is local and was not inserted in the commit
if (index_->GetIndexOrder() == IndexOrder::INDEX_ORDER_PRIMARY) {
// Release local row to the GC!!!!!
ac->GetTxnRow()->GetTable()->DestroyRow(ac->GetTxnRow());
}
rollbackCounter--;
}
if (!rollbackCounter) {
break;
}
}
}
RC TxnManager::RollbackInsert(Access* ac)
{
Sentinel* outputSen = nullptr;
RC rc;
Sentinel* sentinel = ac->GetSentinel();
Index* index_ = sentinel->GetIndex();
MOT_ASSERT(sentinel != nullptr);
rc = sentinel->RefCountUpdate(DEC, GetThdId());
MOT_ASSERT(rc != RC::RC_INDEX_RETRY_INSERT);
if (rc == RC::RC_INDEX_DELETE) {
MaxKey m_key;
// Memory reclamation need to release the key from the primary sentinel back to the pool
m_key.InitKey(index_->GetKeyLength());
index_->BuildKey(ac->GetTxnRow()->GetTable(), ac->GetTxnRow(), &m_key);
MOT_ASSERT(sentinel->GetCounter() == 0);
#ifdef MOT_DEBUG
Sentinel* curr_sentinel = index_->IndexReadHeader(&m_key, GetThdId());
MOT_ASSERT(curr_sentinel == sentinel);
#endif
outputSen = index_->IndexRemove(&m_key, GetThdId());
MOT_ASSERT(outputSen != nullptr);
GcSessionRecordRcu(index_->GetIndexId(), outputSen, nullptr, index_->SentinelDtor, SENTINEL_SIZE);
}
return rc;
}
void TxnManager::RollbackSecondaryIndexInsert(Index* index)
{
if (m_isLightSession)
return;
TxnOrderedSet_t& access_row_set = m_accessMgr->GetOrderedRowSet();
TxnOrderedSet_t::iterator it = access_row_set.begin();
while (it != access_row_set.end()) {
Access* ac = it->second;
if (ac->m_type == INS && ac->GetSentinel()->GetIndex() == index) {
RollbackInsert(ac);
it = access_row_set.erase(it);
// need to perform index clean-up!
m_accessMgr->PubReleaseAccess(ac);
} else {
it++;
}
}
}
void TxnManager::RollbackDDLs()
{
// early exit
if (m_txnDdlAccess->Size() == 0)
return;
// rollback DDLs in reverse order (avoid rolling back parent object before rolling back child)
for (int i = m_txnDdlAccess->Size() - 1; i >= 0; i--) {
Index* index = nullptr;
Index** indexes = nullptr;
Table* table = nullptr;
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->Get(i);
switch (ddl_access->GetDDLAccessType()) {
case DDL_ACCESS_CREATE_TABLE:
table = (Table*)ddl_access->GetEntry();
MOT_LOG_INFO("Rollback of create table %s", table->GetLongTableName().c_str());
table->DropImpl();
RemoveTableFromStat(table);
if (table != nullptr)
delete table;
break;
case DDL_ACCESS_DROP_TABLE:
table = (Table*)ddl_access->GetEntry();
MOT_LOG_INFO("Rollback of drop table %s", table->GetLongTableName().c_str());
break;
case DDL_ACCESS_TRUNCATE_TABLE:
indexes = (Index**)ddl_access->GetEntry();
table = indexes[0]->GetTable();
MOT_LOG_INFO("Rollback of truncate table %s", table->GetLongTableName().c_str());
for (int idx = 0; idx < table->GetNumIndexes(); idx++) {
index = table->m_indexes[idx];
table->m_indexes[idx] = indexes[idx];
if (idx == 0)
table->m_primaryIndex = indexes[idx];
else
table->m_secondaryIndexes[indexes[idx]->GetName()] = indexes[idx];
GcManager::ClearIndexElements(index->GetIndexId());
index->Truncate(true);
delete index;
}
delete[] indexes;
break;
case DDL_ACCESS_CREATE_INDEX:
index = (Index*)ddl_access->GetEntry();
table = index->GetTable();
MOT_LOG_INFO("Rollback of create index %s for table %s",
index->GetName().c_str(),
table->GetLongTableName().c_str());
table->RemoveSecondaryIndex((char*)index->GetName().c_str(), this);
break;
case DDL_ACCESS_DROP_INDEX:
index = (Index*)ddl_access->GetEntry();
table = index->GetTable();
MOT_LOG_INFO("Rollback of drop index %s for table %s",
index->GetName().c_str(),
table->GetLongTableName().c_str());
break;
default:
break;
}
}
}
void TxnManager::WriteDDLChanges()
{
// early exit
if (m_txnDdlAccess->Size() == 0)
return;
Index* index = nullptr;
Index** indexes = nullptr;
Table* table = nullptr;
for (uint16_t i = 0; i < m_txnDdlAccess->Size(); i++) {
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->Get(i);
switch (ddl_access->GetDDLAccessType()) {
case DDL_ACCESS_CREATE_TABLE:
GetTableManager()->AddTable((Table*)ddl_access->GetEntry());
break;
case DDL_ACCESS_DROP_TABLE:
GetTableManager()->DropTable((Table*)ddl_access->GetEntry(), m_sessionContext);
break;
case DDL_ACCESS_TRUNCATE_TABLE:
indexes = (Index**)ddl_access->GetEntry();
table = indexes[0]->GetTable();
table->Lock();
table->m_rowCount = 0;
for (int i = 0; i < table->GetNumIndexes(); i++) {
index = indexes[i];
GcManager::ClearIndexElements(index->GetIndexId());
index->Truncate(true);
delete index;
}
table->Unlock();
delete[] indexes;
break;
case DDL_ACCESS_CREATE_INDEX:
((Index*)ddl_access->GetEntry())->SetIsCommited(true);
break;
case DDL_ACCESS_DROP_INDEX:
index = (Index*)ddl_access->GetEntry();
if (index->IsPrimaryKey())
break;
table = index->GetTable();
table->Lock();
table->RemoveSecondaryIndex((char*)index->GetName().c_str(), this);
table->Unlock();
break;
default:
break;
}
}
}
Row* TxnManager::RemoveRow(Row* row)
{
Table* table = row->GetTable();
Row* outputRow = nullptr;
if (row->GetStable() == nullptr) {
outputRow = table->RemoveRow(row, m_threadId, GetGcSession());
m_accessMgr->IncreaseTableStat(table);
} else {
outputRow = row;
}
return outputRow;
}
Row* TxnManager::RemoveKeyFromIndex(Row* row, Sentinel* sentinel)
{
Table* table = row->GetTable();
Row* outputRow = nullptr;
if (row->GetStable() == nullptr) {
outputRow = table->RemoveKeyFromIndex(row, sentinel, m_threadId, GetGcSession());
} else {
outputRow = row;
}
return outputRow;
}
RC TxnManager::RowDel()
{
return UpdateLastRowState(AccessType::DEL);
}
// Use this function when we have only the key!
// Not Used with FDW!
Row* TxnManager::RowLookupByKey(Table* const& table, const AccessType type, Key* const currentKey)
{
RC rc = RC_OK;
Row* originalRow = nullptr;
Sentinel* pSentinel = nullptr;
table->FindRow(currentKey, pSentinel, GetThdId());
if (pSentinel == nullptr) {
MOT_LOG_DEBUG("Cannot find key:%" PRIu64 " from table:%s", m_key, table->GetLongTableName().c_str());
return nullptr;
} else {
return RowLookup(type, pSentinel, rc);
}
}
TxnManager::TxnManager(SessionContext* session_context)
: m_latestEpoch(~uint64_t(0)),
m_threadId((uint64_t)-1),
m_connectionId((uint64_t)-1),
m_sessionContext(session_context),
m_redoLog(this),
m_occManager(),
m_gcSession(nullptr),
m_checkpointPhase(CheckpointPhase::NONE),
m_checkpointNABit(false),
m_csn(0),
m_transactionId(INVALID_TRANSACTIOIN_ID),
m_surrogateGen(0),
m_flushDone(false),
m_internalTransactionId(((uint64_t)m_sessionContext->GetSessionId()) << SESSION_ID_BITS),
m_internalStmtCount(0),
m_isolationLevel(READ_COMMITED),
m_failedCommitPrepared(false),
m_isLightSession(false),
m_errIx(nullptr),
m_err(RC_OK)
{
m_key = nullptr;
m_state = TxnState::TXN_START;
}
TxnManager::~TxnManager()
{
if (GetGlobalConfiguration().m_enableCheckpoint) {
GetCheckpointManager()->AbortTransaction(this);
}
if (m_state == MOT::TxnState::TXN_PREPARE) {
if (SavePreparedData() != RC_OK) {
MOT_LOG_ERROR("savePreparedData failed");
}
Cleanup();
}
MOT_LOG_DEBUG("txn_man::~txn_man - memory pools released for thread_id=%lu", m_threadId);
if (m_gcSession != nullptr) {
m_gcSession->GcCleanAll();
m_gcSession->RemoveFromGcList(m_gcSession);
m_gcSession->~GcManager();
MemSessionFree(m_gcSession);
}
delete m_key;
delete m_txnDdlAccess;
}
bool TxnManager::Init(uint64_t _thread_id, uint64_t connection_id, bool isLightTxn)
{
this->m_threadId = _thread_id;
this->m_connectionId = connection_id;
m_isLightSession = isLightTxn;
if (!m_occManager.Init())
return false;
m_txnDdlAccess = new (std::nothrow) TxnDDLAccess(this);
if (!m_txnDdlAccess) {
MOT_REPORT_ERROR(MOT_ERROR_OOM, "Initialize Transaction", "Failed to allocate memory for DDL access data");
return false;
}
m_txnDdlAccess->Init();
// make node-local allocations
if (isLightTxn == false) {
m_accessMgr = MemSessionAllocAlignedObjectPtr<TxnAccess>(L1_CACHE_LINE);
if (!m_accessMgr->Init(this))
return false;
m_surrogateGen = GetSurrogateKeyManager()->GetSurrogateSlot(connection_id);
}
m_gcSession = GcManager::Make(GcManager::GC_MAIN, _thread_id);
if (!m_gcSession)
return false;
if (!m_redoLog.Init())
return false;
static const TxnValidation validation_lock = GetGlobalConfiguration().m_validationLock;
if (m_key == nullptr) {
MOT_LOG_DEBUG("Init Key");
m_key = new (std::nothrow) MaxKey(MAX_KEY_SIZE);
if (m_key == nullptr)
MOTAbort();
}
m_occManager.SetPreAbort(GetGlobalConfiguration().m_preAbort);
if (validation_lock == TxnValidation::TXN_VALIDATION_NO_WAIT)
m_occManager.SetValidationNoWait(true);
else if (validation_lock == TxnValidation::TXN_VALIDATION_WAITING) {
m_occManager.SetValidationNoWait(false);
} else {
MOT_ASSERT(false);
}
return true;
}
RC TxnManager::OverwriteRow(Row* updatedRow, BitmapSet& modifiedColumns)
{
if (updatedRow == nullptr)
return RC_ERROR;
Access* access = m_accessMgr->GetLastAccess();
if (access->m_type == AccessType::WR)
access->m_modifiedColumns |= modifiedColumns;
return RC_OK;
}
void TxnManager::SetTxnState(TxnState envelopeState)
{
m_state = envelopeState;
}
void TxnManager::SetTxnIsoLevel(int envelopeIsoLevel)
{
m_isolationLevel = envelopeIsoLevel;
}
void TxnManager::GcSessionRecordRcu(
uint32_t index_id, void* object_ptr, void* object_pool, DestroyValueCbFunc cb, uint32_t obj_size)
{
return m_gcSession->GcRecordObject(index_id, object_ptr, object_pool, cb, obj_size);
}
TxnInsertAction::~TxnInsertAction()
{
if (m_manager != nullptr && m_insertSet != nullptr) {
MemSessionFree(m_insertSet);
}
}
bool TxnInsertAction::Init(TxnManager* _manager)
{
bool rc = true;
if (this->m_manager)
return false;
this->m_manager = _manager;
m_insertSetSize = 0;
m_insertArraySize = INSERT_ARRAY_DEFAULT_SIZE;
void* ptr = MemSessionAlloc(sizeof(InsItem) * m_insertArraySize);
if (ptr == nullptr)
return false;
m_insertSet = reinterpret_cast<InsItem*>(ptr);
for (uint64_t item = 0; item < m_insertArraySize; item++) {
new (&m_insertSet[item]) InsItem();
}
return rc;
}
void TxnInsertAction::ReportError(RC rc, InsItem* currentItem)
{
switch (rc) {
case RC_OK:
break;
case RC_UNIQUE_VIOLATION:
// set error
m_manager->m_err = RC_UNIQUE_VIOLATION;
m_manager->m_errIx = currentItem->m_index;
m_manager->m_errIx->BuildErrorMsg(currentItem->m_row->GetTable(),
currentItem->m_row,
m_manager->m_errMsgBuf,
sizeof(m_manager->m_errMsgBuf));
break;
case RC_MEMORY_ALLOCATION_ERROR:
SetLastError(MOT_ERROR_OOM, MOT_SEVERITY_ERROR);
break;
case RC_ILLEGAL_ROW_STATE:
SetLastError(MOT_ERROR_INTERNAL, MOT_SEVERITY_ERROR);
break;
default:
SetLastError(MOT_ERROR_INTERNAL, MOT_SEVERITY_ERROR);
break;
}
}
RC TxnInsertAction::ExecuteOptimisticInsert(Row* row)
{
Sentinel* pIndexInsertResult = nullptr;
Row* accessRow = nullptr;
RC rc = RC_OK;
bool isInserted = true;
bool isMappedToCache = false;
auto currentItem = BeginCursor();
/*
* 1.Add all sentinels to the Access SET type P_SENTINEL or S_SENTINEL
* 2.IF Sentinel is committed abort!
* 3.We perform lookup directly on sentinels
* 4.We do not attach the row to the index,we map it to the access
* 5.We can release the row in the case of early abort only for Primary
* 6.No need to copy the row to the local_access
*/
while (currentItem != EndCursor()) {
isInserted = true;
isMappedToCache = false;
bool res = reinterpret_cast<Index*>(currentItem->m_index)
->IndexInsert(pIndexInsertResult, currentItem->m_key, m_manager->GetThdId(), rc);
if (unlikely(rc == RC_MEMORY_ALLOCATION_ERROR)) {
ReportError(rc);
// Failed on Memory
isInserted = false;
goto end;
} else {
if (currentItem->getIndexOrder() == IndexOrder::INDEX_ORDER_PRIMARY) {
row->SetAbsentRow();
row->SetPrimarySentinel(pIndexInsertResult);
MOT_ASSERT(row->IsAbsentRow());
}
}
if (pIndexInsertResult->IsCommited() == true) {
// Lets check and see if we deleted the row
Row* local_row = nullptr;
rc = m_manager->AccessLookup(RD, pIndexInsertResult, local_row);
switch (rc) {
case RC_LOCAL_ROW_DELETED:
// promote delete to Insert!
// In this case the sentinel is committed and the previous scenario was delete
// Insert succeeded Sentinel was not committed before!
// At this point the row is not in the cache and can be mapped!
MOT_ASSERT(currentItem->m_index->GetUnique() == true);
accessRow = m_manager->m_accessMgr->AddInsertToLocalAccess(pIndexInsertResult, row, rc, true);
if (accessRow != nullptr) {
isMappedToCache = true;
}
ReportError(rc, currentItem);
break;
case RC_LOCAL_ROW_NOT_FOUND:
// Header is committed
case RC_LOCAL_ROW_FOUND:
// Found But not deleted = self duplicated!
ReportError(RC_UNIQUE_VIOLATION, currentItem);
rc = RC_UNIQUE_VIOLATION;
goto end;
default:
break;
}
} else if (res == true or pIndexInsertResult->IsCommited() == false) {
// tag all the sentinels the insert metadata
MOT_ASSERT(pIndexInsertResult->GetCounter() != 0);
// Insert succeeded Sentinel was not committed before!
accessRow = m_manager->m_accessMgr->AddInsertToLocalAccess(pIndexInsertResult, row, rc);
if (accessRow == nullptr) {
ReportError(rc, currentItem);
goto end;
}
isMappedToCache = true;
}
++currentItem;
}
end:
if (rc != RC_OK) {
// Clean current aborted row and clean secondary indexes that were not inserts
// Clean first Object! - wither primary or secondary!
// Return Local Row to pull for PI
Table* table = currentItem->m_row->GetTable();
if (currentItem->getIndexOrder() == IndexOrder::INDEX_ORDER_PRIMARY) {
table->DestroyRow(currentItem->m_row);
}
if (isInserted == true) {
if (isMappedToCache == false) {
RC rc = pIndexInsertResult->RefCountUpdate(DEC, m_manager->GetThdId());
Index* index_ = pIndexInsertResult->GetIndex();
if (rc == RC::RC_INDEX_DELETE) {
// Memory reclamation need to release the key from the primary sentinel back to the pool
MOT_ASSERT(pIndexInsertResult->GetCounter() == 0);
Sentinel* outputSen = index_->IndexRemove(currentItem->m_key, m_manager->GetThdId());
MOT_ASSERT(outputSen != nullptr);
m_manager->GcSessionRecordRcu(
index_->GetIndexId(), outputSen, nullptr, index_->SentinelDtor, SENTINEL_SIZE);
m_manager->m_accessMgr->IncreaseTableStat(table);
}
}
}
}
// Clean keys
currentItem = BeginCursor();
while (currentItem < EndCursor()) {
m_manager->DestroyTxnKey(currentItem->m_key);
currentItem++;
}
// Clear the current set size;
m_insertSetSize = 0;
return rc;
}
bool TxnInsertAction::ReallocInsertSet()
{
bool rc = true;
uint64_t new_array_size = (uint64_t)m_insertArraySize * INSERT_ARRAY_EXTEND_FACTOR;
void* ptr = MemSessionAlloc(sizeof(InsItem) * new_array_size);
if (__builtin_expect(ptr == nullptr, 0)) {
MOT_LOG_ERROR("%s: failed", __func__);
MOT_ASSERT(ptr != nullptr);
return false;
}
errno_t erc = memset_s(ptr, sizeof(InsItem) * new_array_size, 0, sizeof(InsItem) * new_array_size);
securec_check(erc, "\0", "\0");
erc = memcpy_s(ptr, sizeof(InsItem) * new_array_size, m_insertSet, sizeof(InsItem) * m_insertArraySize);
securec_check(erc, "\0", "\0");
MemSessionFree(m_insertSet);
m_insertSet = reinterpret_cast<InsItem*>(ptr);
m_insertArraySize = new_array_size;
return rc;
}
void TxnInsertAction::ShrinkInsertSet()
{
uint64_t new_array_size = INSERT_ARRAY_DEFAULT_SIZE;
void* ptr = MemSessionAlloc(sizeof(InsItem) * new_array_size);
if (__builtin_expect(ptr == nullptr, 0)) {
MOT_LOG_ERROR("%s: failed", __func__);
return;
}
errno_t erc = memcpy_s(ptr, sizeof(InsItem) * new_array_size, m_insertSet, sizeof(InsItem) * new_array_size);
securec_check(erc, "\0", "\0");
MemSessionFree(m_insertSet);
m_insertSet = reinterpret_cast<InsItem*>(ptr);
m_insertArraySize = new_array_size;
}
/******************** DDL SUPPORT ********************/
Table* TxnManager::GetTableByExternalId(uint64_t id)
{
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->GetByOid(id);
if (ddl_access != nullptr) {
switch (ddl_access->GetDDLAccessType()) {
case DDL_ACCESS_CREATE_TABLE:
return (Table*)ddl_access->GetEntry();
case DDL_ACCESS_DROP_TABLE:
return nullptr;
default:
break;
}
}
return GetTableManager()->GetTableByExternal(id);
}
Index* TxnManager::GetIndexByExternalId(uint64_t table_id, uint64_t index_id)
{
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->GetByOid(index_id);
if (ddl_access != nullptr) {
switch (ddl_access->GetDDLAccessType()) {
case DDL_ACCESS_CREATE_INDEX:
return (Index*)ddl_access->GetEntry();
case DDL_ACCESS_DROP_INDEX:
return nullptr;
default:
break;
}
}
Table* table = GetTableManager()->GetTableByExternal(table_id);
if (table == nullptr) {
return nullptr;
} else {
return table->GetIndexByExtId(index_id);
}
}
Index* TxnManager::GetIndex(uint64_t table_id, uint16_t position)
{
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->GetByOid(table_id);
Table* table = GetTableByExternalId(table_id);
return GetIndex(table, position);
}
Index* TxnManager::GetIndex(Table* table, uint16_t position)
{
MOT_ASSERT(table != nullptr);
Index* index = table->GetIndex(position);
MOT_ASSERT(index != nullptr);
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->GetByOid(index->GetExtId());
if (ddl_access != nullptr) {
switch (ddl_access->GetDDLAccessType()) {
case DDL_ACCESS_CREATE_INDEX:
return index;
case DDL_ACCESS_DROP_INDEX:
return nullptr;
default:
// should print error, the only index operation which are supported
// are create and drop
return nullptr;
}
} else {
if (index->GetIsCommited())
return index;
else
return nullptr;
}
}
RC TxnManager::CreateTable(Table* table)
{
TxnDDLAccess::DDLAccess* ddl_access =
new (std::nothrow) TxnDDLAccess::DDLAccess(table->GetTableExId(), DDL_ACCESS_CREATE_TABLE, (void*)table);
if (ddl_access == nullptr) {
MOT_REPORT_ERROR(MOT_ERROR_OOM, "Create Table", "Failed to allocate memory for DDL Access object");
return RC_MEMORY_ALLOCATION_ERROR;
}
m_txnDdlAccess->Add(ddl_access);
return RC_OK;
}
RC TxnManager::DropTable(Table* table)
{
RC res = RC_OK;
// we allocate all memory before action takes place, so that if memory allocation fails, we can report error safely
TxnDDLAccess::DDLAccess* new_ddl_access = nullptr;
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->GetByOid(table->GetTableExId());
if ((ddl_access == nullptr) || (ddl_access->GetDDLAccessType() == DDL_ACCESS_TRUNCATE_TABLE)) {
new_ddl_access =
new (std::nothrow) TxnDDLAccess::DDLAccess(table->GetTableExId(), DDL_ACCESS_DROP_TABLE, (void*)table);
if (new_ddl_access == nullptr) {
MOT_REPORT_ERROR(MOT_ERROR_OOM, "Drop Table", "Failed to allocate DDL Access object");
return RC_MEMORY_ALLOCATION_ERROR;
}
}
if (!m_isLightSession) {
TxnOrderedSet_t& access_row_set = m_accessMgr->GetOrderedRowSet();
TxnOrderedSet_t::iterator it = access_row_set.begin();
while (it != access_row_set.end()) {
Access* ac = it->second;
if (ac->GetTxnRow()->GetTable() == table) {
if (ac->m_type == INS)
RollbackInsert(ac);
it = access_row_set.erase(it);
// need to perform index clean-up!
m_accessMgr->PubReleaseAccess(ac);
} else {
it++;
}
}
}
if (ddl_access != nullptr) {
if (ddl_access->GetDDLAccessType() == DDL_ACCESS_CREATE_TABLE) {
// this table was created in this transaction, can delete it from the ddl_access
m_txnDdlAccess->EraseByOid(table->GetTableExId());
table->DropImpl();
RemoveTableFromStat(table);
delete table;
} else if (ddl_access->GetDDLAccessType() == DDL_ACCESS_TRUNCATE_TABLE) {
Index** indexes = (Index**)ddl_access->GetEntry();
for (int i = 0; i < table->GetNumIndexes(); i++) {
Index* newIndex = table->m_indexes[i];
Index* oldIndex = indexes[i];
table->m_indexes[i] = oldIndex;
if (i != 0)
table->m_secondaryIndexes[oldIndex->GetName()] = oldIndex;
else
table->m_primaryIndex = oldIndex;
// need to check if need to release memory in a different way? GC?
// assumption is the we deleted all rows
delete newIndex;
}
delete[] indexes;
m_txnDdlAccess->EraseByOid(table->GetTableExId());
m_txnDdlAccess->Add(new_ddl_access);
}
} else {
m_txnDdlAccess->Add(new_ddl_access);
}
return RC_OK;
}
RC TxnManager::TruncateTable(Table* table)
{
RC res = RC_OK;
if (m_isLightSession) // really?
return res;
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->GetByOid(table->GetTableExId());
if (ddl_access != nullptr) {
// must be create table or truncate table
MOT_ASSERT(ddl_access->GetDDLAccessType() == DDL_ACCESS_CREATE_TABLE ||
ddl_access->GetDDLAccessType() == DDL_ACCESS_TRUNCATE_TABLE);
// this is a table that we created or truncated before, should remove all the rows
// belonging to this table from the access and continue
TxnOrderedSet_t& access_row_set = m_accessMgr->GetOrderedRowSet();
TxnOrderedSet_t::iterator it = access_row_set.begin();
while (it != access_row_set.end()) {
Access* ac = it->second;
if (ac->GetTxnRow()->GetTable() == table) {
if (ac->m_type == INS)
RollbackInsert(ac);
it = access_row_set.erase(it);
// need to perform index clean-up!
m_accessMgr->PubReleaseAccess(ac);
} else {
it++;
}
}
} else {
Index** indexes = nullptr;
indexes = new (std::nothrow) Index*[MAX_NUM_INDEXES];
if (indexes == nullptr) {
// print error, clould not allocate memory
MOT_REPORT_ERROR(MOT_ERROR_OOM,
"Truncate Table",
"Failed to allocate memory for %u index objects",
(unsigned)MAX_NUM_INDEXES);
return RC_MEMORY_ALLOCATION_ERROR;
}
// allocate DDL before work and fail immediately if required
ddl_access = new (std::nothrow)
TxnDDLAccess::DDLAccess(table->GetTableExId(), DDL_ACCESS_TRUNCATE_TABLE, (void*)indexes);
if (ddl_access == nullptr) {
MOT_REPORT_ERROR(MOT_ERROR_OOM, "Truncate Table", "Failed to allocate memory for DDL Access object");
delete[] indexes;
return RC_MEMORY_ALLOCATION_ERROR;
}
for (int i = 0; i < table->GetNumIndexes(); i++) {
Index* index_copy = table->GetIndex(i)->CloneEmpty();
if (index_copy == nullptr) {
// print error, clould not allocate memory for index
MOT_REPORT_ERROR(MOT_ERROR_OOM,
"Truncate Table",
"Failed to clone empty index %s",
table->GetIndex(i)->GetName().c_str());
for (int j = 0; j < i; j++) {
// cleanup of previous created indexes copy
Index* newIndex = table->m_indexes[j];
Index* oldIndex = indexes[j];
table->m_indexes[j] = oldIndex;
if (j != 0) // is secondary
table->m_secondaryIndexes[oldIndex->GetName()] = oldIndex;
else // is primary
table->m_primaryIndex = oldIndex;
delete newIndex;
}
delete ddl_access;
return RC_MEMORY_ALLOCATION_ERROR;
}
indexes[i] = table->GetIndex(i);
table->m_indexes[i] = index_copy;
if (i != 0) // is secondary
table->m_secondaryIndexes[index_copy->GetName()] = index_copy;
else // is primary
table->m_primaryIndex = index_copy;
}
m_txnDdlAccess->Add(ddl_access);
}
return res;
}
RC TxnManager::CreateIndex(Table* table, Index* index, bool is_primary)
{
// allocate DDL before work and fail immediately if required
TxnDDLAccess::DDLAccess* ddl_access =
new (std::nothrow) TxnDDLAccess::DDLAccess(index->GetExtId(), DDL_ACCESS_CREATE_INDEX, (void*)index);
if (ddl_access == nullptr) {
MOT_REPORT_ERROR(MOT_ERROR_OOM, "Create Index", "Failed to allocate DDL Access object");
return RC_MEMORY_ALLOCATION_ERROR;
}
if (is_primary) {
table->UpdatePrimaryIndex((MOT::Index*)index);
} else {
// currently we are still adding the index to the table although
// is should only be added on successful commit. Assuming that if
// a client did a create index, all other clients are waiting on a lock
// until the changes are either commited or aborted
table->Lock(); // for concurrent access
if (table->GetNumIndexes() == MAX_NUM_INDEXES) {
table->Unlock();
MOT_REPORT_ERROR(MOT_ERROR_RESOURCE_LIMIT,
"Create Index",
"Cannot create index in table %s: reached limit of %u indices per table",
table->GetLongTableName().c_str(),
(unsigned)MAX_NUM_INDEXES);
delete ddl_access;
return RC_TABLE_EXCEEDS_MAX_INDEXES;
}
if (!table->AddSecondaryIndex(index->GetName(), index, this, m_threadId)) {
table->Unlock();
if (MOT_IS_OOM()) { // do not report error in "unique violation" scenario
MOT_REPORT_ERROR(
MOT_ERROR_INTERNAL, "Create Index", "Failed to add secondary index %s", index->GetName().c_str());
}
delete ddl_access;
return m_err;
}
table->Unlock();
}
m_txnDdlAccess->Add(ddl_access);
return RC_OK;
}
RC TxnManager::DropIndex(Index* index)
{
// allocate DDL before work and fail immediately if required
TxnDDLAccess::DDLAccess* new_ddl_access = nullptr;
TxnDDLAccess::DDLAccess* ddl_access = m_txnDdlAccess->GetByOid(index->GetExtId());
if (ddl_access == nullptr) {
new_ddl_access =
new (std::nothrow) TxnDDLAccess::DDLAccess(index->GetExtId(), DDL_ACCESS_DROP_INDEX, (void*)index);
if (new_ddl_access == nullptr) {
MOT_REPORT_ERROR(MOT_ERROR_OOM, "Drop Index", "Failed to allocate DDL Access object");
return RC_MEMORY_ALLOCATION_ERROR;
}
}
RC res = RC_OK;
Table* table = index->GetTable();
RollbackSecondaryIndexInsert(index);
if (ddl_access != nullptr) {
// this index was created in this transaction, can delete it from the ddl_access
// table->removeSecondaryIndex also performs releases the object
m_txnDdlAccess->EraseByOid(index->GetExtId());
res = table->RemoveSecondaryIndex((char*)index->GetName().c_str(), this);
if (res != RC_OK) {
// print Error
MOT_REPORT_ERROR(MOT_ERROR_INTERNAL, "Drop Index", "Failed to remove secondary index");
return res;
}
} else {
m_txnDdlAccess->Add(new_ddl_access);
}
return res;
}
RC TxnManager::SavePreparedData()
{
m_redoLog.Reset();
SetFailedCommitPrepared(true);
if (m_redoLog.PrepareToInProcessTxns() != RC_OK) {
MOT_LOG_ERROR("PrepareToInProcessTxns failed [%lu]", m_transactionId);
return RC_ERROR;
}
if (MOT::GetRecoveryManager()->ApplyInProcessTransaction(m_internalTransactionId) != RC_OK) {
MOT_LOG_ERROR("ApplyInProcessTransaction failed [%lu]", m_transactionId);
return RC_ERROR;
}
if (m_transactionId != INVALID_TRANSACTIOIN_ID) {
MOT_LOG_DEBUG("mapping ext txid %lu to %lu", m_transactionId, m_internalTransactionId);
MOT::GetRecoveryManager()->UpdateTxIdMap(m_internalTransactionId, m_transactionId);
}
return RC_OK;
}
} // namespace MOT
|
phatblat/macOSPrivateFrameworks
|
PrivateFrameworks/Intents/INRunWorkflowWorkflowResolutionResult.h
|
<gh_stars>10-100
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import <Intents/INSpeakableStringResolutionResult.h>
@interface INRunWorkflowWorkflowResolutionResult : INSpeakableStringResolutionResult
{
}
+ (id)unsupportedForReason:(long long)arg1;
- (id)_buildIntentSlotResolutionResultWithIntentSlotDescription:(id)arg1;
- (id)_initWithIntentSlotResolutionResult:(id)arg1 slotDescription:(id)arg2;
- (id)initWithSpeakableStringResolutionResult:(id)arg1;
@end
|
roshanba/mangal
|
django/docs/topics/files.txt.py
|
<filename>django/docs/topics/files.txt.py<gh_stars>0
XXXXXXXXXXXXXX
XXXXXXXX XXXXX
XXXXXXXXXXXXXX
XXXX XXXXXXXX XXXXXXXXX XXXXXXXX XXXX XXXXXX XXXX XXX XXXXX XXXX XX XXXXX
XXXXXXXX XX X XXXXX XXX XXXXX XXXXX XXXX XXX XXXXXXX XXXXXX XXXX XXX XXXXX XXX
XXXX XXX XXXXX XXXXXXXXX XX XXX XXXX XX XXXXXX XXXXXXX XXXXXX XXXX XXXX XXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXX XXXXXX XXXXX XXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXX XXX
XXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXXX XXXXX XXXXXX XXXX XXXXXX XXXXX XXXXX
XXXXXXXXX
XXXXXXXX XXXXXX XXXXXXXX XXXX XX XXXXX XXXXXX XXXXX XXXXXXX XXXXXXXXX XXXX
XXXXX XXX XX XXXXXXXXXX XXXXXXXXX XXXXX XXX XXX XXXXXX XXXXXX XXXXXX XXX
XXXXXX XXXX XX XXXX XXXXXXXX XXXXXXXXX XXX XXXXX XXXXXXX XXXXXXX XXXXX
XX XXXXX XXXXXXX XXXXXXXX XXXXX XXXXXXXXX
XXXXX XXXXX XX XXXXXX
XXXXXXXXXXXXXXXXXXXXX
XXXX XXX XXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXX X XXX XX XXXX XXX XXX XXX
XX XXXX XXXX XXXX XXXXX
XXXXXXXX XXX XXXXXXXXX XXXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXX X XXXXXXX
XXXX XXXXXXXXX XXXXXX XXXXXX
XXXXX XXXXXXXXXXXXXXXXXX
XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXX XXXXXXXX XXXX XXXX X XXXXXXXXX XXXXXXXXX XXXX XXX XXX XXX XX XXX XX
XXX XXXXXXX XX XXX XXXXXXXX XXXXXXX
XXX XXX X XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXXXXX
XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXX XX XXXXXXXXXXXXX XX XXX XXXXXXX XX XX X XXXXXXXX XXXXXXX XXXXX XXXXX
XX XXX XXX XXX XXXXXXX XXX XXXXXXXXXX XXXXXXXXX XXXXXX
XX XXXXXX
XXX XXXX XX XXXXX XX XXXX XX XXXXXX XXX XXXXX XX XXX XXXXXXXXX XX XXX XXXXXX
XXXX XXXX XXXX XX XXXX XXXXXX XX XXXXXX XX XXXXX XXXXX XXX XXXXX XXX XXXX
XXXXXX
XXX XXXXXXXX XXX XXX XXXXXX XXX XXXX XXXX XX XXXXXXX XXX XXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX X XXXX XXXXXXXX XX XXX XXXX XXXXXXXXX
XXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XX XXX XXX XXXXX XXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXX XX
XXX XXXX XXXXXXXXXXX XXXXXX XXXXXXXX
XXX XXXXXXXXXXXX X XXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXX
XXX XXXXXXXX X XXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXX
XXX X XXXX XXX XXXX XX XXX XXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXX XXXXXXXXXX
XXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XX XXXXXXXX
XXXX
XX XXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXX XXXXXXXXXXX XXXX
XX XXXXXXXXXXX XXXXXXXXXX XXX XXXXXXXX XXX XXXXXXXXX XX XXX XXXXXXXXX XXX
XXXXXXXXXX XXXXX XXXX XXXXXX XX XXXX XXXXXXX XXXXXXXXX XXX XXXXXX XXX
XXXXXXXXX
XXX XXXX XXX XXXXXX XXXXX
XXX XXX X XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXXXXXXXXXXX
XXX
XXX XXXXXXXXXXXXXXXX
XXX
XXX XXXXX X XXXXXXXXXXXXXXXXXXXXX
X XXXXXX XXXXXXXXXXX XXXX XX XXXXXX XXXXX
XXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXX XXXXX X XXXXXXXXXXXXXXXXXXXXX
XXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XXX XXXXXXXX XXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXX XXXXXX XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXX XX
XXXXX XX XXXXXXXXX X XXXXX
XXXX XX XXX XXXX XXXXXX XXX X XXXXXXXX XXXX XXXXXXXX XXXXX XXX XXXXX X XXXX
XXXXXXXX XX X XXXXX XX XXXXXX XX XXXXXXX XX XXXXXXXX XXXXXX
XX XXX XXXX XX XXXXXXXXX X XXXXXXXX XXXXXXXXX XXX XXXXXXX XXX XX XX XXXXXX XXX
XXXXX X XXXXXX XXXXXXXX XXXXXXXX XXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXX XXXXXX XXXX
X XXXXXX X XXXXXX XXXX XXXXXX XXXXX XXXXXX
XXX X X XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX
XXX XXXXXX X XXXXXXX
XXX XXX XXX XXX XXX XX XXX XXXXXXXXXX XXXXXXXXXX XXX XXXXXXX
XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XX XXXXX XXXX XXXXX XXXXXXX XX XXXX XXX XXX XXX XXXXXXXXXXXXX XXXXXXX
XXX XXXXXXXXX XXXXXXXX XXX XX XXXX XX XXXXX XXXXX XXXXXXXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXX XXXXXX XXXX
X XXXXXX X XXXXXX XXXX XXXXXX XXXXX XXXXXX XXX XXX XXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XX XX
XXX XXXXXX X XXXXXXX
XXX XXXXXXXXXXXXXXXXXXX XXXXXXX
XXX
XXX XXXXXXXXXXXXX
XXXX
XXX XXXXXXXX
XXXX
XXXXXXX XXXXX XX XXXXXXXXXX XXXXXXXXX XXXX XXXXXXXXX XXXX XXXXXX XX X XXXX
XXXX X XXXXX XXXXXX XX XXXXXXXX XX XXXXX XXX XXX XXXXXXXX XXXXXX XXXXX
XXXXXXXXX XXXXX XXX XXXX XX XXXXXXX XXX XX XXXX XXXXXXXXXXX XXX XXXXXX XXXX
XXX XXXX XX XXX XXXXXXXXX XXXXXXX
XXXXXXXX XXXXXX XXX XXX XXXX XXXX XXXXX
XXXX XXXXXXX
XXXXXXXXXXXX
XXXXXX XXX XXXXXXX XXXXXX XXXXXXXXX XXXXXXXXX XXXXX XXX XXX XXXXX XX XXXXX XXXXX
XX X XXXX XXXXXXX XXXXXXX XXXX XX XXX XXXXXX XXXX XXXXXXXX XXXXXXXXXXX XXXXXX
XXXX XXXX XXXXXXXX XXXXXXX XXX XXXXXXX XXXXXX XXXX
XXXXXXXX XXXXXXX XXXX XXXXXXX XX XXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XX XXX XXXXX XXXXXXXXXX XXXXXXX X XXXXXXX XXXXXXX XXXX XX XXX XXX XXXX
XXXX XX XXXXX
XXX XXXXX XXX XXXXXXX XX XXX XXXXXXXX XXXXXXX XXXX XXXXXXX XXXXXXX XXX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXX XX XXXXXXX XXXX XXX XXXX
XXXXXXX XXXXXXX
XXXXXXX XXXXXXX
XXXXXXXXXXXXXXX
XXXXXX XXXX XX XXX XXXX XXXXXX XXXX XX XXX X XXXXXXXX XXXXXX XXXXXX XXXXXXXXX XX
XXX XXXXXX XXXXXXX XXX XXXX XXXXXX XXX XXX XXX XXXX XXXXXXX XXXXXXX XXXXXXXXX
XXX XXX XXXXXX XX XXXXXXXX XX XXXX XXXXXX XXXX XXXXXXX XXXXXX XX XX XXXXX XXXX
XXXXXX XX XXX XXX XXX XXX XXXXXX XXXXXXX XXXXXXX XXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX
XXX XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXX
XXX XXXX
XXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXX XXX XXX XXXX XXXXXXX XXXX
XX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXX XXXXXXXXXX XXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXX XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX
XXXXX XXXXXXXXXX XXXXX XXXXX XXXXXXXXXX XXXX XXXXXXXX
XXX XXXXXXXX XXX XXXXXXXXX XXXX XXXX XXXXX XXXXXXXX XXXXX XXXXX
XXXXXXXXXXXXXXXXX XXXXXXXXXX XX XXXX XXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX
XXXX XXXXXXXXX XXXXXX XXXXXX
XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXX
XXX
XXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXX XXXX XXXX
XXX XXX XXXX XXXX XX XX XXX XXXXXXXXXXX XXXXXXXX XX X
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX X XXXXXXXX
XXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXX XXX
XXX XXX XXX X XXXXXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XXXXXX XXX XX XXXXXX XXX XXXX
XXXXXXX XX XXXXXXXX XXXXXXXXX XXXXXXXXX XXXXXXXX XXX XXXXXXXXX XXXXXXXXXXXXX
XXX XXXXXXXX
XXXX XXXXXXXX XXXX XX XXXXXXXXX XXXX XXXX XXXXXX XXXXXXX XXX XXXXXXX XXX XXXX
XXXXXX XX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXX
XXXX XXXXXXXXXXX XXXXXX XXXXXXXX
XXXX XXXXXXXXX XXXXXX XXXXXX
XXXX XXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
|
wq907547122/design23-demo
|
src/main/java/com/wu/qiang/factory/method/animalExample/CattleFarm.java
|
package com.wu.qiang.factory.method.animalExample;
/**
* @auth wq on 2019/12/6 16:06
**/
//具体工厂:养牛场
public class CattleFarm implements AnimalFarm
{
public Animal newAnimal()
{
System.out.println("新牛出生!");
return new Cattle();
}
}
|
gublan24/umpleSPLFull
|
testbed/src-gen-umple/cruise/associations/specializations/Spam.java
|
<reponame>gublan24/umpleSPLFull<filename>testbed/src-gen-umple/cruise/associations/specializations/Spam.java
/*PLEASE DO NOT EDIT THIS CODE*/
/*This code was generated using the UMPLE 1.31.1.5860.78bb27cc6 modeling language!*/
package cruise.associations.specializations;
import java.util.*;
/**
* Many down to N (and 1) [11]
*/
// line 138 "../../../../src/TestHarnessAssociationSpecializations.ump"
public class Spam
{
//------------------------
// MEMBER VARIABLES
//------------------------
//Spam Associations
private List<Drama> drama;
//------------------------
// CONSTRUCTOR
//------------------------
public Spam()
{
drama = new ArrayList<Drama>();
}
//------------------------
// INTERFACE
//------------------------
/* Code from template association_GetMany */
public Drama getDrama(int index)
{
Drama aDrama = drama.get(index);
return aDrama;
}
public List<Drama> getDrama()
{
List<Drama> newDrama = Collections.unmodifiableList(drama);
return newDrama;
}
public int numberOfDrama()
{
int number = drama.size();
return number;
}
public boolean hasDrama()
{
boolean has = drama.size() > 0;
return has;
}
public int indexOfDrama(Drama aDrama)
{
int index = drama.indexOf(aDrama);
return index;
}
/* Code from template association_GetMany_clear */
protected void clear_drama()
{
drama.clear();
}
/* Code from template association_MinimumNumberOfMethod */
public static int minimumNumberOfDrama()
{
return 0;
}
/* Code from template association_AddManyToManyMethod */
public boolean addDrama(Drama aDrama)
{
boolean wasAdded = false;
if (drama.contains(aDrama)) { return false; }
drama.add(aDrama);
if (aDrama.indexOfSpam(this) != -1)
{
wasAdded = true;
}
else
{
wasAdded = aDrama.addSpam(this);
if (!wasAdded)
{
drama.remove(aDrama);
}
}
return wasAdded;
}
/* Code from template association_RemoveMany */
public boolean removeDrama(Drama aDrama)
{
boolean wasRemoved = false;
if (!drama.contains(aDrama))
{
return wasRemoved;
}
int oldIndex = drama.indexOf(aDrama);
drama.remove(oldIndex);
if (aDrama.indexOfSpam(this) == -1)
{
wasRemoved = true;
}
else
{
wasRemoved = aDrama.removeSpam(this);
if (!wasRemoved)
{
drama.add(oldIndex,aDrama);
}
}
return wasRemoved;
}
/* Code from template association_AddIndexControlFunctions */
public boolean addDramaAt(Drama aDrama, int index)
{
boolean wasAdded = false;
if(addDrama(aDrama))
{
if(index < 0 ) { index = 0; }
if(index > numberOfDrama()) { index = numberOfDrama() - 1; }
drama.remove(aDrama);
drama.add(index, aDrama);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMoveDramaAt(Drama aDrama, int index)
{
boolean wasAdded = false;
if(drama.contains(aDrama))
{
if(index < 0 ) { index = 0; }
if(index > numberOfDrama()) { index = numberOfDrama() - 1; }
drama.remove(aDrama);
drama.add(index, aDrama);
wasAdded = true;
}
else
{
wasAdded = addDramaAt(aDrama, index);
}
return wasAdded;
}
public void delete()
{
ArrayList<Drama> copyOfDrama = new ArrayList<Drama>(drama);
drama.clear();
for(Drama aDrama : copyOfDrama)
{
aDrama.removeSpam(this);
}
}
}
|
veltri/DLV2
|
tests/parser/query.08.test.py
|
input = """
c :- b.
c?
"""
output = """
c :- b.
c?
"""
|
simplay/Bachelor-Thesis
|
scene/src/Util/Observer.java
|
<reponame>simplay/Bachelor-Thesis<gh_stars>0
package Util;
import java.util.LinkedList;
import Util.Subscriber;
public abstract class Observer {
protected LinkedList<Subscriber> subscriber;
public Observer(){
this.subscriber = new LinkedList<Subscriber>();
}
public void subscribe(Subscriber subscriber){
this.subscriber.add(subscriber);
}
public boolean unsubscribe(Subscriber subscriber){
boolean success = false;
for(Subscriber s : this.subscriber){
if(s.equals(subscriber)){
this.subscriber.remove(s);
success = true;
}
}
return success;
}
public void notifyObservers() {
for(Subscriber s : this.subscriber){
s.handleEvent();
}
}
}
|
edawson/parliament2
|
resources/home/dnanexus/root/include/TDecompQRH.h
|
// @(#)root/matrix:$Id$
// Authors: <NAME>, <NAME> Dec 2003
/*************************************************************************
* Copyright (C) 1995-2000, <NAME> and <NAME>. *
* All rights reserved. *
* *
* For the licensing terms see $ROOTSYS/LICENSE. *
* For the list of contributors see $ROOTSYS/README/CREDITS. *
*************************************************************************/
#ifndef ROOT_TDecompQRH
#define ROOT_TDecompQRH
///////////////////////////////////////////////////////////////////////////
// //
// QR Decomposition class //
// //
///////////////////////////////////////////////////////////////////////////
#include "TDecompBase.h"
#include "Rtypes.h"
class TDecompQRH : public TDecompBase
{
protected :
// A = fQ fR H (m x n) matrix
TMatrixD fQ; // (m x n) - orthogonal matrix
TMatrixD fR; // (n x n) - upper triangular matrix
TVectorD fUp; // (n) - vector with Householder up's
TVectorD fW; // (n) - vector with Householder beta's
static Bool_t QRH(TMatrixD &q,TVectorD &diagR,TVectorD &up,TVectorD &w,Double_t tol);
virtual const TMatrixDBase &GetDecompMatrix() const { return fR; }
public :
enum {kWorkMax = 100}; // size of work array
TDecompQRH() {}
TDecompQRH(Int_t nrows,Int_t ncols);
TDecompQRH(Int_t row_lwb,Int_t row_upb,Int_t col_lwb,Int_t col_upb);
TDecompQRH(const TMatrixD &m,Double_t tol = 0.0); // be careful for slicing in operator=
TDecompQRH(const TDecompQRH &another);
virtual ~TDecompQRH() {}
virtual Int_t GetNrows () const { return fQ.GetNrows(); }
virtual Int_t GetNcols () const { return fQ.GetNcols(); }
virtual const TMatrixD &GetQ () { if ( !TestBit(kDecomposed) ) Decompose();
return fQ; }
virtual const TMatrixD &GetR () { if ( !TestBit(kDecomposed) ) Decompose();
return fR; }
virtual const TVectorD &GetUp () { if ( !TestBit(kDecomposed) ) Decompose();
return fUp; }
virtual const TVectorD &GetW () { if ( !TestBit(kDecomposed) ) Decompose();
return fW; }
virtual void SetMatrix(const TMatrixD &a);
virtual Bool_t Decompose ();
virtual Bool_t Solve ( TVectorD &b);
virtual TVectorD Solve (const TVectorD& b,Bool_t &ok) { TVectorD x = b; ok = Solve(x); return x; }
virtual Bool_t Solve ( TMatrixDColumn &b);
virtual Bool_t TransSolve ( TVectorD &b);
virtual TVectorD TransSolve (const TVectorD& b,Bool_t &ok) { TVectorD x = b; ok = TransSolve(x); return x; }
virtual Bool_t TransSolve ( TMatrixDColumn &b);
virtual void Det (Double_t &d1,Double_t &d2);
Bool_t Invert (TMatrixD &inv);
TMatrixD Invert (Bool_t &status);
TMatrixD Invert () { Bool_t status; return Invert(status); }
void Print(Option_t *opt ="") const; // *MENU*
TDecompQRH &operator= (const TDecompQRH &source);
ClassDef(TDecompQRH,1) // Matrix Decompositition QRH
};
#endif
|
mgd-hin/systemds
|
src/main/java/org/apache/sysds/runtime/instructions/fed/AggregateUnaryFEDInstruction.java
|
<gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.instructions.fed;
import java.util.concurrent.Future;
import org.apache.sysds.common.Types.ExecType;
import org.apache.sysds.runtime.DMLRuntimeException;
import org.apache.sysds.runtime.controlprogram.caching.MatrixObject;
import org.apache.sysds.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysds.runtime.controlprogram.federated.FederatedRange;
import org.apache.sysds.runtime.controlprogram.federated.FederatedRequest;
import org.apache.sysds.runtime.controlprogram.federated.FederatedRequest.RequestType;
import org.apache.sysds.runtime.controlprogram.federated.FederatedResponse;
import org.apache.sysds.runtime.controlprogram.federated.FederationMap;
import org.apache.sysds.runtime.controlprogram.federated.FederationUtils;
import org.apache.sysds.runtime.instructions.InstructionUtils;
import org.apache.sysds.runtime.instructions.cp.CPOperand;
import org.apache.sysds.runtime.instructions.cp.ScalarObject;
import org.apache.sysds.runtime.matrix.operators.AggregateUnaryOperator;
import org.apache.sysds.runtime.matrix.operators.Operator;
import org.apache.sysds.runtime.meta.DataCharacteristics;
import org.apache.sysds.runtime.meta.MatrixCharacteristics;
public class AggregateUnaryFEDInstruction extends UnaryFEDInstruction {
private AggregateUnaryFEDInstruction(AggregateUnaryOperator auop,
CPOperand in, CPOperand out, String opcode, String istr, FederatedOutput fedOut)
{
super(FEDType.AggregateUnary, auop, in, out, opcode, istr, fedOut);
}
protected AggregateUnaryFEDInstruction(Operator op,
CPOperand in1, CPOperand in2, CPOperand out, String opcode, String istr, FederatedOutput fedOut)
{
super(FEDType.AggregateUnary, op, in1, in2, out, opcode, istr, fedOut);
}
protected AggregateUnaryFEDInstruction(Operator op,
CPOperand in1, CPOperand in2, CPOperand out, String opcode, String istr)
{
super(FEDType.AggregateUnary, op, in1, in2, out, opcode, istr);
}
protected AggregateUnaryFEDInstruction(Operator op, CPOperand in1,
CPOperand in2, CPOperand in3, CPOperand out, String opcode, String istr)
{
super(FEDType.AggregateUnary, op, in1, in2, in3, out, opcode, istr);
}
public static AggregateUnaryFEDInstruction parseInstruction(String str) {
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
String opcode = parts[0];
CPOperand in1 = new CPOperand(parts[1]);
CPOperand out = new CPOperand(parts[2]);
AggregateUnaryOperator aggun = null;
if(opcode.equalsIgnoreCase("uarimax") || opcode.equalsIgnoreCase("uarimin"))
if(InstructionUtils.getExecType(str) == ExecType.SPARK)
aggun = InstructionUtils.parseAggregateUnaryRowIndexOperator(opcode, 1, 1);
else
aggun = InstructionUtils.parseAggregateUnaryRowIndexOperator(opcode, Integer.parseInt(parts[4]), 1);
else
aggun = InstructionUtils.parseBasicAggregateUnaryOperator(opcode);
FederatedOutput fedOut = null;
if ( parts.length == 5 && !parts[4].equals("uarimin") && !parts[4].equals("uarimax") )
fedOut = FederatedOutput.valueOf(parts[4]);
else
fedOut = FederatedOutput.valueOf(parts[5]);
return new AggregateUnaryFEDInstruction(aggun, in1, out, opcode, str, fedOut);
}
@Override
public void processInstruction(ExecutionContext ec) {
if (getOpcode().contains("var")) {
processVar(ec);
} else {
processDefault(ec);
}
}
private void processDefault(ExecutionContext ec){
AggregateUnaryOperator aop = (AggregateUnaryOperator) _optr;
MatrixObject in = ec.getMatrixObject(input1);
FederationMap map = in.getFedMapping();
if((instOpcode.equalsIgnoreCase("uarimax") || instOpcode.equalsIgnoreCase("uarimin")) && in.isFederated(FederationMap.FType.COL))
instString = InstructionUtils.replaceOperand(instString, 5, "2");
// create federated commands for aggregation
// (by default obtain output, even though unnecessary row aggregates)
if ( _fedOut.isForcedFederated() )
if(instString.startsWith("SPARK"))
processFederatedSPOutput(map, in, ec, aop);
else
processFederatedOutput(map, in, ec);
else {
if(instString.startsWith("SPARK"))
processGetSPOutput(map, in, ec, aop);
else
processGetOutput(map, aop, ec, in);
}
}
/**
* Sends federated request with instruction without retrieving the result from the workers.
* @param map federation map of the input
* @param in input matrix object
* @param ec execution context
*/
private void processFederatedOutput(FederationMap map, MatrixObject in, ExecutionContext ec){
if ( output.isScalar() )
throw new DMLRuntimeException("Output of FED instruction, " + output.toString()
+ ", is a scalar and the output is set to be federated. Scalars cannot be federated. ");
FederatedRequest fr1 = FederationUtils.callInstruction(instString, output,
new CPOperand[]{input1}, new long[]{in.getFedMapping().getID()}, true);
map.execute(getTID(), fr1);
MatrixObject out = ec.getMatrixObject(output);
deriveNewOutputFedMapping(in, out, fr1);
}
/**
* Set output fed mapping based on federated partitioning and aggregation type.
* @param in matrix object from which fed partitioning originates from
* @param out matrix object holding the dimensions of the instruction output
* @param fr1 federated request holding the instruction execution call
*/
private void deriveNewOutputFedMapping(MatrixObject in, MatrixObject out, FederatedRequest fr1){
//Get agg type
if ( !(instOpcode.equals("uack+") || instOpcode.equals("uark+")) )
throw new DMLRuntimeException("Operation " + instOpcode + " is unknown to FOUT processing");
boolean isColAgg = instOpcode.equals("uack+");
//Get partition type
FederationMap.FType inFtype = in.getFedMapping().getType();
//Get fedmap from in
FederationMap inputFedMapCopy = in.getFedMapping().copyWithNewID(fr1.getID());
//if partition type is row and aggregation type is row
// then get row dim split from input and use as row dimension and get col dimension from output col dimension
// and set FType to ROW
if ( inFtype.isRowPartitioned() && !isColAgg ){
for ( FederatedRange range : inputFedMapCopy.getFederatedRanges() )
range.setEndDim(1,out.getNumColumns());
inputFedMapCopy.setType(FederationMap.FType.ROW);
}
//if partition type is row and aggregation type is col
// then get row and col dimension from out and use those dimensions for both federated workers
// and set FType to PART
//if partition type is col and aggregation type is row
// then set row and col dimension from out and use those dimensions for both federated workers
// and set FType to PART
if ( (inFtype.isRowPartitioned() && isColAgg) || (inFtype.isColPartitioned() && !isColAgg) ){
for ( FederatedRange range : inputFedMapCopy.getFederatedRanges() ){
range.setBeginDim(0,0);
range.setBeginDim(1,0);
range.setEndDim(0,out.getNumRows());
range.setEndDim(1,out.getNumColumns());
}
inputFedMapCopy.setType(FederationMap.FType.PART);
}
//if partition type is col and aggregation type is col
// then set row dimension to output and col dimension to in col split
// and set FType to COL
if ( inFtype.isColPartitioned() && isColAgg ){
for ( FederatedRange range : inputFedMapCopy.getFederatedRanges() )
range.setEndDim(0,out.getNumRows());
inputFedMapCopy.setType(FederationMap.FType.COL);
}
//set out fedmap in the end
out.setFedMapping(inputFedMapCopy);
}
/**
* Sends federated request with instruction and retrieves the result from the workers.
* @param map federation map of input
* @param aggUOptr aggregate unary operator of the instruction
* @param ec execution context
* @param in input matrix object
*/
private void processGetOutput(FederationMap map, AggregateUnaryOperator aggUOptr, ExecutionContext ec, MatrixObject in){
FederatedRequest fr1 = FederationUtils.callInstruction(instString, output,
new CPOperand[]{input1}, new long[]{in.getFedMapping().getID()}, true);
FederatedRequest fr2 = new FederatedRequest(RequestType.GET_VAR, fr1.getID());
FederatedRequest fr3 = map.cleanup(getTID(), fr1.getID());
//execute federated commands and cleanups
Future<FederatedResponse>[] tmp = map.execute(getTID(), fr1, fr2, fr3);
if( output.isScalar() )
ec.setVariable(output.getName(), FederationUtils.aggScalar(aggUOptr, tmp, map));
else
ec.setMatrixOutput(output.getName(), FederationUtils.aggMatrix(aggUOptr, tmp, map));
}
private void processVar(ExecutionContext ec){
if ( _fedOut.isForcedFederated() ){
throw new DMLRuntimeException("Output of " + toString() + " should not be federated "
+ "since the instruction requires consolidation of partial results to be computed.");
}
boolean isSpark = instString.startsWith("SPARK");
AggregateUnaryOperator aop = (AggregateUnaryOperator) _optr;
MatrixObject in = ec.getMatrixObject(input1);
FederationMap map = in.getFedMapping();
long id = FederationUtils.getNextFedDataID();
FederatedRequest tmpRequest = null;
if(isSpark) {
if ( output.isScalar() ) {
ScalarObject scalarOut = ec.getScalarInput(output);
tmpRequest = map.broadcast(scalarOut);
id = tmpRequest.getID();
}
else {
if((map.getType() == FederationMap.FType.COL && aop.isColAggregate()) || (map.getType() == FederationMap.FType.ROW && aop.isRowAggregate()))
tmpRequest = new FederatedRequest(RequestType.PUT_VAR, id, new MatrixCharacteristics(-1, -1), in.getDataType());
else {
DataCharacteristics dc = ec.getDataCharacteristics(output.getName());
tmpRequest = new FederatedRequest(RequestType.PUT_VAR, id, dc, in.getDataType());
}
}
}
// federated ranges mean for variance
Future<FederatedResponse>[] meanTmp = null;
if (getOpcode().contains("var")) {
String meanInstr = instString.replace(getOpcode(), getOpcode().replace("var", "mean"));
//create federated commands for aggregation
FederatedRequest meanFr1 = FederationUtils.callInstruction(meanInstr, output, id,
new CPOperand[]{input1}, new long[]{in.getFedMapping().getID()}, isSpark ? ExecType.SPARK : ExecType.CP, isSpark);
FederatedRequest meanFr2 = new FederatedRequest(RequestType.GET_VAR, meanFr1.getID());
FederatedRequest meanFr3 = map.cleanup(getTID(), meanFr1.getID());
meanTmp = map.execute(getTID(), isSpark ? new FederatedRequest[] {tmpRequest, meanFr1, meanFr2, meanFr3} : new FederatedRequest[] {meanFr1, meanFr2, meanFr3});
}
//create federated commands for aggregation
FederatedRequest fr1 = FederationUtils.callInstruction(instString, output, id,
new CPOperand[]{input1}, new long[]{in.getFedMapping().getID()}, isSpark ? ExecType.SPARK : ExecType.CP, isSpark);
FederatedRequest fr2 = new FederatedRequest(RequestType.GET_VAR, fr1.getID());
FederatedRequest fr3 = map.cleanup(getTID(), fr1.getID());
//execute federated commands and cleanups
Future<FederatedResponse>[] tmp = map.execute(getTID(), isSpark ? new FederatedRequest[] {tmpRequest, fr1, fr2, fr3} : new FederatedRequest[] { fr1, fr2, fr3});
if( output.isScalar() )
ec.setVariable(output.getName(), FederationUtils.aggScalar(aop, tmp, meanTmp, map));
else
ec.setMatrixOutput(output.getName(), FederationUtils.aggMatrix(aop, tmp, meanTmp, map));
}
private void processFederatedSPOutput(FederationMap map, MatrixObject in, ExecutionContext ec, AggregateUnaryOperator aop) {
DataCharacteristics dc = ec.getDataCharacteristics(output.getName());
FederatedRequest fr1;
long id = FederationUtils.getNextFedDataID();
if((map.getType() == FederationMap.FType.COL && aop.isColAggregate()) ||
(map.getType() == FederationMap.FType.ROW && aop.isRowAggregate()))
fr1 = new FederatedRequest(RequestType.PUT_VAR, id, new MatrixCharacteristics(-1, -1), in.getDataType());
else
fr1 = new FederatedRequest(RequestType.PUT_VAR, id, dc, in.getDataType());
FederatedRequest fr2 = FederationUtils.callInstruction(instString, output, id,
new CPOperand[]{input1}, new long[]{in.getFedMapping().getID()}, ExecType.SPARK, true);
map.execute(getTID(), fr1, fr2);
// derive new fed mapping for output
MatrixObject out = ec.getMatrixObject(output);
out.setFedMapping(in.getFedMapping().copyWithNewID(fr2.getID()));
}
private void processGetSPOutput(FederationMap map, MatrixObject in, ExecutionContext ec, AggregateUnaryOperator aop) {
DataCharacteristics dc = ec.getDataCharacteristics(output.getName());
FederatedRequest fr1;
long id = FederationUtils.getNextFedDataID();
if ( output.isScalar() ) {
ScalarObject scalarOut = ec.getScalarInput(output);
fr1 = map.broadcast(scalarOut);
id = fr1.getID();
}
else {
if((map.getType() == FederationMap.FType.COL && aop.isColAggregate()) || (map.getType() == FederationMap.FType.ROW && aop.isRowAggregate()))
fr1 = new FederatedRequest(RequestType.PUT_VAR, id, new MatrixCharacteristics(-1, -1), in.getDataType());
else
fr1 = new FederatedRequest(RequestType.PUT_VAR, id, dc, in.getDataType());
}
FederatedRequest fr2 = FederationUtils.callInstruction(instString, output, id,
new CPOperand[]{input1}, new long[]{in.getFedMapping().getID()}, ExecType.SPARK, true);
FederatedRequest fr3 = new FederatedRequest(RequestType.GET_VAR, fr1.getID());
FederatedRequest fr4 = map.cleanup(getTID(), fr2.getID());
//execute federated commands and cleanups
Future<FederatedResponse>[] tmp = map.execute(getTID(), fr1, fr2, fr3, fr4);
if( output.isScalar() )
ec.setVariable(output.getName(), FederationUtils.aggScalar(aop, tmp, map));
else
ec.setMatrixOutput(output.getName(), FederationUtils.aggMatrix(aop, tmp, map));
}
}
|
antoniobertilpaiva/cert
|
ejb-in-ear/ejb/src/main/java/com/criticalsoftware/certitools/business/sm/ActivityService.java
|
<reponame>antoniobertilpaiva/cert
package com.criticalsoftware.certitools.business.sm;
import com.criticalsoftware.certitools.business.exception.BusinessException;
import com.criticalsoftware.certitools.business.exception.CertitoolsAuthorizationException;
import com.criticalsoftware.certitools.business.exception.ObjectNotFoundException;
import com.criticalsoftware.certitools.entities.sm.Activity;
import com.criticalsoftware.certitools.entities.sm.ActivityType;
import com.criticalsoftware.certitools.entities.sm.Chat;
import com.criticalsoftware.certitools.entities.sm.CorrectiveAction;
import com.criticalsoftware.certitools.entities.User;
import com.criticalsoftware.certitools.persistence.sm.dto.DocumentDTO;
import com.criticalsoftware.certitools.util.PaginatedListWrapper;
import java.util.Date;
import java.util.List;
/**
* Activity Service
*
* @author miseabra
* @version $Revision$
*/
@SuppressWarnings("unused")
public interface ActivityService {
/**
* Returns the list of activity types for the given contract.
*
* @param contractId The contract id.
* @return The list of activity types.
*/
List<ActivityType> findActivityTypes(long contractId);
/**
* Returns the activity type with the given id.
*
* @param typeId The activity type id.
* @return The activity type found.
* @throws ObjectNotFoundException If the activity type doesn't exist.
*/
ActivityType findActivityType(Long typeId) throws ObjectNotFoundException;
/**
* Creates a new activity type that will be associated with the given contract.
*
* @param contractId The contract id.
* @param name The name of the activity type.
* @param loggedUser The logged user.
* @return The activity type created.
* @throws CertitoolsAuthorizationException If the logged user is not expert.
*/
ActivityType createActivityType(Long contractId, String name, User loggedUser) throws CertitoolsAuthorizationException;
/**
* Creates an activity.
*
* @param contractId The contract id.
* @param activity The activity to be created.
* @param recurrenceTypeId The recurrence type to be applied to this activity, or null if the activity isn't recurrent.
* @param warningDays The warning days for the recurrence, or null if the recurrence type is also null.
* @param notificationUsers The users to be notified.
* @param loggedUser The logged user.
* @return The activity id.
* @throws CertitoolsAuthorizationException If the logged user is not expert.
* @throws ObjectNotFoundException If the contract doesn't exist.
* @throws BusinessException If occurs an error generating the code sequence.
*/
Long createActivity(Long contractId, Activity activity, Long recurrenceTypeId,
Integer warningDays, List<User> notificationUsers, User loggedUser)
throws CertitoolsAuthorizationException, ObjectNotFoundException, BusinessException;
/**
* Updates the main fields of an activity (the same fields of the activity creation).
*
* @param contractId The contract id.
* @param activity The activity to be updated.
* @param recurrenceTypeId The recurrence type to be applied to this activity, or null if the activity isn't recurrent.
* @param warningDays The warning days for the recurrence, or null if the recurrence type is also null.
* @param notificationUsers The users to be notified.
* @param loggedUser The logged user.
* @throws CertitoolsAuthorizationException If the logged user is not expert.
* @throws ObjectNotFoundException If the contract or the activity don't exist.
* @throws BusinessException When updating an activity that is already closed.
*/
void updateActivityMainFields(Long contractId, Activity activity, Long recurrenceTypeId,
Integer warningDays, List<User> notificationUsers, User loggedUser)
throws CertitoolsAuthorizationException, ObjectNotFoundException, BusinessException;
/**
* Edits an activity.
*
* @param contractId The contract id.
* @param activityId The activity id.
* @param newDocuments The list of new documents that will be associated with this activity.
* @param closedDate The closed date. If this field is not null, it will close the activity and only the expert will be able to open it again.
* @param loggedUser The logged user.
* @throws CertitoolsAuthorizationException If the logged user is not expert or intermediate.
* @throws ObjectNotFoundException If the contract or the activity don't exist.
* @throws BusinessException When closing an activity with open corrective actions or updating an activity that is already closed.
*/
void editActivity(Long contractId, Long activityId, List<DocumentDTO> newDocuments, Date closedDate,
User loggedUser) throws CertitoolsAuthorizationException, ObjectNotFoundException, BusinessException;
/**
* Deletes an activity. The activity is only marked as deleted. The documents are not deleted.
* The recurrence, if any, is marked as inactive if this is the latest activity of this recurrence.
*
* @param contractId The contract id.
* @param activityId The activity id.
* @param loggedUser The logged user.
* @throws CertitoolsAuthorizationException If the logged user is not expert.
* @throws ObjectNotFoundException If the activity doesn't exist.
*/
void deleteActivity(Long contractId, Long activityId, User loggedUser)
throws CertitoolsAuthorizationException, ObjectNotFoundException;
/**
* Reopens an activity. The activity closed date is set to null.
*
* @param contractId The contract id.
* @param activityId The activity id.
* @param loggedUser The logged user.
* @throws CertitoolsAuthorizationException If the logged user is not expert.
* @throws ObjectNotFoundException If the activity doesn't exist.
*/
void reopenActivity(Long contractId, Long activityId, User loggedUser)
throws CertitoolsAuthorizationException, ObjectNotFoundException;
/**
* Deletes the given document.
*
* @param contractId The contract id.
* @param activityId The activity id.
* @param documentId The document id.
* @param loggedUser The logged user.
* @throws CertitoolsAuthorizationException If the logged user is not expert or intermediate.
* @throws ObjectNotFoundException If the contract or the activity don't exist.
* @throws BusinessException When updating an activity that is already closed.
*/
void deleteDocument(Long contractId, Long activityId, Long documentId, User loggedUser)
throws CertitoolsAuthorizationException, ObjectNotFoundException, BusinessException;
/**
* Adds a chat message to the given activity.
*
* @param contractId The contract id.
* @param activityId The activity id.
* @param message The chat message.
* @param loggedUser The logged user.
* @throws CertitoolsAuthorizationException If the logged user is not expert or intermediate.
* @throws ObjectNotFoundException If the contract or the activity don't exist.
* @throws BusinessException When updating an activity that is already closed.
*/
void addChatMessage(Long contractId, Long activityId, String message, User loggedUser)
throws CertitoolsAuthorizationException, ObjectNotFoundException, BusinessException;
/**
* Returns the list of chat messages of the given activity.
*
* @param activityId The activity id.
* @return The list of chat messages found.
*/
List<Chat> findChatMessages(Long activityId);
/**
* Returns the list of open corrective actions for the given activity.
*
* @param activityId The activity id.
* @return The list of open corrective actions found.
*/
List<CorrectiveAction> findOpenCorrectiveActions(Long activityId);
/**
* Finds the Activity with the given id. Includes the recurrence and the documents.
*
* @param activityId The activity id.
* @return The Activity complete info.
* @throws ObjectNotFoundException If the activity doesn't exist.
*/
Activity findActivity(Long activityId) throws ObjectNotFoundException;
/**
* Finds the Activity with the given id, for the chat pdf. Includes the contract and the chat messages.
*
* @param activityId The activity id.
* @return The Activity info.
* @throws ObjectNotFoundException If the activity doesn't exist.
*/
Activity findActivityForChatPdf(Long activityId) throws ObjectNotFoundException;
/**
* Returns all activities according to the params in paginatedListWrapper
*
* @param contractId the contract that the activities belong
* @param paginatedListWrapper with the parameters for the search
* @return activities according to the params in paginatedListWrapper
* @throws BusinessException if the paginatedListWrapper is null
*/
PaginatedListWrapper<Activity> findActivitiesByContract(long contractId,
PaginatedListWrapper<Activity> paginatedListWrapper,
Long activityTypeId, String filterYear,
Boolean isOpen)
throws BusinessException;
/**
* Returns all activities according to the given parameters.
*
* @param contractId The contract id.
* @param filterYear The year of the activities. If null, all years will be considered.
* @param filterSemester The semester of the activities. If null, both semesters will be considered. Must be null when year is also null.
* @param isOpen Indicates the status of the activities. If null, all statuses will be considered.
* @return The list of activities found.
*/
List<Activity> findActivitiesByContract(long contractId, String filterYear, String filterSemester, Boolean isOpen);
/**
* Returns all activities years
*
* @param contractId the contract that the activities belong
* @return list of years
*/
List<String> findActivityYears(long contractId);
}
|
UNFPAInnovation/GetInRebuild
|
collect_app/src/main/java/org/odk/getin/android/adapters/UpcomingAppointmentsAdapter.java
|
<filename>collect_app/src/main/java/org/odk/getin/android/adapters/UpcomingAppointmentsAdapter.java
package org.odk.getin.android.adapters;
import static org.odk.getin.android.utilities.ApplicationConstants.APPOINTMENT_FORM_ID;
import static org.odk.getin.android.utilities.ApplicationConstants.APPOINTMENT_FORM_MIDWIFE_ID;
import static org.odk.getin.android.utilities.ApplicationConstants.CHEW_ROLE;
import static org.odk.getin.android.utilities.ApplicationConstants.GIRL_ID;
import static org.odk.getin.android.utilities.ApplicationConstants.GIRL_NAME;
import static org.odk.getin.android.utilities.ApplicationConstants.GIRL_REDEEMED_SERVICES;
import static org.odk.getin.android.utilities.ApplicationConstants.GIRL_VOUCHER_NUMBER;
import static org.odk.getin.android.utilities.ApplicationConstants.USER_ROLE;
import android.Manifest;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.ContentUris;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.net.Uri;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import androidx.recyclerview.widget.RecyclerView;
import com.pixplicity.easyprefs.library.Prefs;
import org.odk.getin.android.R;
import org.odk.getin.android.provider.FormsProviderAPI;
import org.odk.getin.android.provider.appointmentstable.AppointmentstableCursor;
import org.odk.getin.android.provider.appointmentstable.AppointmentstableSelection;
import org.odk.getin.android.provider.mappedgirltable.MappedgirltableCursor;
import org.odk.getin.android.provider.mappedgirltable.MappedgirltableSelection;
import org.odk.getin.android.retrofitmodels.Value;
import org.odk.getin.android.tasks.ServerPollingJob;
import org.odk.getin.android.utilities.ApplicationConstants;
import org.odk.getin.android.utilities.ToastUtils;
import java.text.SimpleDateFormat;
import java.util.Locale;
import timber.log.Timber;
public class UpcomingAppointmentsAdapter extends RecyclerView.Adapter<UpcomingAppointmentsAdapter.ViewHolder> implements ActivityCompat.OnRequestPermissionsResultCallback {
private static final int REQUEST_PHONE_CALL = 34;
private AppointmentstableCursor cursor;
private UpcomingAppointmentsAdapter.ItemClickListener mClickListener;
private Activity activity;
private SimpleDateFormat simpleformat = new SimpleDateFormat("dd MMM yyyy", Locale.US);
public static class ViewHolder extends RecyclerView.ViewHolder {
public TextView name;
public TextView phoneNumber;
public TextView age;
public TextView maritalStatus;
public TextView village;
public TextView appointmentStatus;
public TextView appointmentDate;
public TextView voucherExpiryDate;
public TextView voucherNumber;
public TextView servicesReceived;
public Button followUpButton;
public Button appointmentButton;
public Button postNatalButton;
public ImageView mappedGirlIcon;
public ImageButton callGirlButton;
public ViewHolder(View v) {
super(v);
name = (TextView) v.findViewById(R.id.name);
phoneNumber = (TextView) v.findViewById(R.id.phone_number);
maritalStatus = (TextView) v.findViewById(R.id.marital_status);
age = (TextView) v.findViewById(R.id.age);
village = (TextView) v.findViewById(R.id.village);
appointmentDate = (TextView) v.findViewById(R.id.appointment_date);
voucherExpiryDate = (TextView) v.findViewById(R.id.voucher_expiry_date);
appointmentStatus = (TextView) v.findViewById(R.id.appointment_status);
voucherNumber = (TextView) v.findViewById(R.id.voucher_number);
servicesReceived = (TextView) v.findViewById(R.id.services_received);
followUpButton = (Button) v.findViewById(R.id.create_follow_up_button);
appointmentButton = (Button) v.findViewById(R.id.create_upcoming_appointment_button);
postNatalButton = (Button) v.findViewById(R.id.create_post_natal_button);
mappedGirlIcon = (ImageView) v.findViewById(R.id.mapped_girl_icon);
callGirlButton = (ImageButton) v.findViewById(R.id.call_girl_button);
}
}
public UpcomingAppointmentsAdapter(Activity activity, AppointmentstableCursor cursor) {
this.cursor = cursor;
this.activity = activity;
}
@NonNull
@Override
public UpcomingAppointmentsAdapter.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View cardview = LayoutInflater.from(parent.getContext())
.inflate(R.layout.upcoming_appointments_row, parent, false);
return new UpcomingAppointmentsAdapter.ViewHolder(cardview);
}
@Override
public void onBindViewHolder(@NonNull final UpcomingAppointmentsAdapter.ViewHolder holder, int position) {
try {
cursor.moveToPosition(position);
holder.name.setText(String.format("%s %s", cursor.getFirstname(), cursor.getLastname()));
try {
if (!TextUtils.isEmpty(cursor.getVoucherNumber()))
holder.voucherNumber.setText(String.format(activity.getString(R.string.voucher_number_string),
cursor.getVoucherNumber()));
else
holder.voucherNumber.setVisibility(View.GONE);
} catch (Exception e) {
e.printStackTrace();
}
try {
if (!TextUtils.isEmpty(cursor.getServicesReceived()))
holder.servicesReceived.setText(String.format(activity.getString(R.string.services_received_string),
cursor.getServicesReceived()));
else
holder.servicesReceived.setVisibility(View.GONE);
} catch (Exception e) {
e.printStackTrace();
}
holder.maritalStatus.setText(org.odk.getin.android.utilities
.TextUtils.toCapitalize(cursor.getMaritalstatus()));
try {
holder.village.setText(cursor.getVillage());
} catch (Exception e) {
e.printStackTrace();
}
final String phoneNumber = getActivePhoneNumber(cursor);
holder.phoneNumber.setText(phoneNumber);
try {
holder.age.setText(String.format(Locale.ENGLISH, "%d Years", cursor.getAge()));
} catch (Exception e) {
e.printStackTrace();
}
holder.appointmentStatus.setText(cursor.getStatus());
String date = simpleformat.format(cursor.getAppointmentDate());
holder.appointmentDate.setText(activity.getString(R.string.appointment_date, date));
MappedgirltableSelection mappedgirltableSelection = new MappedgirltableSelection();
mappedgirltableSelection.phonenumber(phoneNumber);
MappedgirltableCursor mappedgirltableCursor = mappedgirltableSelection.query(activity.getContentResolver());
if (mappedgirltableCursor.moveToFirst()) {
if (mappedgirltableCursor.getVoucherExpiryDate() != null)
holder.voucherExpiryDate.setText(activity.getString(R.string.voucher_expiry_string, simpleformat.format(mappedgirltableCursor.getVoucherExpiryDate())));
else
holder.voucherExpiryDate.setVisibility(View.GONE);
} else {
holder.voucherExpiryDate.setVisibility(View.GONE);
}
Timber.d(cursor.getStatus());
if (cursor.getStatus().equals("Missed")) {
holder.mappedGirlIcon.setBackground(this.activity.getResources().getDrawable(R.drawable.circular_view_red));
holder.appointmentStatus.setTextColor(this.activity.getResources().getColor(R.color.light_red));
} else if (cursor.getStatus().equals("Attended")) {
holder.mappedGirlIcon.setBackground(this.activity.getResources().getDrawable(R.drawable.circular_view_green));
holder.appointmentStatus.setTextColor(this.activity.getResources().getColor(R.color.light_green));
} else {
holder.mappedGirlIcon.setBackground(this.activity.getResources().getDrawable(R.drawable.circular_view_orange));
holder.appointmentStatus.setTextColor(this.activity.getResources().getColor(R.color.light_orange));
}
if (Prefs.getString(USER_ROLE, CHEW_ROLE).equals(CHEW_ROLE)) {
holder.appointmentButton.setVisibility(View.GONE);
} else {
holder.appointmentButton.setOnClickListener(v -> {
saveCredentialsInSharedPrefs(holder);
if (Prefs.getString(USER_ROLE, CHEW_ROLE).equals(CHEW_ROLE))
startFormActivity(APPOINTMENT_FORM_ID);
else
startFormActivity(APPOINTMENT_FORM_MIDWIFE_ID);
});
}
holder.callGirlButton.setOnClickListener(v -> {
try {
if (ActivityCompat.checkSelfPermission(activity, Manifest.permission.CALL_PHONE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(activity, new String[]{Manifest.permission.CALL_PHONE}, REQUEST_PHONE_CALL);
} else {
activity.startActivity(new Intent(Intent.ACTION_CALL, Uri.parse("tel:" + phoneNumber)));
}
} catch (ActivityNotFoundException e) {
Timber.e(e);
activity.startActivity(new Intent(Intent.ACTION_DIAL, Uri.parse("tel:" + phoneNumber)));
}
});
} catch (Exception e) {
Timber.e(e);
}
}
private void saveCredentialsInSharedPrefs(@NonNull UpcomingAppointmentsAdapter.ViewHolder holder) {
String girlName = holder.name.getText().toString();
MappedgirltableCursor girlCursor = queryMappedGirlsTable(girlName.split(" ")[0]);
girlCursor.moveToFirst();
Prefs.putString(GIRL_NAME, girlName);
Prefs.putString(GIRL_ID, girlCursor.getServerid());
if (girlCursor.getVoucherNumber() != null) {
Prefs.putString(GIRL_VOUCHER_NUMBER, girlCursor.getVoucherNumber());
Prefs.putString(GIRL_REDEEMED_SERVICES, TextUtils.isEmpty(
girlCursor.getServicesReceived()) ? "None" : girlCursor.getServicesReceived());
}
}
private AppointmentstableCursor queryAppointmentTable(String girlName) {
return new AppointmentstableSelection().firstnameContains(girlName).or()
.lastnameContains(girlName).orderByCreatedAt(true)
.query(this.activity.getContentResolver());
}
private MappedgirltableCursor queryMappedGirlsTable(String text) {
MappedgirltableSelection selection = new MappedgirltableSelection();
selection.firstnameContains(text).or().lastnameContains(text);
return selection.query(activity.getContentResolver());
}
private String getActivePhoneNumber(AppointmentstableCursor cursor) {
// use girl or next of kin phone number
String phoneNumber = cursor.getPhonenumber();
if (TextUtils.isEmpty(phoneNumber))
phoneNumber = cursor.getNextofkinphonenumber();
return phoneNumber;
}
// allows clicks events to be caught
public void setClickListener(UpcomingAppointmentsAdapter.ItemClickListener itemClickListener) {
this.mClickListener = itemClickListener;
}
// parent activity will implement this method to respond to click events
public interface ItemClickListener {
void onItemClick(View view, int position, Value value);
}
@Override
public int getItemCount() {
return (cursor == null) ? 0 : cursor.getCount();
}
private void startFormActivity(String formId) {
String selectionClause = null;
try {
selectionClause = FormsProviderAPI.FormsColumns.JR_FORM_ID + " LIKE ?";
String[] selectionArgs = {formId + "%"};
Cursor c = activity.getContentResolver().query(
FormsProviderAPI.FormsColumns.CONTENT_URI, // The content URI of the words table
null, // The columns to return for each row
selectionClause, // Either null, or the word the user entered
selectionArgs, // Either empty, or the string the user entered
null);
c.moveToFirst();
Uri formUri = ContentUris.withAppendedId(FormsProviderAPI.FormsColumns.CONTENT_URI,
c.getLong(c.getColumnIndex(FormsProviderAPI.FormsColumns._ID)));
Intent intent = new Intent(Intent.ACTION_EDIT, formUri);
intent.putExtra(ApplicationConstants.BundleKeys.FORM_MODE, ApplicationConstants.FormModes.EDIT_SAVED);
activity.startActivity(intent);
} catch (Exception e) {
e.printStackTrace();
ToastUtils.showLongToast("Please connect to Internet and try again");
// download all empty forms from the server. this is required before user can fill in the form
ServerPollingJob.startJobImmediately();
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
switch (requestCode) {
case REQUEST_PHONE_CALL: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
String phoneNumber = getActivePhoneNumber(cursor);
activity.startActivity(new Intent(Intent.ACTION_CALL, Uri.parse("tel:" + phoneNumber)));
}
return;
}
}
}
}
|
Pustur/edabit-js-challenges
|
src/Date Format/index.test.js
|
import formatDate from './index';
test('formatDate', () => {
expect(formatDate('11/12/2019')).toBe('20191211');
expect(formatDate('12/31/2019')).toBe('20193112');
expect(formatDate('01/15/2019')).toBe('20191501');
});
|
NLeSC/escxnat
|
nl.esciencecenter.xnattool/src/nl/esciencecenter/xnattool/XnatTool.java
|
/*
* Copyright 2012-2014 Netherlands eScience Center.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For the full license, see: LICENSE.txt (located in the root folder of this distribution).
* ---
*/
// source:
package nl.esciencecenter.xnattool;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.NoSuchAlgorithmException;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import nl.esciencecenter.medim.ImageDirEvent;
import nl.esciencecenter.medim.ImageDirScanner;
import nl.esciencecenter.medim.ImageDirScanner.FileFilterOptions;
import nl.esciencecenter.medim.ImageDirScannerListener;
import nl.esciencecenter.medim.ImageTypes;
import nl.esciencecenter.medim.ScanSetInfo;
import nl.esciencecenter.medim.ScanSetInfo.FileDescriptor;
import nl.esciencecenter.medim.ScanSetInfo.ScanTypeParameters;
import nl.esciencecenter.medim.SeriesInfo;
import nl.esciencecenter.medim.StudyInfo;
import nl.esciencecenter.medim.SubjectInfo;
import nl.esciencecenter.medim.dicom.DicomDirScanner;
import nl.esciencecenter.medim.dicom.DicomProcessingProfile;
import nl.esciencecenter.medim.dicom.DicomProcessor;
import nl.esciencecenter.medim.dicom.DicomTagFilters;
import nl.esciencecenter.medim.dicom.DicomTagFilters.DicomTagFilter;
import nl.esciencecenter.medim.dicom.DicomUtil;
import nl.esciencecenter.medim.dicom.DicomWrapper;
import nl.esciencecenter.medim.dicom.types.DicomTags;
import nl.esciencecenter.medim.nifti.NiftiDirScanner;
import nl.esciencecenter.ptk.GlobalProperties;
import nl.esciencecenter.ptk.crypt.Secret;
import nl.esciencecenter.ptk.crypt.StringCrypter;
import nl.esciencecenter.ptk.crypt.StringCrypter.DecryptionFailedException;
import nl.esciencecenter.ptk.crypt.StringCrypter.EncryptionException;
import nl.esciencecenter.ptk.csv.CSVData;
import nl.esciencecenter.ptk.data.StringHolder;
import nl.esciencecenter.ptk.io.FSPath;
import nl.esciencecenter.ptk.io.FSUtil;
import nl.esciencecenter.ptk.io.exceptions.FileURISyntaxException;
import nl.esciencecenter.ptk.io.FSPath;
import nl.esciencecenter.ptk.net.URIUtil;
import nl.esciencecenter.ptk.presentation.Presentation;
import nl.esciencecenter.ptk.ssl.CertificateStore;
import nl.esciencecenter.ptk.ssl.CertificateStoreException;
import nl.esciencecenter.ptk.util.StringUtil;
import nl.esciencecenter.ptk.util.logging.ClassLogger;
import nl.esciencecenter.ptk.web.PutMonitor;
import nl.esciencecenter.ptk.web.WebException;
import nl.esciencecenter.ptk.web.WebException.Reason;
import nl.esciencecenter.ptk.xml.XmlUtil;
import nl.esciencecenter.xnatclient.XnatClient;
import nl.esciencecenter.xnatclient.XnatClient.FilesCollection;
import nl.esciencecenter.xnatclient.data.ImageFileInfo;
import nl.esciencecenter.xnatclient.data.NewScanInfo;
import nl.esciencecenter.xnatclient.data.XnatFile;
import nl.esciencecenter.xnatclient.data.XnatProject;
import nl.esciencecenter.xnatclient.data.XnatScan;
import nl.esciencecenter.xnatclient.data.XnatSession;
import nl.esciencecenter.xnatclient.data.XnatSubject;
import nl.esciencecenter.xnatclient.data.XnatTypes.ImageContentType;
import nl.esciencecenter.xnatclient.data.XnatTypes.ImageFormatType;
import nl.esciencecenter.xnatclient.exceptions.XnatAuthenticationException;
import nl.esciencecenter.xnatclient.exceptions.ParameterException;
import nl.esciencecenter.xnatclient.exceptions.XnatClientException;
import org.dcm4che2.data.DicomObject;
import org.dcm4che2.data.Tag;
/**
* Stateful XnatTool for Dicom Uploading and Identity Mapping to XNAT. Manages a set of DataSet Configurations.
*
* @author <NAME>
*/
public class XnatTool implements ImageDirScannerListener
{
private static ClassLogger logger = ClassLogger.getLogger(XnatTool.class);
static
{
// logger.setLevelToDebug();
}
protected static ClassLogger getLogger()
{
return logger;
}
public static void assertEqual(String message, String expected, String actual) throws XnatToolException
{
if (expected == null)
{
throw new XnatToolException(message + "\nActual Value is null.");
}
else if (expected.equals(actual) == false)
{
throw new XnatToolException(message + "\nExpected='" + expected + "', actual='" + actual + "'");
}
}
public static void assertNotNull(String message, Object value) throws XnatToolException
{
if (value == null)
throw new XnatToolException(message);
}
// ========================================================================
//
// ========================================================================
private XnatToolConfig config = null;
private XnatClient xnatClient = null;
// Project
private String currentProjectID;
private List<XnatProject> cachedXnatProjects;
// filtering
protected DicomDirScanner.FileFilterOptions filterOptions = new DicomDirScanner.FileFilterOptions();
private String currentDataSetConfigName = null;
private DataSetConfigList dataSetConfigs = null;
// encryption and hash settings
private CryptHashSettings cryptSettings = null;
// Dicom processing
private DicomProcessor dicomProcessor = null;
// Data Mapping
private DBMapping dbMapping = null;
private ImageDirScanner imageDirScanner;
private Set<String> scanSetUids;
private CertificateStore certStore;
private boolean hasPersistentConfig = true;
// -------------------------------
// backwards compatibility option.
// -------------------------------
protected boolean option_putAtlasUnderReconstructions = false;
public XnatTool(XnatToolConfig newConfig, boolean hasPersistantConfig) throws Exception
{
this.hasPersistentConfig = hasPersistantConfig;
init(newConfig);
}
public void updateConfig(XnatToolConfig newConfig) throws Exception
{
init(newConfig);
}
private void init(XnatToolConfig newConfig) throws Exception
{
this.config = newConfig;
persistantUpdateXnatLocation(config.getXnatURI());
// Default Filter Options:
this.filterOptions.setExtensions(FileFilterOptions.default_dicom_extensions);
this.filterOptions.checkFileSize = true;
this.filterOptions.maxSliceFileSize = 10 * 1024 * 1024; //
this.filterOptions.checkFileMagic = true;
this.cryptSettings = new CryptHashSettings();
reloadDataSetConfigs();
if (dataSetConfigs == null)
{
// reload failed:
dataSetConfigs = new DataSetConfigList();
}
DicomProcessingProfile dicomOpts = null;
DataSetConfig dataSetConfig = this.getCurrentDataSetConfig();
if (dataSetConfig != null)
{
dicomOpts = dataSetConfig.dicomProcessingProfile;
}
else
{
dicomOpts = DicomProcessingProfile.createDefault();
}
initDicomProcessor(dicomOpts);
}
public void setJSessionID(String jsession)
{
xnatClient.setJSessionID(jsession);
}
public void setXnatCredentials(String user, Secret password)
{
this.xnatClient.setCredentials(user, password);
}
public boolean authenticateXnat() throws WebException
{
xnatClient.connect();
return xnatClient.isAuthenticated();
}
public boolean isXnatAuthenticated()
{
return xnatClient.isAuthenticated();
}
public java.net.URI getXnatURI()
{
return this.xnatClient.getServiceURI();
}
public XnatToolConfig getToolConfig()
{
return config;
}
public XnatClient getXnatClient()
{
return xnatClient;
}
/**
* Set alternative CertificateStore
*/
public void setCertificateStore(CertificateStore certStore) throws CertificateStoreException
{
this.xnatClient.setCertificateStore(certStore);
this.certStore = certStore; // keep
}
public void createCachedir() throws IOException
{
FSPath cacheDirNode = getCacheDir();
if (cacheDirNode.exists() == false)
{
cacheDirNode.mkdirs();
logger.debugPrintf("Creating new CacheDir:%s\n", cacheDirNode);
}
else
{
logger.debugPrintf("Using existing CacheDir:%s\n", cacheDirNode);
}
}
public FSPath getCacheDir() throws IOException
{
if (config.getImageCacheDir() == null)
throw new NullPointerException("CacheDir not specified. Please specify Cache Dir.");
FSPath cacheDirNode = this.getFSUtil().newFSPath(config.getImageCacheDir());
return cacheDirNode;
}
/**
* Change DataSetsConfig Directory and reload DataSetConfiguration
*/
public boolean reloadDataSetsConfigDir(URI loc, boolean autoCreate) throws IOException
{
this.config.setDataSetsConfigDir(loc);
// update now:
this.saveToolConfig();
deleteEncryptionKeys();
// block/reset:
this.currentDataSetConfigName = null;
this.dataSetConfigs = null;
this.dbMapping = null;
boolean exists = FSUtil.getDefault().existsDir(loc.getPath());
if (exists == false)
{
if (autoCreate)
{
exists = createDataSetsConfigDir();
return exists;
}
else
{
return false; // UI checks this and sets 'Create' button.
}
}
else
{
// Try to load default data set config:
try
{
boolean reloaded = this.reloadDataSetConfigs();
if (reloaded)
{
logger.infoPrintf("Switched to existing DataSets configuration dir:%s\n", loc);
}
}
catch (Exception e)
{
logger.warnPrintf("No default DataSet configuration(s) found at location:%s", loc);
}
return true; // directory exists, but no configuration found.
}
}
// === Persistant Settings === //
public void persistantUpdateXnatLocation(URI uri) throws XnatToolException, URISyntaxException, WebException, CertificateStoreException
{
this.config.updateURI(uri);
if (this.xnatClient != null)
{
try
{
xnatClient.disconnect();
}
catch (Exception e)
{
logger.warnPrintf("Exeption when disconnecting from xnatClient:%s\n", xnatClient);
}
}
// get User From Config:
String user = this.config.getXnatUser();
if (StringUtil.isEmpty(user))
{
// update user
user = config.getXnatURI().getUserInfo();
if (StringUtil.isEmpty(user))
user = GlobalProperties.getGlobalUserName();
}
// auto update user
config.updateUser(user);
this.xnatClient = new XnatClient(config.getXnatURI(), user, null);
if (this.certStore != null)
{
this.xnatClient.setCertificateStore(certStore);
}
this.saveToolConfig();
}
public void persistantSetCacheDir(URI loc, boolean autoCreate) throws IOException
{
this.config.setImageCacheDir(loc);
if (autoCreate)
{
this.createCachedir();
}
saveToolConfig();
}
public void persistantSetDefaultSourceID(String name)
{
config.defaultSourceId = name;
saveToolConfig();
}
// === Load/Save Tool Config.
protected boolean saveToolConfig()
{
URI confPath;
if (this.hasPersistentConfig == false)
{
return false;
}
try
{
confPath = XnatToolMain.getSettingsFileURI();
}
catch (URISyntaxException e1)
{
handleError(e1, "Invalid configuration location URI:%s\n", e1.getInput());
return false;
}
try
{
XnatToolConfig.saveConfig(config, confPath.getPath());
return true;
}
catch (Exception e2)
{
handleError(e2, "Couldn't save tool configuration to:%s\n", confPath);
}
return false;
}
private void handleError(Throwable ex, String message, Object... args)
{
// Call back to UI !
logger.logException(ClassLogger.ERROR, ex, "Could not save configuration file to %s\n", args);
}
// ========================================================================
// XNat Interface:
// ========================================================================
/**
* Returns authorized project names.
*/
public String[] getProjectIDs(boolean update) throws Exception
{
List<XnatProject> projs = getProjects(update);
if ((projs == null) || (projs.size() <= 0))
return null;
int n = projs.size();
String names[] = new String[n];
for (int i = 0; i < n; i++)
names[i] = projs.get(i).getID(); // Use Ids!;
return names;
}
/**
* Returns authorized project names. Project name might not be defined.
*/
public String[] getProjectNames(boolean update) throws Exception
{
List<XnatProject> projs = getProjects(update);
if ((projs == null) || (projs.size() <= 0))
return null;
int n = projs.size();
String names[] = new String[n];
for (int i = 0; i < n; i++)
names[i] = projs.get(i).getName();
return names;
}
public List<XnatProject> getProjects(boolean update) throws Exception
{
if ((update) || (this.cachedXnatProjects == null))
this.cachedXnatProjects = xnatClient.listProjects();
return this.cachedXnatProjects;
}
/**
* Returns current cached project ID.
*/
public String getCurrentProjectID()
{
return this.currentProjectID;
}
public String getXnatUsername()
{
return this.xnatClient.getUsername();
}
// ========================================================================
// DBMapping and credentials
// ========================================================================
public String getDefaultSourceId()
{
String id = this.config.getDefaultSourceId();
if ((id == null) || (id == ""))
{
// auto copy xnat user name:
id = getXnatUsername();
if (id != null)
{
id = id.toUpperCase();
}
config.setDefaultSourceId(id);
}
return id;
}
protected String getDataSetSourceID()
{
DataSetConfig setConfig = this.getCurrentDataSetConfig();
if (setConfig == null)
{
return config.defaultSourceId;
}
return setConfig.sourceId;
}
public boolean hasCurrentDataSetconfig()
{
return (getCurrentDataSetConfig() != null);
}
public boolean isDataSetEncryptionKeyInitialized()
{
DataSetConfig setConfig = this.getCurrentDataSetConfig();
if (setConfig == null)
{
return false;
}
return setConfig.hasValidEncryptionKey();
}
/**
* Authenticate SourceID and Password with (stored) encrypted Source ID and encrypted Encryption Key. The SourceID
* and Encryption Key are encrypted with an Password when written to the DataSet configuration file. This method
* will decrypt the stored (Encrypted) Source ID and (encrypted) Encryption Key and match the decrypted Source ID
* with the sourceId given as argument.
* <p>
* This will initialize the DBMapping and these credentials are also used by the DicomProcessor.
* <p>
*
* @param sourceId
* the current data set sourceId. Must match with the encrypted Source ID.
* @param passPhrase
* - <PASSWORD>prhase used to encrypt the Source ID ans the Encryption Key
* @param autoCreateEncryptionKey
* - if no encryption key has been created, generate a key from the passPhrase.
*
*/
public void authenticateEncryptionKeys(String sourceId, Secret passPhrase, boolean autoCreateEncryptionKey) throws Exception
{
DataSetConfig setConfig = this.getCurrentDataSetConfig();
if (setConfig == null)
{
throw new XnatToolException("No Configuration loaded or created yet. Can't authenticate keys");
}
if (sourceId == null || sourceId == "")
{
throw new NullPointerException("Argument sourceId is empty or null!");
}
StringCrypter crypter = createEncrypter(passPhrase);
byte[] encryptedSourceID = setConfig.getEncryptedSourceID();
if (encryptedSourceID == null)
{
if (autoCreateEncryptionKey)
{
initializeDataSetEncryptionKeys(setConfig, crypter, sourceId, passPhrase);
encryptedSourceID = setConfig.getEncryptedSourceID();
if (encryptedSourceID == null)
{
throw new XnatAuthenticationException("Failed to initailize a new Encryption Key. Returned Encrypted SourceID is null!");
}
}
else
{
throw new XnatAuthenticationException("Encryption Keys not initialized. Auto creation of Encryption Key disabled.");
}
}
byte[] encryptedKey = setConfig.getEncryptedKey();
if (encryptedKey == null)
{
throw new XnatAuthenticationException("Encryption Keys not initialized. Created or decrypted Encryption Key is null!");
}
try
{
// To check whether the supplied key matches the DataSet the
// sourceID is encrypted with that same key.
// If the encrypted SourceID can be decrypted the keys match this
// data set.
String decryptedSourceId = new String(crypter.decrypt(encryptedSourceID), "UTF-8");
if (!sourceId.equals(decryptedSourceId))
{
throw new XnatAuthenticationException("Authentication Failed: Passphrase and Owner ID combination do not match.");
}
}
catch (DecryptionFailedException e)
{
throw new XnatAuthenticationException("Authentication Failed: Passphrase is incorrect.", e);
}
byte decryptedKey[] = crypter.decrypt(encryptedKey);
DicomProcessingProfile procOpts = dicomProcessor.getProcessingOptions();
procOpts.setEncryptionKey(decryptedKey);
setConfig.setEncryptionKey(decryptedKey);
// re-initialized DicomProcessor !
dicomProcessor.updateProcessingOptions(procOpts);
// Update ID and Keys for XNAT CryptHashing ...
cryptSettings.setCredentials(sourceId, decryptedKey);
dbMapping = new DBMapping(cryptSettings);
logger.infoPrintf(" - sourceId =%s\n", sourceId);
logger.infoPrintf(" - encryptedKey =%s\n", StringUtil.toHexString(encryptedKey));
logger.infoPrintf(" - encryptedSourceId =%s\n", StringUtil.toHexString(encryptedSourceID));
logger.infoPrintf(" - decryptedKey =%s\n", StringUtil.toHexString(decryptedKey));
}
/**
* Create Stateful Encrypter using current encryption settings.
*/
private StringCrypter createEncrypter(Secret passPhrase) throws NoSuchAlgorithmException, UnsupportedEncodingException,
EncryptionException
{
return new StringCrypter(
passPhrase,
cryptSettings.cryptScheme,
cryptSettings.hashAlgorithm,
StringCrypter.CHARSET_UTF8);
}
public void initializeEncryptionKeyFromSourceText(String sourceId, Secret keySourceText, Secret keyPassword) throws Exception
{
StringCrypter crypter = this.createEncrypter(keyPassword);
DataSetConfig setConfig = this.getCurrentDataSetConfig();
initializeDataSetEncryptionKeys(setConfig, crypter, sourceId, keySourceText);
}
protected DataSetConfig initializeDataSetEncryptionKeys(
DataSetConfig config,
StringCrypter crypter,
String sourceId,
Secret keyDigestSourceText) throws Exception
{
// generate (sha-256 hash) digest from key source text.
byte digest[] = crypter.createKeyDigest(keyDigestSourceText);
byte encryptedKey[] = crypter.encrypt(digest);
byte encryptedSourceId[] = crypter.encrypt(sourceId);
config.setCredentials(sourceId, digest);
config.setEncryptedCredentials(encryptedSourceId, encryptedKey);
this.saveDataSetConfigs(); // persistant update!
return config;
}
public void deleteEncryptionKeys()
{
DataSetConfig setConfig = this.getCurrentDataSetConfig();
if (setConfig == null)
return; // no config loaded yet!
setConfig.setCredentials(null, null);
setConfig.setEncryptedCredentials(null, null);
DicomProcessingProfile opts = this.dicomProcessor.getProcessingOptions();
if (opts != null)
{
// remove encryptionKey;
opts.setEncryptionKey(null);
}
if (this.dbMapping != null)
{
this.dbMapping.cryptHashSettings.setCredentials(null, null);
}
// clear DB mapping here ?
this.dbMapping = null;
}
protected void saveMappings(String name, DBMapping mapping)
{
URI loc = getToolConfig().getDataSetsConfigDir();
try
{
loc = URIUtil.appendPath(loc, name + "_mappings.csv");
StringBuilder sb = new StringBuilder();
mapping.toCSV(name, sb);
String csvText = sb.toString();
logger.debugPrintf("Saving new ID Mapping File: %s\n>>>-----------\n%s>>>-----------\n",
loc, csvText);
FSUtil.getDefault().writeText(loc.getPath(), csvText);
}
catch (Exception e)
{
logger.logException(ClassLogger.ERROR, e, "Failed to write ID Mappings to CSV File:%sn", loc);
}
}
protected void saveMetaData(String name, ImageDirScanner dicomSource, DBMapping mappings)
{
URI loc = getToolConfig().getDataSetsConfigDir();
try
{
loc = URIUtil.appendPath(loc, name + "_metadata.csv");
StringBuilder sb = new StringBuilder();
new MetaDataWriter(dicomSource, mappings).toCSV(sb);
String csvText = sb.toString();
logger.debugPrintf("Saving new ID Mapping File: %s\n>>>-----------\n%s>>>-----------\n",
loc, csvText);
FSUtil.getDefault().writeText(loc.getPath(), csvText);
}
catch (Exception e)
{
logger.logException(ClassLogger.ERROR, e, "Failed to write ID Mappings to CSV File:%sn", loc);
}
}
public CSVData getMetaData() throws Exception
{
if (dbMapping == null)
{
throw new Exception("No (Meta) Data, please scan first.");
}
return new MetaDataWriter(imageDirScanner, dbMapping).toCSV();
}
protected void printMappings(DBMapping mapping, ImageDirScannerListener optListener)
{
if (optListener != null)
{
optListener.notifyImageDirScannerEvent(ImageDirEvent.newMessageEvent("=== Current XNAT DB Mappings ==="));
}
String subjs[] = mapping.getSubjectKeys();
if (subjs != null)
{
for (int i = 0; i < subjs.length; i++)
{
String subjkey = subjs[i]; // is patientID;
String xnatSubjectLabel = mapping.getXnatSubjectLabel(subjkey);
logger.infoPrintf(">PatientID %s=>%s\n", subjkey, xnatSubjectLabel);
if (optListener != null)
{
optListener.notifyImageDirScannerEvent(ImageDirEvent.newMessageEvent("- Subject Mapping:" + subjkey + " -> "
+ xnatSubjectLabel));
}
String sessionKeys[] = mapping.getSessionKeys(subjkey);
if (sessionKeys == null)
continue;
for (int j = 0; j < sessionKeys.length; j++)
{
String sessKey = sessionKeys[j]; // is studyUID
String xnatSessionLabel = mapping.getXnatSessionLabel(subjkey, sessKey);
logger.infoPrintf(" - StudyUID: %s=>%s\n", sessKey, xnatSessionLabel);
if (optListener != null)
{
optListener.notifyImageDirScannerEvent(ImageDirEvent.newMessageEvent("- - Session Mapping:" + sessKey + " -> "
+ xnatSessionLabel));
}
String scanKeys[] = mapping.getScanKeys(sessKey);
if (scanKeys == null)
continue;
for (int k = 0; k < scanKeys.length; k++)
{
String scanKey = scanKeys[k];
String xnatScanLabel = mapping.getXnatScanLabel(sessKey, scanKey);
logger.infoPrintf(" - - SeriesUID: %s=>%s\n", scanKey, xnatScanLabel);
if (optListener != null)
{
optListener.notifyImageDirScannerEvent(ImageDirEvent.newMessageEvent("- - -ScanSet Mapping:" + scanKey + " -> "
+ xnatScanLabel));
}
}
}
}
}
}
// ========================================================================
// Dicom Processing
// ========================================================================
protected void initDicomProcessor(DicomProcessingProfile options) throws Exception
{
// Load optional dicom tags configuration here:
String dicomTagConfigurationFile = "dicom/dicom_tags.xcsv";
DicomTags tagOpts;
try
{
tagOpts = DicomTags.createFromFile(dicomTagConfigurationFile);
}
catch (Exception e)
{
throw new XnatToolException("Failed to read Dicom Tag Processing configuration file from:" + dicomTagConfigurationFile, e);
}
dicomProcessor = new DicomProcessor(tagOpts, options);
}
protected DicomProcessingProfile getProcessingOptions()
{
return dicomProcessor.getProcessingOptions();
}
// ========================================================================
// Xnat Mappings
// ========================================================================
public XnatSubject getCreateSubject(String projectId, String subjectLabel, boolean autoCreate) throws Exception
{
if (StringUtil.isEmpty(projectId))
throw new XnatToolException("ProjecId can't be empty)");
if (StringUtil.isEmpty(subjectLabel))
throw new XnatToolException("SubjectLabel can't be empty)");
XnatSubject subject = null;
try
{
subject = xnatClient.getSubjectByLabel(projectId, subjectLabel);
}
catch (WebException e)
{
if ((autoCreate == false) && (e.getReason() != Reason.RESOURCE_NOT_FOUND))
{
throw e;
}
}
if (subject == null)
{
if (autoCreate == false)
return null;
// Create at XNAT DB
subject = xnatClient.createSubject(projectId, subjectLabel);
//
logger.infoPrintf("Created new subject: %s:%s\n", subject.getID(), subject.getLabel());
subject = xnatClient.getSubjectByLabel(projectId, subjectLabel);
}
// use subject create by current user!
logger.infoPrintf("Got Subject:%s:%s\n", subject.getID(), subject.getLabel());
return subject;
}
protected XnatSession getCreateSession(XnatSubject subject, String sessionLabel, boolean autoCreate) throws Exception
{
String projectId = subject.getProjectID();
String subjectLabel = subject.getLabel();
XnatSession session = xnatClient.getSession(projectId, subjectLabel, sessionLabel);
if (session == null)
{
if (autoCreate == false)
return null;
long current = System.currentTimeMillis();
GregorianCalendar now = new GregorianCalendar();
now.setTimeInMillis(current);
Date date = now.getTime();
// New Object!
session = XnatSession.createXnatSession(projectId, subjectLabel, sessionLabel);
session.setSessionDate(date);
//
session = xnatClient.createSession(session);
//
logger.infoPrintf("Created new Session: %s:%s\n", session.getID(), session.getLabel());
session = xnatClient.getSessionOfSubject(subject, sessionLabel);
}
logger.infoPrintf("Got Session:%s\n", session);
return session;
}
protected XnatScan getCreateMrScan(XnatSubject subject, XnatSession session, String newScanLabel, ScanSetInfo scanSet,
boolean autoCreate) throws WebException, XnatClientException
{
XnatScan scan = xnatClient.getScanByLabel(session, newScanLabel);
if (scan == null)
{
if (autoCreate == false)
return null;
NewScanInfo info = new NewScanInfo(newScanLabel);
// info.scanID=newScanID;
info.note = "No Notes.";
info.series_description = scanSet.getSeriesDescription();
if (StringUtil.isEmpty(info.series_description))
{
info.series_description = "No Description.";
}
// info.scanID=newScanID;
// info.mrScanDataType=mrScanDataType;
info.quality = "usable";
String scanid = xnatClient.createMrScan(session, info);
logger.infoPrintf("Created new Scan:%s\n", scanid);
scan = xnatClient.getScanByLabel(session, newScanLabel);
}
logger.infoPrintf(">>> Got Scan:%s\n", scan);
return scan;
}
protected XnatSubject createNewSubject(String projectId, String subjectLabel) throws Exception
{
return this.getCreateSubject(projectId, subjectLabel, true);
}
protected XnatSession createNewSession(String projectId, String subjectLabel, String newSession) throws Exception
{
return this.getCreateSession(this.getCreateSubject(projectId, subjectLabel, false), newSession, true);
}
/**
*
* Get Either Patient Name or Patient ID form the subject depending on set DataSetConfiguration.
*
* @param subject
* - Subject.
* @return
*/
protected String getSubjectKey(SubjectInfo subject) throws XnatClientException
{
switch (getCurrentDataSetConfig().subjectKeyType)
{
case CRYPTHASH_PATIENT_NAME:
case PLAIN_PATIENT_NAME:
{
return subject.getPatientName();
}
case CRYPTHASH_PATIENT_ID:
case PLAIN_PATIENT_ID:
{
return subject.getPatientID();
}
default:
{
throw new ParameterException("Cannot determine Subject Key type:" + getCurrentDataSetConfig().subjectKeyType);
}
}
}
/**
* Whether to crypt/hash the Session Key.
*
* @return
*/
protected boolean getDoCryptHashSessionKey()
{
return getCurrentDataSetConfig().getSessionKeyType().getDoCryptHash();
}
/**
* Check whether to crypt+hash the Scan Key (Scan Id)
*
* @return true if the ScanIDs need to be crypto-hashed.
*/
protected boolean getDoCryptHashScanKey()
{
DataSetConfig conf = getCurrentDataSetConfig();
boolean doCryptScanKey = conf.getScanKeyType().getDoCryptHash();
// Atlas files have no ScanID, ScanId is use as Atlas (Reconstruction)
// Label.
switch (getCurrentDataSetConfig().dataSetType)
{
case NIFTI_ATLASSET:
{
return false;
}
default:
{
return doCryptScanKey;
}
}
}
protected String getScanSetLabelPrefix()
{
return "scn_";
}
/**
* Whether to crypt/hash the Subject Key.
*
* @return
*/
protected boolean getDoCryptHashSubjectKey()
{
return getCurrentDataSetConfig().subjectKeyType.getDoCryptHash();
}
/**
* Get Either Patient Name or Patient ID from the subject depending on the DataSet Configuration.
*/
protected String getSessionKey(StudyInfo session) throws XnatClientException
{
switch (getCurrentDataSetConfig().sessionKeyType)
{
case CRYPTHASH_STUDY_UID:
{
return session.getStudyInstanceUID();
}
case PLAIN_STUDY_UID:
{
return session.getStudyInstanceUID();
}
case STUDY_DATE_YEAR:
{
GregorianCalendar cal = new GregorianCalendar();
cal.setTime(session.getStudyDate());
return "" + cal.get(Calendar.YEAR);
}
case STUDY_DATE_YEAR_MONTH:
{
GregorianCalendar cal = new GregorianCalendar();
cal.setTime(session.getStudyDate());
// january is month '0'!
return "" + cal.get(Calendar.YEAR) + "_" + Presentation.to2decimals(1 + cal.get(Calendar.MONTH));
}
case PATIENT_AGE_YEAR:
case PATIENT_AGE_YEAR_MONTHS:
default:
{
throw new ParameterException("Unsupported Session Key type:" + getCurrentDataSetConfig().sessionKeyType);
}
}
}
// ========================================================================
// Filters
// ========================================================================
/**
* Add Repeat/Echo time filter. Times must match (TRMin <= TR <=TRMax) AND (TEMin <= TE <= TEMax)
*/
public void addTRTEFilter(int minTR, int maxTR, int minTE, int maxTE)
{
DicomTagFilter fil = new DicomTagFilters.MinMaxFilter(Tag.RepetitionTime, minTR, maxTR);
filterOptions.dicomTagFilters.add(fil);
fil = new DicomTagFilters.MinMaxFilter(Tag.EchoTime, minTE, maxTE);
filterOptions.dicomTagFilters.add(fil);
}
public ImageDirScanner.FileFilterOptions getFilterOptions()
{
return this.filterOptions;
}
// ========================================================================
// Scan Image Directory
// ========================================================================
public void enableDicom() throws IOException
{
this.getCurrentDataSetConfig().setDataSetType(ImageTypes.DataSetType.DICOM_SCANSET);
saveDataSetConfigs();
}
public void enableNifti() throws IOException
{
this.getCurrentDataSetConfig().setDataSetType(ImageTypes.DataSetType.NIFTI_SCANSET);
saveDataSetConfigs();
}
public void enableAtlas() throws IOException
{
this.getCurrentDataSetConfig().setDataSetType(ImageTypes.DataSetType.NIFTI_ATLASSET);
saveDataSetConfigs();
}
private void initImageDirScanner(ImageDirScannerListener optListener) throws XnatToolException
{
DataSetConfig conf = this.getCurrentDataSetConfig();
if (conf == null)
{
throw new XnatToolException("No DataSet Configured. Please Initialize!");
}
ImageTypes.DataSetType dataType = conf.getDataSetType();
if (dataType == ImageTypes.DataSetType.NIFTI_SCANSET)
{
NiftiDirScanner niftiScanner = new NiftiDirScanner(ImageTypes.DataSetType.NIFTI_SCANSET);
niftiScanner.setScanSubType(conf.getScanSubType());
imageDirScanner = niftiScanner;
}
else if (dataType == ImageTypes.DataSetType.NIFTI_ATLASSET)
{
NiftiDirScanner niftiScanner = new NiftiDirScanner(ImageTypes.DataSetType.NIFTI_ATLASSET);
niftiScanner.setScanSubType(conf.getScanSubType());
imageDirScanner = niftiScanner;
}
else if (dataType == ImageTypes.DataSetType.DICOM_SCANSET)
{
imageDirScanner = new DicomDirScanner();
}
else
{
throw new XnatToolException("Unrecognized DataType:" + dataType);
}
imageDirScanner.addDicomDirListener(this);
if (optListener != null)
{
imageDirScanner.addDicomDirListener(optListener);
}
}
/**
* Scan on the image source directory.
*
* @param URI
* The directory to be scanned.
* @param optListener
* - Optional DicomDirScannerLister which receives updates during the scan.
* @return - true if images were found.
* @throws Exception
*/
public boolean doScanImageSourceDir(URI imageSourceDir, ImageDirScannerListener optListener, DicomDirScanner.ScanMonitor optScanMonitor)
throws Exception
{
// Update settings and save before performing actual scan.
this.saveDataSetConfigs();
if (this.dbMapping == null)
{
throw new XnatToolException("InitializationError: DBMapping NOT initialized!");
}
// this re-initialized DBMapping!
// ProcessingOptions options = this.getProcessingOptions();
// this.updateOwnerIDAndPassword(options.getSourceId(),options.getPassword());
initImageDirScanner(optListener);
imageDirScanner.clear();
// update filters:
imageDirScanner.setFilterOption(filterOptions);
// perform scan:
imageDirScanner.scanDirectory(imageSourceDir, true, optScanMonitor);
// update:
logger.infoPrintf("Got %d files and %d ScanSets\n", imageDirScanner.getNumFiles(), imageDirScanner.getNumScanSets());
// process mappings:
addDBMappings(optListener);
return true;
}
@Override
public void notifyImageDirScannerEvent(ImageDirEvent e)
{
logger.debugPrintf("<DicomDirEvent>:%s\n", e);
}
public int getLocalNumScanSets()
{
return imageDirScanner.getNumScanSets();
}
public ImageDirScanner getImageDirScanner()
{
return this.imageDirScanner;
}
// ========================================================================
// DBMappings
// ========================================================================
public int getLocalNumSubjects()
{
String[] keys = dbMapping.getSubjectKeys();
if (keys == null)
return 0;
return keys.length;
}
public int getLocalNumSessions()
{
String[] keys = dbMapping.getSessionKeys();
if (keys == null)
return 0;
return keys.length;
}
protected void addDBMappings(ImageDirScannerListener optListener) throws Exception
{
if (this.dbMapping == null)
throw new XnatToolException("DBMapping NOT initialized!");
dbMapping.clear();
scanSetUids = imageDirScanner.getScanSetUIDs();
Iterator<String> it = scanSetUids.iterator();
int index = 0;
while (it.hasNext())
{
String uid = it.next();
ScanSetInfo scanSet = imageDirScanner.getScanSet(uid);
try
{
addScanMapping(scanSet);
}
catch (Exception e)
{
// init failed!
logger.logException(ClassLogger.ERROR, e, "Failed to add/create DBMapping for scanSet:%s\n", scanSet);
// keep scanset for now.
}
logger.infoPrintf("> SetId[%d]='%s'\n", index++, uid);
SubjectInfo subj = scanSet.getSubjectInfo();
ScanTypeParameters scanType = scanSet.getScanTypeParameters();
StudyInfo studyInf = scanSet.getStudyInfo();
SeriesInfo seriesInf = scanSet.getSeriesInfo();
// do not let null pointer here cause havoc.
try
{
logger.infoPrintf(" - StudyInstanceUID = %s\n", scanSet.getStudyInstanceUID());
logger.infoPrintf(" - StudyID = %s\n", studyInf.getStudyId());
logger.infoPrintf(" - xnatSessionLabel = %s\n",
dbMapping.getXnatSessionLabel(getSubjectKey(subj), getSessionKey(studyInf)));
logger.infoPrintf(" - SeriesInstanceUID = %s\n", scanSet.getSeriesInstanceUID());
logger.infoPrintf(" - SeriesNr = %d\n", seriesInf.getSeriesNr());
logger.infoPrintf(" - SeriesDescription = %s\n", seriesInf.getSeriesDescription());
logger.infoPrintf(" - xnatScanLabel = %s\n",
dbMapping.getXnatScanLabel(scanSet.getStudyInstanceUID(), scanSet.getSeriesInstanceUID()));
logger.infoPrintf(" - SeriesDate = %s\n", seriesInf.getSeriesDate());
logger.infoPrintf(" - StudyDate = %s\n", studyInf.getStudyDate());
logger.infoPrintf(" [ScanType] \n");
logger.infoPrintf(" - Modality = %s\n", scanType.modality);
logger.infoPrintf(" - ScanningSequence = %s\n", scanType.scanningSequence);
logger.infoPrintf(" - TR/TE (TI) = %f/%f (%f)\n", scanType.repeatTime, scanType.echoTime, scanType.inverseTime);
logger.infoPrintf(" - Flip Angle = %f\n", scanType.flipAngle);
logger.infoPrintf(" [Subject]\n");
logger.infoPrintf(" - PatientName = %s\n", subj.getPatientName());
logger.infoPrintf(" - PatientId = %s\n", subj.getPatientID());
logger.infoPrintf(" - xnatSubjectLabel = %s\n",
dbMapping.getCreateXnatSubjectLabel(getSubjectKey(subj), getDoCryptHashSubjectKey(), false));
logger.infoPrintf(" - PatientAge = %s\n", subj.getPatientAgeString());
logger.infoPrintf(" - PatientGender = %s\n", subj.getPatientGender());
logger.infoPrintf(" - PatientBirthDate = %s\n", subj.getPatientBirthDate());
logger.infoPrintf(" [FileSet] \n");
// logger.infoPrintf(" - fileSetID = %s\n",scanSet.fileSetId);
logger.infoPrintf(" - Number of files = %d\n", scanSet.getNumFSNodes());
logger.infoPrintf(" - first file = %s\n", scanSet.getFirstFile());
logger.infoPrintf(" ---------------------\n");
}
catch (Throwable e)
{
logger.logException(ClassLogger.WARN, e, "Logging exception:%s\n", e);
}
}
printMappings(dbMapping, optListener);
if (this.getToolConfig().getAutoCreateMappingsFile())
{
saveMappings(this.currentDataSetConfigName, dbMapping);
}
if (this.getToolConfig().getAutoExtractMetaData())
{
saveMetaData(this.currentDataSetConfigName, this.imageDirScanner, dbMapping);
}
}
protected void addScanMapping(ScanSetInfo scanSet) throws XnatClientException, EncryptionException
{
if (dbMapping == null)
{
throw new Error("DBMapping not yet initialized:dbMapping==null");
}
// Map patientID,patientName => SubjectID
SubjectInfo subjectInfo = scanSet.getSubjectInfo();
String subjectKey = getSubjectKey(subjectInfo);
if (StringUtil.isEmpty(subjectKey))
{
throw new NullPointerException("SubjectKey may not be Null or Empty!");
}
String subjectLabel = dbMapping.getCreateXnatSubjectLabel(subjectKey, getDoCryptHashSubjectKey(), true);
// Map subjectLabel (patientID?)+StudyInstanceUID => sessionLabel
StudyInfo studyInfo = scanSet.getStudyInfo();
// String studyUID=scanSet.getStudyInstanceUID();
String sessionKey = getSessionKey(studyInfo);
if (StringUtil.isEmpty(sessionKey))
{
throw new NullPointerException("sessionKey may not be Null or Empty!");
}
String sessionLabel = dbMapping.getCreateXnatSessionLabel(subjectKey, getDoCryptHashSubjectKey(), sessionKey,
getDoCryptHashSessionKey(), true);
// Map studyUID+SeriesInstanceUID => scanLabel
String seriesUID = scanSet.getSeriesInstanceUID();
String scanLabel = dbMapping.getCreateXnatScanLabel(sessionKey, seriesUID, getDoCryptHashScanKey(), getScanSetLabelPrefix(), true);
logger.infoPrintf("Added mapping PatientID/PatientName:%s/%s\n", subjectInfo.getPatientID(), subjectInfo.getPatientName());
logger.infoPrintf(" - patient -> subject: '%s' (keyType=%s) -> '%s'\n", subjectKey, subjectLabel,
this.getCurrentDataSetConfig().subjectKeyType);
logger.infoPrintf(" - study -> session: '%s' (keyType=%s) -> '%s'\n", sessionKey, sessionLabel,
this.getCurrentDataSetConfig().sessionKeyType);
logger.infoPrintf(" - series -> scanSet: '%s' -> '%s'\n", seriesUID, scanLabel);
}
// ========================================================================
// Uploading
// ========================================================================
/**
* Upload Scan files
*
* @throws Exception
*
*/
public void doUploadTo(String projectId) throws Exception
{
createCachedir();
this.doUploadTo(projectId, true, null);
}
public void doUploadTo(String projectId, boolean autoCreate, UploadMonitorListener monitor) throws Exception
{
if (monitor.isCancelled())
{
throw new XnatToolException("Upload cancelled.");
}
// Project must exist!
List<String> scanIds = imageDirScanner.getScanSetUIDList();
int numScans = scanIds.size();
if ((this.xnatClient.isAuthenticated() == false) && (XnatToolMain.demoUpload == false))
{
throw new XnatToolException("User not authenticated. Please login first for user:" + this.getXnatUsername());
}
String sourceId = this.dbMapping.cryptHashSettings.getSourceID();
// ScanSet file stats for monitor:
int scanSetNumFiles[] = new int[numScans];
for (int i = 0; i < numScans; i++)
{
ScanSetInfo scanSet = imageDirScanner.getScanSet(scanIds.get(i));
scanSetNumFiles[i] = scanSet.getNumFSNodes();
}
String taskName = "Uploading ScanSets";
monitor.notifyStartUpload(taskName, scanSetNumFiles);
DataSetConfig dataSetConf = this.getCurrentDataSetConfig();
ImageTypes.DataSetType dataSetType = dataSetConf.getDataSetType();
// nifti sub type:
ImageTypes.ScanSubType scanSubType = dataSetConf.getScanSubType();
for (int scanNr = 0; scanNr < numScans; scanNr++)
{
String scanId = scanIds.get(scanNr);
ScanSetInfo scanSet = imageDirScanner.getScanSet(scanIds.get(scanNr));
// one scan per subject/session
logger.infoPrintf(">>> Uploading ScanSet[#%d]:%s\n", scanNr, scanId);
// Map patientID,patientName => SubjectID
String subjectKey = getSubjectKey(scanSet.getSubjectInfo());
String subjectLabel;
subjectLabel = dbMapping.getCreateXnatSubjectLabel(subjectKey, getDoCryptHashSubjectKey(), false);
if (subjectLabel == null)
{
// initialization error: DBMapping not correct.
throw new XnatToolException("DBMapping Error: Subject Label does not exists for SubjectKey:" + subjectKey);
}
// Map subjectLabel (patientID?)+StudyInstanceUID => sessionLabel
String sessionKey = getSessionKey(scanSet.getStudyInfo());
String sessionLabel = dbMapping.getCreateXnatSessionLabel(subjectKey, getDoCryptHashSubjectKey(), sessionKey,
getDoCryptHashSessionKey(), false);
if (sessionLabel == null)
{
// initialization error: DBMapping not correct.
throw new XnatToolException("DBMapping Error: Session Label does not exists for SessionKey:" + sessionKey);
}
// Map studyUID+SeriesInstanceUID => scanLabel
String scanUID = scanSet.getScanUID();
String scanLabel;
switch (dataSetType)
{
case DICOM_SCANSET:
{
scanLabel = dbMapping.getCreateXnatScanLabel(sessionKey, scanUID, getDoCryptHashScanKey(), getScanSetLabelPrefix(),
autoCreate);
break;
}
case NIFTI_SCANSET:
{
scanLabel = dbMapping.getCreateXnatScanLabel(sessionKey, scanUID, getDoCryptHashScanKey(), getScanSetLabelPrefix(),
autoCreate);
break;
}
case NIFTI_ATLASSET:
{
scanLabel = ImageTypes.getScanSubTypeFileLabel(scanSubType);
break;
}
default:
throw new Error("getScanSetLabelPrefix():Invalid DataSetType:" + getCurrentDataSetConfig().dataSetType);
}
if (scanLabel == null)
{
// initialization error: DBMapping not correct.
throw new XnatToolException("DBMapping Error: ScanSet Label does not exists for ScanUID(SeriesUID):" + scanUID);
}
XnatSubject subject = null;
XnatSession session = null;
XnatScan scan = null;
scanSet.setSeriesDescription("Scan Set Type:" + dataSetType + ", scanSubType=" + scanSubType + ",ownerId=" + sourceId);
if (XnatToolMain.demoUpload == false)
{
subject = getCreateSubject(projectId, subjectLabel, autoCreate);
assertNotNull("No such subject or creation failed for project/subject:" + projectId + "/" + subjectLabel, subject);
assertEqual("Subject Label in Subject Object must match actual subjectLabel", subjectLabel, subject.getLabel());
session = getCreateSession(subject, sessionLabel, autoCreate);
assertNotNull("No such session or creation failed for subject/session:" + subjectLabel + "/" + sessionLabel, session);
assertEqual("Session Label in Session Object must match actual sessionLabel", sessionLabel, session.getLabel());
// ================
// Create Scan:
// ================
scan = getCreateMrScan(subject, session, scanLabel, scanSet, true);
logger.infoPrintf(">>> Got Scan:%s\n", scan);
}
if (monitor != null)
{
monitor.notifyCollectionStart(scanNr, "ScanSet:#" + scanNr + ".");
}
uploadScanSetFiles(dataSetType, subject, session, scan, scanSet, scanNr, monitor);
scanSet.setUploadFinishedDate(Presentation.now());
if (monitor != null)
{
monitor.notifyCollectionDone(scanNr);
}
}
}
protected void uploadScanSetFiles(ImageTypes.DataSetType dataSetType, XnatSubject subject, XnatSession session, XnatScan xnatScan,
ScanSetInfo scanSet, int scanNr, UploadMonitorListener monitor) throws Exception
{
String subjectLabel = session.getSubjectLabel();
String sessionLabel = session.getLabel();
String scanLabel = xnatScan.getID();
// bridge between PutMonitor and UploadMonitorListener:
PutMonitorAdaptor putMonitor = new PutMonitorAdaptor(monitor);
// query existing files.
FilesCollection fileCollections = this.xnatClient.listScanFiles(session, scanLabel);
List<FileDescriptor> files = scanSet.getFileDescriptors();
if ((files == null) || (files.size() <= 0))
{
throw new XnatToolException("No Files to upload!");
}
if ((dataSetType == ImageTypes.DataSetType.NIFTI_SCANSET) && (files.size() != 1))
{
throw new XnatToolException("When uploading Nifti ScanSets, each ScanSet may only contain one file! Number of files="
+ files.size());
}
// if ( (dataSetType==DataSetType.NIFTI_ATLAS) && (files.size()!=2) )
// {
// throw new
// XnatToolException("When uploading Nifti Atlases, each ScanSet must contain two files! Number of files="+files.size());
// }
boolean isAtlas = (dataSetType == ImageTypes.DataSetType.NIFTI_ATLASSET);
for (int fileNr = 0; fileNr < files.size(); fileNr++)
{
if (monitor.isCancelled())
{
throw new XnatToolException("Upload cancelled!");
}
FileDescriptor fileDescr = files.get(fileNr);
if ((fileDescr == null) || (fileDescr.fsNode == null))
{
// possible since files are ordered using scan nr. and some may
// be missing.
continue;
}
FSPath uploadFile = fileDescr.fsNode;
String fileLabel = fileDescr.fileLabel;
FSPath orgFile = uploadFile;
FSPath sourceFile = orgFile;
FSPath processedFile = null;
if (dataSetType == ImageTypes.DataSetType.DICOM_SCANSET)
{
String destFilename = createXnatDicomTargetFileName(subjectLabel, sessionLabel, scanLabel, fileNr);
boolean doProcessDicom = this.getProcessingOptions().getDoProcessDicom();
if (doProcessDicom)
{
if (monitor != null)
{
monitor.logPrintf("Processing DICOM File:\n - %s\n", sourceFile);
}
processedFile = processFile(this.getCacheDir(), sourceFile, destFilename);
uploadFile = processedFile; // upload new file.
}
long fileSize = uploadFile.getFileSize();
if (monitor != null)
{
monitor.notifyFileStart(scanNr, fileNr, fileSize, "File:" + uploadFile.getPathname());
}
// =======================================
// Check and Verify existing Remote File !
// =========================================
boolean exists = checkAndVerifyExisting(fileCollections, ImageFormatType.DICOM, orgFile, uploadFile, destFilename,
getToolConfig().getAutoResumeAndVerifyUpload());
if (exists)
{
logger.infoPrintf("Remote DICOM file already exists: skip existing:%s\n", orgFile);
if (monitor != null)
{
monitor.logPrintf("Verified: Remote DICOM file already exists and file sizes match for:\n - %s\n", orgFile);
}
}
else
{
if (XnatToolMain.demoUpload)
{
logger.infoPrintf("DEMO:putDicomFile:%s -> %s\n\n", uploadFile, destFilename);
}
else
{
logger.infoPrintf("putDicomFile:%s -> %s\n\n", uploadFile, destFilename);
putDicomFile(session, xnatScan, uploadFile, fileNr, destFilename, putMonitor);
}
}
}
else if (dataSetType == ImageTypes.DataSetType.NIFTI_SCANSET)
{
StringHolder basenameH = new StringHolder();
StringHolder extensionH = new StringHolder();
imageDirScanner.splitBasenameAndExtension(uploadFile, basenameH, extensionH);
String ext = extensionH.value.toLowerCase();
String destFilename = createXnatNiftiTargetFileName(subjectLabel, sessionLabel, scanLabel, ext);
boolean exists = checkAndVerifyExisting(fileCollections, ImageFormatType.NIFTI, orgFile, uploadFile, destFilename,
getToolConfig().getAutoResumeAndVerifyUpload());
if (exists)
{
logger.infoPrintf("Remote NIFTI file already exists: skip existing:%s\n", orgFile);
if (monitor != null)
{
monitor.logPrintf("Verified: Remote NIFTI file already exists and file sizes match for:\n - %s\n", orgFile);
}
}
else
{
long fileSize = uploadFile.getFileSize();
if (monitor != null)
{
monitor.notifyFileStart(scanNr, fileNr, fileSize, uploadFile.toString());
}
if (XnatToolMain.demoUpload)
{
logger.infoPrintf("DEMO: putNiftiFile (atlas=%s): %s -> %s\n\n", isAtlas, uploadFile, destFilename);
}
else
{
logger.infoPrintf("putNiftiFile (atlas=%s): %s -> %s\n\n", isAtlas, uploadFile, destFilename);
putNiftiScanSetFile(session, xnatScan, uploadFile, destFilename, putMonitor);
}
}
}
else if (dataSetType == ImageTypes.DataSetType.NIFTI_ATLASSET)
{
StringHolder basenameH = new StringHolder();
StringHolder extensionH = new StringHolder();
imageDirScanner.splitBasenameAndExtension(uploadFile, basenameH, extensionH);
String ext = extensionH.value.toLowerCase();
String destFilename;
boolean isAnnotation;
String reconId = "";
String atlasScanLabel = null;
if (fileNr == 0)
{
destFilename = createXnatNiftiAtlasFileName(subjectLabel, sessionLabel, scanLabel, ext);
isAnnotation = false;
}
else
{
reconId = session.getLabel() + "_" + fileLabel;
String atlasLabel = "atlas." + fileLabel; // Actual Atlas,
// put in
// reconstructions.
atlasScanLabel = "atlas_" + fileLabel;
destFilename = createXnatNiftiAtlasFileName(subjectLabel, sessionLabel, atlasLabel, ext);
isAnnotation = true;
}
long fileSize = uploadFile.getFileSize();
if (monitor != null)
{
monitor.notifyFileStart(scanNr, fileNr, fileSize, uploadFile.toString());
}
if (isAnnotation)
{
// put annotations under reconstructions of this session:
if (XnatToolMain.demoUpload == false)
{
if (option_putAtlasUnderReconstructions)
{
// create reconstruction;
this.putNiftiReconstructionFile(session, reconId, uploadFile, destFilename, putMonitor);
}
else
{
// create scan with atlas label.
XnatScan atlasScan = getCreateMrScan(subject, session, atlasScanLabel, scanSet, true);
putNiftiScanSetFile(session, atlasScan, uploadFile, destFilename, putMonitor);
}
}
else
{
logger.infoPrintf("putNiftiReconstructionFile reconId= %s (isAnnotation=%s): %s -> %s\n\n", reconId, ""
+ isAnnotation, uploadFile, destFilename);
}
}
else
{
if (XnatToolMain.demoUpload == false)
{
putNiftiScanSetFile(session, xnatScan, uploadFile, destFilename, putMonitor);
}
else
{
logger.infoPrintf("putNiftiFile (isAnnotation=%s): %s -> %s\n\n", "" + isAnnotation, uploadFile, destFilename);
}
}
}
if (monitor != null)
{
monitor.notifyFileDone(scanNr, fileNr);
}
if ((processedFile != null) && (config.getKeepProcessedDicomFile() == false))
{
try
{
processedFile.delete();
}
catch (Exception e)
{
logger.logException(ClassLogger.ERROR, e, "Failed to delete image file:" + processedFile);
}
}
} // for files
}
private boolean checkAndVerifyExisting(FilesCollection filesCollection, ImageFormatType formatType, FSPath orgFile, FSPath uploadFile,
String destFilename, boolean autoResumeAndVerifyUpload) throws XnatToolException, IOException
{
String resourceLabel = "";
if (formatType != null)
{
resourceLabel = formatType.toString();
}
XnatFile remoteFile = filesCollection.getFile(resourceLabel, destFilename);
if (remoteFile == null)
{
return false;
}
if (this.getToolConfig().getAutoResumeAndVerifyUpload() == false)
{
throw new XnatToolException("Remote file already exists:" + remoteFile);
}
// check size
if (remoteFile.getFileSize() != uploadFile.getFileSize())
{
throw new XnatToolException("Verify Failed: Remote file exists already but has different file size!\n"
+ "Size=" + orgFile.getFileSize() + "; for Source File:" + orgFile + "\n"
+ "Size=" + uploadFile.getFileSize() + "; for Processed File:" + uploadFile + "\n"
+ "Size=" + remoteFile.getFileSize() + "; for Remote File=" + remoteFile.getFileName() + "\n");
}
return true;
}
protected void putDicomFile(XnatSession session, XnatScan scan, FSPath file, int fileNum, String targetFilename, PutMonitor putMonitor)
throws Exception
{
// current support DICOM/T1 only!
ImageFileInfo info = new ImageFileInfo(ImageFormatType.DICOM, ImageContentType.T1_RAW);
info.setDestinationFilename(targetFilename);
if (XnatToolMain.demoUpload)
{
logger.infoPrintf(">>> Dummy Run for slice/file: #%d/%s\n", fileNum, file);
logger.infoPrintf(" - > target filename=%s\n", info.getDestinationFilename());
try
{
Thread.sleep(10);
}
catch (Throwable t)
{
;
}
}
else
{
logger.infoPrintf(">>> Uploading set['%s']#%d=%s\n", scan.getID(), fileNum, file);
logger.infoPrintf(" - > target filename=%s\n", info.getDestinationFilename());
String resultId = this.xnatClient.putDicomFile(session, scan, file.getPathname(), info, putMonitor);
logger.debugPrintf(">>> result=%s\n", resultId);
}
}
protected void putNiftiScanSetFile(XnatSession session, XnatScan scan, FSPath file, String targetFilename, PutMonitor putMonitor)
throws Exception
{
ImageFileInfo info = new ImageFileInfo(ImageFormatType.NIFTI, ImageContentType.T1_RECON);
info.setDestinationFilename(targetFilename);
String scanId = scan.getID();
if (XnatToolMain.demoUpload)
{
logger.infoPrintf(">>> Dummy Run for nifti: %s\n", file);
logger.infoPrintf(" - > target filename=%s\n", info.getDestinationFilename());
try
{
Thread.sleep(10);
}
catch (Throwable t)
{
;
}
}
else
{
logger.infoPrintf(">>> Uploading Nifti ScanSet: %s\n", scan.getID(), file);
logger.infoPrintf(" - > target filename=%s\n", info.getDestinationFilename());
String resultId = this.xnatClient.putNiftiScanFile(session, scanId, file.getPathname(), info, putMonitor);
logger.debugPrintf(">>> result=%s\n", resultId);
}
}
protected void putNiftiReconstructionFile(XnatSession session, String reconId, FSPath file, String targetFilename, PutMonitor putMonitor)
throws Exception
{
ImageFileInfo info = new ImageFileInfo(ImageFormatType.NIFTI, ImageContentType.T1_RECON);
info.setDestinationFilename(targetFilename);
if (XnatToolMain.demoUpload)
{
logger.infoPrintf(">>> Dummy Run for nifti: %s\n", file);
logger.infoPrintf(" - > target filename=%s\n", info.getDestinationFilename());
try
{
Thread.sleep(10);
}
catch (Throwable t)
{
;
}
}
else
{
logger.infoPrintf(">>> putReconstructionNiftiFile session/reconId:([%s,%s]: %s\n", session.getLabel(), reconId, file);
logger.infoPrintf(" - > target filename=%s\n", info.getDestinationFilename());
String resultId;
// create reconstruction first:
String status = xnatClient.createReconstruction(session, reconId, info.getContentType());
// put reconstruction:
resultId = this.xnatClient.putReconstructionFile(session, reconId, file.getPathname(), info, putMonitor);
logger.debugPrintf(">>> result=%s\n", resultId);
}
}
protected static String createXnatDicomTargetFileName(String subjectLabel, String sessionLabel, String scanLabel, int fileNum)
{
// padd with zeros:
String fileNumStr = Presentation.to3decimals(fileNum);
String fileName = subjectLabel + "." + sessionLabel + "." + scanLabel + ".file_" + fileNumStr + ".dcm";
return fileName;
}
protected static String createXnatNiftiTargetFileName(String subjectLabel, String sessionLabel, String scanLabel, String ext)
{
String fileName = subjectLabel + "." + sessionLabel + "." + scanLabel + "." + ext;
return fileName;
}
protected static String createXnatNiftiAtlasFileName(String subjectLabel, String sessionLabel, String atlasLabel, String ext)
{
String fileName = subjectLabel + "." + sessionLabel;
if (atlasLabel != null)
{
fileName += "." + atlasLabel;
}
fileName += "." + ext;
return fileName;
}
protected FSPath processFile(FSPath tmpDir, FSPath sourceFile, String destFilename) throws Exception
{
logger.debugPrintf("Processing dicom file:%s to:%s\n", sourceFile, destFilename);
DicomWrapper wrap = DicomWrapper.readFrom(sourceFile.getURI());
wrap.setIsModifyable(true);
wrap = dicomProcessor.process(wrap);
wrap.performChecks(true);
DicomObject dic = wrap.getDicomObject();
FSPath destFile = tmpDir.resolvePath(destFilename);
DicomUtil.writeDicom(dic, destFile.getPathname());
return destFile;
}
// ========================================================================
// Utils
// ========================================================================
public FSUtil getFSUtil()
{
return FSUtil.getDefault();
}
// ========================================================================
// DataSetConfig Management
// ========================================================================
protected URI createDataSetsConfigLocation() throws URISyntaxException
{
URI loc = getToolConfig().getDataSetsConfigDir();
if (loc == null)
return null;
loc = URIUtil.appendPath(loc, "datasets_config.xcfg");
return loc;
}
protected void saveDataSetConfigs() throws IOException
{
try
{
URI loc = createDataSetsConfigLocation();
DataSetConfigList.saveTo(this.dataSetConfigs, loc.getPath());
}
catch (URISyntaxException e)
{
throw new IOException("Couldn't create target file URI:" + e.getReason(), e);
}
}
public String getDataSetConfigName()
{
return currentDataSetConfigName;
}
protected boolean reloadDataSetConfigs() throws IOException, URISyntaxException
{
URI loc = createDataSetsConfigLocation();
if (loc == null)
return false;
try
{
this.dataSetConfigs = DataSetConfigList.loadFrom(loc.getPath());
}
catch (Exception e)
{
this.dataSetConfigs = new DataSetConfigList();
logger.warnPrintf("Could NOT (re)load DataSetConfigurations from :%s\n", loc);
return false;
}
// update with first name from list:
if ((dataSetConfigs != null) && (dataSetConfigs.size() > 0))
{
logger.infoPrintf("reloadDataSetConfigs() Loaded: %d DataSet configurations from:%s\n", dataSetConfigs.size(), loc);
this.currentDataSetConfigName = dataSetConfigs.getDataSetConfig(0).getDataSetName();
logger.infoPrintf("reloadDataSetConfigs() Setting current dataset name=%s\n", currentDataSetConfigName);
}
else
{
logger.infoPrintf("reloadDataSetConfigs() No DataSet configurations from:%s\n", loc);
this.currentDataSetConfigName = null; // should block processing!
}
return true;
}
public DataSetConfig switchToDataSetConfig(String name) throws Exception
{
if (name == null)
throw new NullPointerException("Name is NULL!");
if (dataSetConfigs == null)
return null;
DataSetConfig config = getDataSetConfigs(true).getDataSetConfig(name);
if (config != null)
{
this.currentDataSetConfigName = config.getDataSetName();
updateCurrentDataSetConfig(config);
return config;
}
return null;
}
protected void updateCurrentDataSetConfig(DataSetConfig dataSetConfig) throws Exception
{
currentDataSetConfigName = dataSetConfig.dataSetName;
// invalid config:
if (dataSetConfig.getDicomProcessingProfile() == null)
{
throw new XnatToolException("DataSetConfiguration doesn't contains dicomProcessingProfile!\n"
+ XmlUtil.prettyFormat(config.toXML(), 3));
}
initDicomProcessor(dataSetConfig.dicomProcessingProfile);
// clear DB Mapping!
this.dbMapping = null;
}
public List<String> getDataSetNames()
{
return this.getDataSetConfigs(true).getDataSetConfigNames();
}
public boolean createDataSetsConfigDir() throws IOException
{
URI uri = this.getToolConfig().getDataSetsConfigDir();
FSUtil fs = getFSUtil();
FSPath dir = fs.newLocalDir(uri);
if (dir.exists())
{
return true;
}
else
{
// auto create parent as well.
if (dir.getParent().exists() == false)
{
dir.getParent().mkdir();
}
}
dir.mkdir();
logger.infoPrintf("Created new DataSet config directory:%s\n", uri);
return true;
}
protected DataSetConfigList getDataSetConfigs(boolean autoInit)
{
if ((dataSetConfigs == null) && (autoInit))
dataSetConfigs = new DataSetConfigList();
return dataSetConfigs;
}
public DataSetConfig getCurrentDataSetConfig()
{
return getDataSetConfigs(true).getDataSetConfig(this.currentDataSetConfigName);
}
public DataSetConfig createNewDataSetConfig(String sourceId, String newName) throws Exception
{
// defaults:
DicomProcessingProfile dicomOpts = DicomProcessingProfile.createDefault();
DataSetConfig newConfig = new DataSetConfig(dicomOpts);
newConfig.setSourceId(sourceId);
newConfig.setDataSetName(newName);
newConfig.setImageSourceDir(URIUtil.appendPath(FSUtil.getDefault().getUserHome(), "dicom"));
Exception e1 = null;
try
{
getDataSetConfigs(true).add(newConfig);
this.saveDataSetConfigs();
}
catch (Exception e)
{
e1 = e;
}
// update
switchToDataSetConfig(newName);
if (e1 != null)
throw e1;
return newConfig;
}
public void setDataSetImageSourceDir(String loc) throws XnatToolException, IOException
{
DataSetConfig setConf = getCurrentDataSetConfig();
if (setConf == null)
{
throw new XnatToolException("No Current DataSet created!");
}
setConf.setImageSourceDir(resolveURI(loc));
saveDataSetConfigs();
}
public void setDataSetScanSubType(ImageTypes.ScanSubType subType) throws XnatToolException, IOException
{
DataSetConfig setConf = getCurrentDataSetConfig();
if (setConf == null)
{
throw new XnatToolException("No Current DataSet created!");
}
setConf.setScanSubType(subType);
saveDataSetConfigs();
}
private URI resolveURI(String loc) throws FileURISyntaxException
{
return getFSUtil().resolvePathURI(loc);
}
public DataSetConfig getDataSetConfig(String name)
{
return this.getDataSetConfigs(true).getDataSetConfig(name);
}
public void clearImageCacheDir() throws IOException
{
URI cacheDir = this.getToolConfig().getImageCacheDir();
FSPath dir = this.getFSUtil().newLocalDir(cacheDir);
FSPath nodes[] = dir.listNodes();
for (FSPath node : nodes)
{
logger.debugPrintf("Deleting cache file:%s\n", node);
node.delete();
}
}
/**
* Perform graceful exit and cleanup/dispose held resources.
*/
public void dispose()
{
if (getToolConfig().getClearImageCacheDirAfterExit())
{
try
{
this.clearImageCacheDir();
}
catch (IOException e)
{
e.printStackTrace();
}
}
if (imageDirScanner != null)
{
imageDirScanner.dispose();
imageDirScanner = null;
}
if (this.dbMapping != null)
{
this.dbMapping.dispose();
this.dbMapping = null;
}
if (this.dicomProcessor != null)
{
this.dicomProcessor.dispose();
this.dicomProcessor = null;
}
}
public void doUploadCSV(String projectId, CSVData csvData, boolean autoCreateSubjects, boolean autoCreateSessions,
UploadMonitor uploadMonitor) throws WebException, XnatClientException
{
xnatClient.putMetaData(projectId, csvData, autoCreateSubjects, autoCreateSessions, uploadMonitor);
}
// ==================
// XnatCMD Interface
// ==================
}
|
Legacy-LuaSTG-Engine/LuaSTG-Sub
|
fancylib/fcyRefObj.h
|
<filename>fancylib/fcyRefObj.h
////////////////////////////////////////////////////////////////////////////////
/// @file fcyRefObj.h
/// @brief 描述并实现了引用计数接口
////////////////////////////////////////////////////////////////////////////////
#pragma once
#include "fcyType.h"
#define FCYREFOBJ
////////////////////////////////////////////////////////////////////////////////
/// @brief 平台接口
////////////////////////////////////////////////////////////////////////////////
long _api_InterlockedIncrement(long volatile *add_);
long _api_InterlockedDecrement(long volatile *add_);
////////////////////////////////////////////////////////////////////////////////
/// @brief 引用计数接口定义
/// @note AddRef和Release函数应当加锁
////////////////////////////////////////////////////////////////////////////////
struct fcyRefObj
{
virtual void AddRef()=0; ///< @brief 增加接口的引用计数
virtual void Release()=0; ///< @brief 减少接口的引用计数
///< @note 当计数器置0时销毁对象
virtual ~fcyRefObj() {}
};
////////////////////////////////////////////////////////////////////////////////
/// @brief 引用计数接口实现
/// @note 使用模版进行代码复用防止出现菱形继承
////////////////////////////////////////////////////////////////////////////////
template<class T>
class fcyRefObjImpl : public T
{
private:
long m_cRef;
public:
virtual void AddRef()
{
_api_InterlockedIncrement(&m_cRef);
}
virtual void Release()
{
const long tRet = _api_InterlockedDecrement(&m_cRef);
if(tRet <= 0)
delete this;
}
private:
fcyRefObjImpl(const fcyRefObjImpl& Org);
public:
fcyRefObjImpl() : m_cRef(1) {}
virtual ~fcyRefObjImpl() {}
};
////////////////////////////////////////////////////////////////////////////////
/// @brief 智能指针
////////////////////////////////////////////////////////////////////////////////
template<typename T>
class fcyRefPointer
{
protected:
T* m_pPointer;
public:
fcyRefPointer& DirectSet(T* Ptr)
{
FCYSAFEKILL(m_pPointer);
m_pPointer = Ptr;
return *this;
}
public:
bool operator==(const fcyRefPointer& Right)const
{
return (m_pPointer == Right.m_pPointer);
}
fcyRefPointer& operator=(const fcyRefPointer& Right)
{
if(m_pPointer != Right.m_pPointer)
{
FCYSAFEKILL(m_pPointer);
m_pPointer = Right.m_pPointer;
if(m_pPointer)
m_pPointer->AddRef();
}
return *this;
}
T* operator->()const
{
return m_pPointer;
}
T* operator*()const
{
return m_pPointer;
}
T** operator~()
{
FCYSAFEKILL(m_pPointer);
return &m_pPointer;
}
operator T*()const
{
return m_pPointer;
}
template<typename P>
operator fcyRefPointer<P>()
{
fcyRefPointer<P> tRet = fcyRefPointer<P>((P*)m_pPointer);
return tRet;
}
template<typename P>
operator fcyRefPointer<P>()const
{
fcyRefPointer<P> tRet = fcyRefPointer<P>((P*)m_pPointer);
return tRet;
}
public:
fcyRefPointer()
: m_pPointer(NULL) {}
fcyRefPointer(T* pObj)
: m_pPointer(pObj)
{
if(pObj)
pObj->AddRef();
}
fcyRefPointer(const fcyRefPointer& Right)
: m_pPointer(Right.m_pPointer)
{
if(m_pPointer)
m_pPointer->AddRef();
}
~fcyRefPointer()
{
FCYSAFEKILL(m_pPointer);
}
};
|
hackerlank/SourceCode
|
Game/OGRE/PlatformManagers/Win32/src/OgreWin32PlatformDll.cpp
|
<reponame>hackerlank/SourceCode
/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2005 The OGRE Team
Also see acknowledgements in Readme.html
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place - Suite 330, Boston, MA 02111-1307, USA, or go to
http://www.gnu.org/copyleft/lesser.txt.
-----------------------------------------------------------------------------
*/
#include "OgreWin32ConfigDialog.h"
#include "OgreWin32ErrorDialog.h"
#ifndef OGRE_NO_DX_INPUT
#ifdef DX7INPUTONLY
#include "OgreWin32Input.h"
#else
#include "OgreWin32Input8.h"
#endif
#endif
#include "OgreWin32Timer.h"
#include "OgreRoot.h"
#include "OgreLogManager.h"
#include "OgreRenderWindow.h"
#if OGRE_COMPILER == OGRE_COMPILER_MSVC
#if defined(_M_IA64)
#pragma section(".base", long, read)
extern "C"
__declspec(allocate(".base"))
const IMAGE_DOS_HEADER __ImageBase;
#else
extern "C"
const IMAGE_DOS_HEADER __ImageBase;
#endif
#endif // OGRE_COMPILER == OGRE_COMPILER_MSVC
namespace Ogre {
#ifdef DEBUG
int g_iCreatedConfigDiag = 0;
int g_iCreatedErrorDiag = 0;
int g_iCreatedRenderWindow = 0;
int g_iCreatedInputReader = 0;
#endif
/// Retrieves an instance of a config dialog for this platform
void createPlatformConfigDialog(ConfigDialog** ppDlg)
{
// Must get HINSTANCE
#if OGRE_COMPILER == OGRE_COMPILER_MSVC
HINSTANCE hInst = (HINSTANCE) &__ImageBase;
#else
HINSTANCE hInst = GetModuleHandle(OGRE_PLATFORM_LIB);
#endif
*ppDlg = new Win32ConfigDialog(hInst);
#ifdef DEBUG
g_iCreatedConfigDiag++;
#endif
}
/// Retrieves an instance of an error dialog for this platform
void createPlatformErrorDialog(ErrorDialog** ppDlg)
{
#if OGRE_COMPILER == OGRE_COMPILER_MSVC
HINSTANCE hInst = (HINSTANCE) &__ImageBase;
#else
HINSTANCE hInst = GetModuleHandle(OGRE_PLATFORM_LIB);
#endif
*ppDlg = new Win32ErrorDialog(hInst);
#ifdef DEBUG
g_iCreatedErrorDiag++;
#endif
}
/// Creates a Timer using default implementation
void createMyTimer(Timer** ppTimer)
{
*ppTimer = new Win32Timer();
(*ppTimer)->reset();
}
void destroyMyTimer(Timer* ppTimer)
{
delete ppTimer;
}
/// Retrieves an instance of an input reader for this platform
void createPlatformInputReader(InputReader** ppReader)
{
#ifndef OGRE_NO_DX_INPUT
#ifdef DX7INPUTONLY
*ppReader = new Win32Input();
#else
*ppReader = new Win32Input8();
#endif
#endif
#ifdef DEBUG
g_iCreatedInputReader++;
#endif
}
/// Destroys
void destroyPlatformConfigDialog(ConfigDialog* dlg)
{
delete dlg;
#ifdef DEBUG
g_iCreatedConfigDiag--;
#endif
}
/// Destroys
void destroyPlatformErrorDialog(ErrorDialog* dlg)
{
delete dlg;
#ifdef DEBUG
g_iCreatedErrorDiag--;
#endif
}
/// Destroys
void destroyPlatformRenderWindow(RenderWindow* wnd)
{
delete wnd;
#ifdef DEBUG
g_iCreatedRenderWindow--;
#endif
}
/// Destroys
void destroyPlatformInputReader(InputReader* reader)
{
delete reader;
#ifdef DEBUG
g_iCreatedInputReader--;
#endif
}
void MymessagePump(RenderWindow* rw)
{
//A simple Win32 event pump
MSG msg;
while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) )
{
TranslateMessage( &msg );
DispatchMessage( &msg );
}
}
#ifdef DEBUG
BOOL WINAPI DllMain( HINSTANCE hinstDLL, // handle to DLL module
DWORD fdwReason, // reason for calling function
LPVOID lpvReserved // reserved
)
{
if( fdwReason == DLL_THREAD_DETACH ) {
if( g_iCreatedConfigDiag )
LogManager::logMessage( "Memory Leak: Not all platform configuration dialogs were destroyed!!!", LML_CRITICAL );
if( g_iCreatedErrorDiag )
LogManager::logMessage( "Memory Leak: Not all platform error dialogs were destroyed!!!", LML_CRITICAL );
if( g_iCreatedRenderWindow )
LogManager::logMessage( "Memory Leak: Not all platform render windows were destroyed!!!", LML_CRITICAL );
if( g_iCreatedInputReader )
LogManager::logMessage( "Memory Leak: Not all platform input readers were destroyed!!!", LML_CRITICAL );
}
}
#endif
}
|
glatteis/tacas21-artifact
|
artifact/storm/src/storm/utility/math.h
|
#ifndef STORM_UTILITY_MATH_H_
#define STORM_UTILITY_MATH_H_
#include <cmath>
#include "storm/utility/macros.h"
#include "storm/utility/OsDetection.h"
namespace storm {
namespace utility {
namespace math {
// We provide this method explicitly, because MSVC does not offer it (non-C99 compliant).
template<typename ValueType>
static inline double log2(ValueType number) {
# ifndef WINDOWS
return std::log2(number);
# else
return std::log(number) / std::log(2);
# endif
}
inline uint64_t uint64_log2(uint64_t n)
{
STORM_LOG_ASSERT(n != 0, "N is 0.");
#define S(k) if (n >= (UINT64_C(1) << k)) { i += k; n >>= k; }
uint64_t i = 0; S(32); S(16); S(8); S(4); S(2); S(1); return i;
#undef S
}
}
}
}
#endif /* STORM_UTILITY_MATH_H_ */
|
shijingsh/shijingsh-ai2
|
shijingsh-ai-jsat/src/main/java/com/shijingsh/ai/jsat/clustering/evaluation/AdjustedRandIndex.java
|
<reponame>shijingsh/shijingsh-ai2
package com.shijingsh.ai.jsat.clustering.evaluation;
import static java.lang.Math.exp;
import static java.lang.Math.log;
import java.util.List;
import com.shijingsh.ai.jsat.DataSet;
import com.shijingsh.ai.jsat.classifiers.ClassificationDataSet;
import com.shijingsh.ai.jsat.classifiers.DataPoint;
import com.shijingsh.ai.jsat.DataSet;
import com.shijingsh.ai.jsat.classifiers.ClassificationDataSet;
import com.shijingsh.ai.jsat.classifiers.DataPoint;
/**
* Adjusted Rand Index (ARI) is a measure to evaluate a cluster based on the
* true class labels for the data set. The ARI normally returns a value in [-1,
* 1], where 0 indicates the clustering appears random, and 1 indicate the
* clusters perfectly match the class labels, and negative values indicate a
* clustering that is worse than random. To match the {@link ClusterEvaluation}
* interface, the value returned by evaluate will be 1.0-Adjusted Rand Index so
* the best value becomes 0.0 and the worse value becomes 2.0. <br>
* <b>NOTE:</b> Because the ARI needs to know the true class labels, only
* {@link #evaluate(int[], DataSet) } will work, since it
* provides the data set as an argument. The dataset given must be an instance
* of {@link ClassificationDataSet}
*
* @author <NAME>
*/
public class AdjustedRandIndex implements ClusterEvaluation {
@Override
public double evaluate(int[] designations, DataSet dataSet) {
if (!(dataSet instanceof ClassificationDataSet))
throw new RuntimeException("NMI can only be calcuate for classification data sets");
ClassificationDataSet cds = (ClassificationDataSet) dataSet;
int clusters = 0;// how many clusters are there?
for (int clusterID : designations)
clusters = Math.max(clusterID + 1, clusters);
double[] truthSums = new double[cds.getClassSize()];
double[] clusterSums = new double[clusters];
double[][] table = new double[clusterSums.length][truthSums.length];
double n = 0.0;
for (int i = 0; i < designations.length; i++) {
int cluster = designations[i];
if (cluster < 0)
continue;// noisy point
int label = cds.getDataPointCategory(i);
double weight = cds.getWeight(i);
table[cluster][label] += weight;
truthSums[label] += weight;
clusterSums[cluster] += weight;
n += weight;
}
/*
* Adjusted Rand Index involves many (n choose 2) = 1/2 (n-1) n
*/
double sumAllTable = 0.0;
double addCTerm = 0.0, addLTerm = 0.0;// clustering and label
for (int i = 0; i < table.length; i++) {
double a_i = clusterSums[i];
addCTerm += a_i * (a_i - 1) / 2;
for (int j = 0; j < table[i].length; j++) {
if (i == 0) {
double b_j = truthSums[j];
addLTerm += b_j * (b_j - 1) / 2;
}
double n_ij = table[i][j];
double n_ij_c2 = n_ij * (n_ij - 1) / 2;
sumAllTable += n_ij_c2;
}
}
double longMultTerm = exp(log(addCTerm) + log(addLTerm) - (log(n) + log(n - 1) - log(2)));// numericaly more stable verison
return 1.0 - (sumAllTable - longMultTerm) / (addCTerm / 2 + addLTerm / 2 - longMultTerm);
}
@Override
public double naturalScore(double evaluate_score) {
// returns values int he range of [1, -1], with 1=best, and -1=worst
return -evaluate_score + 1;
}
@Override
public double evaluate(List<List<DataPoint>> dataSets) {
throw new UnsupportedOperationException("Adjusted Rand Index requires the true data set" + " labels, call evaluate(int[] designations, DataSet dataSet)" + " instead");
}
@Override
public ClusterEvaluation clone() {
return new AdjustedRandIndex();
}
}
|
nathanfaucett/js-frontend-template
|
config/tasks/webpack.js
|
var vfs = require("vinyl-fs"),
webpack = require("webpack-stream"),
filePath = require("@nathanfaucett/file_path");
var webpackConfig = function(config) {
return {
devtool: "source-map",
output: {
filename: "index.js"
},
module: {
loaders: [{
test: /\.js?$/,
exclude: /node_modules/,
loader: "babel",
query: {
presets: ["react", "es2015"]
}
}]
}
};
}
module.exports = function(config) {
return function() {
return vfs.src(config.paths.js_src)
.pipe(webpack(webpackConfig(config)))
.on("error", function handleError(e) {
this.emit("end");
})
.pipe(vfs.dest(config.paths.out));
};
};
|
macedo22/spectre
|
tests/Unit/Helpers/PointwiseFunctions/Hydro/TestHelpers.hpp
|
<reponame>macedo22/spectre
// Distributed under the MIT License.
// See LICENSE.txt for details.
#pragma once
#include <random>
#include "DataStructures/Tensor/TypeAliases.hpp"
/// \cond
namespace gsl {
template <typename T>
class not_null;
} // namespace gsl
/// \endcond
namespace TestHelpers {
/// \ingroup TestingFrameworkGroup
/// \brief Make random hydro variables which correct physical behavior,
/// e.g. Lorentz factor will be greater or equal than one.
namespace hydro {
template <typename DataType>
Scalar<DataType> random_density(gsl::not_null<std::mt19937*> generator,
const DataType& used_for_size) noexcept;
template <typename DataType>
Scalar<DataType> random_lorentz_factor(gsl::not_null<std::mt19937*> generator,
const DataType& used_for_size) noexcept;
template <typename DataType, size_t Dim>
tnsr::I<DataType, Dim> random_velocity(
gsl::not_null<std::mt19937*> generator,
const Scalar<DataType>& lorentz_factor,
const tnsr::ii<DataType, Dim>& spatial_metric) noexcept;
template <typename DataType>
Scalar<DataType> random_temperature(gsl::not_null<std::mt19937*> generator,
const DataType& used_for_size) noexcept;
template <typename DataType>
Scalar<DataType> random_specific_internal_energy(
gsl::not_null<std::mt19937*> generator,
const DataType& used_for_size) noexcept;
template <typename DataType>
tnsr::I<DataType, 3> random_magnetic_field(
gsl::not_null<std::mt19937*> generator, const Scalar<DataType>& pressure,
const tnsr::ii<DataType, 3>& spatial_metric) noexcept;
template <typename DataType>
Scalar<DataType> random_divergence_cleaning_field(
gsl::not_null<std::mt19937*> generator,
const DataType& used_for_size) noexcept;
} // namespace hydro
} // namespace TestHelpers
|
Jorropo/js-libp2p
|
examples/pnet-ipfs/index.js
|
<reponame>Jorropo/js-libp2p
/* eslint no-console: ["off"] */
'use strict'
const IPFS = require('ipfs')
const assert = require('assert').strict
const { generate: writeKey } = require('libp2p/src/pnet')
const path = require('path')
const fs = require('fs')
const privateLibp2pBundle = require('./libp2p-bundle')
const { mkdirp } = require('./utils')
// Create two separate repo paths so we can run two nodes and check their output
const repo1 = path.resolve('./tmp', 'repo1', '.ipfs')
const repo2 = path.resolve('./tmp', 'repo2', '.ipfs')
mkdirp(repo1)
mkdirp(repo2)
// Create a buffer and write the swarm key to it
const swarmKey = Buffer.alloc(95)
writeKey(swarmKey)
// This key is for the `TASK` mentioned in the writeFileSync calls below
const otherSwarmKey = Buffer.alloc(95)
writeKey(otherSwarmKey)
// Add the swarm key to both repos
const swarmKey1Path = path.resolve(repo1, 'swarm.key')
const swarmKey2Path = path.resolve(repo2, 'swarm.key')
fs.writeFileSync(swarmKey1Path, swarmKey)
// TASK: switch the commented out line below so we're using a different key, to see the nodes fail to connect
fs.writeFileSync(swarmKey2Path, swarmKey)
// fs.writeFileSync(swarmKey2Path, otherSwarmKey)
// Create the first ipfs node
const node1 = new IPFS({
repo: repo1,
libp2p: privateLibp2pBundle(swarmKey1Path),
config: {
Addresses: {
// Set the swarm address so we dont get port collision on the nodes
Swarm: ['/ip4/0.0.0.0/tcp/9101']
}
}
})
// Create the second ipfs node
const node2 = new IPFS({
repo: repo2,
libp2p: privateLibp2pBundle(swarmKey2Path),
config: {
Addresses: {
// Set the swarm address so we dont get port collision on the nodes
Swarm: ['/ip4/0.0.0.0/tcp/9102']
}
}
})
console.log('auto starting the nodes...')
// `nodesStarted` keeps track of how many of our nodes have started
let nodesStarted = 0
/**
* Calls `connectAndTalk` when both nodes have started
* @returns {void}
*/
const didStartHandler = () => {
if (++nodesStarted === 2) {
// If both nodes are up, start talking
connectAndTalk()
}
}
/**
* Exits the process when all started nodes have stopped
* @returns {void}
*/
const didStopHandler = () => {
if (--nodesStarted < 1) {
console.log('all nodes stopped, exiting.')
process.exit(0)
}
}
/**
* Stops the running nodes
* @param {Error} err An optional error to log to the console
* @returns {void}
*/
const doStop = (err) => {
if (err) {
console.error(err)
}
console.log('Shutting down...')
node1.stop()
node2.stop()
}
/**
* Connects the IPFS nodes and transfers data between them
* @returns {void}
*/
const connectAndTalk = async () => {
console.log('connecting the nodes...')
const node2Id = await node2.id()
const dataToAdd = Buffer.from('Hello, private friend!')
// Connect the nodes
// This will error when different private keys are used
try {
await node1.swarm.connect(node2Id.addresses[0])
} catch (err) {
return doStop(err)
}
console.log('the nodes are connected, let\'s add some data')
// Add some data to node 1
let addedCID
try {
addedCID = await node1.add(dataToAdd)
} catch (err) {
return doStop(err)
}
console.log(`added ${addedCID[0].path} to the node1`)
// Retrieve the data from node 2
let cattedData
try {
cattedData = await node2.cat(addedCID[0].path)
} catch (err) {
return doStop(err)
}
assert.deepEqual(cattedData.toString(), dataToAdd.toString(), 'Should have equal data')
console.log(`successfully retrieved "${dataToAdd.toString()}" from node2`)
doStop()
}
// Wait for the nodes to boot
node1.once('start', didStartHandler)
node2.once('start', didStartHandler)
// Listen for the nodes stopping so we can cleanup
node1.once('stop', didStopHandler)
node2.once('stop', didStopHandler)
|
miguel-isasmendi/store
|
src/main/java/com/store/domain/model/order/OrderStatus.java
|
<filename>src/main/java/com/store/domain/model/order/OrderStatus.java
package com.store.domain.model.order;
public enum OrderStatus {
NEW, IN_PROGRESS, COMPLETE, CANCELLED
}
|
JamesCao2048/BlizzardData
|
Corpus/aspectj/4078.java
|
<gh_stars>1-10
package p;
aspect B extends Y {
declare parents: A* implements IFace;
}
abstract aspect Y {
public void IFace.foo() {}
}
interface IFace {}
|
feueraustreter/YAPION
|
src/main/java/yapion/exceptions/parser/YAPIONParserException.java
|
// SPDX-License-Identifier: Apache-2.0
// YAPION
// Copyright (C) 2019,2020 yoyosource
package yapion.exceptions.parser;
import yapion.exceptions.YAPIONException;
public class YAPIONParserException extends YAPIONException {
public YAPIONParserException() {
super();
}
public YAPIONParserException(String message) {
super(message);
}
public YAPIONParserException(String message, Throwable cause) {
super(message, cause);
}
public YAPIONParserException(Throwable cause) {
super(cause);
}
protected YAPIONParserException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
|
othonreyes/code_problems
|
python/fundamentals/tree/treev2.py
|
class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def insert(root, value) -> Node:
if not root:
root = Node(value)
return root
n = root
while True:
if n.value > value: #go left
if n.left is None:
n.left = Node(value)
break
else:
n = n.left
else:
if n.right is None:
n.right = Node(value)
break
else:
n = n.right
def search(root, value):
if not root:
return None
n = root
while n:
if n.value == value:
return n
if n.value > value: # search left
n = n.left
else:
n = n.right # search right
return None
def isCompleteTree(root):
if not root:
return False
nodes = [root]
n = None
while nodes:
n = nodes.pop(0)
if not checkNodeCompleteTree(n):
return False
if n:
nodes.append(n.left)
nodes.append(n.right)
return True
def checkNodeCompleteTree(root):
if not root:
return True
if root.left and root.right:
return True
if root.left and not root.right:
return True
if not root.left and not root.right:
return True
return False
def isFullTree(root):
if not root:
return False
nodes = [root]
n = None
while nodes:
n = nodes.pop(0)
if not checkNodeForFullTree(n):
return False
if n:
nodes.append(n.left)
nodes.append(n.right)
return True
def checkNodeForFullTree(n):
if not n:
return True
if n.left and n.right:
return True
if not n.left and not n.right:
return True
return False
def inOrderTraversal(root, level= 0):
if root:
inOrderTraversal(root.left, level + 1)
visit(root, level)
inOrderTraversal(root.right, level + 1)
def preOrderTraversal(root, level= 0):
if root:
visit(root, level)
preOrderTraversal(root.left, level + 1)
preOrderTraversal(root.right, level + 1)
def postOrderTraversal(root, level= 0):
if root:
visit(root, level)
postOrderTraversal(root.left, level + 1)
postOrderTraversal(root.right, level + 1)
def visit(node, level = 0):
print(" " * level, node.value)
def make_complete_tree():
root = Node(1)
root.left = Node(12)
root.left.left = Node(5)
root.left.right = Node(6)
root.right = Node(9)
root.right.left = Node(5)
return root
def make_complete_tree_and_full_tree():
root = Node(1)
root.left = Node(12)
root.left.left = Node(5)
root.left.right = Node(6)
root.right = Node(9)
return root
def make_incomplete_tree():
root = Node(1)
root.left = Node(12)
root.left.left = Node(5)
root.left.right = Node(6)
root.right = Node(9)
root.right.right = Node(8)
return root
def deepest_level(node, level = 0):
if not node:
return level
return max(deepest_level(node.left, level+1), deepest_level(node.right, level+1))
def print_from_bottom(node):
"""
Print a tree from the bottom to the top
"""
max_level = deepest_level(node)
for i in reversed(range(1, max_level + 1)):
print_level(node, i, 1)
print()
def print_level(node, level, current):
if current == level and node:
print(node.value, end=",")
return
if node.left:
print_level(node.left, level, current + 1)
if node.right:
print_level(node.right, level, current + 1)
def h_in_order(node):
"""
TODO:
- don't add padding to the bottom left node
- less padding on bottom node
"""
max_level = deepest_level(node)
n = None
nodes = [node]
for i in range(max_level):
children = []
while nodes:
n = nodes.pop(0)
print(" " * (max_level - i), n.value, end="")
children.append(n.left)
children.append(n.right)
nodes = children
print()
def check_tree_is_bst(node: Node)->bool:
"""
Naive approch that doesn't work
https://www.geeksforgeeks.org/a-program-to-check-if-a-binary-tree-is-bst-or-not/
"""
if not node:
return False
if not node.left and not node.right:
return True
check_left = True
if node.left and node.left.value < node.value:
check_left = check_tree_is_bst(node.left)
else:
return False
check_right = True
if node.right and node.right.value > node.value:
check_right = check_tree_is_bst(node.right)
else:
return False
return check_left and check_right
# def check_tree_is_bst2(node: Node, biggest = 0, smallest = 0)->bool:
# """
# Naive approch that doesn't work
# https://www.geeksforgeeks.org/a-program-to-check-if-a-binary-tree-is-bst-or-not/
# """
# if not node:
# return True
# if node.left and biggest > node.value:
# return False
# if node.right and node.right.value < node.value:
# return False
# if not check_tree_is_bst(node.left, max(biggest, node.value)) or not check_tree_is_bst(node.right)
# return False
# return True
if __name__ == "__main__":
root = insert(None, 27)
insert(root, 14)
insert(root, 35)
insert(root, 10)
insert(root, 19)
insert(root, 31)
insert(root, 42)
inOrderTraversal(root)
print("Search ", search(root, 27).value)
print("Search ", search(root, 42).value)
print("Search ", search(root, 90))
print("Root should be a complete tree ", isCompleteTree(root))
print("Root should be a complete tree ", isCompleteTree(make_complete_tree()))
print("Root should be a complete tree ", isCompleteTree(make_complete_tree_and_full_tree()))
print("Root should not be a complete tree ", isCompleteTree(make_incomplete_tree()))
print("Root should be a full tree ", isFullTree(root))
print("Root should NOT be a full tree ", isFullTree(make_complete_tree()))
print("Root should be a full tree ", isFullTree(make_complete_tree_and_full_tree()))
print("Root should NOT be a full tree ", isFullTree(make_incomplete_tree()))
print("deepest_level ", deepest_level(root))
print_from_bottom(root)
h_in_order(root)
print("check_tree_is_bst ", check_tree_is_bst(root))
root = Node(3)
root.left = Node(2)
root.left.right = Node(4)
root.left.left = Node(1)
root.right = Node(8)
root.right.left = Node(6)
root.right.right = Node(9)
print("check_tree_is_bst ", check_tree_is_bst(root)) # True -> This is wrong
# global variable prev - to keep track
# of previous node during Inorder
# traversal
prev = None
# function to check if given binary
# tree is BST
def isbst(root):
# prev is a global variable
global prev
prev = None
return isbst_rec(root)
# Helper function to test if binary
# tree is BST
# Traverse the tree in inorder fashion
# and keep track of previous node
# return true if tree is Binary
# search tree otherwise false
def isbst_rec(root):
# prev is a global variable
global prev
# if tree is empty return true
if root is None:
return True
if isbst_rec(root.left) is False:
return False
# if previous node'data is found
# greater than the current node's
# data return fals
if prev is not None and prev.value > root.value:
return False
# store the current node in prev
prev = root
return isbst_rec(root.right)
# driver code to test above function
root = Node(3)
root.left = Node(2)
root.right = Node(5)
root.left.left = Node(1)
root.left.right = Node(4)
if isbst(root):
print("is BST")
else:
print("not a BST")
INT_MAX = 4294967296
INT_MIN = -4294967296
# Returns true if the given tree is a binary search tree
# (efficient version)
def isBST(node):
return (isBSTUtil(node, INT_MIN, INT_MAX))
# Retusn true if the given tree is a BST and its values
# >= min and <= max
def isBSTUtil(node, mini, maxi):
# An empty tree is BST
if node is None:
return True
# False if this node violates min/max constraint
if node.value < mini or node.value > maxi:
return False
# Otherwise check the subtrees recursively
# tightening the min or max constraint
return (isBSTUtil(node.left, mini, node.value ) and
isBSTUtil(node.right, node.value, maxi))
if isBST(root):
print("is BST")
else:
print("not a BST")
|
davidvlaminck/OTLClassPython
|
src/OTLMOW/OTLModel/Datatypes/KlSeinbrugRijrichting.py
|
<reponame>davidvlaminck/OTLClassPython<gh_stars>1-10
# coding=utf-8
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlSeinbrugRijrichting(KeuzelijstField):
"""Mogelijke rijrichtingen bij een seinbrug (enkele of dubbele)."""
naam = 'KlSeinbrugRijrichting'
label = 'Seinbrug rijrichting'
objectUri = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#KlSeinbrugRijrichting'
definition = 'Mogelijke rijrichtingen bij een seinbrug (enkele of dubbele).'
codelist = 'https://wegenenverkeer.data.vlaanderen.be/id/conceptscheme/KlSeinbrugRijrichting'
options = {
'dubbele-rijrichting': KeuzelijstWaarde(invulwaarde='dubbele-rijrichting',
label='dubbele rijrichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlSeinbrugRijrichting/dubbele-rijrichting'),
'enkele-rijrichting': KeuzelijstWaarde(invulwaarde='enkele-rijrichting',
label='enkele rijrichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlSeinbrugRijrichting/enkele-rijrichting')
}
|
Shan1024/carbon-auth
|
components/auth/org.wso2.carbon.auth.core/src/main/java/org/wso2/carbon/auth/core/encryption/SymmetricEncryption.java
|
<filename>components/auth/org.wso2.carbon.auth.core/src/main/java/org/wso2/carbon/auth/core/encryption/SymmetricEncryption.java
/*
*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.auth.core.encryption;
import org.wso2.carbon.auth.core.AuthConstants;
import org.wso2.carbon.auth.core.exception.CryptoException;
import org.wso2.carbon.secvault.SecureVault;
import org.wso2.carbon.secvault.SecureVaultFactory;
import org.wso2.carbon.secvault.exception.SecureVaultException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Properties;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
/**
* This class is used to handle symmetric encryption related tasks
*/
public class SymmetricEncryption {
private static SymmetricEncryption instance = null;
private SecretKey symmetricKey = null;
private static String symmetricKeyEncryptAlgoDefault = "AES";
private String propertyKey = "symmetric.key";
public static synchronized SymmetricEncryption getInstance() {
if (instance == null) {
instance = new SymmetricEncryption();
}
return instance;
}
public void generateSymmetricKey() throws CryptoException {
String secretAlias;
String encryptionAlgo;
Properties properties;
boolean isSymmetricKeyFromFile = false;
try {
String symmetricPropertiesFilePath =
System.getProperty(AuthConstants.WSO2_RUNTIME_PATH) + File.separator + "resources" + File.separator
+ "auth" + File.separator + AuthConstants.SYMMETRIC_KEY_PROPERTIES_FILE_NAME;
File symmetricPropertiesFile = new File(symmetricPropertiesFilePath);
if (symmetricPropertiesFile.exists()) {
try (FileInputStream fileInputStream = new FileInputStream(symmetricPropertiesFile)) {
properties = new Properties();
properties.load(fileInputStream);
}
String secVaultYamlFilePath =
System.getProperty(AuthConstants.WSO2_RUNTIME_PATH) + File.separator + "resources"
+ File.separator + "auth" + File.separator
+ AuthConstants.SECURE_VAULT_CONFIG_YAML_FILE_NAME;
File secVaultYamlFile = new File(secVaultYamlFilePath);
Path configPath = Paths.get(secVaultYamlFile.toURI());
SecureVault secureVault = new SecureVaultFactory().getSecureVault(configPath)
.orElseThrow(() -> new SecureVaultException("Error in getting secure vault instance"));
secretAlias = properties.getProperty(propertyKey);
encryptionAlgo = symmetricKeyEncryptAlgoDefault;
symmetricKey = new SecretKeySpec(new String(secureVault.resolve(secretAlias)).getBytes(
Charset.defaultCharset()), 0,
secureVault.resolve(secretAlias).length, encryptionAlgo);
isSymmetricKeyFromFile = true;
}
if (!isSymmetricKeyFromFile) {
throw new CryptoException("Error in generating symmetric key. Symmetric key is not available.");
}
} catch (IOException e) {
throw new CryptoException("Error in generating symmetric key", e);
} catch (SecureVaultException e) {
throw new CryptoException("Error in getting secure vault instance", e);
}
}
public byte[] encryptWithSymmetricKey(byte[] plainText) throws CryptoException {
Cipher c = null;
byte[] encryptedData = null;
String encryptionAlgo;
try {
encryptionAlgo = symmetricKeyEncryptAlgoDefault;
c = Cipher.getInstance(encryptionAlgo);
c.init(Cipher.ENCRYPT_MODE, symmetricKey);
encryptedData = c.doFinal(plainText);
} catch (Exception e) {
throw new CryptoException("Error when encrypting data.", e);
}
return encryptedData;
}
public byte[] decryptWithSymmetricKey(byte[] encryptionBytes) throws CryptoException {
Cipher c = null;
byte[] decryptedData = null;
String encryptionAlgo;
try {
encryptionAlgo = symmetricKeyEncryptAlgoDefault;
c = Cipher.getInstance(encryptionAlgo);
c.init(Cipher.DECRYPT_MODE, symmetricKey);
decryptedData = c.doFinal(encryptionBytes);
} catch (InvalidKeyException | BadPaddingException | IllegalBlockSizeException |
NoSuchAlgorithmException | NoSuchPaddingException e) {
throw new CryptoException("Error when decrypting data.", e);
}
return decryptedData;
}
}
|
51breeze/EaseScript
|
javascript/system/ElementEvent.js
|
<reponame>51breeze/EaseScript
/*
* EaseScript
* Copyright © 2017 EaseScript All rights reserved.
* Released under the MIT license
* https://github.com/51breeze/EaseScript
* @author <NAME> <<EMAIL>>
* @require System,Event,Object
*/
function ElementEvent( type, bubbles,cancelable )
{
if( !System.instanceOf(this,ElementEvent) )return new ElementEvent(type, bubbles,cancelable);
Event.call(this, type, bubbles,cancelable );
return this;
};
module.exports = ElementEvent;
var Object = require("./Object.js");
var Event = require("./Event.js");
var System = require("./System.js");
ElementEvent.prototype=Object.create( Event.prototype,{
"constructor":{value:ElementEvent}
});
ElementEvent.prototype.parent=null;
ElementEvent.prototype.child=null;
ElementEvent.ADD='elementAdd';
ElementEvent.ADD_TO_DOCUMENT='elementAddToDocument';
ElementEvent.REMOVE='elementRemove';
ElementEvent.CHANGE='elementChildrenChange';
//鼠标事件
Event.registerEvent(function ( type , target, originalEvent )
{
if( originalEvent instanceof ElementEvent )return originalEvent;
});
|
menty44/tutorials
|
jee-7/src/test/java/com/baeldung/batch/understanding/JobSequenceUnitTest.java
|
package com.baeldung.batch.understanding;
import static org.junit.jupiter.api.Assertions.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import javax.batch.operations.JobOperator;
import javax.batch.runtime.BatchRuntime;
import javax.batch.runtime.BatchStatus;
import javax.batch.runtime.JobExecution;
import javax.batch.runtime.StepExecution;
import org.junit.jupiter.api.Test;
class JobSequenceUnitTest {
@Test
public void givenTwoSteps_thenBatch_CompleteWithSuccess() throws Exception {
JobOperator jobOperator = BatchRuntime.getJobOperator();
Long executionId = jobOperator.start("simpleJobSequence", new Properties());
JobExecution jobExecution = jobOperator.getJobExecution(executionId);
jobExecution = BatchTestHelper.keepTestAlive(jobExecution);
assertEquals(2, jobOperator.getStepExecutions(executionId).size());
assertEquals(jobExecution.getBatchStatus(), BatchStatus.COMPLETED);
}
@Test
public void givenFlow_thenBatch_CompleteWithSuccess() throws Exception {
JobOperator jobOperator = BatchRuntime.getJobOperator();
Long executionId = jobOperator.start("flowJobSequence", new Properties());
JobExecution jobExecution = jobOperator.getJobExecution(executionId);
jobExecution = BatchTestHelper.keepTestAlive(jobExecution);
assertEquals(3, jobOperator.getStepExecutions(executionId).size());
assertEquals(jobExecution.getBatchStatus(), BatchStatus.COMPLETED);
}
@Test
public void givenDecider_thenBatch_CompleteWithSuccess() throws Exception {
JobOperator jobOperator = BatchRuntime.getJobOperator();
Long executionId = jobOperator.start("decideJobSequence", new Properties());
JobExecution jobExecution = jobOperator.getJobExecution(executionId);
jobExecution = BatchTestHelper.keepTestAlive(jobExecution);
List<StepExecution> stepExecutions = jobOperator.getStepExecutions(executionId);
List<String> executedSteps = new ArrayList<>();
for (StepExecution stepExecution : stepExecutions) {
executedSteps.add(stepExecution.getStepName());
}
assertEquals(2, jobOperator.getStepExecutions(executionId).size());
assertArrayEquals(new String[]{"firstBatchStepStep1", "firstBatchStepStep3"}, executedSteps.toArray());
assertEquals(jobExecution.getBatchStatus(), BatchStatus.COMPLETED);
}
@Test
public void givenSplit_thenBatch_CompletesWithSuccess() throws Exception {
JobOperator jobOperator = BatchRuntime.getJobOperator();
Long executionId = jobOperator.start("splitJobSequence", new Properties());
JobExecution jobExecution = jobOperator.getJobExecution(executionId);
jobExecution = BatchTestHelper.keepTestAlive(jobExecution);
List<StepExecution> stepExecutions = jobOperator.getStepExecutions(executionId);
List<String> executedSteps = new ArrayList<>();
for (StepExecution stepExecution : stepExecutions) {
executedSteps.add(stepExecution.getStepName());
}
assertEquals(3, stepExecutions.size());
assertTrue(executedSteps.contains("splitJobSequenceStep1"));
assertTrue(executedSteps.contains("splitJobSequenceStep2"));
assertTrue(executedSteps.contains("splitJobSequenceStep3"));
assertTrue(executedSteps.get(0).equals("splitJobSequenceStep1") || executedSteps.get(0).equals("splitJobSequenceStep2"));
assertTrue(executedSteps.get(1).equals("splitJobSequenceStep1") || executedSteps.get(1).equals("splitJobSequenceStep2"));
assertTrue(executedSteps.get(2).equals("splitJobSequenceStep3"));
assertEquals(jobExecution.getBatchStatus(), BatchStatus.COMPLETED);
}
}
|
aqiu202/aqiu-spring-boot-starter-projects
|
core/id-generator-core/src/main/java/com/github/aqiu202/id/generator/SnowFlakeIdGenerator.java
|
<filename>core/id-generator-core/src/main/java/com/github/aqiu202/id/generator/SnowFlakeIdGenerator.java
package com.github.aqiu202.id.generator;
import com.github.aqiu202.id.IdGenerator;
import com.github.aqiu202.id.prop.SnowFlakeIdProperties;
import org.springframework.lang.NonNull;
/**
* <pre>SnowFlakeIdGenerator</pre>
*
* @author aqiu 2020/12/2 15:09
**/
public class SnowFlakeIdGenerator implements IdGenerator<Long> {
private final long epoch = 1577836800000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long dataCenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = ~(-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDataCenterId = ~(-1L << dataCenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + dataCenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = ~(-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long dataCenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*/
public SnowFlakeIdGenerator() {
this.workerId = this.getWorkerId();
this.dataCenterId = this.getDataCenterId();
}
public SnowFlakeIdGenerator(SnowFlakeIdProperties properties) {
this.configure(properties);
}
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param dataCenterId 数据中心ID (0~31)
*/
public SnowFlakeIdGenerator(long workerId, long dataCenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(
String.format("worker Id can't be greater than %d or less than 0",
maxWorkerId));
}
if (dataCenterId > maxDataCenterId || dataCenterId < 0) {
throw new IllegalArgumentException(
String.format("datacenter Id can't be greater than %d or less than 0",
maxDataCenterId));
}
this.workerId = workerId;
this.dataCenterId = dataCenterId;
}
public void configure(SnowFlakeIdProperties properties) {
this.workerId = properties.getWorkerId();
this.dataCenterId = properties.getDataCenterId();
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
@Override
@NonNull
public synchronized Long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format(
"Clock moved backwards. Refusing to generate id for %d milliseconds",
lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - epoch) << timestampLeftShift)
| (dataCenterId << datacenterIdShift)
| (workerId << workerIdShift)
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
private long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
private long timeGen() {
return System.currentTimeMillis();
}
public long getWorkerId() {
return workerId;
}
public void setWorkerId(long workerId) {
this.workerId = workerId;
}
public long getDataCenterId() {
return dataCenterId;
}
public void setDataCenterId(long dataCenterId) {
this.dataCenterId = dataCenterId;
}
}
|
forgot2015/ForgotJavaLearning
|
src/book/headfirstjava/two/GameLauncher.java
|
<filename>src/book/headfirstjava/two/GameLauncher.java
package book.headfirstjava.two;
/**
* Created by forgot on 2017/6/25.
*/
public class GameLauncher {
public static void main(String[] args) {
GuessGame guessGame = new GuessGame();
guessGame.startGame();
}
}
|
mindspore-ai/models
|
research/cv/SE_ResNeXt50/eval.py
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Process the test set with the .ckpt model in turn.
"""
import argparse
import os
import mindspore.nn as nn
from mindspore import context
from mindspore.train.model import Model
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.common import set_seed
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.nn.loss.loss import LossBase
from mindspore.ops import functional as F
from mindspore.ops import operations as P
from src.config import imagenet_cfg
from src.dataset import create_dataset_imagenet
import src.senet_ms as senets
set_seed(1)
parser = argparse.ArgumentParser(description='senet_ms')
parser.add_argument('--dataset_name', type=str, default='imagenet', choices=['imagenet', 'cifar10'],
help='dataset name.')
parser.add_argument('--checkpoint_path', type=str, default='./ckpt_0', help='Checkpoint file path')
args_opt = parser.parse_args()
class CrossEntropySmooth(LossBase):
"""CrossEntropy"""
def __init__(self, sparse=True, reduction='mean', smooth_factor=0., num_classes=1000):
super(CrossEntropySmooth, self).__init__()
self.onehot = P.OneHot()
self.sparse = sparse
self.on_value = Tensor(1.0 - smooth_factor, mstype.float32)
self.off_value = Tensor(1.0 * smooth_factor / (num_classes - 1), mstype.float32)
self.ce = nn.SoftmaxCrossEntropyWithLogits(reduction=reduction)
def construct(self, logit, label):
if self.sparse:
label = self.onehot(label, F.shape(logit)[1], self.on_value, self.off_value)
loss_ = self.ce(logit, label)
return loss_
if __name__ == '__main__':
if args_opt.dataset_name == "imagenet":
cfg = imagenet_cfg
if not cfg.use_label_smooth:
cfg.label_smooth_factor = 0.0
else:
raise ValueError("dataset is not support.")
device_target = cfg.device_target
context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target)
if device_target == "Ascend":
context.set_context(device_id=cfg.device_id)
dataset = create_dataset_imagenet(cfg.val_data_path, 1, False)
loss = CrossEntropySmooth(sparse=True, reduction="mean",
smooth_factor=cfg.label_smooth_factor, num_classes=cfg.num_classes)
net = senets.se_resnext50_32x4d(cfg.num_classes)
model = Model(net, loss_fn=loss, metrics={'top_1_accuracy', 'top_5_accuracy'})
file_list = os.listdir(args_opt.checkpoint_path)
for filename in file_list:
de_path = os.path.join(args_opt.checkpoint_path, filename)
if de_path.endswith('.ckpt'):
param_dict = load_checkpoint(de_path)
load_param_into_net(net, param_dict)
net.set_train(False)
acc = model.eval(dataset)
print(f"model {de_path}'s accuracy is {acc}")
|
Bind-Forward/vue-postgrest
|
src/GenericModel.js
|
import Vue from 'vue'
import { PrimaryKeyError } from '@/errors'
import { $diff, $freeze, createDiffProxy, createReactivePrototype, mapAliasesFromSelect } from '@/utils'
class GenericModel {
#options
#proxy
constructor (options, data) {
this.#options = options
Object.assign(this, data)
this.#proxy = createReactivePrototype(createDiffProxy(this), this)
return this.#proxy
}
async #request ({ method, keepChanges = false, needsQuery = true }, signal, opts, ...data) {
await this.#options.route.$ready
const { columns, ...options } = opts
const query = { select: this.#options.select }
if (needsQuery) {
const q = this.#options.query
if (!q) throw new PrimaryKeyError()
if (q instanceof PrimaryKeyError) throw q
Object.assign(query, q)
}
if (columns) {
if (this.#options.route.columns) {
query.columns = columns.filter(c => this.#options.route.columns.includes(c))
} else {
query.columns = columns
}
}
// rename aliased columns and drop columns that don't exist on the route (e.g. joined columns)
data = data.map(data => {
return Object.fromEntries(
Object.entries(mapAliasesFromSelect(this.#options.select, data))
.filter(([col, v]) => !this.#options.route.columns || this.#options.route.columns.includes(col))
)
})
const resp = await this.#options.route[method](query, { ...options, accept: 'single', signal }, ...data)
let body
try {
body = await resp.json()
} catch {
if (!resp.headers.get('Location')) return
// for POST/PUT minimal
const loc = new URLSearchParams(resp.headers.get('Location').replace(/^\/[^?]+\?/, ''))
return Object.fromEntries(Array.from(loc.entries()).map(([key, value]) => [key, value.replace(/^eq\./, '')]))
}
// update instance with returned data
// TODO: do we need to delete missing keys?
if (keepChanges) {
const diff = this.#proxy[$diff]
Object.entries(body).forEach(([key, value]) => Vue.set(this.#proxy, key, value))
this.#proxy[$freeze]()
Object.entries(diff).forEach(([key, value]) => Vue.set(this.#proxy, key, value))
} else {
Object.entries(body).forEach(([key, value]) => Vue.set(this.#proxy, key, value))
this.#proxy[$freeze]()
}
return body
}
async $get (signal, opts = {}) {
const { keepChanges, ...options } = opts
return this.#request({ method: 'get', keepChanges }, signal, options)
}
async $post (signal, opts = {}) {
const options = { return: 'representation', ...opts }
return this.#request({ method: 'post', needsQuery: false }, signal, options, this.#proxy)
}
async $put (signal, opts) {
const options = { return: 'representation', ...opts }
return this.#request({ method: 'put' }, signal, options, this.#proxy)
}
async $patch (signal, opts, data = {}) {
const options = { return: 'representation', ...opts }
if (!data || typeof data !== 'object') {
throw new Error('Patch data must be an object.')
}
const patchData = Object.assign(
{},
this.#proxy[$diff],
data
)
if (Object.keys(patchData).length === 0) {
// avoid sending an empty patch request
return this.#proxy
}
return this.#request({ method: 'patch' }, signal, options, patchData)
}
async $delete (signal, options = {}) {
return this.#request({ method: 'delete' }, signal, options)
}
}
export default GenericModel
|
SubscribeIT/ngDesk
|
ngDesk-Module-Service/src/main/java/com/ngdesk/repositories/ModuleValidationRepository.java
|
package com.ngdesk.repositories;
import com.ngdesk.module.validations.dao.ModuleValidation;
public interface ModuleValidationRepository
extends CustomNgdeskRepository<ModuleValidation, String>, CustomModuleValidationRepository {
}
|
donsheng/acrn-hypervisor
|
misc/services/life_mngr/uart_channel.h
|
/*
* Copyright (C)2021 Intel Corporation
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef _UART_CHANNEL_H_
#define _UART_CHANNEL_H_
#include <sys/queue.h>
#include <pthread.h>
#include <semaphore.h>
#include <sys/un.h>
#include "uart.h"
#define WAIT_USER_VM_POWEROFF (10*SECOND_TO_US)
#define CHANNEL_DEV_NAME_MAX 128U
#define CHANNEL_DEV_BUF_LEN 256U
#define MIN_RESEND_TIME 3U
#define LISTEN_INTERVAL (5 * SECOND_TO_US)
typedef void data_handler_f(const char *cmd_name, int fd);
struct channel_dev {
struct uart_dev *uart_device;
char name[CHANNEL_DEV_NAME_MAX]; /**< channel device name */
bool listening; /**< listen thread loop flag */
bool polling; /**< message polling thread loop flag */
pthread_t listen_thread;
pthread_t pool_thread;
char buf[CHANNEL_DEV_BUF_LEN]; /**< store received message */
LIST_ENTRY(channel_dev) list; /**< list node used in UART connection list */
LIST_ENTRY(channel_dev) open_list; /**< list node used UART opening list */
struct uart_channel *channel; /**< point to UART server */
sem_t dev_sem; /**< semaphore used to start polling message */
char resend_buf[CHANNEL_DEV_BUF_LEN]; /**< store the message that will be sent */
unsigned int resend_time; /**< the time which the message will be resent */
};
struct channel_config {
char identifier[CHANNEL_DEV_NAME_MAX]; /**< the user VM name which is configured by user */
};
struct uart_channel {
data_handler_f *data_handler;
LIST_HEAD(tty_head, channel_dev) tty_conn_head; /* UART connection list */
LIST_HEAD(tty_open_head, channel_dev) tty_open_head; /* UART opening list */
pthread_mutex_t tty_conn_list_lock;
struct channel_config conf;
};
/**
* @brief Initialize each field of uart channel instance, such as
* a lock and configuration of uart channel
*/
struct uart_channel *init_uart_channel(char *id);
/**
* @brief Create one uart channel device according to device name
*
* Create one channel device instance to store information about
* one uart channel device which will be opened.
* For master channel, create two threads, one thread
* is to listen and wait sync messaage from slave channel, another thread
* is to poll message from slave channel.
* For slave channel, create one thread to send sync message
* to master channel every 5 second until acked sync
* message is received from master channel and poll meessage from master channel.
*
* @param uart point to uart server
* @param path start address of the name of the device which will
* be opened
* @param fn the handler of handling message
*/
struct channel_dev *create_uart_channel_dev(struct uart_channel *c, char *path, data_handler_f *fn);
/**
* @brief Wait uart channel devices threads to exit
*/
void wait_uart_channel_devs_threads(struct uart_channel *c);
/**
* @brief Destroy uart channel and release channel device instance
*/
void deinit_uart_channel(struct uart_channel *c);
/**
* @brief Wait to connect device in uart channel
*
* Wait sync message from slave channel device, parse slave channel device
* indentifier from sync message, then add channel device into uart channel
* device connection list.
*/
void *listen_uart_channel_dev(void *arg);
/**
* @brief Wait to connect device in the uart channel
*
* Send sync message every 5 second and wait acked sync message from master
* channel device, add uart channel device instance into uart connection list.
*/
void *connect_uart_channel_dev(void *arg);
/**
* @brief Poll and dispatch message received from uart channel
*
* If resend time is set, this interface will resend message unit the ACK message
* is received.
*/
void *poll_and_dispatch_uart_channel_events(void *arg);
/**
* @brief Find uart channel device instance according to fd
*/
struct channel_dev *find_uart_channel_dev(struct uart_channel *c, int fd);
/**
* @brief Find uart channel device instance according to device name
*/
struct channel_dev *find_uart_channel_dev_by_name(struct uart_channel *c, char *name);
/**
* @brief Disconnect uart channel device instance
*/
void disconnect_uart_channel_dev(struct channel_dev *c_dev, struct uart_channel *c);
/**
* @brief Stop to listen uart channel device
*/
void stop_listen_uart_channel_dev(struct uart_channel *c);
/**
* @brief Set the uart channel device resending buffer and resending time
*
* If ACK message is not received during specified time, resend
* message.
*/
void enable_uart_channel_dev_resend(struct channel_dev *c_dev, char *resend_buf, unsigned int resend_time);
/**
* @brief Enable resend for all connected uart channel devices
*/
void enable_all_uart_channel_dev_resend(struct uart_channel *c, char *msg, unsigned int resend_time);
/**
* @brief Clear the uart channel device resending buffer and resending time
*/
void disable_uart_channel_dev_resend(struct channel_dev *c_dev);
/**
* @brief Broadcast message to each connected uart channel device
*/
void notify_all_connected_uart_channel_dev(struct uart_channel *c, char *msg);
/**
* @brief Check whether uart channel connection list is empty or not
*/
bool is_uart_channel_connection_list_empty(struct uart_channel *c);
#endif
|
wallet-io/ledger-app-walletio
|
deps/lib-coins-c/src/eth_m/eth_m_transaction.c
|
<reponame>wallet-io/ledger-app-walletio
#include "eth_m_transaction.h"
#include "../common/rlp.h"
#include "../common/tx_helper.h"
#include "../common/utils.h"
typedef struct {
uint8_t start_index;
uint8_t len;
} field_info_t;
static field_info_t field_info[] = {
{0, 5}, //PREFIX
{5, 20}, //ADDRESS
{25, 32}, //VALUE
{57, 1}, //DATA TODO, the length of this filed is not fixed!
{58, 32}, //EXPIRETIME
{90, 32}, //SEQUENCEID
};
static field_info_t token_field_info[] = {
{0, 5}, //PREFIX
{5, 20}, //ADDRESS
{25, 32}, //VALUE
{57, 20}, //TOKEN ADDRESS
{77, 32}, //EXPIRETIME
{109, 32}, //SEQUENCEID
};
int eth_m_tx_init(eth_m_tx_t* tx, uint8_t* buffer, int buffer_len) {
if (!tx || !buffer) {
return 1;
}
tx->buffer = buffer;
tx->buffer_len = buffer_len;
return eth_m_tx_update(tx);
}
int eth_m_tx_update(eth_m_tx_t* tx) {
return 0;
}
static uint8_t eth_m_tx_get_item(eth_m_tx_t* tx, uint8_t type_index, bool is_token, uint8_t** out) {
/**
* check tx,out
* check out capacity
* get index and len
* copy
* return data len
*/
if (tx == NULL || out == NULL)
return 0;
field_info_t tmp = is_token ? token_field_info[type_index] : field_info[type_index];
*out = tx->buffer + tmp.start_index;
return tmp.len;
}
uint8_t eth_m_tx_get_prefix(eth_m_tx_t* tx, uint8_t** out) {
return eth_m_tx_get_item(tx, 0, false, out);
}
uint8_t eth_m_tx_get_address(eth_m_tx_t* tx, uint8_t** out) {
return eth_m_tx_get_item(tx, 1, false, out);
}
uint8_t eth_m_tx_get_value(eth_m_tx_t* tx, uint8_t** out) {
return eth_m_tx_get_item(tx, 2, false, out);
}
// uint8_t eth_m_tx_get_data(eth_m_tx_t* tx, uint8_t** out) {
// return eth_m_tx_get_item(tx, 3, false, out);
// }
uint8_t eth_m_token_tx_get_prefix(eth_m_tx_t* tx, uint8_t** out) {
return eth_m_tx_get_item(tx, 0, true, out);
}
uint8_t eth_m_token_tx_get_address(eth_m_tx_t* tx, uint8_t** out) {
return eth_m_tx_get_item(tx, 1, true, out);
}
uint8_t eth_m_token_tx_get_token_addr(eth_m_tx_t* tx, uint8_t** out) {
return eth_m_tx_get_item(tx, 3, true, out);
}
uint8_t eth_m_token_tx_get_value(eth_m_tx_t* tx, uint8_t** out ) {
return eth_m_tx_get_item(tx, 2, true, out);
}
|
poanchen/iotc-go
|
src/models/symmetric_key.go
|
<reponame>poanchen/iotc-go
// Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
)
// SymmetricKey symmetric key
//
// swagger:model SymmetricKey
type SymmetricKey struct {
// The primary key for this credential.
// Required: true
PrimaryKey *string `json:"primaryKey"`
// The secondary key for this credential.
// Required: true
SecondaryKey *string `json:"secondaryKey"`
}
// Validate validates this symmetric key
func (m *SymmetricKey) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validatePrimaryKey(formats); err != nil {
res = append(res, err)
}
if err := m.validateSecondaryKey(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *SymmetricKey) validatePrimaryKey(formats strfmt.Registry) error {
if err := validate.Required("primaryKey", "body", m.PrimaryKey); err != nil {
return err
}
return nil
}
func (m *SymmetricKey) validateSecondaryKey(formats strfmt.Registry) error {
if err := validate.Required("secondaryKey", "body", m.SecondaryKey); err != nil {
return err
}
return nil
}
// MarshalBinary interface implementation
func (m *SymmetricKey) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *SymmetricKey) UnmarshalBinary(b []byte) error {
var res SymmetricKey
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
|
rafiyasirin/jackrabbit-oak
|
oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3DataStore.java
|
<reponame>rafiyasirin/jackrabbit-oak<filename>oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3DataStore.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.blob.cloud.s3;
import java.net.URI;
import java.util.Properties;
import org.apache.jackrabbit.core.data.DataIdentifier;
import org.apache.jackrabbit.core.data.DataRecord;
import org.apache.jackrabbit.core.data.DataStoreException;
import org.apache.jackrabbit.oak.plugins.blob.AbstractSharedCachingDataStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.ConfigurableDataRecordAccessProvider;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordUploadException;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordDownloadOptions;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordUpload;
import org.apache.jackrabbit.oak.plugins.blob.datastore.directaccess.DataRecordUploadOptions;
import org.apache.jackrabbit.oak.spi.blob.AbstractSharedBackend;
import org.apache.jackrabbit.oak.spi.blob.SharedBackend;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Amazon S3 data store extending from {@link AbstractSharedCachingDataStore}.
*/
public class S3DataStore extends AbstractSharedCachingDataStore implements ConfigurableDataRecordAccessProvider {
protected Properties properties;
private S3Backend s3Backend;
/**
* The minimum size of an object that should be stored in this data store.
*/
private int minRecordLength = 16 * 1024;
@Override
protected AbstractSharedBackend createBackend() {
s3Backend = new S3Backend();
if(properties != null){
s3Backend.setProperties(properties);
}
return s3Backend;
}
/**------------------------------------------- Getters & Setters-----------------------------**/
/**
* Properties required to configure the S3Backend
*/
public void setProperties(Properties properties) {
this.properties = properties;
}
public SharedBackend getBackend() {
return backend;
}
@Override
public int getMinRecordLength() {
return minRecordLength;
}
public void setMinRecordLength(int minRecordLength) {
this.minRecordLength = minRecordLength;
}
//
// ConfigurableDataRecordAccessProvider implementation
//
@Override
public void setDirectUploadURIExpirySeconds(int seconds) {
if (s3Backend != null) {
s3Backend.setHttpUploadURIExpirySeconds(seconds);
}
}
@Override
public void setBinaryTransferAccelerationEnabled(boolean enabled) {
if (s3Backend != null) {
s3Backend.setBinaryTransferAccelerationEnabled(enabled);
}
}
@Nullable
@Override
public DataRecordUpload initiateDataRecordUpload(long maxUploadSizeInBytes, int maxNumberOfURIs)
throws IllegalArgumentException, DataRecordUploadException {
return initiateDataRecordUpload(maxUploadSizeInBytes, maxNumberOfURIs, DataRecordUploadOptions.DEFAULT);
}
@Nullable
@Override
public DataRecordUpload initiateDataRecordUpload(long maxUploadSizeInBytes, int maxNumberOfURIs, @NotNull final DataRecordUploadOptions options)
throws IllegalArgumentException, DataRecordUploadException {
if (null == s3Backend) {
throw new DataRecordUploadException("Backend not initialized");
}
return s3Backend.initiateHttpUpload(maxUploadSizeInBytes, maxNumberOfURIs);
}
@NotNull
@Override
public DataRecord completeDataRecordUpload(@NotNull String uploadToken)
throws IllegalArgumentException, DataRecordUploadException, DataStoreException {
if (null == s3Backend) {
throw new DataRecordUploadException("Backend not initialized");
}
return s3Backend.completeHttpUpload(uploadToken);
}
@Override
public void setDirectDownloadURIExpirySeconds(int seconds) {
if (s3Backend != null) {
s3Backend.setHttpDownloadURIExpirySeconds(seconds);
}
}
@Override
public void setDirectDownloadURICacheSize(int maxSize) {
if (s3Backend != null) {
s3Backend.setHttpDownloadURICacheSize(maxSize);
}
}
@Nullable
@Override
public URI getDownloadURI(@NotNull DataIdentifier identifier,
@NotNull DataRecordDownloadOptions downloadOptions) {
if (s3Backend == null) {
return null;
}
return s3Backend.createHttpDownloadURI(identifier, downloadOptions);
}
}
|
mgoyal2-atl/atlassian-slack-integration-server
|
bitbucket-slack-server-integration-plugin/src/main/java/com/atlassian/bitbucket/plugins/slack/notification/renderer/SlackLinkRenderer.java
|
<filename>bitbucket-slack-server-integration-plugin/src/main/java/com/atlassian/bitbucket/plugins/slack/notification/renderer/SlackLinkRenderer.java<gh_stars>10-100
package com.atlassian.bitbucket.plugins.slack.notification.renderer;
import com.atlassian.bitbucket.avatar.AvatarRequest;
import com.atlassian.bitbucket.avatar.AvatarService;
import com.atlassian.bitbucket.commit.Commit;
import com.atlassian.bitbucket.nav.NavBuilder;
import com.atlassian.bitbucket.project.Project;
import com.atlassian.bitbucket.pull.PullRequest;
import com.atlassian.bitbucket.repository.Ref;
import com.atlassian.bitbucket.repository.Repository;
import com.atlassian.bitbucket.user.ApplicationUser;
import com.atlassian.bitbucket.user.Person;
import com.atlassian.sal.api.message.I18nResolver;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.stream.Collectors;
import static com.atlassian.plugins.slack.util.LinkHelper.decorateWithOrigin;
import static com.atlassian.plugins.slack.util.SlackHelper.escapeSignsForSlackLink;
import static org.apache.commons.lang3.StringUtils.substring;
import static org.apache.commons.lang3.StringUtils.substringBefore;
/**
* Generates notifications in Slack format.
*/
@SuppressWarnings("WeakerAccess")
@Component
public class SlackLinkRenderer {
public static final String SLACK_PATH = "slack";
public static final String OAUTH_SESSIONS_PATH = "view-oauth-sessions";
public static final String CONFIGURE_PATH = "configure";
private final NavBuilder navBuilder;
private final I18nResolver i18nResolver;
private final AvatarService avatarService;
@Autowired
public SlackLinkRenderer(final NavBuilder navBuilder,
final I18nResolver i18nResolver,
final AvatarService avatarService) {
this.navBuilder = navBuilder;
this.i18nResolver = i18nResolver;
this.avatarService = avatarService;
}
public String userLink(final ApplicationUser user) {
if (user != null) {
final String userLink = navBuilder.user(user).buildAbsolute();
return slackLink(userLink, user.getDisplayName(), "user");
} else {
return i18nResolver.getText("plugins.slack.common.anonymous");
}
}
public String userUrl(final Person user) {
if (user instanceof ApplicationUser) {
return decorateWithOrigin(navBuilder.user((ApplicationUser) user).buildAbsolute(), "user");
} else {
return "";
}
}
public String userName(final Person user) {
if (user instanceof ApplicationUser) {
return ((ApplicationUser) user).getDisplayName();
} else if (user != null) {
return user.getName();
} else {
return "";
}
}
public String userAvatarUrl(final Person user) {
return avatarService.getUrlForPerson(user, new AvatarRequest(true, 24, true));
}
public String repoLink(final Repository repository) {
String repoFullName = repository.getProject().getName() + "/" + repository.getName();
return slackLink(repoUrl(repository), repoFullName, "repository");
}
public String repoUrl(final Repository repository) {
return decorateWithOrigin(navBuilder
.repo(repository)
.buildAbsolute(), "repository");
}
public String projectAvatarUrl(final Project project) {
return navBuilder.project(project).avatar(24).buildAbsolute();
}
public String commitLink(final Commit commit) {
String commitShortId = commit.getDisplayId();
return slackCode(slackLink(commitUrl(commit), commitShortId, "commit"));
}
public String commitUrl(final Commit commit) {
return decorateWithOrigin(navBuilder
.repo(commit.getRepository())
.commit(commit.getId())
.buildAbsolute(), "commit");
}
public String commitCommentUrl(final Commit commit, final long commentId) {
return decorateWithOrigin(navBuilder.repo(commit.getRepository())
.commit(commit.getId())
.comment(commentId)
.buildAbsolute(), "comment");
}
public String refLink(final Repository repository, final Ref ref) {
String commitShortId = ref.getDisplayId();
String type = ref.getType() instanceof Enum ? ((Enum) ref.getType()).name().toLowerCase() : "";
return slackCode(slackLink(refUrl(repository, ref), commitShortId, type));
}
public String refUrl(final Repository repository, final Ref ref) {
String type = ref.getType() instanceof Enum ? ((Enum) ref.getType()).name().toLowerCase() : "";
return decorateWithOrigin(navBuilder
.repo(repository)
.browse()
.atRevision(ref)
.buildAbsolute(), type);
}
public String pullRequestLink(final PullRequest pullRequest) {
String pullRequestUrl = navBuilder.repo(pullRequest.getToRef().getRepository())
.pullRequest(pullRequest.getId())
.buildAbsolute();
return slackLink(pullRequestUrl, pullRequest.getTitle(), "pullRequest");
}
public String replyPullRequestCommentUrl(final PullRequest pullRequest, final long commentId) {
return decorateWithOrigin(navBuilder.repo(pullRequest.getToRef().getRepository())
.pullRequest(pullRequest.getId())
.comment(commentId)
.reply()
.buildAbsolute(), "pullRequestComment");
}
public String pullRequestCommentUrl(final PullRequest pullRequest, final long commentId) {
return decorateWithOrigin(navBuilder.repo(pullRequest.getToRef().getRepository())
.pullRequest(pullRequest.getId())
.comment(commentId)
.buildAbsolute(), "pullRequestComment");
}
public String pullRequestCommitUrl(final PullRequest pullRequest, final Commit commit) {
return decorateWithOrigin(navBuilder.repo(pullRequest.getToRef().getRepository())
.pullRequest(pullRequest.getId())
.commit(commit.getId())
.buildAbsolute(), "pullRequestCommit");
}
public String fileLink(final String filePath, final String fileUrl) {
return slackLink(fileUrl, filePath, "file");
}
public String oAuthSessionsPageUrl() {
return navBuilder
.pluginServlets()
.path(SLACK_PATH, OAUTH_SESSIONS_PATH)
.buildAbsolute();
}
public String adminConfigurationPage(final String teamId) {
return navBuilder
.pluginServlets()
.path(SLACK_PATH, CONFIGURE_PATH)
.withParam("teamId", teamId)
.buildAbsolute();
}
public String homePage() {
return decorateWithOrigin(navBuilder.dashboard().buildAbsolute(), "site");
}
public String formatCommitList(final List<Commit> commits) {
return commits.stream()
.map(commit -> String.format("%s %s", commitLink(commit), trimCommitMessage(commit.getMessage())))
.collect(Collectors.joining("\n"));
}
private String trimCommitMessage(final String commitMessage) {
final String commitMessageWithoutLineBreaks = substring(substringBefore(commitMessage, "\n"), 0, 200);
boolean isAbbreviated = commitMessage.length() != commitMessageWithoutLineBreaks.length();
return commitMessageWithoutLineBreaks + (isAbbreviated ? "..." : "");
}
private String slackCode(final String code) {
return "`" + code + "`";
}
private String slackLink(final String link, final String text, final String type) {
return "<" + decorateWithOrigin(link, type) + "|" + escapeSignsForSlackLink(text) + ">";
}
public String slackMultilineQuote(final String text) {
return ">>>" + text;
}
public String slackMultilineCode(final String text) {
return "```" + text + "```";
}
}
|
superspeeder/thegame
|
Engine/src/Buffer.cpp
|
#include "kat/renderer/Buffer.hpp"
#include <spdlog/spdlog.h>
kat::VertexBuffer::VertexBuffer(std::vector<float> data, BufferMode mode) : m_Data(data), m_Mode(mode), m_EffectiveSize(data.size()) {
glGenBuffers(1, &m_Buffer);
push();
spdlog::debug("Created VertexBuffer({0}, {2}) : {1} bytes", m_Buffer, data.size() * sizeof(float), mode);
}
kat::VertexBuffer::VertexBuffer(uint32_t size, BufferMode mode) : m_Data(size), m_Mode(mode) {
glGenBuffers(1, &m_Buffer);
push();
spdlog::debug("Created VertexBuffer({0}, {2}) : {1} bytes", m_Buffer, size * sizeof(float), mode);
}
kat::VertexBuffer::~VertexBuffer() {
glDeleteBuffers(1, &m_Buffer);
spdlog::debug("Deleted VertexBuffer({0})", m_Buffer);
}
void kat::VertexBuffer::set(std::vector<float> data) {
if (data.size() > m_Data.size()) {
m_SizeDirty = true;
}
m_Data = data;
update();
}
void kat::VertexBuffer::push() {
bind();
glBufferData(GL_ARRAY_BUFFER, m_Data.size() * sizeof(float), m_Data.data(), static_cast<GLenum>(m_Mode));
unbind();
m_SizeDirty = false;
}
void kat::VertexBuffer::update() {
if (m_SizeDirty) {
push();
}
else {
bind();
glBufferSubData(GL_ARRAY_BUFFER, 0, m_EffectiveSize * sizeof(float), m_Data.data());
unbind();
}
}
void kat::VertexBuffer::update(std::vector<float> data, uint32_t offset) {
uint32_t endp = data.size() + offset;
if (endp > m_Data.size()) {
spdlog::error("Error updating VertexBuffer({0}) with given data. Data is too large. Updating with largest possible dataset", m_Buffer);
endp = m_Data.size();
}
bind();
glBufferSubData(GL_ARRAY_BUFFER, offset * sizeof(float), (endp - offset) * sizeof(float), data.data());
unbind();
}
void kat::VertexBuffer::forceResize(uint32_t newSize) {
if (newSize == m_Data.size()) return;
m_Data.resize(newSize);
m_SizeDirty = true;
}
void kat::VertexBuffer::setVal(uint32_t index, float v) {
if (index >= m_Data.size()) {
spdlog::error("VertexBuffer({0}) doesn't contain index {1}", m_Buffer, index);
}
if (index > m_EffectiveSize) {
m_EffectiveSize = index + 1;
}
m_Data[index] = v;
}
void kat::VertexBuffer::pushVal(float v) {
if (m_EffectiveSize < m_Data.size()) {
m_Data[m_EffectiveSize++] = v;
}
else {
m_Data.push_back(v);
m_EffectiveSize++;
}
}
void kat::VertexBuffer::reset() {
m_EffectiveSize = 0;
}
void kat::VertexBuffer::shrink(uint32_t size) {
m_EffectiveSize = size;
}
void kat::VertexBuffer::bind() {
glBindBuffer(GL_ARRAY_BUFFER, m_Buffer);
}
void kat::VertexBuffer::unbind() {
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
uint32_t kat::VertexBuffer::getName() {
return m_Buffer;
}
uint32_t kat::VertexBuffer::getEffectiveSize() {
return m_EffectiveSize;
}
kat::ElementBuffer::ElementBuffer(std::vector<uint32_t> data, BufferMode mode) : m_Data(data), m_Mode(mode), m_EffectiveSize(data.size()) {
glGenBuffers(1, &m_Buffer);
push();
spdlog::debug("Created ElementBuffer({0}, {2}) : {1} bytes", m_Buffer, data.size() * sizeof(uint32_t), mode);
}
kat::ElementBuffer::ElementBuffer(uint32_t size, BufferMode mode) : m_Data(size), m_Mode(mode) {
glGenBuffers(1, &m_Buffer);
push();
spdlog::debug("Created ElementBuffer({0}, {2}) : {1} bytes", m_Buffer, size * sizeof(uint32_t), mode);
}
kat::ElementBuffer::~ElementBuffer() {
glDeleteBuffers(1, &m_Buffer);
spdlog::debug("Deleted ElementBuffer({0})", m_Buffer);
}
void kat::ElementBuffer::set(std::vector<uint32_t> data) {
if (data.size() > m_Data.size()) {
m_SizeDirty = true;
}
m_Data = data;
update();
}
void kat::ElementBuffer::push() {
bind();
glBufferData(GL_ELEMENT_ARRAY_BUFFER, m_Data.size() * sizeof(uint32_t), m_Data.data(), static_cast<GLenum>(m_Mode));
unbind();
m_SizeDirty = false;
}
void kat::ElementBuffer::update() {
if (m_SizeDirty) {
push();
}
else {
bind();
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, 0, m_EffectiveSize * sizeof(uint32_t), m_Data.data());
unbind();
}
}
void kat::ElementBuffer::update(std::vector<uint32_t> data, uint32_t offset) {
uint32_t endp = data.size() + offset;
if (endp > m_Data.size()) {
spdlog::error("Error updating ElementBuffer({0}) with given data. Data is too large. Updating with largest possible dataset", m_Buffer);
endp = m_Data.size();
}
bind();
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, offset * sizeof(uint32_t), (endp - offset) * sizeof(uint32_t), data.data());
unbind();
}
void kat::ElementBuffer::forceResize(uint32_t newSize) {
if (newSize == m_Data.size()) return;
m_Data.resize(newSize);
m_SizeDirty = true;
}
void kat::ElementBuffer::setVal(uint32_t index, uint32_t v) {
if (index >= m_Data.size()) {
spdlog::error("ElementBuffer({0}) doesn't contain index {1}", m_Buffer, index);
}
if (index > m_EffectiveSize) {
m_EffectiveSize = index + 1;
}
m_Data[index] = v;
}
void kat::ElementBuffer::pushVal(uint32_t v) {
if (m_EffectiveSize < m_Data.size()) {
m_Data[m_EffectiveSize++] = v;
}
else {
m_Data.push_back(v);
m_EffectiveSize++;
}
}
void kat::ElementBuffer::reset() {
m_EffectiveSize = 0;
}
void kat::ElementBuffer::shrink(uint32_t size) {
m_EffectiveSize = size;
}
void kat::ElementBuffer::bind() {
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_Buffer);
}
void kat::ElementBuffer::unbind() {
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
uint32_t kat::ElementBuffer::getName() {
return m_Buffer;
}
uint32_t kat::ElementBuffer::getEffectiveSize() {
return m_EffectiveSize;
}
|
yashvantys/shecabs
|
assets/surveyapp/custom/custom/management/statistics.js
|
var Statistics = function () {
return{
loadPage:function(){
var clientList = $.cookie("man-statistics-client-list");
if (clientList != null) {
$("#clientlist").select2("val",clientList.split(','));
}
$(".chkuserfilter").click(function(){
var chk= $('.chkuserfilter').is(':checked');
if(chk){
$(".userfilter").prop("disabled",false);
$(".userlist").prop("disabled",true);
$(".userfilter").focus();
}else{
$(".userfilter").prop("disabled",true);
$(".userlist").prop("disabled",false);
}
});
$("#clientlist").on("change",function(){
if($(this).val().indexOf("allclients")>=0){
allclients = Array();
$("#clientlist option").each(function()
{
// log the value and text of each option
if($(this).val()!=""){
if($(this).parent().attr("label")=="Customers"){
allclients.push($(this).val());
}
}
});
$(this).select2('val', allclients);
}
});
$(".cedit_stat_filter_submit").click(function(e){
e.preventDefault();
var data = $('.rule-filter-form').serializeArray();
var aoData ={};
for (var i = 0; i < data.length; i++) {
aoData [data[i].name]= data[i].value
}
$('#example').DataTable( {
"dom": 'T<"clear">lfrtip',
"processing": true,
"serverSide": true,
"bSort" : false,
"ajax": {
"url": SocialView.base_url+'management/statistics/bwmc/userdata',
"type": "POST",
data:aoData,
complete: function(){
Custom.hideLoader();
},
beforeSend :function(){
Custom.showLoader();
}
},
/*"ajax": SocialView.base_url+'management/statistics/bwmc/userdata',*/
bFilter: false,
bPaginate: false,
bDestroy: true,
'oLanguage': {
'sEmptyTable': 'No data found in the given date range'
},
"bInfo" : false,
"oTableTools": {
"sSwfPath": SocialView.asset_url+"global/plugins/datatables/extensions/TableTools/swf/copy_csv_xls_pdf.swf",
"aButtons":[
"copy",
"csv",
{
"sExtends": "pdf",
"sTitle": "Content Management Statistics"},
"print"
]},
"fnServerParams": function (aoData) {
}
} );
});
$(".cedit_pending_submit ").click(function(){
});
$("#cmb_report_type").on("change",function(){
if($("#cmb_report_type").val() == "automated_task_count"){
$("#chkcompetitor").prop("disabled",true)
}else{
$("#chkcompetitor").prop("disabled",false)
}
}).trigger("change");
$( "#rule-filter-task-form" ).submit(function( event ) {
if($("#clientlist").select2("val").length==0){
bootbox.alert("Please select atleast one client.");
event.preventDefault();
}else{
var clientList = $("#clientlist").select2("val");
$.cookie("man-statistics-client-list", clientList);
}
});
}
}
}();
|
dhinf/otf2xx
|
include/otf2xx/definition/detail/comm_impl.hpp
|
/*
* This file is part of otf2xx (https://github.com/tud-zih-energy/otf2xx)
* otf2xx - A wrapper for the Open Trace Format 2 library
*
* Copyright (c) 2013-2016, Technische Universität Dresden, Germany
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef INCLUDE_OTF2XX_DEFINITIONS_DETAIL_COMM_HPP
#define INCLUDE_OTF2XX_DEFINITIONS_DETAIL_COMM_HPP
#include <otf2xx/exception.hpp>
#include <otf2xx/fwd.hpp>
#include <otf2xx/reference.hpp>
#include <otf2xx/definition/detail/ref_counted.hpp>
#include <otf2xx/intrusive_ptr.hpp>
#include <otf2xx/definition/group.hpp>
#include <otf2xx/definition/string.hpp>
#include <sstream>
#include <variant>
namespace otf2
{
namespace definition
{
namespace detail
{
class comm_impl : public ref_counted
{
public:
using tag_type = comm_base;
private:
using reference_type = otf2::reference_impl<comm, tag_type>;
public:
using comm_flag_type = otf2::common::comm_flag_type;
using group_type =
std::variant<otf2::definition::comm_group, otf2::definition::comm_self_group>;
comm_impl(const otf2::definition::string& name, const group_type& group,
comm_impl* parent, reference_type pref, comm_flag_type flags,
std::int64_t retain_count = 0)
: ref_counted(retain_count), name_(name), group_(group), parent_(parent), pref_(pref),
flags_(flags)
{
}
comm_impl(const otf2::definition::string& name, const group_type& group,
comm_flag_type flags, std::int64_t retain_count = 0)
: ref_counted(retain_count), name_(name), group_(group), parent_(),
pref_(reference_type::undefined()), flags_(flags)
{
}
otf2::definition::string& name()
{
return name_;
}
const group_type& group() const
{
return group_;
}
auto parent() const
{
return std::make_pair(parent_.get(), pref_);
}
comm_flag_type flags() const
{
return flags_;
}
private:
otf2::definition::string name_;
group_type group_;
otf2::intrusive_ptr<comm_impl> parent_;
reference_type pref_;
comm_flag_type flags_;
};
} // namespace detail
} // namespace definition
} // namespace otf2
#endif // INCLUDE_OTF2XX_DEFINITIONS_DETAIL_COMM_HPP
|
dmcouncil/copyable
|
spec/config_spec.rb
|
<gh_stars>0
require_relative 'helper/copyable_spec_helper'
describe 'Copyable.config' do
it 'should be defined' do
expect(Copyable).to respond_to(:config)
end
describe 'suppress_schema_errors' do
it 'should default to false' do
expect(Copyable.config.suppress_schema_errors).to be_falsey
end
it 'should be changeable' do
Copyable.config.suppress_schema_errors = true
expect(Copyable.config.suppress_schema_errors).to be_truthy
Copyable.config.suppress_schema_errors = false
end
context 'when set to true' do
before(:each) do
Copyable.config.suppress_schema_errors = true
end
after(:each) do
Copyable.config.suppress_schema_errors = false
end
context 'when missing columns' do
before(:each) do
@model_definition = lambda do
undefine_copyable_in CopyableCoin
class CopyableCoin < ActiveRecord::Base
copyable do
disable_all_callbacks_and_observers_except_validate
columns({
kind: :copy,
})
associations({
})
end
end
end
end
it 'should not throw an error' do
expect(@model_definition).to_not raise_error
end
end
context 'with unknown columns' do
before(:each) do
@model_definition = lambda do
undefine_copyable_in CopyableCoin
class CopyableCoin < ActiveRecord::Base
copyable do
disable_all_callbacks_and_observers_except_validate
columns({
what_is_this_column_doing_here: :copy,
kind: :copy,
year: :copy,
})
associations({
})
end
end
end
end
it 'should not throw an error' do
expect(@model_definition).to_not raise_error
end
end
context 'when missing associations' do
before(:each) do
@model_definition = lambda do
class CopyablePet < ActiveRecord::Base
copyable do
disable_all_callbacks_and_observers_except_validate
columns({
name: :copy,
kind: :copy,
birth_year: :copy,
})
associations({
# MISSING copyable_toys: :copy,
copyable_pet_tag: :copy,
copyable_pet_profile: :copy,
copyable_pet_foods: :copy,
copyable_pet_sitting_patronages: :copy,
})
end
end
end
end
it 'should not throw an error' do
expect(@model_definition).to_not raise_error
end
end
context 'with unknown associations' do
before(:each) do
@model_definition = lambda do
class CopyablePet < ActiveRecord::Base
copyable do
disable_all_callbacks_and_observers_except_validate
columns({
name: :copy,
kind: :copy,
birth_year: :copy,
})
associations({
this_assoc_should_not_be_here: :copy,
copyable_toys: :copy,
copyable_pet_tag: :copy,
copyable_pet_profile: :copy,
copyable_pet_foods: :copy,
copyable_pet_sitting_patronages: :copy,
})
end
end
end
end
it 'should not throw an error' do
expect(@model_definition).to_not raise_error
end
end
end
end
end
|
ajblane/iota_fpga
|
pow_accel_soc/software/u-boot-socfpga/board/xes/xpedite517x/xpedite517x.c
|
/*
* Copyright 2009 Extreme Engineering Solutions, Inc.
*
* See file CREDITS for list of people who contributed to this
* project.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*/
#include <common.h>
#include <asm/processor.h>
#include <asm/fsl_ddr_sdram.h>
#include <asm/mmu.h>
#include <asm/io.h>
#include <fdt_support.h>
#include <pca953x.h>
#include "../common/fsl_8xxx_misc.h"
#if defined(CONFIG_OF_BOARD_SETUP) && defined(CONFIG_PCI)
extern void ft_board_pci_setup(void *blob, bd_t *bd);
#endif
/*
* Print out which flash was booted from and if booting from the 2nd flash,
* swap flash chip selects to maintain consistent flash numbering/addresses.
*/
static void flash_cs_fixup(void)
{
int flash_sel;
/*
* Print boot dev and swap flash flash chip selects if booted from 2nd
* flash. Swapping chip selects presents user with a common memory
* map regardless of which flash was booted from.
*/
flash_sel = !((pca953x_get_val(CONFIG_SYS_I2C_PCA953X_ADDR0) &
CONFIG_SYS_PCA953X_C0_FLASH_PASS_CS));
printf("Flash: Executed from flash%d\n", flash_sel ? 2 : 1);
if (flash_sel) {
set_lbc_br(0, CONFIG_SYS_BR1_PRELIM);
set_lbc_or(0, CONFIG_SYS_OR1_PRELIM);
set_lbc_br(1, CONFIG_SYS_BR0_PRELIM);
set_lbc_or(1, CONFIG_SYS_OR0_PRELIM);
}
}
int board_early_init_r(void)
{
/* Initialize PCA9557 devices */
pca953x_set_pol(CONFIG_SYS_I2C_PCA953X_ADDR0, 0xff, 0);
pca953x_set_pol(CONFIG_SYS_I2C_PCA953X_ADDR1, 0xff, 0);
pca953x_set_pol(CONFIG_SYS_I2C_PCA953X_ADDR2, 0xff, 0);
pca953x_set_pol(CONFIG_SYS_I2C_PCA953X_ADDR3, 0xff, 0);
flash_cs_fixup();
return 0;
}
phys_size_t initdram(int board_type)
{
phys_size_t dram_size = fsl_ddr_sdram();
#if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER)
/* Initialize and enable DDR ECC */
ddr_enable_ecc(dram_size);
#endif
return dram_size;
}
#if defined(CONFIG_OF_BOARD_SETUP)
void ft_board_setup(void *blob, bd_t *bd)
{
#ifdef CONFIG_PCI
ft_board_pci_setup(blob, bd);
#endif
ft_cpu_setup(blob, bd);
}
#endif
|
M1kemclain247/ParkingDemo
|
app/src/main/java/com/example/m1kes/parkingdemo/adapters/recyclerviews/LoggedInAdapter.java
|
<filename>app/src/main/java/com/example/m1kes/parkingdemo/adapters/recyclerviews/LoggedInAdapter.java
package com.example.m1kes.parkingdemo.adapters.recyclerviews;
import android.content.Context;
import android.os.CountDownTimer;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.example.m1kes.parkingdemo.R;
import com.example.m1kes.parkingdemo.models.Transaction;
import com.example.m1kes.parkingdemo.sqlite.adapters.TransactionAdapter;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static com.example.m1kes.parkingdemo.util.DBUtils.getDateParts;
public class LoggedInAdapter extends RecyclerView.Adapter<LoggedInAdapter.RecyclerViewHolder> {
private List<Transaction> transactions;
private Context context;
private LoggedInAdapter.RecyclerViewHolder viewHolder;
public LoggedInAdapter(Context context, List<Transaction> transactions) {
this.transactions = transactions;
this.context = context;
}
@Override
public RecyclerViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.logged_in_vehicle_row,parent,false);
viewHolder = new LoggedInAdapter.RecyclerViewHolder(view,context);
return viewHolder;
}
@Override
public void onBindViewHolder(RecyclerViewHolder holder, int position) {
Transaction transaction = transactions.get(position);
if(transaction!=null){
holder.txtLoggedIn_RegNum.setText(transaction.getVehicleregNumber());
String expiryDateTime = ""+transaction.getExpiry_datetime();
Calendar cal = Calendar.getInstance();
String[] parts= getDateParts(expiryDateTime);
cal.set(Integer.parseInt(parts[0]), Integer.parseInt(parts[1])-1, Integer.parseInt(parts[2]), Integer.parseInt(parts[3]), Integer.parseInt(parts[4]), Integer.parseInt(parts[5]));
Date lastLoggedDate = cal.getTime();
System.out.println("*******************************************************************************************");
System.out.println("Expiry Date Time : "+lastLoggedDate + " Current Date Time: "+new Date());
System.out.println("Expiry Date Time : "+lastLoggedDate.getTime() + " Current Date Time: "+new Date().getTime());
System.out.println("*******************************************************************************************");
if(new Date().before(lastLoggedDate)){
long diff = lastLoggedDate.getTime() - new Date().getTime();//as given
long diffSeconds = diff / 1000 % 60;
long diffMinutes = diff / (60 * 1000) % 60;
long diffHours = diff / (60 * 60 * 1000);
String duration = parseDuration(diffHours,diffMinutes,diffSeconds);
holder.txtLoggedIn_Time.setText(duration);
}
System.out.println(transaction.getExpiry_datetime());
}
}
private String parseDuration(long diffHours,long diffMinutes, long diffSeconds){
String duration ="";
if(diffHours<0){
duration += Math.abs(diffHours);
}else{
duration += diffHours;
}
duration += " hrs ";
if(diffMinutes<0){
duration += Math.abs(diffMinutes);
}else {
duration += diffMinutes;
}
duration += " m ";
if(diffSeconds<0){
duration += Math.abs(diffSeconds);
}else {
duration += diffSeconds;
}
duration += " s ";
return duration;
}
@Override
public int getItemCount() {
return transactions.size();
}
class RecyclerViewHolder extends RecyclerView.ViewHolder{
TextView txtLoggedIn_RegNum,txtLoggedIn_Time;
RecyclerViewHolder(View view,Context context){
super(view);
txtLoggedIn_RegNum = (TextView)view.findViewById(R.id.txtLoggedIn_RegNum);
txtLoggedIn_Time = (TextView)view.findViewById(R.id.txtLoggedIn_Time);
}
}
}
|
GravisZro/kos
|
common/net/net_ipv6.c
|
/* KallistiOS ##version##
kernel/net/net_ipv6.c
Copyright (C) 2010, 2012, 2013 <NAME>
*/
#include <string.h>
#include <netinet/in.h>
#include <kos/net.h>
#include <kos/fs_socket.h>
#include <errno.h>
#include "net_ipv6.h"
#include "net_icmp6.h"
#include "net_ipv4.h"
static net_ipv6_stats_t ipv6_stats = { 0 };
const struct in6_addr in6addr_any = IN6ADDR_ANY_INIT;
const struct in6_addr in6addr_loopback = IN6ADDR_LOOPBACK_INIT;
/* These few aren't in the IEEE 1003.1-2008 spec, but do appear on (at least)
Mac OS X (in not strictly compliant mode) and are useful for us... */
const struct in6_addr in6addr_linklocal_allnodes = {
{ {
0xff, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01
}
}
};
const struct in6_addr in6addr_linklocal_allrouters = {
{ {
0xff, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02
}
}
};
static int is_in_network(netif_t *net, const struct in6_addr *ip) {
int i;
/* Make sure its not trivially link-local */
if(IN6_IS_ADDR_LINKLOCAL(ip)) {
return 1;
}
/* Go through each prefix, and see if any match */
for(i = 0; i < net->ip6_addr_count; ++i) {
if(!memcmp(ip, &net->ip6_addrs[i], 8)) {
return 1;
}
}
return 0;
}
/* Send a packet on the specified network adapter */
int net_ipv6_send_packet(netif_t *net, ipv6_hdr_t *hdr, const uint8 *data,
size_t data_size) {
uint8 pkt[data_size + sizeof(ipv6_hdr_t) + sizeof(eth_hdr_t)];
uint8 dst_mac[6];
int err;
struct in6_addr dst = hdr->dst_addr;
eth_hdr_t *ehdr;
if(!net) {
net = net_default_dev;
if(!net) {
errno = ENETDOWN;
return -1;
}
}
/* Are we sending a packet to loopback? */
if(IN6_IS_ADDR_LOOPBACK(&hdr->dst_addr)) {
memcpy(pkt, hdr, sizeof(ipv6_hdr_t));
memcpy(pkt + sizeof(ipv6_hdr_t), data, data_size);
++ipv6_stats.pkt_sent;
/* Send the packet "away" */
net_ipv6_input(NULL, pkt, sizeof(ipv6_hdr_t) + data_size, NULL);
return 0;
}
else if(net->flags & NETIF_NOETH) {
memcpy(pkt, hdr, sizeof(ipv6_hdr_t));
memcpy(pkt + sizeof(ipv6_hdr_t), data, data_size);
++ipv6_stats.pkt_sent;
/* Send the packet away */
return net->if_tx(net, pkt, sizeof(ipv6_hdr_t) + data_size,
NETIF_BLOCK);
}
else if(IN6_IS_ADDR_MULTICAST(&hdr->dst_addr)) {
dst_mac[0] = dst_mac[1] = 0x33;
dst_mac[2] = hdr->dst_addr.__s6_addr.__s6_addr8[12];
dst_mac[3] = hdr->dst_addr.__s6_addr.__s6_addr8[13];
dst_mac[4] = hdr->dst_addr.__s6_addr.__s6_addr8[14];
dst_mac[5] = hdr->dst_addr.__s6_addr.__s6_addr8[15];
}
else {
if(!is_in_network(net, &dst)) {
dst = net->ip6_gateway;
}
err = net_ndp_lookup(net, &dst, dst_mac, hdr, data, data_size);
if(err == -1) {
errno = ENETUNREACH;
++ipv6_stats.pkt_send_failed;
return err;
}
else if(err == -2) {
return 0;
}
}
/* Fill in the ethernet header */
ehdr = (eth_hdr_t *)pkt;
memcpy(ehdr->dest, dst_mac, 6);
memcpy(ehdr->src, net->mac_addr, 6);
ehdr->type[0] = 0x86;
ehdr->type[1] = 0xDD;
/* Put the IP header / data into our ethernet packet */
memcpy(pkt + sizeof(eth_hdr_t), hdr, sizeof(ipv6_hdr_t));
memcpy(pkt + sizeof(eth_hdr_t) + sizeof(ipv6_hdr_t), data, data_size);
++ipv6_stats.pkt_sent;
/* Send it away */
net->if_tx(net, pkt, sizeof(ipv6_hdr_t) + data_size + sizeof(eth_hdr_t),
NETIF_BLOCK);
return 0;
}
int net_ipv6_send(netif_t *net, const uint8 *data, size_t data_size,
int hop_limit, int proto, const struct in6_addr *src,
const struct in6_addr *dst) {
ipv6_hdr_t hdr;
if(!net) {
net = net_default_dev;
if(!net) {
errno = ENETDOWN;
return -1;
}
}
/* Set up the hop limit. We need to do this here, in case we end up passing
this off to the IPv4 code, otherwise we could end up with a 0 down there
for the ttl, which would be bad. */
if(!hop_limit) {
if(net->hop_limit)
hop_limit = net->hop_limit;
else
hop_limit = 255;
}
/* If this is actually going both to and from an IPv4 address, use the IPv4
send function to do the rest. Note that only V4-mapped addresses are
supported here (::ffff:x.y.z.w) */
if(IN6_IS_ADDR_V4MAPPED(src) && IN6_IS_ADDR_V4MAPPED(dst)) {
return net_ipv4_send(net, data, data_size, -1, hop_limit, proto,
src->__s6_addr.__s6_addr32[3],
dst->__s6_addr.__s6_addr32[3]);
}
else if(IN6_IS_ADDR_V4MAPPED(src) || IN6_IS_ADDR_V4MAPPED(dst) ||
IN6_IS_ADDR_V4COMPAT(src) || IN6_IS_ADDR_V4COMPAT(dst)) {
return -1;
}
hdr.version_lclass = 0x60;
hdr.hclass_lflow = 0;
hdr.lclass = 0;
hdr.length = ntohs(data_size);
hdr.next_header = proto;
hdr.hop_limit = hop_limit;
hdr.src_addr = *src;
hdr.dst_addr = *dst;
/* XXXX: Handle fragmentation... */
return net_ipv6_send_packet(net, &hdr, data, data_size);
}
int net_ipv6_input(netif_t *src, const uint8 *pkt, size_t pktsize,
const eth_hdr_t *eth) {
ipv6_hdr_t *ip;
uint8 next_hdr;
//int pos;
size_t len;
int rv;
if(pktsize < sizeof(ipv6_hdr_t)) {
/* This is obviously a bad packet, drop it */
++ipv6_stats.pkt_recv_bad_size;
return -1;
}
ip = (ipv6_hdr_t *)pkt;
len = ntohs(ip->length);
if(pktsize < len + sizeof(ipv6_hdr_t)) {
/* The packet is of size less than the payload length + the size of a
minimal IPv6 header; it must be bad, drop it */
++ipv6_stats.pkt_recv_bad_size;
return -1;
}
/* Parse the header to find the payload */
//pos = sizeof(ipv6_hdr_t); // Currently unused, but will be needed later.
next_hdr = ip->next_header;
if(eth)
net_ndp_insert(src, eth->src, &ip->src_addr, 1);
/* XXXX: Parse options and deal with fragmentation */
switch(next_hdr) {
case IPV6_HDR_ICMP:
return net_icmp6_input(src, ip, pkt + sizeof(ipv6_hdr_t), len);
default:
rv = fs_socket_input(src, AF_INET6, next_hdr, pkt,
pkt + sizeof(ipv6_hdr_t), len);
if(rv == -2) {
/* We don't know what to do with this packet, so send an ICMPv6
message indicating that. */
++ipv6_stats.pkt_recv_bad_proto;
return net_icmp6_send_param_prob(src,
ICMP6_PARAM_PROB_UNK_HEADER, 6,
pkt, pktsize);
}
++ipv6_stats.pkt_recv;
return rv;
}
return 0;
}
net_ipv6_stats_t net_ipv6_get_stats(void) {
return ipv6_stats;
}
uint16 net_ipv6_checksum_pseudo(const struct in6_addr *src,
const struct in6_addr *dst,
uint32 upper_len, uint8 next_hdr) {
ipv6_pseudo_hdr_t ps;
/* Since the src and dst addresses aren't necessarily aligned when we send
them in from header processing, do this the hard way. */
memcpy(&ps.src_addr, src, sizeof(struct in6_addr));
memcpy(&ps.dst_addr, dst, sizeof(struct in6_addr));
/* If this is actually an IPv4 packet, do the calculation there instead. */
if(IN6_IS_ADDR_V4MAPPED(&ps.src_addr) &&
IN6_IS_ADDR_V4MAPPED(&ps.dst_addr)) {
return net_ipv4_checksum_pseudo(ps.src_addr.__s6_addr.__s6_addr32[3],
ps.dst_addr.__s6_addr.__s6_addr32[3],
next_hdr, (uint16)upper_len);
}
ps.upper_layer_len = htonl(upper_len);
ps.next_header = next_hdr;
ps.zero[0] = ps.zero[1] = ps.zero[2] = 0;
return ~net_ipv4_checksum((uint8 *)&ps, sizeof(ipv6_pseudo_hdr_t), 0);
}
int net_ipv6_init(void) {
/* Make sure we're registered to get "All nodes" multicasts from the
ethernet layer. */
uint8 mac[6] = { 0x33, 0x33, 0x00, 0x00, 0x00, 0x01 };
net_multicast_add(mac);
/* Also register for the one for our link-local address' solicited nodes
group (which will do the same for all our other addresses too). */
mac[2] = 0xFF;
mac[3] = net_default_dev->ip6_lladdr.s6_addr[13];
mac[4] = net_default_dev->ip6_lladdr.s6_addr[14];
mac[5] = net_default_dev->ip6_lladdr.s6_addr[15];
net_multicast_add(mac);
return 0;
}
void net_ipv6_shutdown(void) {
uint8 mac[6] = { 0x33, 0x33, 0x00, 0x00, 0x00, 0x01 };
/* Remove from the all nodes multicast group */
net_multicast_del(mac);
/* ... and our solicited nodes multicast group */
mac[2] = 0xFF;
mac[3] = net_default_dev->ip6_lladdr.s6_addr[13];
mac[4] = net_default_dev->ip6_lladdr.s6_addr[14];
mac[5] = net_default_dev->ip6_lladdr.s6_addr[15];
net_multicast_del(mac);
}
|
jlanga/smsk_selection
|
src/guidance.v2.02/programs/semphy/semphySearchBestTree.h
|
<reponame>jlanga/smsk_selection<gh_stars>1-10
// $Id: semphySearchBestTree.h 6002 2009-03-20 19:39:03Z privmane $
#ifndef ___SEMPHY_SEARCH_BEST_TREE
#define ___SEMPHY_SEARCH_BEST_TREE
#include "alphabet.h"
#include "sequenceContainer.h"
#include "tree.h"
#include "stochasticProcess.h"
#include <iostream>
using namespace std;
class semphySearchBestTree {
public:
explicit semphySearchBestTree(sequenceContainer& sc,
tree& startTree,
const tree* ctPtr,
stochasticProcess& sp,
ostream& out,
const int numOfRandomStart = 1,
const bool optimizeAlpha = false,
const double epsilonLikelihoodImprovement4alphaOptimiz = 0.001,
const double epsilonLikelihoodImprovement4BBL = 0.001,
const int maxIterationsBBL = 10,
const Vdouble * weights = NULL);
virtual ~semphySearchBestTree(){}
private:
MDOUBLE semphyBasicSearchBestTree(
sequenceContainer& sc,
tree& et,
const tree* ctPtr,
stochasticProcess& sp,
const bool optimizeAlpha = false,
const double epsilonLikelihoodImprovement4alphaOptimiz = 0.001,
const double epsilonLikelihoodImprovement4BBL = 0.001,
const int maxIterationsBBL = 10,
const Vdouble * weights = NULL);
MDOUBLE semphyBasicSearchBestTree(
sequenceContainer& sc,
tree& startTree,
const tree* ctPtr,
stochasticProcess& sp,
ostream& out,
const bool optimizeAlpha = false,
const double epsilonLikelihoodImprovement4alphaOptimiz = 0.001,
const double epsilonLikelihoodImprovement4BBL = 0.001,
const int maxIterationsBBL = 10,
const Vdouble * weights = NULL);
MDOUBLE semphyBasicSearchBestTreeManyRandomStarts(
sequenceContainer& sc,
tree& et,
const tree* ctPtr,
stochasticProcess& sp,
ostream& out,
const int nRanStart,
const bool optimizeAlpha = false,
const double epsilonLikelihoodImprovement4alphaOptimiz = 0.001,
const double epsilonLikelihoodImprovement4BBL = 0.001,
const int maxIterationsBBL = 10,
const Vdouble * weights = NULL);
};
#endif
|
mailfly/das
|
das-console-manager/src/main/java/com/ppdai/platform/das/console/dto/entry/das/DataSearchLog.java
|
<filename>das-console-manager/src/main/java/com/ppdai/platform/das/console/dto/entry/das/DataSearchLog.java<gh_stars>1-10
package com.ppdai.platform.das.console.dto.entry.das;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.ppdai.das.client.ColumnDefinition;
import com.ppdai.das.client.TableDefinition;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
import java.sql.JDBCType;
import java.util.Date;
/**
* create by das-console
* 请勿修改此文件
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Table
public class DataSearchLog {
public static final DataSearchLogDefinition DATASEARCHLOG = new DataSearchLogDefinition();
public static class DataSearchLogDefinition extends TableDefinition {
public final ColumnDefinition id;
public final ColumnDefinition ip;
public final ColumnDefinition requestType;
public final ColumnDefinition request;
public final ColumnDefinition success;
public final ColumnDefinition result;
public final ColumnDefinition userNo;
public final ColumnDefinition inserttime;
public final ColumnDefinition updatetime;
public final ColumnDefinition isactive;
public DataSearchLogDefinition as(String alias) {
return _as(alias);
}
public DataSearchLogDefinition inShard(String shardId) {
return _inShard(shardId);
}
public DataSearchLogDefinition shardBy(String shardValue) {
return _shardBy(shardValue);
}
public DataSearchLogDefinition() {
super("data_search_log");
id = column("id", JDBCType.BIGINT);
ip = column("ip", JDBCType.VARCHAR);
requestType = column("request_type", JDBCType.TINYINT);
request = column("request", JDBCType.LONGVARCHAR);
success = column("success", JDBCType.BIT);
result = column("result", JDBCType.LONGVARCHAR);
userNo = column("user_no", JDBCType.VARCHAR);
inserttime = column("inserttime", JDBCType.TIMESTAMP);
updatetime = column("updatetime", JDBCType.TIMESTAMP);
isactive = column("isactive", JDBCType.BIT);
setColumnDefinitions(
id, ip, requestType, request, success, result, userNo, inserttime, updatetime,
isactive
);
}
}
/** 自增主键 **/
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@Column(name = "ip")
private String ip;
/** 类型:1、查询 0、下载 **/
@Column(name = "request_type")
private Integer request_type;
/** 请求参数 **/
@Column(name = "request")
private String request;
/** 请求:1、成功 0、失败 **/
@Column(name = "success")
private Boolean success;
/** 异常信息等记录 **/
@Column(name = "result")
private String result;
/** 操作人工号 **/
@Column(name = "user_no")
private String user_no;
/** 插入时间 **/
@Column(name = "inserttime")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date insert_time;
/** 更新时间 **/
@Column(name = "updatetime")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date update_time;
/** 逻辑删除 **/
@Column(name = "isactive")
private Boolean isactive;
private String user_real_name;
}
|
aaujayasena/identy-apps
|
node_modules/rc-tree/lib/DropIndicator.js
|
<gh_stars>0
"use strict";
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = DropIndicator;
var React = _interopRequireWildcard(require("react"));
function DropIndicator(_ref) {
var dropPosition = _ref.dropPosition,
dropLevelOffset = _ref.dropLevelOffset,
indent = _ref.indent;
var style = {
pointerEvents: 'none',
position: 'absolute',
right: 0,
backgroundColor: 'red',
height: 2
};
switch (dropPosition) {
case -1:
style.top = 0;
style.left = -dropLevelOffset * indent;
break;
case 1:
style.bottom = 0;
style.left = -dropLevelOffset * indent;
break;
case 0:
style.bottom = 0;
style.left = indent;
break;
}
return React.createElement("div", {
style: style
});
}
|
nesl/UnderwaterSensorTag
|
Aquamote/Firmware/Firmware3.1/GPS_workspace/ble_examples-ble_examples-2.2/src/components/display_eng/ti/mw/display/DisplaySharp.c
|
<filename>Aquamote/Firmware/Firmware3.1/GPS_workspace/ble_examples-ble_examples-2.2/src/components/display_eng/ti/mw/display/DisplaySharp.c
/*
* Copyright (c) 2016, Texas Instruments Incorporated
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Texas Instruments Incorporated nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* -----------------------------------------------------------------------------
* Includes
* ------------------------------------------------------------------------------
*/
// TI RTOS drivers
#include <ti/sysbios/BIOS.h>
#include <ti/sysbios/knl/Task.h>
#include <ti/sysbios/knl/Semaphore.h>
#include <ti/drivers/PIN.h>
#include <ti/drivers/SPI.h>
#include <xdc/runtime/Log.h>
#include <xdc/runtime/System.h>
#include <ti/mw/grlib/grlib.h>
#include <ti/mw/display/Display.h>
#include <ti/mw/display/DisplaySharp.h>
#include <ti/mw/lcd/SharpGrLib.h>
/* -----------------------------------------------------------------------------
* Constants and macros
* ------------------------------------------------------------------------------
*/
// Timeout of semaphore that controls exclusive to the LCD (infinite)
#define ACCESS_TIMEOUT BIOS_WAIT_FOREVER
/* -----------------------------------------------------------------------------
* Type definitions
* ------------------------------------------------------------------------------
*/
/* -----------------------------------------------------------------------------
* Local variables
* ------------------------------------------------------------------------------
*/
/* Display function table for sharp implementation */
const Display_FxnTable DisplaySharp_fxnTable = {
DisplaySharp_open,
DisplaySharp_clear,
DisplaySharp_clearLines,
DisplaySharp_put5,
DisplaySharp_close,
DisplaySharp_control,
DisplaySharp_getType,
};
/* -----------------------------------------------------------------------------
* Functions
* ------------------------------------------------------------------------------
*/
/*!
* @fn DisplaySharp_open
*
* @brief Initialize the LCD
*
* @descr Initializes the pins used by the LCD, creates resource access
* protection semaphore, turns on the LCD device, initializes the
* frame buffer, initializes to white background/dark foreground,
* and finally clears the object->displayColor.
*
* @param hDisplay - pointer to Display_Config struct
* @param params - display parameters
*
* @return Pointer to Display_Config struct
*/
Display_Handle DisplaySharp_open(Display_Handle hDisplay,
Display_Params *params)
{
DisplaySharp_HWAttrs *hwAttrs = (DisplaySharp_HWAttrs *)hDisplay->hwAttrs;
DisplaySharp_Object *object = (DisplaySharp_Object *)hDisplay->object;
PIN_Config pinTable[4 + 1];
object->lineClearMode = params->lineClearMode;
uint32_t i = 0;
if (hwAttrs->csPin != PIN_TERMINATE)
{
pinTable[i++] = hwAttrs->csPin | PIN_GPIO_OUTPUT_EN | PIN_GPIO_LOW | PIN_PUSHPULL | PIN_DRVSTR_MAX;
}
if (hwAttrs->extcominPin != PIN_TERMINATE)
{
pinTable[i++] = hwAttrs->extcominPin | PIN_GPIO_OUTPUT_EN | PIN_GPIO_LOW | PIN_PUSHPULL | PIN_DRVSTR_MAX;
}
if (hwAttrs->powerPin != PIN_TERMINATE)
{
pinTable[i++] = hwAttrs->powerPin | PIN_GPIO_OUTPUT_EN | PIN_GPIO_HIGH | PIN_PUSHPULL | PIN_DRVSTR_MAX;
}
if (hwAttrs->enablePin != PIN_TERMINATE)
{
pinTable[i++] = hwAttrs->enablePin | PIN_GPIO_OUTPUT_EN | PIN_GPIO_HIGH | PIN_PUSHPULL | PIN_DRVSTR_MAX;
}
pinTable[i++] = PIN_TERMINATE;
object->hPins = PIN_open(&object->pinState, pinTable);
if (object->hPins == NULL)
{
Log_error0("Couldn't open pins for Sharp96x96");
return NULL;
}
SPI_Params spiParams;
SPI_Params_init(&spiParams);
spiParams.bitRate = 4000000;
object->hSpi = SPI_open(hwAttrs->spiIndex, &spiParams);
if (object->hSpi == NULL)
{
Log_error0("Couldn't open SPI driver for Sharp96x96");
PIN_close(object->hPins);
object->hPins = NULL;
return NULL;
}
// Init colors
object->displayColor.bg = ClrBlack;
object->displayColor.fg = ClrWhite;
// Exclusive access
Semaphore_Params semParams;
Semaphore_Params_init(&semParams);
semParams.mode = Semaphore_Mode_BINARY;
Semaphore_construct(&object->semLCD, 1, &semParams);
// Grab LCD
Semaphore_pend((Semaphore_Handle) & object->semLCD, ACCESS_TIMEOUT);
// Initialize the GrLib back-end transport
SharpGrLib_init(object->hSpi, object->hPins, hwAttrs->csPin);
object->g_sDisplay.lSize = sizeof(tDisplay);
object->g_sDisplay.pFxns = &g_sharpFxns;
object->g_sDisplay.pvDisplayData = object->displayBuffer;
object->g_sDisplay.usHeight = hwAttrs->pixelHeight;
object->g_sDisplay.usWidth = hwAttrs->pixelWidth;
object->g_sDisplay.pvDisplayData = hwAttrs->displayBuf;
// Graphics library init
GrContextInit(&object->g_sContext, &object->g_sDisplay, &g_sharpFxns);
// Graphics properties
GrContextForegroundSet(&object->g_sContext, object->displayColor.fg);
GrContextBackgroundSet(&object->g_sContext, object->displayColor.bg);
GrContextFontSet(&object->g_sContext, &g_sFontFixed6x8);
// Clear display
GrClearDisplay(&object->g_sContext);
GrFlush(&object->g_sContext);
// Release LCD
Semaphore_post((Semaphore_Handle) & object->semLCD);
return hDisplay;
}
/*!
* @fn DisplaySharp_clear
*
* @brief Clears the display
*
* @param hDisplay - pointer to Display_Config struct
*
* @return void
*/
void DisplaySharp_clear(Display_Handle hDisplay)
{
DisplaySharp_Object *object = (DisplaySharp_Object *)hDisplay->object;
if (object->hPins == NULL)
{
return;
}
// Grab LCD
if (Semaphore_pend((Semaphore_Handle) & object->semLCD, ACCESS_TIMEOUT))
{
GrClearDisplay(&object->g_sContext);
GrFlush(&object->g_sContext);
// Release LCD
Semaphore_post((Semaphore_Handle) & object->semLCD);
}
}
/*!
* @fn DisplaySharp_clearLines
*
* @brief Clears lines lineFrom-lineTo of the display, inclusive
*
* @param hDisplay - pointer to Display_Config struct
* @param lineFrom - line index (0 .. )
* @param lineTo - line index (0 .. )
*
* @return void
*/
void DisplaySharp_clearLines(Display_Handle hDisplay,
uint8_t lineFrom, uint8_t lineTo)
{
DisplaySharp_Object *object = (DisplaySharp_Object *)hDisplay->object;
if (lineTo <= lineFrom)
{
lineTo = lineFrom;
}
tRectangle rect = {
.sXMin = 0,
.sXMax = object->g_sContext.sClipRegion.sXMax,
.sYMin = lineFrom * object->g_sContext.pFont->ucHeight,
.sYMax = (lineTo + 1) * object->g_sContext.pFont->ucHeight - 1,
};
GrContextForegroundSet(&object->g_sContext, object->displayColor.bg);
GrRectFill(&object->g_sContext, &rect);
GrContextForegroundSet(&object->g_sContext, object->displayColor.fg);
GrFlush(&object->g_sContext);
}
/*!
* @fn DisplaySharp_put5
*
* @brief Write a text string to a specific line/column of the display
*
* @param hDisplay - pointer to Display_Config struct
* @param line - line index (0..)
* @param column - column index (0..)
* @param fmt - format string
* @param aN - optional format arguments
*
* @return void
*/
void DisplaySharp_put5(Display_Handle hDisplay, uint8_t line,
uint8_t column, uintptr_t fmt, uintptr_t a0,
uintptr_t a1, uintptr_t a2, uintptr_t a3, uintptr_t a4)
{
DisplaySharp_Object *object = (DisplaySharp_Object *)hDisplay->object;
uint8_t xp, yp, clearStartX, clearEndX;
char dispStr[23];
if (object->hPins == NULL)
{
return;
}
// Grab LCD
if (Semaphore_pend((Semaphore_Handle) & object->semLCD, ACCESS_TIMEOUT))
{
xp = column * object->g_sContext.pFont->ucMaxWidth + 1;
yp = line * object->g_sContext.pFont->ucHeight + 0;
clearStartX = clearEndX = xp;
switch (object->lineClearMode)
{
case DISPLAY_CLEAR_LEFT:
clearStartX = 0;
break;
case DISPLAY_CLEAR_RIGHT:
clearEndX = object->g_sContext.sClipRegion.sXMax;
break;
case DISPLAY_CLEAR_BOTH:
clearStartX = 0;
clearEndX = object->g_sContext.sClipRegion.sXMax;
break;
case DISPLAY_CLEAR_NONE:
default:
break;
}
if (clearStartX != clearEndX)
{
tRectangle rect = {
.sXMin = clearStartX,
.sXMax = clearEndX,
.sYMin = yp,
.sYMax = yp + object->g_sContext.pFont->ucHeight - 1,
};
GrContextForegroundSet(&object->g_sContext, object->displayColor.bg);
GrRectFill(&object->g_sContext, &rect);
GrContextForegroundSet(&object->g_sContext, object->displayColor.fg);
}
System_snprintf(dispStr, sizeof(dispStr), (xdc_CString)fmt, a0, a1, a2, a3, a4);
// Draw a text on the display
GrStringDraw(&object->g_sContext,
dispStr,
AUTO_STRING_LENGTH,
xp,
yp,
OPAQUE_TEXT);
GrFlush(&object->g_sContext);
// Release LCD
Semaphore_post((Semaphore_Handle) & object->semLCD);
}
}
/*!
* @fn DisplaySharp_close
*
* @brief Turns of the display and releases the LCD control pins
*
* @param hDisplay - pointer to Display_Config struct
*
* @return void
*/
void DisplaySharp_close(Display_Handle hDisplay)
{
DisplaySharp_HWAttrs *hwAttrs = (DisplaySharp_HWAttrs *)hDisplay->hwAttrs;
DisplaySharp_Object *object = (DisplaySharp_Object *)hDisplay->object;
if (object->hPins == NULL)
{
return;
}
// Grab LCD
if (Semaphore_pend((Semaphore_Handle) & object->semLCD, ACCESS_TIMEOUT))
{
// Turn off the display
PIN_setOutputValue(object->hPins, hwAttrs->enablePin, 0);
// Release resources
PIN_close(object->hPins);
object->hPins = NULL;
SPI_close(object->hSpi);
object->hSpi = NULL;
// Deconfigure GrLib back-end
SharpGrLib_init(NULL, NULL, PIN_UNASSIGNED);
// Release LCD
Semaphore_post((Semaphore_Handle) & object->semLCD);
}
}
/*!
* @fn DisplaySharp_control
*
* @brief Function for setting control parameters of the Display driver
* after it has been opened.
*
* @param hDisplay - pointer to Display_Config struct
* @param cmd - command to execute, supported commands are:
* | Command | Description |
* |------------------------------- |-------------------------|
* | ::DISPLAY_CMD_TRANSPORT_CLOSE | Close SPI but leave control pins |
* | ::DISPLAY_CMD_TRANSPORT_OPEN | Re-open SPI driver |
* @param arg - argument to the command
*
* @return ::DISPLAY_STATUS_SUCCESS if success, or error code if error.
*/
int DisplaySharp_control(Display_Handle hDisplay, unsigned int cmd, void *arg)
{
DisplaySharp_HWAttrs *hwAttrs = (DisplaySharp_HWAttrs *)hDisplay->hwAttrs;
DisplaySharp_Object *object = (DisplaySharp_Object *)hDisplay->object;
/* Initialize return value */
int ret = DISPLAY_STATUS_ERROR;
/* Perform command */
switch(cmd)
{
case DISPLAY_CMD_TRANSPORT_CLOSE:
// Grab LCD
if (Semaphore_pend((Semaphore_Handle) & object->semLCD, ACCESS_TIMEOUT))
{
if (object->hSpi)
{
// Close SPI and tell back-end there is no SPI
SPI_close(object->hSpi);
SharpGrLib_init(NULL, object->hPins, hwAttrs->csPin);
object->hSpi = NULL;
ret = DISPLAY_STATUS_SUCCESS;
}
// Release LCD
Semaphore_post((Semaphore_Handle) & object->semLCD);
}
break;
case DISPLAY_CMD_TRANSPORT_OPEN:
// Grab LCD
if (Semaphore_pend((Semaphore_Handle) & object->semLCD, ACCESS_TIMEOUT))
{
if (NULL == object->hSpi)
{
// Re-open SPI and re-init back-end
SPI_Params spiParams;
SPI_Params_init(&spiParams);
spiParams.bitRate = 4000000;
object->hSpi = SPI_open(hwAttrs->spiIndex, &spiParams);
SharpGrLib_init(object->hSpi, object->hPins, hwAttrs->csPin);
ret = DISPLAY_STATUS_SUCCESS;
}
// Release LCD
Semaphore_post((Semaphore_Handle) & object->semLCD);
}
break;
case DISPLAYSHARP_CMD_SET_COLORS:
// Grab LCD
if (Semaphore_pend((Semaphore_Handle) & object->semLCD, ACCESS_TIMEOUT))
{
object->displayColor = *(DisplaySharpColor_t *)arg;
GrContextForegroundSet(&object->g_sContext, object->displayColor.fg);
GrContextBackgroundSet(&object->g_sContext, object->displayColor.bg);
// Release LCD
Semaphore_post((Semaphore_Handle) & object->semLCD);
// Return success
ret = DISPLAY_STATUS_SUCCESS;
}
break;
default:
/* The command is not defined */
ret = SPI_STATUS_UNDEFINEDCMD;
break;
}
return ret;
}
/*!
* @fn DisplaySharp_getType
*
* @brief Returns type of transport
*
* @return Display type define LCD
*/
unsigned int DisplaySharp_getType(void)
{
return Display_Type_LCD | Display_Type_GRLIB;
}
|
zeyuanxy/LeetCode
|
vol3/word-ladder-ii/word-ladder-ii.cpp
|
class Solution {
public:
void bfs(string start, string end, unordered_set<string> &dict, unordered_map<string, int> &depth) {
if(start == end)
return;
queue<string> q;
q.push(start);
depth[start] = 0;
while(!q.empty()) {
string s = q.front();
q.pop();
for(int i = 0; i < s.length(); ++ i)
for(int j = 0; j < 26; ++ j) {
string t = s;
t[i] = 'a' + j;
if((t == end || dict.find(t) != dict.end()) && depth.find(t) == depth.end()) {
q.push(t);
depth[t] = depth[s] + 1;
}
}
}
}
void dfs(string start, string end, unordered_set<string> &dict, unordered_map<string, int> &depth, vector<string> &path, vector<vector<string> > &ret) {
if(start == end) {
ret.push_back(path);
return;
}
int goal = depth[start] - 1;
for(int i = 0; i < start.length(); ++ i)
for(int j = 0; j < 26; ++ j) {
string t = start;
t[i] = 'a' + j;
if(depth.find(t) != depth.end() && depth[t] == goal) {
path.push_back(t);
dfs(t, end, dict, depth, path, ret);
path.pop_back();
}
}
}
vector<vector<string>> findLadders(string start, string end, unordered_set<string> &dict) {
vector<vector<string> > ret;
if(start.length() == 0 || end.length() == 0)
return ret;
unordered_map<string, int> depth;
bfs(start, end, dict, depth);
vector<string> path;
path.push_back(end);
dfs(end, start, dict, depth, path, ret);
for(auto & i : ret)
reverse(i.begin(), i.end());
return ret;
}
};
|
phantomDai/CMTJmcr
|
mcr-test/src/main/java/edu/tamu/aser/tests/ABPushPop/ProgLoader.java
|
package edu.tamu.aser.tests.ABPushPop; /*************************************************************/
/* (C) IBM Corporation (2007), ALL RIGHTS RESERVED */
/* */
/* <NAME> 30/1/2007 Class created */
/*************************************************************/
// Note: ConTest only version - not part of CMGTT
import java.util.*;
public class ProgLoader {
class TestedClass {
String name;
Class cls;
int howMany;
}
protected Vector<TestedClass> testProgConfig;
protected Vector<Thread> testProg;
protected Vector<Object> testObjs;
protected boolean shouldStop = false; // successors may force stop.
// prevent situation when(shouldStop) but join() prevents stopping
protected int joinTimeout = 5000;
protected ProgLoader() {
/* empty constructor */
}
// Does all at once: collects classes, loads objects and runs them
protected ProgLoader(String args[]) throws AnyException {
collectTestProgThreads(args);
createRunnable();
startTestProg();
}
protected static String[] splitCommandLine(String command) {
return command.split("\\s+", 0);
}
protected static void printArgs(String args[]) {
for(int i = 0; i<args.length; ++i)
System.out.println("args["+i+"] = "+args[i]);
}
protected void collectTestProgThreads(String args[]) throws AnyException {
// Succesors may replace this function to generate test program his way.
TestedClass tstCls;
Class cls;
String fileName;
int num;
Names.setBaseDir(System.getProperty("user.dir"));
testProgConfig = new Vector<TestedClass>(args.length);
for (int i=0; i<args.length; ++i) {
try {
num = Integer.parseInt(args[i]);
i++;
} catch (NumberFormatException ex) {
num = 1;
}
try {
cls = Class.forName(args[i]);
} catch (Throwable ex) {
String msg = "ProgLoader.collectTestProgThreads: unable to load tested thread class \""+
args[i]+"\": "+ex.getMessage();
System.err.println(msg);
throw new AnyException(msg, ex);
}
tstCls = new TestedClass();
tstCls.name = args[i];
tstCls.cls = cls;
tstCls.howMany = num;
testProgConfig.add(tstCls);
} /* for */
if (testProgConfig.size() == 0) {
String msg = "ProgLoader: No test threads where specified !";
System.err.println(msg);
throw new AnyException(msg);
}
}
protected void createRunnable() throws AnyException {
int i,j;
TestedClass tstCls;
Object obj;
Thread trd;
String threadName;
int num = testProgConfig.size();
if (testObjs == null)
testObjs = new Vector<Object>(num);
else
testObjs.clear();
if (testProg == null)
testProg = new Vector<Thread>(num);
else
testProg.clear();
for (i=0; i<num; ++i) {
tstCls = testProgConfig.get(i);
for(j=0; j<tstCls.howMany; ++j) {
try {
obj = tstCls.cls.newInstance();
} catch(Exception ex) {
String msg = "ProgLoader.createRunnable: unable to create instance of: "+
tstCls.name;
System.out.println(msg);
throw new AnyException(msg, ex);
}
testObjs.add(obj);
// may create objects of the tested program which are not threads:
if (obj instanceof Thread)
trd = (Thread) obj;
else
continue;
setThreadName(trd, j, tstCls.howMany);
testProg.add(trd);
}
}
}
/**Give a name to the newly created thread.
* Successors may replace to give other names to threads.
* @param it the thread we want to name
* @param index the index of the thread among the threads in its class
* @param numOfSameClass the total number of threads of the same class as this thread
* @return the new name
*/
protected String setThreadName(Thread it, int index, int numOfSameClass) {
String threadName;
if (numOfSameClass > 1)
threadName = Names.threadName(it, index);
else
threadName = Names.threadName(it);
it.setName(threadName);
return threadName;
}
protected void startTestProg() {
int num = testProg.size();
for (int i=0; i<num; ++i)
testProg.get(i).start();
}
protected void waitTestProgThreads() {
// wait to all treads in testProg to finish:
int i, num = testProg.size();
int numRunning = num; // at the beginning
while (numRunning > 0) {
numRunning = 0;
for (i=0; i<num; ++i) {
if (shouldStop)
return;
if (testProg.get(i).isAlive()) {
numRunning++;
try {
testProg.get(i).join(joinTimeout);
}
catch (InterruptedException ex) { /* continue */ }
}
}
}
}
}
|
python20180319howmework/homework
|
zhangqi/20180328/h4.py
|
#4. 定义一个函数,完成以下功能:
# 1) 输入两个整型数,例如输入的是3, 5
# 2) 此函数要计算的是3 + 33 + 333 + 3333 + 33333(到5个为止)
def sum1(m, n):
sumnum = 0
for i in range(1,n+1):
sumnum = sumnum + int(str(m)*i)
return sumnum
print("结果是{}".format(sumnum))
a, b = eval(input("请输入两个整型数"))
print(sum1(a, b))
|
yutingzou/tp
|
src/test/java/seedu/address/model/schedule/ScheduleTrackerTest.java
|
package seedu.address.model.schedule;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static seedu.address.testutil.Assert.assertThrows;
import static seedu.address.testutil.TypicalSchedules.MATHS_HOMEWORK_SCHEDULE;
import static seedu.address.testutil.TypicalSchedules.getTypicalScheduleTracker;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.Test;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import seedu.address.model.schedule.exceptions.DuplicateScheduleException;
public class ScheduleTrackerTest {
private final ScheduleTracker scheduleTracker = new ScheduleTracker();
@Test
public void constructor() {
assertEquals(Collections.emptyList(), scheduleTracker.getScheduleList());
}
@Test
public void resetData_null_throwsNullPointerException() {
assertThrows(NullPointerException.class, () -> scheduleTracker.resetData(null));
}
@Test
public void resetData_withValidReadOnlyAddressBook_replacesData() {
ScheduleTracker newData = getTypicalScheduleTracker();
scheduleTracker.resetData(newData);
assertEquals(newData, scheduleTracker);
}
@Test
public void resetData_withDuplicateSchedules_throwsDuplicateScheduleException() {
List<Schedule> newSchedules = Arrays.asList(MATHS_HOMEWORK_SCHEDULE, MATHS_HOMEWORK_SCHEDULE);
ScheduleTrackerStub newData = new ScheduleTrackerStub(newSchedules);
assertThrows(DuplicateScheduleException.class, () -> scheduleTracker.resetData(newData));
}
@Test
public void hasSchedule_nullSchedule_throwsNullPointerException() {
assertThrows(NullPointerException.class, () -> scheduleTracker.hasSchedule(null));
}
@Test
public void hasSchedule_scheduleNotInScheduleTracker_returnsFalse() {
assertFalse(scheduleTracker.hasSchedule(MATHS_HOMEWORK_SCHEDULE));
}
@Test
public void hasSchedule_scheduleInScheduleTracker_returnsTrue() {
scheduleTracker.addSchedule(MATHS_HOMEWORK_SCHEDULE);
assertTrue(scheduleTracker.hasSchedule(MATHS_HOMEWORK_SCHEDULE));
}
@Test
public void getScheduleList_modifyList_throwsUnsupportedOperationException() {
assertThrows(UnsupportedOperationException.class, () -> scheduleTracker.getScheduleList().remove(0));
}
/**
* A stub ReadOnlyScheduleTracker whose schedule list can violate interface constraints.
*/
private static class ScheduleTrackerStub implements ReadOnlyScheduleTracker {
private final ObservableList<Schedule> schedules = FXCollections.observableArrayList();
ScheduleTrackerStub(Collection<Schedule> schedules) {
this.schedules.setAll(schedules);
}
@Override
public ObservableList<Schedule> getScheduleList() {
return schedules;
}
}
}
|
dpercy/evergreen
|
vendor/github.com/mongodb/jasper/vendor/github.com/tychoish/mongorpc/mongowire/wire_get_more.go
|
<reponame>dpercy/evergreen
package mongowire
import "github.com/pkg/errors"
func NewGetMore(ns string, number int32, cursorID int64) Message {
return &getMoreMessage{
header: MessageHeader{
RequestID: 19,
OpCode: OP_GET_MORE,
},
Namespace: ns,
NReturn: number,
CursorId: cursorID,
}
}
func (m *getMoreMessage) HasResponse() bool { return true }
func (m *getMoreMessage) Header() MessageHeader { return m.header }
func (m *getMoreMessage) Scope() *OpScope {
return &OpScope{
Type: m.header.OpCode,
Context: m.Namespace,
}
}
func (m *getMoreMessage) Serialize() []byte {
size := 16 /* header */ + 16 /* query header */
size += len(m.Namespace) + 1
m.header.Size = int32(size)
buf := make([]byte, size)
m.header.WriteInto(buf)
writeInt32(0, buf, 16)
loc := 20
writeCString(m.Namespace, buf, &loc)
writeInt32(m.NReturn, buf, loc)
loc += 4
writeInt64(m.CursorId, buf, loc)
loc += 8
return buf
}
func (h *MessageHeader) parseGetMoreMessage(buf []byte) (Message, error) {
var (
err error
loc int
)
if len(buf) < 4 {
return nil, errors.New("invalid get more message -- message must have length of at least 4 bytes")
}
qm := &getMoreMessage{
header: *h,
}
qm.Reserved = readInt32(buf)
loc += 4
qm.Namespace, err = readCString(buf[loc:])
if err != nil {
return nil, errors.WithStack(err)
}
loc += len(qm.Namespace) + 1
if len(buf) < loc+12 {
return nil, errors.New("invalid get more message -- message length is too short")
}
qm.NReturn = readInt32(buf[loc:])
loc += 4
qm.CursorId = readInt64(buf[loc:])
loc += 8 // nolint
return qm, nil
}
|
zdivozzo/react-bootstrap
|
test/InputGroupSpec.js
|
import React from 'react';
import { mount } from 'enzyme';
import InputGroup from '../src/InputGroup';
describe('<InputGroup>', () => {
it('Should have div as default component', () => {
const wrapper = mount(<InputGroup />);
expect(wrapper.find('div').length).to.equal(1);
});
});
|
reo-ar/airline
|
airline-web/app/controllers/HistoryUtil.scala
|
<gh_stars>10-100
package controllers
import java.util
import java.util.concurrent.TimeUnit
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
import com.patson.data.{ConsumptionHistorySource, CycleSource}
import com.patson.model.{PassengerType, _}
import models.{LinkHistory, RelatedLink}
import scala.collection.mutable.ListBuffer
object HistoryUtil {
var loadedCycle = 0
//val simpleCache: LoadingCache[Int, Option[Airport]] = CacheBuilder.newBuilder.maximumSize(2000).expireAfterAccess(10, TimeUnit.MINUTES).build(new SimpleLoader())
var consumptionCache : java.util.Map[Int, LoadingCache[Int, Map[Route, (PassengerType.Value, Int)]]] = new java.util.HashMap[Int, LoadingCache[Int, Map[Route, (PassengerType.Value, Int)]]]() //key is cycle
/**
* Group the related links base on traverse ordering
*
* For example the fromAirport is X, toAirport is Y, related links are:
* [ U1 -> V, U2 -> V, V -> W, W -> X, X -> Y, Y -> Z1, Y -> Z2]
*
* Then it will return
*
* [[U1 -> V, U2 -> V], [V -> W] , [W -> X] , [X -> Y], [Y -> Z1, Y -> Z2]]
*
*
* @param fromAirport
* @param toAirport
* @param relatedLinksOriginal
* @return
*/
def groupLinksByStep(fromAirport : Airport, toAirport : Airport, selectedAirline : Airline, relatedLinksOriginal: List[RelatedLink]) : List[List[RelatedLink]] = {
val groupedLinks = ListBuffer[List[RelatedLink]]()
var relatedLinks = ListBuffer[RelatedLink]()
relatedLinks.appendAll(relatedLinksOriginal)
//first find the links matching from/to
relatedLinks.partition(link => link.fromAirport.id == fromAirport.id && link.toAirport.id == toAirport.id && link.airline.id == selectedAirline.id) match {
case(originalLinks, remainingLinks) =>
groupedLinks.append(originalLinks.toList)
relatedLinks = remainingLinks
}
//traverse backwards
var shouldContinue = true
var backwardsAirportIds = List(fromAirport.id)
while (shouldContinue) {
relatedLinks.partition(link => backwardsAirportIds.contains(link.toAirport.id)) match {
case (matchingLinks, remainingLinks) =>
relatedLinks = remainingLinks
if (matchingLinks.isEmpty) {
shouldContinue = false
} else {
groupedLinks.prepend(matchingLinks.toList)
backwardsAirportIds = matchingLinks.map(_.fromAirport.id).toList
}
}
}
//traverse forward
shouldContinue = true
var forwardsAirportIds = List(toAirport.id)
while (shouldContinue) {
relatedLinks.partition(link => forwardsAirportIds.contains(link.fromAirport.id)) match {
case (matchingLinks, remainingLinks) =>
relatedLinks = remainingLinks
if (matchingLinks.isEmpty) {
shouldContinue = false
} else {
groupedLinks.append(matchingLinks.toList)
forwardsAirportIds = matchingLinks.map(_.toAirport.id).toList
}
}
}
groupedLinks.toList
}
def loadConsumptionByLink(link : Link, cycleDelta : Int = 0, selfOnly : Boolean = false) : LinkHistory = {
val relatedConsumptions = loadRelatedRoutesFromCache(link.id, cycleDelta)
val airlineId = link.airline.id
println("Finished loading related consumption for " + link)
val relatedForwardLinks : List[RelatedLink] = computeRelatedLinks(relatedConsumptions.filter {
case(route, _) => route.links.find { linkConsideration => !linkConsideration.inverted && linkConsideration.link.id == link.id}.isDefined
}.toList, airlineId, selfOnly
)
val groupedForwardLinks = groupLinksByStep(link.from, link.to, link.airline, relatedForwardLinks)
val relatedReverseLinks : List[RelatedLink] = computeRelatedLinks(relatedConsumptions.filter {
case(route, _) => route.links.find { linkConsideration => linkConsideration.inverted && linkConsideration.link.id == link.id}.isDefined
}.toList, airlineId, selfOnly
)
val groupedReverseLinks = groupLinksByStep(link.to, link.from, link.airline, relatedReverseLinks)
LinkHistory(0, groupedForwardLinks, groupedReverseLinks)
}
def loadConsumptionByAirport(airportId : Int) : Map[Airport, Int] = {
val linksWithPassengers = ConsumptionHistorySource.loadConsumptionsByAirport(airportId)
//find all the "other" airport and sum up passenger count
val passengersByOtherAirport = scala.collection.mutable.Map[Airport, Int]()
linksWithPassengers.foreach {
case (link, passengers) => {
val otherAirport = if (link.from.id == airportId) {
link.to
} else {
link.from
}
val sum = passengersByOtherAirport.getOrElse(otherAirport, 0)
passengersByOtherAirport.put(otherAirport, sum + passengers)
}
}
passengersByOtherAirport.toMap
}
private def computeRelatedLinks(relatedConsumption : List[(Route, (PassengerType.Value, Int))], airlineId : Int, selfOnly : Boolean) : List[RelatedLink] = {
val relatedLinkConsumptions : List[(PassengerType.Value, Int, LinkConsideration)] = relatedConsumption.flatMap {
case(route, (passengerType, passengerCount)) => route.links.map { (passengerType, passengerCount, _) }.filter {
case(_, _, link) => !selfOnly || link.link.airline.id == airlineId
}
} //flat map by expanding the route to the links of the route
//now group the link by the passenger type and the link itself
val groupedLinkConsumptions = relatedLinkConsumptions.groupBy { case(passengerType, _, linkConsideration) => (linkConsideration.link, linkConsideration.inverted, passengerType) }
//fold the value of the grouped map, we only care about passenger count now
val computedConsumedLinks = groupedLinkConsumptions.view.mapValues{
_.foldLeft(0)( (totalPassengerCount, entry) => totalPassengerCount + entry._2)
}.toMap
//now it should have a nice map of
//key: Link, inverted, passengerType
//value: number of passengers
computedConsumedLinks.map {
case (key, value) => {
val link = key._1
val inverted = key._2
val passengerType = key._3
val passengerCount = value
if (!inverted) {
new RelatedLink(link.id, link.from, link.to, link.airline, passengerCount)
} else {
new RelatedLink(link.id, link.to, link.from, link.airline, passengerCount)
}
}
}.toList
}
private def loadRelatedRoutesFromCache(linkId : Int, cycleDelta : Int) : Map[Route, (PassengerType.Value, Int)] = {
val currentCycle = CycleSource.loadCycle()
val targetCycle = currentCycle + cycleDelta
if (targetCycle > currentCycle || targetCycle < currentCycle - ConsumptionHistorySource.MAX_CONSUMPTION_HISTORY_WEEK) {
return Map.empty
}
var consumptionCacheOfCycle: LoadingCache[Int, Map[Route, (PassengerType.Value, Int)]] = null
synchronized {
if (currentCycle != loadedCycle) {
purgeExpiredCache(currentCycle - ConsumptionHistorySource.MAX_CONSUMPTION_HISTORY_WEEK)
loadedCycle = currentCycle
}
consumptionCacheOfCycle = consumptionCache.get(targetCycle)
if (consumptionCacheOfCycle == null) {
val cache: LoadingCache[Int, Map[Route, (PassengerType.Value, Int)]] = CacheBuilder.newBuilder.maximumSize(500).expireAfterAccess(10, TimeUnit.MINUTES).build(new SimpleLoader(targetCycle))
consumptionCache.put(targetCycle, cache)
consumptionCacheOfCycle = cache
}
}
consumptionCacheOfCycle.get(linkId)
}
class SimpleLoader(cycle : Int) extends CacheLoader[Int, Map[Route, (PassengerType.Value, Int)]] {
override def load(linkId: Int) = {
println(s"Updating link history cache on cycle $cycle for link " + linkId )
ConsumptionHistorySource.loadRelatedConsumptionByLinkId(linkId, cycle)
}
}
private[this] val purgeExpiredCache = (cutoff : Int) => {
val cycleIterator = consumptionCache.keySet().iterator()
val removingCycles = new util.HashSet[Int]()
while (cycleIterator.hasNext()) {
val cycle = cycleIterator.next()
if (cycle < cutoff) {
removingCycles.add(cycle)
consumptionCache.get(cycle).invalidateAll()
}
}
consumptionCache.keySet().removeAll(removingCycles)
}
}
|
wisehackermonkey/magic
|
gcr/gcrRoute.c
|
/* gcrRoute.c -
*
* The greedy router: Top level procedures.
*
* *********************************************************************
* * Copyright (C) 1985, 1990 Regents of the University of California. *
* * Permission to use, copy, modify, and distribute this *
* * software and its documentation for any purpose and without *
* * fee is hereby granted, provided that the above copyright *
* * notice appear in all copies. The University of California *
* * makes no representations about the suitability of this *
* * software for any purpose. It is provided "as is" without *
* * express or implied warranty. Export of this software outside *
* * of the United States of America may require an export license. *
* *********************************************************************
*/
#ifndef lint
static char rcsid[] __attribute__ ((unused)) = "$Header: /usr/cvsroot/magic-8.0/gcr/gcrRoute.c,v 1.1.1.1 2008/02/03 20:43:50 tim Exp $";
#endif /* not lint */
#include <stdio.h>
#include <sys/types.h>
#include <sys/times.h>
#include <string.h>
#include "utils/magic.h"
#include "utils/geometry.h"
#include "gcr/gcr.h"
#include "utils/signals.h"
#include "utils/malloc.h"
#include "utils/styles.h"
int gcrRouterErrors;
extern int gcrStandalone;
/* Forward declarations */
void gcrRouteCol();
void gcrExtend();
/*
* ----------------------------------------------------------------------------
*
* GCRroute --
*
* Top level for the greedy channel router.
* Routes are already set up channel routing problem.
*
* Results:
* The return value is the number of errors found while routing
* this channel.
*
* Side effects:
* Modifies flag bits in the channel to show the presence of routing.
* Calls RtrChannelError when there are errors.
*
* ----------------------------------------------------------------------------
*/
int
GCRroute(ch)
GCRChannel *ch;
{
int i, density, netId;
char mesg[256];
GCRColEl *col;
GCRPin *pin;
GCRNet *net;
/* Try river-routing across the channel if possible */
gcrRouterErrors = 0;
if (gcrRiverRoute(ch))
return (gcrRouterErrors);
gcrBuildNets(ch);
if (ch->gcr_nets == (GCRNet *) NULL)
return (gcrRouterErrors);
gcrSetEndDist(ch);
density = gcrDensity(ch);
/* gcrPrDensity(ch, density); /* Debugging */
if (density > ch->gcr_width)
{
(void) sprintf(mesg, "Density (%d) > channel size (%d)",
density, ch->gcr_width);
RtrChannelError(ch, ch->gcr_width, ch->gcr_length, mesg, NULL);
}
gcrInitCollapse(ch->gcr_width + 2);
gcrSetFlags(ch);
/* Process the first column */
gcrInitCol(ch, ch->gcr_lPins);
gcrExtend(ch, 0);
gcrPrintCol(ch, 0, GcrShowResult);
/* Process subsequent columns */
for (i = 1; i <= ch->gcr_length; i++)
{
if (SigInterruptPending)
goto bottom;
gcrRouteCol(ch, i);
}
/* Process errors at the end */
col = ch->gcr_lCol;
pin = ch->gcr_rPins;
for (i = 1; i <= ch->gcr_width; i++, col++, pin++)
if (col->gcr_h != pin->gcr_pId)
{
netId = col->gcr_h ? col->gcr_h->gcr_Id : pin->gcr_pId->gcr_Id;
RtrChannelError(ch, ch->gcr_length, i,
"Can't make end connection", netId);
gcrRouterErrors++;
}
bottom:
/* For debugging: print channel on screen */
gcrDumpResult(ch, GcrShowEnd);
/*
* We have to free up the nets here, since callers may re-arrange
* the channel and cause the net structure to become invalid
* anyway.
*/
for (net = ch->gcr_nets; net; net = net->gcr_next)
freeMagic((char *) net);
ch->gcr_nets = NULL;
return (gcrRouterErrors);
}
/*
* ----------------------------------------------------------------------------
*
* gcrRouteCol --
*
* Route the given column in the channel.
*
* Results:
* None.
*
* Side effects:
* Sets flags in the channel structure to show where routing
* is to be placed.
*
* ----------------------------------------------------------------------------
*/
void
gcrRouteCol(ch, indx)
GCRChannel *ch;
int indx; /* Index of column being routed. */
{
GCRNet **gcrClassify(), **list;
GCRColEl *col;
int count;
/* Make feasible top and bottom connections */
gcrCheckCol(ch, indx, "Start of gcrRouteCol");
gcrFeasible(ch, indx);
gcrCheckCol(ch, indx, "After feasible connections");
/* Here I should vacate terminating tracks */
if (GCRNearEnd(ch, indx) &&
(GCREndDist < ch->gcr_length || !GCRNearEnd(ch, indx - 1)))
gcrMarkWanted(ch);
/* Collapse split nets in the pattern that frees the most tracks */
gcrCollapse(&ch->gcr_lCol, ch->gcr_width, 1, ch->gcr_width, 0);
gcrPickBest(ch);
gcrCheckCol(ch, indx, "After collapse");
col = ch->gcr_lCol;
/* Reduce the range of split nets */
gcrReduceRange(col, ch->gcr_width);
gcrCheckCol(ch, indx, "After reducing range of split nets");
/* Vacate obstructed tracks. Split to make multiple end connections */
gcrVacate(ch, indx);
/* Raise rising and lower falling nets */
list = gcrClassify(ch, &count);
gcrCheckCol(ch, indx, "After classifying nets");
gcrMakeRuns(ch, indx, list, count, TRUE);
gcrCheckCol(ch, indx, "After making rising/falling runs");
gcrCheckCol(ch, indx, "After vacating");
if (GCRNearEnd(ch, indx))
{
gcrUncollapse(ch, &ch->gcr_lCol, ch->gcr_width, 1, ch->gcr_width, 0);
gcrPickBest(ch);
}
gcrCheckCol(ch, indx, "After uncollapse");
/* Extend active tracks to the next column. Place contacts */
gcrExtend(ch, indx);
gcrCheckCol(ch, indx, "After widen and extend");
gcrPrintCol(ch, indx, GcrShowResult);
}
/*
* ----------------------------------------------------------------------------
*
* gcrExtend --
*
* Extend dangling wires to the next column.
* Don't extend off the end of the channel if the wrong connection
* would be made.
*
* Results:
* None.
*
* Side effects:
* Sets bits in the result array for the channel. Where there
* are blockages in the next column, adds contacts to blocked
* tracks for a layer switch. Clears the vertical wiring for
* the new column.
*
* ----------------------------------------------------------------------------
*/
void
gcrExtend(ch, currentCol)
GCRChannel *ch; /* Channel being routed */
int currentCol; /* Column that has just been completed */
{
short *res = ch->gcr_result[currentCol];
GCRColEl *col = ch->gcr_lCol;
short *prev = (short *) NULL, *next = (short *) NULL;
bool hasNext, hasPrev;
int i;
ASSERT(ch->gcr_result, "gcrExtend: ");
if (currentCol > 0) prev = ch->gcr_result[currentCol - 1];
if (currentCol <= ch->gcr_length) next = ch->gcr_result[currentCol + 1];
/*
* Consider each track, including the pseudo-track at the
* bottom (0) of the channel, but not the one at the top
* (ch->gcr_width).
*/
for (i = 0; i <= ch->gcr_width; i++)
{
if (col[1].gcr_v == col->gcr_v && col->gcr_v)
{
/* Track extends upwards */
res[0] |= GCRU;
if (i == ch->gcr_width) res[1] |= GCRU;
if (col->gcr_flags & GCRCC) res[0] |= GCRX;
if (col[1].gcr_flags & GCRCC) res[1] |= GCRX;
}
/* Don't process track if not occupied by a real net */
hasPrev = prev && (*prev & GCRR);
if (col->gcr_h == (GCRNet *) NULL)
{
if (currentCol == 0) res[0] &= ~GCRR;
if (hasPrev) res[0] |= GCRX;
col->gcr_v = 0;
}
else
{
/* Extend net if split or another pin exists in this channel */
hasNext = col->gcr_hi != EMPTY
|| col->gcr_lo != EMPTY
|| GCRPin1st(col->gcr_h);
if (col->gcr_v == col->gcr_h && (hasPrev || hasNext))
res[0] |= GCRX;
/* Clear vertical wiring */
col->gcr_v = 0;
/* Terminate unsplit nets with no pins after the current column */
if (!hasNext) col->gcr_h = (GCRNet *) NULL;
else if (col->gcr_flags & GCRTE)
{
/*
* If the track should be extended but can't due to a
* hard obstacle, then print a message and terminate it.
*/
RtrChannelError(ch, currentCol, i,
"Can't extend track through obstacle", col->gcr_h->gcr_Id);
gcrRouterErrors++;
col->gcr_h = (GCRNet *) NULL;
}
else if (currentCol == ch->gcr_length && i
&& ch->gcr_rPins[i].gcr_pId == (GCRNet *) NULL)
{
/* If track about to make a bad connection, don't extend */
RtrChannelError(ch, currentCol, i,
"Can't extend track to bad connection", col->gcr_h->gcr_Id);
col->gcr_h = (GCRNet *) NULL;
gcrRouterErrors++;
}
else
{
/* Extend the net into the next column */
res[0] |= GCRR;
if (currentCol == ch->gcr_length) *next |= GCRR;
}
/* Contact in next col if GCRTC */
if (*next & GCRTC) col->gcr_v = col->gcr_h;
}
if (prev) prev++;
if (next) col->gcr_flags = *next++;
else col->gcr_flags= 0;
res++;
col++;
}
col->gcr_v = 0;
col->gcr_flags = 0;
}
|
makhatadze/admin_panel
|
public/js/node_modules_ant-design_icons_es_icons_TrademarkCircleFilled_js.js
|
<reponame>makhatadze/admin_panel
(self["webpackChunk"] = self["webpackChunk"] || []).push([["node_modules_ant-design_icons_es_icons_TrademarkCircleFilled_js"],{
/***/ "./node_modules/@ant-design/icons-svg/es/asn/TrademarkCircleFilled.js":
/*!****************************************************************************!*\
!*** ./node_modules/@ant-design/icons-svg/es/asn/TrademarkCircleFilled.js ***!
\****************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
// This icon file is generated automatically.
var TrademarkCircleFilled = { "icon": { "tag": "svg", "attrs": { "viewBox": "64 64 896 896", "focusable": "false" }, "children": [{ "tag": "path", "attrs": { "d": "M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm164.7 660.2c-1.1.5-2.3.8-3.5.8h-62c-3.1 0-5.9-1.8-7.2-4.6l-74.6-159.2h-88.7V717c0 4.4-3.6 8-8 8H378c-4.4 0-8-3.6-8-8V307c0-4.4 3.6-8 8-8h155.6c98.8 0 144.2 59.9 144.2 131.1 0 70.2-43.6 106.4-78.4 119.2l80.8 164.2c2.1 3.9.4 8.7-3.5 10.7zM523.9 357h-83.4v148H522c53 0 82.8-25.6 82.8-72.4 0-50.3-32.9-75.6-80.9-75.6z" } }] }, "name": "trademark-circle", "theme": "filled" };
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (TrademarkCircleFilled);
/***/ }),
/***/ "./node_modules/@ant-design/icons/es/icons/TrademarkCircleFilled.js":
/*!**************************************************************************!*\
!*** ./node_modules/@ant-design/icons/es/icons/TrademarkCircleFilled.js ***!
\**************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! react */ "./node_modules/react/index.js");
/* harmony import */ var _ant_design_icons_svg_es_asn_TrademarkCircleFilled__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @ant-design/icons-svg/es/asn/TrademarkCircleFilled */ "./node_modules/@ant-design/icons-svg/es/asn/TrademarkCircleFilled.js");
/* harmony import */ var _components_AntdIcon__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../components/AntdIcon */ "./node_modules/@ant-design/icons/es/components/AntdIcon.js");
// GENERATE BY ./scripts/generate.ts
// DON NOT EDIT IT MANUALLY
var TrademarkCircleFilled = function TrademarkCircleFilled(props, ref) {
return /*#__PURE__*/react__WEBPACK_IMPORTED_MODULE_0__.createElement(_components_AntdIcon__WEBPACK_IMPORTED_MODULE_1__.default, Object.assign({}, props, {
ref: ref,
icon: _ant_design_icons_svg_es_asn_TrademarkCircleFilled__WEBPACK_IMPORTED_MODULE_2__.default
}));
};
TrademarkCircleFilled.displayName = 'TrademarkCircleFilled';
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (/*#__PURE__*/react__WEBPACK_IMPORTED_MODULE_0__.forwardRef(TrademarkCircleFilled));
/***/ })
}]);
|
4lexBaum/openui5
|
src/sap.ui.support/test/sap/ui/support/integration/ui/SupportAssistantOpaConfig.js
|
sap.ui.require([
"sap/ui/test/Opa5",
"sap/ui/support/integration/ui/arrangements/Arrangement",
"sap/ui/support/integration/ui/data/CommunicationMock",
"sap/ui/support/mock/StorageSynchronizer",
"sap/ui/test/opaQunit",
"sap/ui/support/integration/ui/pages/Main",
"sap/ui/support/integration/ui/pages/Issues",
"sap/ui/support/integration/ui/pages/Rules",
"sap/ui/support/integration/ui/pages/Presets",
"sap/ui/support/integration/ui/pages/TemporaryRule"
], function (Opa5, Arrangement, CommunicationMock, StorageSynchronizer) {
"use strict";
StorageSynchronizer.initialize();
Opa5.extendConfig({
arrangements: new Arrangement(),
autoWait: true,
assertions: new Opa5({
iTeardownSupportAssistantFrame: function () {
return this.waitFor({
check: function () {
StorageSynchronizer.preserve(Opa5.getWindow());
return this.iTeardownMyAppFrame().done(function () {
CommunicationMock.destroy();
});
}
});
}
})
});
});
|
zx1993312/ry
|
ruoyi-system/src/main/java/com/ruoyi/system/domain/MeterAndCase.java
|
package com.ruoyi.system.domain;
import java.math.BigDecimal;
import com.ruoyi.common.annotation.Excel;
public class MeterAndCase {
/** 主键 */
private Long id;
/** 房屋编号 */
@Excel(name = "房屋编号")
private String houseNum;
/** 表计类型 */
@Excel(name = "表计类型")
private Integer meterType;
/** 表计序号 */
@Excel(name = "表计序号")
private Integer meterSerialNum;
/** 表计名称 */
@Excel(name = "表计名称")
private String meterName;
/** 初始读数 */
@Excel(name = "初始读数")
private BigDecimal initialRead;
/** 变比 */
@Excel(name = "变比")
private BigDecimal transfRatio;
/** 是否反向 */
@Excel(name = "是否反向")
private Integer reverseNot;
/** 计量表类型 */
@Excel(name = "计量表类型")
private Integer strappingType;
/** 表箱名称 */
@Excel(name = "表箱名称")
private String meterCaseName;
/** 表箱位置 */
@Excel(name = "表箱位置")
private String meterCasePosition;
/** 表箱序号 */
@Excel(name = "表箱序号")
private String meterSerial;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getHouseNum() {
return houseNum;
}
public BigDecimal getTransfRatio() {
return transfRatio;
}
public void setTransfRatio(BigDecimal transfRatio) {
this.transfRatio = transfRatio;
}
public void setHouseNum(String houseNum) {
this.houseNum = houseNum;
}
public Integer getMeterType() {
return meterType;
}
public void setMeterType(Integer meterType) {
this.meterType = meterType;
}
public Integer getMeterSerialNum() {
return meterSerialNum;
}
public void setMeterSerialNum(Integer meterSerialNum) {
this.meterSerialNum = meterSerialNum;
}
public String getMeterName() {
return meterName;
}
public void setMeterName(String meterName) {
this.meterName = meterName;
}
public BigDecimal getInitialRead() {
return initialRead;
}
public void setInitialRead(BigDecimal initialRead) {
this.initialRead = initialRead;
}
public Integer getReverseNot() {
return reverseNot;
}
public void setReverseNot(Integer reverseNot) {
this.reverseNot = reverseNot;
}
public Integer getStrappingType() {
return strappingType;
}
public void setStrappingType(Integer strappingType) {
this.strappingType = strappingType;
}
public String getMeterCaseName() {
return meterCaseName;
}
public void setMeterCaseName(String meterCaseName) {
this.meterCaseName = meterCaseName;
}
public String getMeterCasePosition() {
return meterCasePosition;
}
public void setMeterCasePosition(String meterCasePosition) {
this.meterCasePosition = meterCasePosition;
}
public String getMeterSerial() {
return meterSerial;
}
public void setMeterSerial(String meterSerial) {
this.meterSerial = meterSerial;
}
@Override
public String toString() {
return "MeterAndCase [id=" + id + ", houseNum=" + houseNum + ", meterType=" + meterType + ", meterSerialNum="
+ meterSerialNum + ", meterName=" + meterName + ", initialRead=" + initialRead + ", transfRatio="
+ transfRatio + ", reverseNot=" + reverseNot + ", strappingType=" + strappingType + ", meterCaseName="
+ meterCaseName + ", meterCasePosition=" + meterCasePosition + ", meterSerial=" + meterSerial
+ ", getId()=" + getId() + ", getHouseNum()=" + getHouseNum() + ", getMeterType()=" + getMeterType()
+ ", getMeterSerialNum()=" + getMeterSerialNum() + ", getMeterName()=" + getMeterName()
+ ", getTransfRatio()=" + getTransfRatio() + ", getReverseNot()=" + getReverseNot()
+ ", getStrappingType()=" + getStrappingType() + ", getMeterCaseName()=" + getMeterCaseName()
+ ", getMeterCasePosition()=" + getMeterCasePosition() + ", getMeterSerial()=" + getMeterSerial()
+ ", getClass()=" + getClass() + ", hashCode()=" + hashCode() + ", toString()=" + super.toString()
+ "]";
}
}
|
larrytheliquid/dataflow
|
spec/forker_spec.rb
|
require "#{File.dirname(__FILE__)}/spec_helper"
describe 'Setting a customer forker' do
before(:all) do
@original_forker = Dataflow.forker
Dataflow.forker = Class.new do
def self.synchronous_forker(&block)
block.call
end
end.method(:synchronous_forker)
end
after(:all) do
Dataflow.forker = @original_forker
end
it 'uses the custom forker in #flow' do
local do |my_var|
flow(my_var) { 1337 }
my_var.should == 1337
end
end
it 'uses the custom forker in #need_later' do
my_var = need_later { 1337 }
my_var.should == 1337
end
end
|
rickypai/chromotype
|
app/models/season_tag.rb
|
class SeasonTag < Tag
def self.root_name
"seasons"
end
def self.seasons_root
named_root(DateTag.named_root)
end
def self.for_date(date)
I18n.t("tags.#{self.root_name}.name")
season_name = I18n.t("tags.seasons.#{date.season.to_s}")
seasons_root.find_or_create_by_path season_name
end
def self.visit_asset(asset)
date = asset.captured_at
asset.add_tag(for_date(date), self)
end
end
|
NitinSatpal/Event-Scheduler
|
public/lib/vendor/ng-video/PlaybackRate.js
|
<reponame>NitinSatpal/Event-Scheduler
(function PlaybackRate($angular) {
"use strict";
/**
* @method createPlaybackRateDirective
* @param name {String}
* @param clickFn {Function}
* @return {Object}
*/
var createPlaybackRateDirective = function createPlaybackRateDirective(name, clickFn) {
/**
* @property directiveName
* @type {String}
*/
var directiveName = 'viPlaybackRate' + name.charAt(0).toUpperCase() + name.slice(1);
/**
* @directive viPlaybackRateItem
* @type {Function}
*/
$angular.module('ngVideo').directive(directiveName, ['$rootScope', 'ngVideoOptions',
function viPlaybackRateItem($rootScope, ngVideoOptions) {
return {
/**
* @property restrict
* @type {String}
*/
restrict: ngVideoOptions.RESTRICT,
/**
* @method link
* @param scope {Object}
* @param element {Object}
* @param attributes {Object}
* @return {void}
*/
link: function link(scope, element, attributes) {
/**
* @method setPlaybackRate
* @param rate {Number}
* @return {void}
*/
scope.setPlaybackRate = function setPlaybackRate(rate) {
// Update the current play rate and the default play rate for when another
// video is played.
scope.player.playbackRate = rate;
scope.player.defaultPlaybackRate = rate;
// Force the refreshing of the statistics.
$rootScope.$broadcast('ng-video/feedback/refresh');
};
element.bind('click', function onClick() {
// Invoke the `clickFn` callback when the element has been clicked.
clickFn.call(this, scope, +attributes[directiveName], +scope.player.playbackRate);
// Force the timeline directive to update.
$rootScope.$broadcast('ng-video/feedback/refresh');
scope.$apply();
});
}
}
}]);
};
/**
* @directive viPlaybackRate
* @type {Function}
* @param scope {Object}
* @param directiveValue {Number}
*/
createPlaybackRateDirective('', function onPlaybackRateClick(scope, directiveValue) {
scope.setPlaybackRate(directiveValue);
});
/**
* @directive viPlaybackRateNormalise
* @type {Function}
* @param scope {Object}
*/
createPlaybackRateDirective('normalise', function onPlaybackRateNormaliseClick(scope) {
scope.setPlaybackRate(1);
});
/**
* @directive viPlaybackRateIncrement
* @type {Function}
* @param scope {Object}
* @param directiveValue {Number}
* @param currentRate {Number}
*/
createPlaybackRateDirective('increment', function onPlaybackRateIncrementClick(scope, directiveValue, currentRate) {
scope.setPlaybackRate(currentRate + directiveValue);
});
/**
* @directive viPlaybackRateDecrement
* @type {Function}
* @param scope {Object}
* @param directiveValue {Number}
* @param currentRate {Number}
*/
createPlaybackRateDirective('decrement', function onPlaybackRateDecrementClick(scope, directiveValue, currentRate) {
scope.setPlaybackRate(currentRate - directiveValue);
});
})(window.angular);
|
MercuriusXeno/Goo
|
src/main/java/com/xeno/goo/datagen/BaseLootTableProvider.java
|
<reponame>MercuriusXeno/Goo<filename>src/main/java/com/xeno/goo/datagen/BaseLootTableProvider.java
package com.xeno.goo.datagen;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.xeno.goo.GooMod;
import com.xeno.goo.setup.Registry;
import net.minecraft.block.Block;
import net.minecraft.data.DataGenerator;
import net.minecraft.data.DirectoryCache;
import net.minecraft.data.IDataProvider;
import net.minecraft.data.LootTableProvider;
import net.minecraft.loot.*;
import net.minecraft.loot.functions.CopyName;
import net.minecraft.loot.functions.CopyNbt;
import net.minecraft.util.ResourceLocation;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
public abstract class BaseLootTableProvider extends LootTableProvider {
private static final Gson GSON = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create();
// Filled by subclasses
protected final Map<Block, LootTable.Builder> blockLootTables = new HashMap<>();
protected final Map<ResourceLocation, LootTable.Builder> advancementLootTables = new HashMap<>();
private final DataGenerator generator;
public BaseLootTableProvider(DataGenerator dataGeneratorIn) {
super(dataGeneratorIn);
this.generator = dataGeneratorIn;
}
// Subclasses can override this to fill the 'lootTables' map.
protected abstract void addTables();
// absolutely bog standard loot table for things with no nbt requirements
// used for drain, lobber, pump
protected LootTable.Builder createMundaneTable(String name, Block block) {
LootPool.Builder builder = LootPool.builder()
.name(name)
.rolls(ConstantRange.of(1))
.addEntry(ItemLootEntry.builder(block)
.acceptFunction(CopyName.builder(CopyName.Source.BLOCK_ENTITY))
);
return LootTable.builder().addLootPool(builder);
}
// any tile that holds goo needs some nbt (goo) and its id
protected LootTable.Builder createGooContainerLootTable(String name, Block block) {
LootPool.Builder builder = LootPool.builder()
.name(name)
.rolls(ConstantRange.of(1))
.addEntry(ItemLootEntry.builder(block)
.acceptFunction(CopyName.builder(CopyName.Source.BLOCK_ENTITY))
.acceptFunction(CopyNbt.builder(CopyNbt.Source.BLOCK_ENTITY)
.replaceOperation("goo", "BlockEntityTag.goo")
.replaceOperation("id", "BlockEntityTag.id"))
);
return LootTable.builder().addLootPool(builder);
}
protected LootTable.Builder createGooContainerWithContainmentLootTable(String name, Block block) {
LootPool.Builder builder = LootPool.builder()
.name(name)
.rolls(ConstantRange.of(1))
.addEntry(ItemLootEntry.builder(block)
.acceptFunction(CopyName.builder(CopyName.Source.BLOCK_ENTITY))
.acceptFunction(CopyNbt.builder(CopyNbt.Source.BLOCK_ENTITY)
.replaceOperation("goo", "BlockEntityTag.goo")
.replaceOperation(Registry.CONTAINMENT.getId().toString(), "BlockEntityTag." + Registry.CONTAINMENT.getId().toString())
.replaceOperation("id", "BlockEntityTag.id")
)
);
return LootTable.builder().addLootPool(builder);
}
@Override
// Entry point
public void act(DirectoryCache cache) {
addTables();
Map<ResourceLocation, LootTable> tables = new HashMap<>();
for (Map.Entry<Block, LootTable.Builder> entry : blockLootTables.entrySet()) {
tables.put(entry.getKey().getLootTable(), entry.getValue().setParameterSet(LootParameterSets.BLOCK).build());
}
for (Map.Entry<ResourceLocation, LootTable.Builder> entry : advancementLootTables.entrySet()) {
tables.put(entry.getKey(), entry.getValue().setParameterSet(LootParameterSets.ADVANCEMENT).build());
}
writeTables(cache, tables);
}
// Actually write out the tables in the output folder
private void writeTables(DirectoryCache cache, Map<ResourceLocation, LootTable> tables) {
Path outputFolder = this.generator.getOutputFolder();
tables.forEach((key, lootTable) -> {
Path path = outputFolder.resolve("data/" + key.getNamespace() + "/loot_tables/" + key.getPath() + ".json");
try {
IDataProvider.save(GSON, cache, LootTableManager.toJson(lootTable), path);
} catch (IOException e) {
GooMod.error("Couldn't write loot table " + path + e);
}
});
}
@Override
public String getName() {
return "Goo LootTables";
}
}
|
bradchesney79/illacceptanything
|
linux/drivers/staging/comedi/drivers/ni_tio_internal.h
|
/*
drivers/ni_tio_internal.h
Header file for NI general purpose counter support code (ni_tio.c and
ni_tiocmd.c)
COMEDI - Linux Control and Measurement Device Interface
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
*/
#ifndef _COMEDI_NI_TIO_INTERNAL_H
#define _COMEDI_NI_TIO_INTERNAL_H
#include "ni_tio.h"
#define NITIO_AUTO_INC_REG(x) (NITIO_G0_AUTO_INC + (x))
#define GI_AUTO_INC_MASK 0xff
#define NITIO_CMD_REG(x) (NITIO_G0_CMD + (x))
#define GI_ARM (1 << 0)
#define GI_SAVE_TRACE (1 << 1)
#define GI_LOAD (1 << 2)
#define GI_DISARM (1 << 4)
#define GI_CNT_DIR(x) (((x) & 0x3) << 5)
#define GI_CNT_DIR_MASK (3 << 5)
#define GI_WRITE_SWITCH (1 << 7)
#define GI_SYNC_GATE (1 << 8)
#define GI_LITTLE_BIG_ENDIAN (1 << 9)
#define GI_BANK_SWITCH_START (1 << 10)
#define GI_BANK_SWITCH_MODE (1 << 11)
#define GI_BANK_SWITCH_ENABLE (1 << 12)
#define GI_ARM_COPY (1 << 13)
#define GI_SAVE_TRACE_COPY (1 << 14)
#define GI_DISARM_COPY (1 << 15)
#define NITIO_HW_SAVE_REG(x) (NITIO_G0_HW_SAVE + (x))
#define NITIO_SW_SAVE_REG(x) (NITIO_G0_SW_SAVE + (x))
#define NITIO_MODE_REG(x) (NITIO_G0_MODE + (x))
#define GI_GATING_DISABLED (0 << 0)
#define GI_LEVEL_GATING (1 << 0)
#define GI_RISING_EDGE_GATING (2 << 0)
#define GI_FALLING_EDGE_GATING (3 << 0)
#define GI_GATING_MODE_MASK (3 << 0)
#define GI_GATE_ON_BOTH_EDGES (1 << 2)
#define GI_EDGE_GATE_STARTS_STOPS (0 << 3)
#define GI_EDGE_GATE_STOPS_STARTS (1 << 3)
#define GI_EDGE_GATE_STARTS (2 << 3)
#define GI_EDGE_GATE_NO_STARTS_OR_STOPS (3 << 3)
#define GI_EDGE_GATE_MODE_MASK (3 << 3)
#define GI_STOP_ON_GATE (0 << 5)
#define GI_STOP_ON_GATE_OR_TC (1 << 5)
#define GI_STOP_ON_GATE_OR_SECOND_TC (2 << 5)
#define GI_STOP_MODE_MASK (3 << 5)
#define GI_LOAD_SRC_SEL (1 << 7)
#define GI_OUTPUT_TC_PULSE (1 << 8)
#define GI_OUTPUT_TC_TOGGLE (2 << 8)
#define GI_OUTPUT_TC_OR_GATE_TOGGLE (3 << 8)
#define GI_OUTPUT_MODE_MASK (3 << 8)
#define GI_NO_HARDWARE_DISARM (0 << 10)
#define GI_DISARM_AT_TC (1 << 10)
#define GI_DISARM_AT_GATE (2 << 10)
#define GI_DISARM_AT_TC_OR_GATE (3 << 10)
#define GI_COUNTING_ONCE_MASK (3 << 10)
#define GI_LOADING_ON_TC (1 << 12)
#define GI_GATE_POL_INVERT (1 << 13)
#define GI_LOADING_ON_GATE (1 << 14)
#define GI_RELOAD_SRC_SWITCHING (1 << 15)
#define NITIO_LOADA_REG(x) (NITIO_G0_LOADA + (x))
#define NITIO_LOADB_REG(x) (NITIO_G0_LOADB + (x))
#define NITIO_INPUT_SEL_REG(x) (NITIO_G0_INPUT_SEL + (x))
#define GI_READ_ACKS_IRQ (1 << 0)
#define GI_WRITE_ACKS_IRQ (1 << 1)
#define GI_BITS_TO_SRC(x) (((x) >> 2) & 0x1f)
#define GI_SRC_SEL(x) (((x) & 0x1f) << 2)
#define GI_SRC_SEL_MASK (0x1f << 2)
#define GI_BITS_TO_GATE(x) (((x) >> 7) & 0x1f)
#define GI_GATE_SEL(x) (((x) & 0x1f) << 7)
#define GI_GATE_SEL_MASK (0x1f << 7)
#define GI_GATE_SEL_LOAD_SRC (1 << 12)
#define GI_OR_GATE (1 << 13)
#define GI_OUTPUT_POL_INVERT (1 << 14)
#define GI_SRC_POL_INVERT (1 << 15)
#define NITIO_CNT_MODE_REG(x) (NITIO_G0_CNT_MODE + (x))
#define GI_CNT_MODE(x) (((x) & 0x7) << 0)
#define GI_CNT_MODE_NORMAL GI_CNT_MODE(0)
#define GI_CNT_MODE_QUADX1 GI_CNT_MODE(1)
#define GI_CNT_MODE_QUADX2 GI_CNT_MODE(2)
#define GI_CNT_MODE_QUADX4 GI_CNT_MODE(3)
#define GI_CNT_MODE_TWO_PULSE GI_CNT_MODE(4)
#define GI_CNT_MODE_SYNC_SRC GI_CNT_MODE(6)
#define GI_CNT_MODE_MASK (7 << 0)
#define GI_INDEX_MODE (1 << 4)
#define GI_INDEX_PHASE(x) (((x) & 0x3) << 5)
#define GI_INDEX_PHASE_MASK (3 << 5)
#define GI_HW_ARM_ENA (1 << 7)
#define GI_HW_ARM_SEL(x) ((x) << 8)
#define GI_660X_HW_ARM_SEL_MASK (0x7 << 8)
#define GI_M_HW_ARM_SEL_MASK (0x1f << 8)
#define GI_660X_PRESCALE_X8 (1 << 12)
#define GI_M_PRESCALE_X8 (1 << 13)
#define GI_660X_ALT_SYNC (1 << 13)
#define GI_M_ALT_SYNC (1 << 14)
#define GI_660X_PRESCALE_X2 (1 << 14)
#define GI_M_PRESCALE_X2 (1 << 15)
#define NITIO_GATE2_REG(x) (NITIO_G0_GATE2 + (x))
#define GI_GATE2_MODE (1 << 0)
#define GI_BITS_TO_GATE2(x) (((x) >> 7) & 0x1f)
#define GI_GATE2_SEL(x) (((x) & 0x1f) << 7)
#define GI_GATE2_SEL_MASK (0x1f << 7)
#define GI_GATE2_POL_INVERT (1 << 13)
#define GI_GATE2_SUBSEL (1 << 14)
#define GI_SRC_SUBSEL (1 << 15)
#define NITIO_SHARED_STATUS_REG(x) (NITIO_G01_STATUS + ((x) / 2))
#define GI_SAVE(x) (((x) % 2) ? (1 << 1) : (1 << 0))
#define GI_COUNTING(x) (((x) % 2) ? (1 << 3) : (1 << 2))
#define GI_NEXT_LOAD_SRC(x) (((x) % 2) ? (1 << 5) : (1 << 4))
#define GI_STALE_DATA(x) (((x) % 2) ? (1 << 7) : (1 << 6))
#define GI_ARMED(x) (((x) % 2) ? (1 << 9) : (1 << 8))
#define GI_NO_LOAD_BETWEEN_GATES(x) (((x) % 2) ? (1 << 11) : (1 << 10))
#define GI_TC_ERROR(x) (((x) % 2) ? (1 << 13) : (1 << 12))
#define GI_GATE_ERROR(x) (((x) % 2) ? (1 << 15) : (1 << 14))
#define NITIO_RESET_REG(x) (NITIO_G01_RESET + ((x) / 2))
#define GI_RESET(x) (1 << (2 + ((x) % 2)))
#define NITIO_STATUS1_REG(x) (NITIO_G01_STATUS1 + ((x) / 2))
#define NITIO_STATUS2_REG(x) (NITIO_G01_STATUS2 + ((x) / 2))
#define GI_OUTPUT(x) (((x) % 2) ? (1 << 1) : (1 << 0))
#define GI_HW_SAVE(x) (((x) % 2) ? (1 << 13) : (1 << 12))
#define GI_PERMANENT_STALE(x) (((x) % 2) ? (1 << 15) : (1 << 14))
#define NITIO_DMA_CFG_REG(x) (NITIO_G0_DMA_CFG + (x))
#define GI_DMA_ENABLE (1 << 0)
#define GI_DMA_WRITE (1 << 1)
#define GI_DMA_INT_ENA (1 << 2)
#define GI_DMA_RESET (1 << 3)
#define GI_DMA_BANKSW_ERROR (1 << 4)
#define NITIO_DMA_STATUS_REG(x) (NITIO_G0_DMA_STATUS + (x))
#define GI_DMA_READBANK (1 << 13)
#define GI_DRQ_ERROR (1 << 14)
#define GI_DRQ_STATUS (1 << 15)
#define NITIO_ABZ_REG(x) (NITIO_G0_ABZ + (x))
#define NITIO_INT_ACK_REG(x) (NITIO_G0_INT_ACK + (x))
#define GI_GATE_ERROR_CONFIRM(x) (((x) % 2) ? (1 << 1) : (1 << 5))
#define GI_TC_ERROR_CONFIRM(x) (((x) % 2) ? (1 << 2) : (1 << 6))
#define GI_TC_INTERRUPT_ACK (1 << 14)
#define GI_GATE_INTERRUPT_ACK (1 << 15)
#define NITIO_STATUS_REG(x) (NITIO_G0_STATUS + (x))
#define GI_GATE_INTERRUPT (1 << 2)
#define GI_TC (1 << 3)
#define GI_INTERRUPT (1 << 15)
#define NITIO_INT_ENA_REG(x) (NITIO_G0_INT_ENA + (x))
#define GI_TC_INTERRUPT_ENABLE(x) (((x) % 2) ? (1 << 9) : (1 << 6))
#define GI_GATE_INTERRUPT_ENABLE(x) (((x) % 2) ? (1 << 10) : (1 << 8))
static inline void write_register(struct ni_gpct *counter, unsigned bits,
enum ni_gpct_register reg)
{
BUG_ON(reg >= NITIO_NUM_REGS);
counter->counter_dev->write_register(counter, bits, reg);
}
static inline unsigned read_register(struct ni_gpct *counter,
enum ni_gpct_register reg)
{
BUG_ON(reg >= NITIO_NUM_REGS);
return counter->counter_dev->read_register(counter, reg);
}
static inline int ni_tio_counting_mode_registers_present(const struct
ni_gpct_device
*counter_dev)
{
switch (counter_dev->variant) {
case ni_gpct_variant_e_series:
return 0;
case ni_gpct_variant_m_series:
case ni_gpct_variant_660x:
return 1;
default:
BUG();
break;
}
return 0;
}
static inline void ni_tio_set_bits_transient(struct ni_gpct *counter,
enum ni_gpct_register
register_index, unsigned bit_mask,
unsigned bit_values,
unsigned transient_bit_values)
{
struct ni_gpct_device *counter_dev = counter->counter_dev;
unsigned long flags;
BUG_ON(register_index >= NITIO_NUM_REGS);
spin_lock_irqsave(&counter_dev->regs_lock, flags);
counter_dev->regs[register_index] &= ~bit_mask;
counter_dev->regs[register_index] |= (bit_values & bit_mask);
write_register(counter,
counter_dev->regs[register_index] | transient_bit_values,
register_index);
mmiowb();
spin_unlock_irqrestore(&counter_dev->regs_lock, flags);
}
/* ni_tio_set_bits( ) is for safely writing to registers whose bits may be
* twiddled in interrupt context, or whose software copy may be read in
* interrupt context.
*/
static inline void ni_tio_set_bits(struct ni_gpct *counter,
enum ni_gpct_register register_index,
unsigned bit_mask, unsigned bit_values)
{
ni_tio_set_bits_transient(counter, register_index, bit_mask, bit_values,
0x0);
}
/* ni_tio_get_soft_copy( ) is for safely reading the software copy of a register
whose bits might be modified in interrupt context, or whose software copy
might need to be read in interrupt context.
*/
static inline unsigned ni_tio_get_soft_copy(const struct ni_gpct *counter,
enum ni_gpct_register
register_index)
{
struct ni_gpct_device *counter_dev = counter->counter_dev;
unsigned long flags;
unsigned value;
BUG_ON(register_index >= NITIO_NUM_REGS);
spin_lock_irqsave(&counter_dev->regs_lock, flags);
value = counter_dev->regs[register_index];
spin_unlock_irqrestore(&counter_dev->regs_lock, flags);
return value;
}
int ni_tio_arm(struct ni_gpct *counter, int arm, unsigned start_trigger);
int ni_tio_set_gate_src(struct ni_gpct *counter, unsigned gate_index,
unsigned int gate_source);
#endif /* _COMEDI_NI_TIO_INTERNAL_H */
|
MeetYouDevs/hbase-manager
|
src/main/java/com/meiyou/shiro/core/ShiroConfig.java
|
package com.meiyou.shiro.core;
import java.util.List;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.realm.Realm;
import org.apache.shiro.spring.web.config.DefaultShiroFilterChainDefinition;
import org.apache.shiro.spring.web.config.ShiroFilterChainDefinition;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.aop.framework.autoproxy.DefaultAdvisorAutoProxyCreator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.meiyou.shiro.entity.SysPermission;
import com.meiyou.shiro.service.SysPermissionService;
import com.meiyou.shiro.utils.EncryptUitls;
@Configuration
public class ShiroConfig {
@Autowired
private SysPermissionService sysPermissionService;
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher() {
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
// 散列算法
hashedCredentialsMatcher.setHashAlgorithmName(EncryptUitls.HASH_ALGORITHM_NAME);
// 散列次数
hashedCredentialsMatcher.setHashIterations(EncryptUitls.HASH_ITERATIONS);
return hashedCredentialsMatcher;
}
@Bean
public Realm realm() {
CustomRealm customRealm = new CustomRealm();
customRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return customRealm;
}
@Bean
public static DefaultAdvisorAutoProxyCreator getDefaultAdvisorAutoProxyCreator() {
DefaultAdvisorAutoProxyCreator creator = new DefaultAdvisorAutoProxyCreator();
// 设置为ture,用于解决一个奇怪的bug。在引入spring aop的情况下,@RequiresRole注解,会导致该方法无法映射请求,导致返回404
creator.setUsePrefix(true);
return creator;
}
@Bean
public DefaultWebSecurityManager securityManager() {
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(realm());
return securityManager;
}
/**
* 过滤器: anon org.apache.shiro.web.filter.authc.AnonymousFilter authc
* org.apache.shiro.web.filter.authc.FormAuthenticationFilter authcBasic
* org.apache.shiro.web.filter.authc.BasicHttpAuthenticationFilter logout
* org.apache.shiro.web.filter.authc.LogoutFilter noSessionCreation
* org.apache.shiro.web.filter.session.NoSessionCreationFilter perms
* org.apache.shiro.web.filter.authz.PermissionsAuthorizationFilter port
* org.apache.shiro.web.filter.authz.PortFilter rest
* org.apache.shiro.web.filter.authz.HttpMethodPermissionFilter roles
* org.apache.shiro.web.filter.authz.RolesAuthorizationFilter ssl
* org.apache.shiro.web.filter.authz.SslFilter user
* org.apache.shiro.web.filter.authc.UserFilter
*/
@Bean
public ShiroFilterChainDefinition shiroFilterChainDefinition() {
DefaultShiroFilterChainDefinition chain = new DefaultShiroFilterChainDefinition();
// 注意此处使用的是LinkedHashMap,是有顺序的,shiro会按从上到下的顺序匹配验证,匹配了就不再继续验证
chain.addPathDefinition("/fonts/**", "anon");// fonts
chain.addPathDefinition("/image/**", "anon");// image
chain.addPathDefinition("/css/**", "anon");// css
chain.addPathDefinition("/js/**", "anon");// js
// 可以匿名访问的url
chain.addPathDefinition("/login", "anon");
chain.addPathDefinition("/unauthorized", "anon");
chain.addPathDefinition("/logout", "anon");
List<SysPermission> sysPermissionList = sysPermissionService.listSysPermission();
for (SysPermission sysPermission : sysPermissionList) {
// 设置authc,roles[roleName]无效
// chain.addPathDefinition(sysPermission.getUrl(),
// "authc,"+sysPermission.getShrioValue());
chain.addPathDefinition(sysPermission.getUrl(), "perms[" + sysPermission.getPermissionName() + "]");
}
// 其它路径均需要登录
chain.addPathDefinition("/**", "authc");
return chain;
}
}
|
ace2014/Dreamer
|
DreamerSupport/src/main/java/com/pzl/dreamer/utils/GraphicUtil.java
|
<filename>DreamerSupport/src/main/java/com/pzl/dreamer/utils/GraphicUtil.java
package com.pzl.dreamer.utils;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.text.Layout;
import android.text.TextPaint;
import android.text.TextUtils;
import java.util.ArrayList;
import java.util.List;
/**
* @author zl.peng
* @version [1.0, 2016-10-28]
*/
public class GraphicUtil {
public static int subStringLength(String str, int maxPix, TextPaint paint) {
if (TextUtils.isEmpty(str)) {
return 0;
}
int currentIndex = 0;
for (int i = 0; i < str.length(); i++) {
String temp = str.substring(0, i + 1);
float valueLength = paint.measureText(temp);
if (valueLength > maxPix) {
currentIndex = i - 1;
break;
}
if (valueLength == maxPix) {
currentIndex = i;
break;
}
}
if (currentIndex == 0) {
currentIndex = str.length() - 1;
}
return currentIndex;
}
public static float getStringWidth(String str, TextPaint paint) {
float strWidth = paint.measureText(str);
return strWidth;
}
public static float getDesiredWidth(String str, TextPaint paint) {
float strWidth = Layout.getDesiredWidth(str, paint);
return strWidth;
}
public static float getDesiredHeight(TextPaint paint) {
Paint.FontMetrics fm = paint.getFontMetrics();
return (float) Math.ceil(fm.descent - fm.ascent);
}
public static List<String> getDrawRowStr(String text, int maxWPix, TextPaint paint) {
String[] texts = null;
if (text.indexOf("\n") != -1) {
texts = text.split("\n");
} else {
texts = new String[1];
texts[0] = text;
}
List mStrList = new ArrayList();
for (int i = 0; i < texts.length; i++) {
String textLine = texts[i];
while (true) {
int endIndex = subStringLength(textLine, maxWPix, paint);
if (endIndex <= 0) {
mStrList.add(textLine);
} else if (endIndex == textLine.length() - 1)
mStrList.add(textLine);
else {
mStrList.add(textLine.substring(0, endIndex + 1));
}
if (textLine.length() <= endIndex + 1)
break;
textLine = textLine.substring(endIndex + 1);
}
}
return mStrList;
}
public static int getDrawRowCount(String text, int maxWPix, TextPaint paint) {
String[] texts = null;
if (text.indexOf("\n") != -1) {
texts = text.split("\n");
} else {
texts = new String[1];
texts[0] = text;
}
List mStrList = new ArrayList();
for (int i = 0; i < texts.length; i++) {
String textLine = texts[i];
while (true) {
int endIndex = subStringLength(textLine, maxWPix, paint);
if (endIndex <= 0) {
mStrList.add(textLine);
} else if (endIndex == textLine.length() - 1)
mStrList.add(textLine);
else {
mStrList.add(textLine.substring(0, endIndex + 1));
}
if (textLine.length() <= endIndex + 1)
break;
textLine = textLine.substring(endIndex + 1);
}
}
return mStrList.size();
}
public static int drawText(Canvas canvas, String text, int maxWPix, TextPaint paint, int left, int top) {
if (TextUtils.isEmpty(text)) {
return 1;
}
List mStrList = getDrawRowStr(text, maxWPix, paint);
int hSize = (int) getDesiredHeight(paint);
for (int i = 0; i < mStrList.size(); i++) {
int x = left;
int y = top + hSize / 2 + hSize * i;
String textLine = (String) mStrList.get(i);
canvas.drawText(textLine, x, y, paint);
}
return mStrList.size();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.