code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/**
* Copyright (C) 2015 Agro-Know, Deutsches Forschungszentrum für Künstliche Intelligenz, iMinds,
* Institut für Angewandte Informatik e. V. an der Universität Leipzig,
* Istituto Superiore Mario Boella, Tilde, Vistatec, WRIPL (http://freme-project.eu)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.freme.broker.eservices;
import com.google.gson.JsonSyntaxException;
import com.mashape.unirest.http.exceptions.UnirestException;
import eu.freme.broker.exception.*;
import eu.freme.common.conversion.rdf.RDFConstants;
import eu.freme.common.exception.OwnedResourceNotFoundException;
import eu.freme.common.persistence.dao.PipelineDAO;
import eu.freme.common.persistence.dao.UserDAO;
import eu.freme.common.persistence.model.OwnedResource;
import eu.freme.common.persistence.model.Pipeline;
import eu.freme.common.persistence.model.User;
import eu.freme.eservices.pipelines.core.PipelineResponse;
import eu.freme.eservices.pipelines.core.PipelineService;
import eu.freme.eservices.pipelines.core.ServiceException;
import eu.freme.eservices.pipelines.core.WrappedPipelineResponse;
import eu.freme.eservices.pipelines.requests.RequestBuilder;
import eu.freme.eservices.pipelines.requests.RequestFactory;
import eu.freme.eservices.pipelines.requests.SerializedRequest;
import eu.freme.eservices.pipelines.serialization.Serializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.annotation.Secured;
import org.springframework.security.authentication.InsufficientAuthenticationException;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author Gerald Haesendonck
*/
@RestController
@SuppressWarnings("unused")
@Profile("broker")
public class Pipelines extends BaseRestController {
@Autowired
PipelineService pipelineAPI;
@Autowired
PipelineDAO pipelineDAO;
@Autowired
UserDAO userDAO;
/**
* <p>Calls the pipelining service.</p>
* <p>Some predefined Requests can be formed using the class {@link RequestFactory}. It also converts request objects
* from and to JSON.</p>
* <p><To create custom requests, use the {@link RequestBuilder}.</p>
* <p>Examples can be found in the unit tests in {@link eu/freme/broker/integration_tests/pipelines}.</p>
* @param requests The requests to send to the service.
* @param stats If "true": wrap the response of the last request and add timing statistics.
* @return The response of the last request.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(value = "pipelining/chain",
method = RequestMethod.POST,
consumes = "application/json",
produces = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3", "text/html"}
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> pipeline(@RequestBody String requests, @RequestParam (value = "stats", defaultValue = "false", required = false) String stats) {
try {
boolean wrapResult = Boolean.parseBoolean(stats);
List<SerializedRequest> serializedRequests = Serializer.fromJson(requests);
WrappedPipelineResponse pipelineResult = pipelineAPI.chain(serializedRequests);
MultiValueMap<String, String> headers = new HttpHeaders();
if (wrapResult) {
headers.add(HttpHeaders.CONTENT_TYPE, RDFConstants.RDFSerialization.JSON.contentType());
return new ResponseEntity<>(Serializer.toJson(pipelineResult), headers, HttpStatus.OK);
} else {
headers.add(HttpHeaders.CONTENT_TYPE, pipelineResult.getContent().getContentType());
PipelineResponse lastResponse = pipelineResult.getContent();
return new ResponseEntity<>(lastResponse.getBody(), headers, HttpStatus.OK);
}
} catch (ServiceException serviceError) {
// TODO: see if this can be replaced by excsption(s) defined in the broker.
logger.error(serviceError.getMessage(), serviceError);
MultiValueMap<String, String> headers = new HttpHeaders();
headers.add(HttpHeaders.CONTENT_TYPE, serviceError.getResponse().getContentType());
return new ResponseEntity<>(serviceError.getMessage(), headers, serviceError.getStatus());
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (UnirestException unirestException) {
logger.error(unirestException.getMessage(), unirestException);
throw new BadRequestException(unirestException.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Calls the pipelining service using an existing template.
* @param body The contents to send to the pipeline. This can be a NIF or plain text document.
* @param id The id of the pipeline template to use.
* @param stats If "true": wrap the response of the last request and add timing statistics.
* @return The response of the latest request defined in the template.
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
* @throws TemplateNotFoundException The pipeline template does not exist.
*/
@RequestMapping(value = "pipelining/chain/{id}",
method = RequestMethod.POST,
consumes = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3", "text/plain"},
produces = {"text/turtle", "application/json", "application/ld+json", "application/n-triples", "application/rdf+xml", "text/n3"}
)
public ResponseEntity<String> pipeline(@RequestBody String body, @PathVariable long id, @RequestParam (value = "stats", defaultValue = "false", required = false) String stats) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
List<SerializedRequest> serializedRequests = Serializer.fromJson(pipeline.getSerializedRequests());
serializedRequests.get(0).setBody(body);
return pipeline(Serializer.toJson(serializedRequests), stats);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
}
}
/**
* Creates and stores a pipeline template.
* @param pipelineInfo A JSON string containing the fields "label", "description", "serializedRequests", which
* define the pipeline template.
* @param visibility The visibility of the template. Can be {@literal PUBLIC} or {@literal PRIVATE}. PUBLIC means visible to anyone,
* PRIVATE means only visible to the currently authenticated user.
* @param persist {@literal true}: store the template until deleted by someone, {@literal false} to guarantee
* it to be stored for one week.
* @return A JSON string containing the full pipeline info, i.e. the fields "id", "label", "description",
* "persist", "visibility", "owner", "serializedRequests".
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(value = "pipelining/templates",
method = RequestMethod.POST,
consumes = "application/json",
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> create(
@RequestBody String pipelineInfo,
@RequestParam(value = "visibility", required = false) String visibility,
@RequestParam (value = "persist", defaultValue = "false", required = false) String persist
) {
try {
// just to perform a first validation of the pipeline...
eu.freme.eservices.pipelines.serialization.Pipeline pipelineInfoObj = Serializer.templateFromJson(pipelineInfo);
//List<SerializedRequest> serializedRequests = RequestFactory.fromJson(requests);
boolean toPersist = Boolean.parseBoolean(persist);
Pipeline pipeline = new Pipeline(
OwnedResource.Visibility.getByString(visibility),
pipelineInfoObj.getLabel(),
pipelineInfoObj.getDescription(),
Serializer.toJson(pipelineInfoObj.getSerializedRequests()),
toPersist);
pipeline = pipelineDAO.save(pipeline);
String response = Serializer.toJson(pipeline);
return createOKJSONResponse(response);
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (eu.freme.common.exception.BadRequestException e) {
logger.error(e.getMessage(), e);
throw new BadRequestException(e.getMessage());
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Updates an existing pipeline template.
* @param id The id of the pipeline template to update.
* @param ownerName The name of the new owner.
* @param visibility The visibility of the template. Can be {@literal PUBLIC} or {@literal PRIVATE}. PUBLIC means visible to anyone,
* PRIVATE means only visible to the currently authenticated user.
* @param persist {@literal true}: store the template until deleted by someone, {@literal false} to guarantee
* it to be stored for one week.
* @param pipelineInfo A JSON string containing updated pipeline template info. The fields "label", "description", "serializedRequests"
* define the pipeline template.
* @return A JSON string containing the updated full pipeline info, i.e. the fields "id", "label", "description",
* "persist", "visibility", "owner", "serializedRequests".
* @throws ForbiddenException The pipeline template is not visible by the current user.
* @throws BadRequestException The contents of the request is not valid.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.PUT,
consumes = "application/json",
produces = "application/json"
)
public ResponseEntity<String> update(
@PathVariable(value = "id") long id,
@RequestParam(value = "owner", required=false) String ownerName,
@RequestParam(value = "visibility", required = false) String visibility,
@RequestParam(value = "persist", required = false) String persist,
@RequestBody(required = false) String pipelineInfo
) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
if (pipelineInfo != null && !pipelineInfo.isEmpty()) {
eu.freme.eservices.pipelines.serialization.Pipeline pipelineInfoObj = Serializer.templateFromJson(pipelineInfo);
String newLabel = pipelineInfoObj.getLabel();
if (newLabel != null && !newLabel.equals(pipeline.getLabel())) {
pipeline.setLabel(newLabel);
}
String newDescription = pipelineInfoObj.getDescription();
if (newDescription != null && !newDescription.equals(pipeline.getDescription())) {
pipeline.setDescription(newDescription);
}
List<SerializedRequest> oldRequests = Serializer.fromJson(pipeline.getSerializedRequests());
List<SerializedRequest> newRequests = pipelineInfoObj.getSerializedRequests();
if (newRequests != null && !newRequests.equals(oldRequests)) {
pipeline.setSerializedRequests(Serializer.toJson(newRequests));
}
}
if (visibility != null && !visibility.equals(pipeline.getVisibility().name())) {
pipeline.setVisibility(OwnedResource.Visibility.getByString(visibility));
}
if (persist != null) {
boolean toPersist = Boolean.parseBoolean(persist);
if (toPersist != pipeline.isPersistent()) {
pipeline.setPersist(toPersist);
}
}
if (ownerName != null && !ownerName.equals(pipeline.getOwner().getName())) {
User newOwner = userDAO.getRepository().findOneByName(ownerName);
if (newOwner == null) {
throw new BadRequestException("Can not change owner of the dataset. User \"" + ownerName + "\" does not exist.");
}
pipeline.setOwner(newOwner);
}
pipeline = pipelineDAO.save(pipeline);
String response = Serializer.toJson(pipeline);
return createOKJSONResponse(response);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new ForbiddenException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (JsonSyntaxException jsonException) {
logger.error(jsonException.getMessage(), jsonException);
String errormsg = jsonException.getCause() != null ? jsonException.getCause().getMessage() : jsonException.getMessage();
throw new BadRequestException("Error detected in the JSON body contents: " + errormsg);
} catch (eu.freme.common.exception.BadRequestException e) {
logger.error(e.getMessage(), e);
throw new BadRequestException(e.getMessage());
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Reads (gets) the pipeline template with the given id.
* @param id The id of the pipeline template to get.
* @return The pipeline templatewith the given id as a JSON string.
* @throws AccessDeniedException The pipeline template is not visible by the current user.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.GET,
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> read(@PathVariable(value = "id") long id) {
try {
Pipeline pipeline = pipelineDAO.findOneById(id);
String serializedPipeline = Serializer.toJson(pipeline);
return createOKJSONResponse(serializedPipeline);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new AccessDeniedException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Reads (gets) all visible pipelines.
* @return all visible pipelines as a JSON string.
*/
@RequestMapping(
value = "pipelining/templates",
method = RequestMethod.GET,
produces = "application/json"
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> read() {
try {
List<Pipeline> readablePipelines = pipelineDAO.findAllReadAccessible();
String serializedPipelines = Serializer.templatesToJson(readablePipelines);
return createOKJSONResponse(serializedPipelines);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
/**
* Deletes the pipeline template with the given id.
* @param id The id of the template to delete.
* @return The message "The pipeline was sucessfully removed."
* @throws ForbiddenException The pipeline template cannot be deleted by the current user.
* @throws TemplateNotFoundException The pipeline template does not exist.
* @throws InternalServerErrorException Something goes wrong that shouldn't go wrong.
*/
@RequestMapping(
value = "pipelining/templates/{id}",
method = RequestMethod.DELETE
)
@Secured({"ROLE_USER", "ROLE_ADMIN"})
public ResponseEntity<String> delete(@PathVariable("id") long id) {
try {
pipelineDAO.delete(pipelineDAO.findOneById(id));
return new ResponseEntity<>("The pipeline was sucessfully removed.", HttpStatus.OK);
} catch (org.springframework.security.access.AccessDeniedException | InsufficientAuthenticationException ex) {
logger.error(ex.getMessage(), ex);
throw new ForbiddenException(ex.getMessage());
} catch (OwnedResourceNotFoundException ex) {
logger.error(ex.getMessage(), ex);
throw new TemplateNotFoundException("Could not find the pipeline template with id " + id);
} catch (Throwable t) {
logger.error(t.getMessage(), t);
// throw an Internal Server exception if anything goes really wrong...
throw new InternalServerErrorException(t.getMessage());
}
}
private ResponseEntity<String> createOKJSONResponse(final String contents) {
MultiValueMap<String, String> headers = new HttpHeaders();
headers.add(HttpHeaders.CONTENT_TYPE, RDFConstants.RDFSerialization.JSON.contentType());
return new ResponseEntity<>(contents, headers, HttpStatus.OK);
}
} | freme-project/Broker | src/main/java/eu/freme/broker/eservices/Pipelines.java | Java | apache-2.0 | 19,529 |
#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
SPELL=u'láogōng'
CN=u'劳宫'
NAME=u'laogong21'
CHANNEL='pericardium'
CHANNEL_FULLNAME='PericardiumChannelofHand-Jueyin'
SEQ='PC8'
if __name__ == '__main__':
pass
| sinotradition/meridian | meridian/acupoints/laogong21.py | Python | apache-2.0 | 241 |
<?php if ( ! defined('BASEPATH')) exit('No direct script access allowed');
/**
* @author Dennis A. Simpson
* @copyright 2015
* @version 2.2
* @abstract This is the controller for handling the Extended Contact Map(ECM) Notes.
*/
class Ecmnote extends CI_Controller
{
function __construct()
{
parent::__construct();
/**
* Make sure our users are logged in.
*/
if (!$this->ion_auth->logged_in())
{
redirect('/auth/');
}
/**
* Determine if a Project has been selected. If not force user to select one.
*/
if (isset($_SESSION['project_id']))
{
$this->information['project_id'] = $_SESSION['project_id'];
}
else
{
redirect('/');
}
/**
* Determine if we are logged in with admin status.
*/
if($this->ion_auth->is_admin() || $this->ion_auth->is_group_admin())
{
$this->template->assign('admin', TRUE);
}
else
{
$this->template->assign('admin', FALSE);
}
/**
* Determine if we are logged in as a guest.
*/
if($this->ion_auth->is_guest())
{
$this->template->assign('guest', TRUE);
}
else
{
$this->template->assign('guest', FALSE);
}
/**
* Capture any messages or errors.
*/
if(isset($_SESSION['messages']))
{
$this->information['messages'] = $_SESSION['messages'];
unset($_SESSION['messages']);
}
elseif(isset($_SESSION['errors']))
{
$this->information['errors'] = $_SESSION['errors'];
unset($_SESSION['errors']);
}
$this->lang->load('auth_lang');
$this->load->model( 'model_ecmnote' );
}
#######################################################################################################################
# List all ECM Notes #
#######################################################################################################################
function index( $page = 0 )
{
$this->model_utilities->pagination( TRUE );
$data_info = $this->model_ecmnote->lister( $page );
$this->information['who'] = $id;
$this->information['title'] = 'List of ECM Notes for Project';
$this->template->assign( 'pager', $this->model_ecmnote->pager );
$this->template->assign( 'ecmnote_fields', $this->model_ecmnote->fields( TRUE ) );
$this->template->assign( 'ecmnote_data', $data_info );
$this->template->assign( 'information', $this->information);
$this->_render_page('list_ecmnote.tpl');
}
###################################################################################################
# Show ECM Note Comments. Input from ECM Diagrams #
###################################################################################################
function show( $id )
{
$this->session->set_userdata('tag','ECM');
$data = $this->model_ecmnote->get( $id );
$fields = $this->model_ecmnote->fields( TRUE );
$this->template->assign( 'id', $id );
$this->template->assign( 'ecmnote_fields', $fields );
$this->template->assign( 'data', $data );
$this->template->assign( 'table_name', 'Ecmnote' );
$this->template->assign( 'template', 'show_ecmnote' );
$this->template->display( 'frame_admin.tpl' );
}
#######################################################################################################################
# Create new ECM Note #
#######################################################################################################################
function create( $id = false )
{
if($this->ion_auth->is_guest()) //A guest user should never get this far but if they do send them packing.
{
redirect('ecmnote');
}
if (isset($_POST) && !empty($_POST))
{
$this->form_validation->set_rules( 'ecmnote', lang('ecmnote'), 'required|max_length[45]' );
$this->form_validation->set_rules( 'comment', lang('comment'), 'required' );
if ( $this->form_validation->run() )
{
$data_post['ecmnote'] = $this->input->post( 'ecmnote' );
$data_post['comment'] = $this->input->post( 'comment' );
$data_post['project_id'] = $_SESSION['project_id'];
if( $this->model_ecmnote->dupcheck($data_post['ecmnote']) )
{
$id = $this->model_utilities->insert( 'ecmnote', $data_post );
$_SESSION['messages'] = "Update Successful";
redirect( 'ecmnote/edit/' . $id );
}
else
{
$this->information['errors'] = "ECM Note ".$data_post['ecmnote']. " already exsists in project.";
}
}
else
{
$this->information['errors'] = validation_errors();
}
}
$this->information['who'] = '';
$this->information['title'] = 'Create New ECM Note';
$this->data['ecmnote'] = $this->form_validation->set_value('ecmnote', $data_post['ecmnote']);
$this->data['comment'] = $this->form_validation->set_value('comment', $data_post['comment']);
$this->template->assign( 'ecmnote_fields', $this->model_ecmnote->fields() );
$this->template->assign( 'information', $this->information);
$this->template->assign( 'data', $this->data );
$this->_render_page('form_ecmnote.tpl');
}
###################################################################################################
# Edit ECM Notes #
###################################################################################################
function edit( $id = false )
{
$_SESSION['tag'] = 'idecm';
$data = $this->model_ecmnote->get( $id );
if (isset($_POST) && !empty($_POST))
{
$this->form_validation->set_rules( 'ecmnote', lang('ecmnote'), 'required|max_length[45]' );
$this->form_validation->set_rules( 'comment', lang('comment'), 'required' );
if ( $this->form_validation->run() )
{
$data_post['ecmnote'] = $this->input->post( 'ecmnote' );
$data_post['comment'] = $this->input->post( 'comment' );
$this->model_utilities->update( 'ecmnote', 'idecm', $id, $data_post );
$_SESSION['messages'] = "Update Successful";
redirect( 'ecmnote/edit/' . $id );
}
else
{
$this->information['errors'] = validation_errors();
}
}
$this->information['who'] = $data['ecmnote'];
$this->information['title'] = 'Edit ECM Note ';
$this->data['ecmnote'] = $this->form_validation->set_value('ecmnote', $data['ecmnote']);
$this->data['comment'] = $this->form_validation->set_value('comment', $data['comment']);
$this->template->assign( 'ecmnote_fields', $this->model_ecmnote->fields() );
$this->template->assign( 'information', $this->information);
$this->template->assign( 'data', $this->data );
$this->template->assign( 'action_mode', 'edit' );
$this->_render_page('form_ecmnote.tpl');
}
#######################################################################################################################
# Delete ECM Note(s) #
#######################################################################################################################
function delete( $id = FALSE )
{
if(!$this->ion_auth->is_admin()) //Only allowing system administrators to do this for now.
{
redirect('ecmnote');
}
$id = $this->uri->segment(3);
$data = $this->model_ecmnote->get($id);
if (isset($_POST) && !empty($_POST))
{
$post = $this->input->post('idme');
if($this->ion_auth->delete_ecmnote($id)) //This will delete the ecmnote, pubmed, pubauth, and rules records.
{
$this->model_ecmnote->unlinked_molecules(); //This deletes any molecules that become orphans.
$_SESSION['messages'] = 'ECM Note Deletion Successful';
redirect('ecmnote');
}
$this->information['errors'] = 'ECM Note Deletion Unsuccessful';
}
$this->information['who'] = '';
$this->information['title'] = 'Deletion of ECM Note '.$data['ecmnote'].' and ALL Linked Data. ';
$this->template->assign('information', $this->information);
$this->_render_page('biohazard.tpl');
}
#######################################################################################################################
# Loads our Pages by Passing Obects #
#######################################################################################################################
function _render_page($view, $data=null, $render=false)
{
$this->template->assign( 'logged_in', $this->ion_auth->logged_in( TRUE ) );
$this->template->assign( 'user_id', $this->ion_auth->get_user_id());
$this->template->assign( 'project', $_SESSION['project_name']);
$this->template->assign( 'template', $view );
$view_html = $this->template->display( 'frame_admin.tpl' );
if (!$render) return $view_html;
}
}//end of file brace.
| ModelAnnotation/model | application/controllers/Ecmnote.php | PHP | apache-2.0 | 10,296 |
/**
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.bpmn.core.internal;
import org.activiti.engine.ProcessEngines;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.service.component.ComponentContext;
import org.wso2.carbon.bpmn.core.ActivitiEngineBuilder;
import org.wso2.carbon.bpmn.core.BPMNServerHolder;
import org.wso2.carbon.bpmn.core.db.DataSourceHandler;
import org.wso2.carbon.bpmn.core.deployment.TenantManager;
import org.wso2.carbon.bpmn.core.exception.BPMNMetaDataTableCreationException;
import org.wso2.carbon.bpmn.core.exception.DatabaseConfigurationException;
import org.wso2.carbon.registry.core.service.RegistryService;
/**
* @scr.component name="org.wso2.carbon.bpmn.core.internal.BPMNServiceComponent" immediate="true"
* @scr.reference name="registry.service" interface="org.wso2.carbon.registry.core.service.RegistryService"
* cardinality="1..1" policy="dynamic" bind="setRegistryService" unbind="unsetRegistryService"
*/
public class BPMNServiceComponent {
private static Log log = LogFactory.getLog(BPMNServiceComponent.class);
protected void activate(ComponentContext ctxt) {
log.info("Initializing the BPMN core component...");
try {
BPMNServerHolder holder = BPMNServerHolder.getInstance();
ActivitiEngineBuilder activitiEngineBuilder = new ActivitiEngineBuilder();
holder.setEngine(activitiEngineBuilder.buildEngine());
holder.setTenantManager(new TenantManager());
DataSourceHandler dataSourceHandler = new DataSourceHandler();
dataSourceHandler.initDataSource(activitiEngineBuilder.getDataSourceJndiName());
dataSourceHandler.closeDataSource();
} catch (BPMNMetaDataTableCreationException e) {
log.error("Could not create BPMN checksum table", e);
} catch (DatabaseConfigurationException e) {
log.error("Could not create BPMN checksum table", e);
}catch (Throwable e) {
log.error("Failed to initialize the BPMN core component.", e);
}
}
protected void deactivate(ComponentContext ctxt) {
log.info("Stopping the BPMN core component...");
ProcessEngines.destroy();
}
protected void setRegistryService(RegistryService registrySvc) {
if (log.isDebugEnabled()) {
log.debug("RegistryService bound to the BPMN component");
}
BPMNServerHolder.getInstance().setRegistryService(registrySvc);
}
public void unsetRegistryService(RegistryService registryService) {
if (log.isDebugEnabled()) {
log.debug("RegistryService unbound from the BPMN component");
}
BPMNServerHolder.getInstance().unsetRegistryService(registryService);
}
}
| maheshika/carbon-business-process | components/bpmn/org.wso2.carbon.bpmn/src/main/java/org/wso2/carbon/bpmn/core/internal/BPMNServiceComponent.java | Java | apache-2.0 | 3,411 |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Microsoft.Build.Framework;
using Microsoft.Build.Utilities;
namespace Clarius.VisualStudio.Tasks
{
public class AssignIncludeTargetPaths : Task
{
[Required]
public ITaskItem[] Files { get; set; }
public ITaskItem[] AssignedFiles { get; set; }
public override bool Execute ()
{
this.AssignedFiles = this.Files.Select (i => new TaskItem (i)).ToArray ();
foreach (var item in this.AssignedFiles) {
item.SetMetadata ("Link",
Path.Combine (item.GetMetadata ("TemplateDir"),
new FileInfo (item.GetMetadata ("FullPath"))
.FullName.Replace (item.GetMetadata ("IncludedDir"), "")
));
// For compatibility with the built-in AssignTargetPath task that runs for content files.
item.SetMetadata ("TargetPath", item.GetMetadata ("Link"));
}
return true;
}
}
}
| modulexcite/VisualStudio-1 | Tasks/AssignIncludeTargetPaths.cs | C# | apache-2.0 | 925 |
package com.xsing.demo;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.FragmentActivity;
/**
* Baseclass of all Activities of the Demo Application.
*
* @author Philipp Jahoda
*/
public abstract class DemoBase extends FragmentActivity {
protected String[] mMonths = new String[] {
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dec"
};
protected String[] mParties = new String[] {
"Party A", "Party B", "Party C", "Party D", "Party E", "Party F", "Party G", "Party H",
"Party I", "Party J", "Party K", "Party L", "Party M", "Party N", "Party O", "Party P",
"Party Q", "Party R", "Party S", "Party T", "Party U", "Party V", "Party W", "Party X",
"Party Y", "Party Z"
};
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
protected float getRandom(float range, float startsfrom) {
return (float) (Math.random() * range) + startsfrom;
}
@Override
public void onBackPressed() {
super.onBackPressed();
// overridePendingTransition(R.anim.move_left_in_activity, R.anim.move_right_out_activity);
}
} | xsingHu/xs-android-architecture | study-view/xs-MPAndroidChartDemo/demo/src/main/java/com/xsing/demo/DemoBase.java | Java | apache-2.0 | 1,322 |
package gui
import (
"bytes"
"crypto/tls"
"fmt"
"io/ioutil"
"net"
"net/http"
"path/filepath"
"strconv"
"strings"
"github.com/spolabs/spo/src/cipher"
"github.com/spolabs/spo/src/daemon"
"github.com/spolabs/spo/src/wallet"
"github.com/spolabs/spo/src/util/file"
wh "github.com/spolabs/spo/src/util/http" //http,json helpers
"github.com/spolabs/spo/src/util/logging"
)
var (
logger = logging.MustGetLogger("gui")
)
const (
resourceDir = "dist/"
devDir = "dev/"
indexPage = "index.html"
)
// Server exposes an HTTP API
type Server struct {
mux *http.ServeMux
listener net.Listener
done chan struct{}
}
func create(host, staticDir string, daemon *daemon.Daemon) (*Server, error) {
appLoc, err := file.DetermineResourcePath(staticDir, resourceDir, devDir)
if err != nil {
return nil, err
}
logger.Info("Web resources directory: %s", appLoc)
return &Server{
mux: NewServerMux(appLoc, daemon),
done: make(chan struct{}),
}, nil
}
// Create creates a new Server instance that listens on HTTP
func Create(host, staticDir string, daemon *daemon.Daemon) (*Server, error) {
s, err := create(host, staticDir, daemon)
if err != nil {
return nil, err
}
logger.Warning("HTTPS not in use!")
s.listener, err = net.Listen("tcp", host)
if err != nil {
return nil, err
}
return s, nil
}
// CreateHTTPS creates a new Server instance that listens on HTTPS
func CreateHTTPS(host, staticDir string, daemon *daemon.Daemon, certFile, keyFile string) (*Server, error) {
s, err := create(host, staticDir, daemon)
if err != nil {
return nil, err
}
logger.Info("Using %s for the certificate", certFile)
logger.Info("Using %s for the key", keyFile)
cert, err := tls.LoadX509KeyPair(certFile, keyFile)
if err != nil {
return nil, err
}
s.listener, err = tls.Listen("tcp", host, &tls.Config{
Certificates: []tls.Certificate{cert},
})
if err != nil {
return nil, err
}
return s, nil
}
// Serve serves the web interface on the configured host
func (s *Server) Serve() error {
logger.Info("Starting web interface on %s", s.listener.Addr())
defer logger.Info("Web interface closed")
defer close(s.done)
if err := http.Serve(s.listener, s.mux); err != nil {
if err != http.ErrServerClosed {
return err
}
}
return nil
}
// Shutdown closes the HTTP service. This can only be called after Serve or ServeHTTPS has been called.
func (s *Server) Shutdown() {
logger.Info("Shutting down web interface")
defer logger.Info("Web interface shut down")
s.listener.Close()
<-s.done
}
// NewServerMux creates an http.ServeMux with handlers registered
func NewServerMux(appLoc string, daemon *daemon.Daemon) *http.ServeMux {
mux := http.NewServeMux()
mux.HandleFunc("/", newIndexHandler(appLoc))
fileInfos, _ := ioutil.ReadDir(appLoc)
for _, fileInfo := range fileInfos {
route := fmt.Sprintf("/%s", fileInfo.Name())
if fileInfo.IsDir() {
route = route + "/"
}
mux.Handle(route, http.FileServer(http.Dir(appLoc)))
}
mux.HandleFunc("/logs", getLogsHandler(&daemon.LogBuff))
mux.HandleFunc("/version", versionHandler(daemon.Gateway))
//get set of unspent outputs
mux.HandleFunc("/outputs", getOutputsHandler(daemon.Gateway))
// get balance of addresses
mux.HandleFunc("/balance", getBalanceHandler(daemon.Gateway))
// Wallet interface
RegisterWalletHandlers(mux, daemon.Gateway)
// Blockchain interface
RegisterBlockchainHandlers(mux, daemon.Gateway)
// Network stats interface
RegisterNetworkHandlers(mux, daemon.Gateway)
// Transaction handler
RegisterTxHandlers(mux, daemon.Gateway)
// UxOUt api handler
RegisterUxOutHandlers(mux, daemon.Gateway)
// expplorer handler
RegisterExplorerHandlers(mux, daemon.Gateway)
return mux
}
// Returns a http.HandlerFunc for index.html, where index.html is in appLoc
func newIndexHandler(appLoc string) http.HandlerFunc {
// Serves the main page
return func(w http.ResponseWriter, r *http.Request) {
page := filepath.Join(appLoc, indexPage)
logger.Debug("Serving index page: %s", page)
if r.URL.Path == "/" {
http.ServeFile(w, r, page)
} else {
wh.Error404(w)
}
}
}
// getOutputsHandler get utxos base on the filters in url params.
// mode: GET
// url: /outputs?addrs=[:addrs]&hashes=[:hashes]
// if addrs and hashes are not specificed, return all unspent outputs.
// if both addrs and hashes are specificed, then both those filters are need to be matched.
// if only specify one filter, then return outputs match the filter.
func getOutputsHandler(gateway *daemon.Gateway) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
wh.Error405(w)
return
}
var addrs []string
var hashes []string
trimSpace := func(vs []string) []string {
for i := range vs {
vs[i] = strings.TrimSpace(vs[i])
}
return vs
}
addrStr := r.FormValue("addrs")
if addrStr != "" {
addrs = trimSpace(strings.Split(addrStr, ","))
}
hashStr := r.FormValue("hashes")
if hashStr != "" {
hashes = trimSpace(strings.Split(hashStr, ","))
}
filters := []daemon.OutputsFilter{}
if len(addrs) > 0 {
filters = append(filters, daemon.FbyAddresses(addrs))
}
if len(hashes) > 0 {
filters = append(filters, daemon.FbyHashes(hashes))
}
outs, err := gateway.GetUnspentOutputs(filters...)
if err != nil {
logger.Error("get unspent outputs failed: %v", err)
wh.Error500(w)
return
}
wh.SendOr404(w, outs)
}
}
func getBalanceHandler(gateway *daemon.Gateway) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
wh.Error405(w)
return
}
addrsParam := r.FormValue("addrs")
addrsStr := strings.Split(addrsParam, ",")
addrs := make([]cipher.Address, 0, len(addrsStr))
for _, addr := range addrsStr {
// trim space
addr = strings.Trim(addr, " ")
a, err := cipher.DecodeBase58Address(addr)
if err != nil {
wh.Error400(w, fmt.Sprintf("address %s is invalid: %v", addr, err))
return
}
addrs = append(addrs, a)
}
bals, err := gateway.GetBalanceOfAddrs(addrs)
if err != nil {
logger.Error("Get balance failed: %v", err)
wh.Error500(w)
return
}
var balance wallet.BalancePair
for _, bal := range bals {
balance.Confirmed = balance.Confirmed.Add(bal.Confirmed)
balance.Predicted = balance.Predicted.Add(bal.Predicted)
}
wh.SendOr404(w, balance)
}
}
func versionHandler(gateway *daemon.Gateway) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
wh.Error405(w)
return
}
wh.SendOr404(w, gateway.GetBuildInfo())
}
}
/*
attrActualLog remove color char in log
origin: "\u001b[36m[spo.daemon:DEBUG] Trying to connect to 47.88.33.156:6000\u001b[0m",
*/
func attrActualLog(logInfo string) string {
//return logInfo
var actualLog string
actualLog = logInfo
if strings.HasPrefix(logInfo, "[spolabs") {
if strings.Contains(logInfo, "\u001b") {
actualLog = logInfo[0 : len(logInfo)-4]
}
} else {
if len(logInfo) > 5 {
if strings.Contains(logInfo, "\u001b") {
actualLog = logInfo[5 : len(logInfo)-4]
}
}
}
return actualLog
}
func getLogsHandler(logbuf *bytes.Buffer) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
wh.Error405(w)
return
}
var err error
defaultLineNum := 1000 // default line numbers
linenum := defaultLineNum
if lines := r.FormValue("lines"); lines != "" {
linenum, err = strconv.Atoi(lines)
if err != nil {
linenum = defaultLineNum
}
}
keyword := r.FormValue("include")
excludeKeyword := r.FormValue("exclude")
logs := []string{}
logList := strings.Split(logbuf.String(), "\n")
for _, logInfo := range logList {
if excludeKeyword != "" && strings.Contains(logInfo, excludeKeyword) {
continue
}
if keyword != "" && !strings.Contains(logInfo, keyword) {
continue
}
if len(logs) >= linenum {
logger.Debug("logs size %d,total size:%d", len(logs), len(logList))
break
}
log := attrActualLog(logInfo)
if "" != log {
logs = append(logs, log)
}
}
wh.SendOr404(w, logs)
}
}
| spaco/spo | src/gui/http.go | GO | apache-2.0 | 8,205 |
// Code generated by go-swagger; DO NOT EDIT.
package operations
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"errors"
"net/url"
golangswaggerpaths "path"
"strings"
)
// CommissionSiteURL generates an URL for the commission site operation
type CommissionSiteURL struct {
ID string
_basePath string
// avoid unkeyed usage
_ struct{}
}
// WithBasePath sets the base path for this url builder, only required when it's different from the
// base path specified in the swagger spec.
// When the value of the base path is an empty string
func (o *CommissionSiteURL) WithBasePath(bp string) *CommissionSiteURL {
o.SetBasePath(bp)
return o
}
// SetBasePath sets the base path for this url builder, only required when it's different from the
// base path specified in the swagger spec.
// When the value of the base path is an empty string
func (o *CommissionSiteURL) SetBasePath(bp string) {
o._basePath = bp
}
// Build a url path and query string
func (o *CommissionSiteURL) Build() (*url.URL, error) {
var result url.URL
var _path = "/sites/{id}"
id := o.ID
if id != "" {
_path = strings.Replace(_path, "{id}", id, -1)
} else {
return nil, errors.New("ID is required on CommissionSiteURL")
}
_basePath := o._basePath
if _basePath == "" {
_basePath = "/"
}
result.Path = golangswaggerpaths.Join(_basePath, _path)
return &result, nil
}
// Must is a helper function to panic when the url builder returns an error
func (o *CommissionSiteURL) Must(u *url.URL, err error) *url.URL {
if err != nil {
panic(err)
}
if u == nil {
panic("url can't be nil")
}
return u
}
// String returns the string representation of the path with query string
func (o *CommissionSiteURL) String() string {
return o.Must(o.Build()).String()
}
// BuildFull builds a full url with scheme, host, path and query string
func (o *CommissionSiteURL) BuildFull(scheme, host string) (*url.URL, error) {
if scheme == "" {
return nil, errors.New("scheme is required for a full url on CommissionSiteURL")
}
if host == "" {
return nil, errors.New("host is required for a full url on CommissionSiteURL")
}
base, err := o.Build()
if err != nil {
return nil, err
}
base.Scheme = scheme
base.Host = host
return base, nil
}
// StringFull returns the string representation of a complete url
func (o *CommissionSiteURL) StringFull(scheme, host string) string {
return o.Must(o.BuildFull(scheme, host)).String()
}
| ceftb/api | api/restapi/operations/commission_site_urlbuilder.go | GO | apache-2.0 | 2,521 |
namespace Code.Library
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
/// <summary>
/// All IEnumerable extensions.
/// </summary>
public static class IEnumerableExtensions
{
/// <summary>
/// Convert IEnumerable to DataTable.
/// </summary>
/// <typeparam name="T">The element type of the IEnumerable item.</typeparam>
/// <param name="data">IEnumerable.</param>
/// <returns>DataTable.</returns>
public static DataTable ToDataTable<T>(this IEnumerable<T> data)
{
var properties = TypeDescriptor.GetProperties(typeof(T));
var table = new DataTable();
foreach (PropertyDescriptor prop in properties)
{
table.Columns.Add(prop.Name, Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType);
}
foreach (T item in data)
{
var row = table.NewRow();
foreach (PropertyDescriptor prop in properties)
{
row[prop.Name] = prop.GetValue(item) ?? DBNull.Value;
}
table.Rows.Add(row);
}
return table;
}
}
} | Risvana/Code.Library | src/Code.Library/Extensions/IEnumerableExtensions.cs | C# | apache-2.0 | 1,297 |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/ml/v1beta1/model_service.proto
package com.google.cloud.ml.api.v1beta1;
public interface ModelOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.cloud.ml.v1beta1.Model)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required. The name specified for the model when it was created.
* The model name must be unique within the project it is created in.
* </pre>
*
* <code>optional string name = 1;</code>
*/
java.lang.String getName();
/**
* <pre>
* Required. The name specified for the model when it was created.
* The model name must be unique within the project it is created in.
* </pre>
*
* <code>optional string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <pre>
* Optional. The description specified for the model when it was created.
* </pre>
*
* <code>optional string description = 2;</code>
*/
java.lang.String getDescription();
/**
* <pre>
* Optional. The description specified for the model when it was created.
* </pre>
*
* <code>optional string description = 2;</code>
*/
com.google.protobuf.ByteString
getDescriptionBytes();
/**
* <pre>
* Output only. The default version of the model. This version will be used to
* handle prediction requests that do not specify a version.
* You can change the default version by calling
* [projects.methods.versions.setDefault](/ml/reference/rest/v1beta1/projects.models.versions/setDefault).
* </pre>
*
* <code>optional .google.cloud.ml.v1beta1.Version default_version = 3;</code>
*/
boolean hasDefaultVersion();
/**
* <pre>
* Output only. The default version of the model. This version will be used to
* handle prediction requests that do not specify a version.
* You can change the default version by calling
* [projects.methods.versions.setDefault](/ml/reference/rest/v1beta1/projects.models.versions/setDefault).
* </pre>
*
* <code>optional .google.cloud.ml.v1beta1.Version default_version = 3;</code>
*/
com.google.cloud.ml.api.v1beta1.Version getDefaultVersion();
/**
* <pre>
* Output only. The default version of the model. This version will be used to
* handle prediction requests that do not specify a version.
* You can change the default version by calling
* [projects.methods.versions.setDefault](/ml/reference/rest/v1beta1/projects.models.versions/setDefault).
* </pre>
*
* <code>optional .google.cloud.ml.v1beta1.Version default_version = 3;</code>
*/
com.google.cloud.ml.api.v1beta1.VersionOrBuilder getDefaultVersionOrBuilder();
}
| speedycontrol/googleapis | output/com/google/cloud/ml/api/v1beta1/ModelOrBuilder.java | Java | apache-2.0 | 2,745 |
<?php
ob_start();
// here we test argument passed
if (function_exists("say_hello")) {
say_hello();
say_hello("unicornteam");
say_hello("zzu_softboy");
say_hello(3.14); // here will convert into string
}
$ret = trim(ob_get_clean());
$expect = <<<'EOF'
hello, zapi
hello, unicornteam
hello, zzu_softboy
hello, 3.14
EOF;
if ($ret != $expect) {
echo "got: ".$ret;
exit(1);
}
| qcoreteam/zendapi | tests/lang/func/FunctionDefaultArgTest.phpt | PHP | apache-2.0 | 396 |
package com.lzy.imagepicker.adapter;
import android.support.annotation.IdRes;
import android.support.v7.widget.RecyclerView;
import android.view.View;
/**
* Copyright (C) 2016,深圳市红鸟网络科技股份有限公司 All rights reserved.
* 项目名称:
* 类的描述:
* 创建人员:Robi
* 创建时间:2017/02/21 15:07
* 修改人员:Robi
* 修改时间:2017/02/21 15:07
* 修改备注:
* Version: 1.0.0
*/
public class ImageViewHolder extends RecyclerView.ViewHolder {
public ImageViewHolder(View itemView) {
super(itemView);
}
public <T extends View> T v(@IdRes int resId) {
return (T) itemView.findViewById(resId);
}
}
| angcyo/RLibrary | imagepicker/src/main/java/com/lzy/imagepicker/adapter/ImageViewHolder.java | Java | apache-2.0 | 689 |
package autorest
import (
"fmt"
"net/http"
"time"
)
const (
// The default delay between polling requests (only used if the http.Request lacks a well-formed
// Retry-After header).
DefaultPollingDelay = 60 * time.Second
// The default total polling duration.
DefaultPollingDuration = 10 * time.Minute
)
// PollingMode sets how, if at all, clients composed with Client will poll.
type PollingMode string
const (
// Poll until reaching a maximum number of attempts
PollUntilAttempts PollingMode = "poll-until-attempts"
// Poll until a specified time.Duration has passed
PollUntilDuration PollingMode = "poll-until-duration"
// Do not poll at all
DoNotPoll PollingMode = "not-at-all"
)
// RequestInspector defines a single method that returns a PrepareDecorator used to inspect the
// http.Request prior to sending.
type RequestInspector interface {
WithInspection() PrepareDecorator
}
// ResponseInspector defines a single method that returns a ResponseDecorator used to inspect the
// http.Response prior to responding.
type ResponseInspector interface {
ByInspecting() RespondDecorator
}
var (
// Generated clients should compose using the DefaultClient instead of allocating a new Client
// instance. Users can then established widely used Client defaults by replacing or modifying the
// DefaultClient before instantiating a generated client.
DefaultClient = &Client{PollingMode: PollUntilDuration, PollingDuration: DefaultPollingDuration}
)
// Client is the base for autorest generated clients. It provides default, "do nothing"
// implementations of an Authorizer, RequestInspector, and ResponseInspector. It also returns the
// standard, undecorated http.Client as a default Sender. Lastly, it supports basic request polling,
// limited to a maximum number of attempts or a specified duration.
//
// Most customization of generated clients is best achieved by supplying a custom Authorizer, custom
// RequestInspector, and / or custom ResponseInspector. Users may log requests, implement circuit
// breakers (see https://msdn.microsoft.com/en-us/library/dn589784.aspx) or otherwise influence
// sending the request by providing a decorated Sender.
type Client struct {
Authorizer Authorizer
Sender Sender
RequestInspector RequestInspector
ResponseInspector ResponseInspector
PollingMode PollingMode
PollingAttempts int
PollingDuration time.Duration
}
// ShouldPoll returns true if the client allows polling and the passed http.Response requires it,
// otherwise it returns false.
func (c *Client) ShouldPoll(resp *http.Response, codes ...int) bool {
return !c.DoNotPoll() && ResponseRequiresPolling(resp, codes...)
}
// PollAsNeeded is a convenience method that will poll if the passed http.Response requires it.
func (c *Client) PollAsNeeded(resp *http.Response, codes ...int) (*http.Response, error) {
if !ResponseRequiresPolling(resp, codes...) {
return resp, nil
}
req, err := CreatePollingRequest(resp, c)
if err != nil {
return resp, fmt.Errorf("autorest: Unable to create polling request for response to %s (%v)",
resp.Request.URL, err)
}
delay := GetRetryDelay(resp, DefaultPollingDelay)
if c.PollForAttempts() {
return PollForAttempts(c, req, delay, c.PollingAttempts, codes...)
} else if c.PollForDuration() {
return PollForDuration(c, req, delay, c.PollingDuration, codes...)
} else {
return resp, fmt.Errorf("autorest: Polling for %s is required, but polling is disabled", req.URL)
}
}
// DoNotPoll returns true if the client should not poll, false otherwise.
func (c Client) DoNotPoll() bool {
return len(c.PollingMode) == 0 || c.PollingMode == DoNotPoll
}
// PollForAttempts returns true if the PollingMode is set to ForAttempts, false otherwise.
func (c Client) PollForAttempts() bool {
return c.PollingMode == PollUntilAttempts
}
// PollForDuration return true if the PollingMode is set to ForDuration, false otherwise.
func (c Client) PollForDuration() bool {
return c.PollingMode == PollUntilDuration
}
// Do is a convenience method that invokes the Sender of the Client. If no Sender is set, it will
// be set to the default http.Client.
func (c *Client) Do(r *http.Request) (*http.Response, error) {
if c.Sender == nil {
c.Sender = &http.Client{}
}
return c.Sender.Do(r)
}
// WithAuthorization is a convenience method that returns the WithAuthorization PrepareDecorator
// from the current Authorizer. If not Authorizer is set, it sets it to the NullAuthorizer.
func (c *Client) WithAuthorization() PrepareDecorator {
if c.Authorizer == nil {
c.Authorizer = NullAuthorizer{}
}
return c.Authorizer.WithAuthorization()
}
// WithInspection is a convenience method that passes the request to the supplied RequestInspector,
// if present, or returns the WithNothing PrepareDecorator otherwise.
func (c *Client) WithInspection() PrepareDecorator {
if c.RequestInspector == nil {
return WithNothing()
} else {
return c.RequestInspector.WithInspection()
}
}
// ByInspecting is a convenience method that passes the response to the supplied ResponseInspector,
// if present, or returns the ByIgnoring RespondDecorator otherwise.
func (c *Client) ByInspecting() RespondDecorator {
if c.ResponseInspector == nil {
return ByIgnoring()
} else {
return c.ResponseInspector.ByInspecting()
}
}
| oaastest/go-autorest | autorest/client.go | GO | apache-2.0 | 5,327 |
# -*- coding: utf-8 -*-
from flask import Flask, jsonify, request, abort, make_response
from futu_server_api import *
from db import save_update_token
from db import delete_tokens
from db import list_cards
import logging
import logging.config
import json
app = Flask(__name__)
logging.config.fileConfig('./conf/log.ini')
no_db_logger = logging.getLogger()
def check_parameters(pjson):
if not pjson or not 'app_account' in pjson or not 'card' in pjson or not 'appid' in pjson:
no_db_logger.info('No Parameter')
abort(400)
cli = {'account':pjson['app_account'], 'card':pjson['card'], 'appid':pjson['appid']}
return client(cli['account'], cli['card'], cli['appid'])
def log_handler(myjson, mytitle):
if 'ClientWarning' in myjson:
return '%s' % myjson['ClientWarning']
elif myjson['result_code'] == 0:
return 'SUCCESS'
else:
return 'FAIL ,REASON OF FAILURE:%s ,PARAMETER:%s' % (myjson['error_msg'], request.json)
@app.route('/')
def hello_world():
no_db_logger.info('server start#####')
return 'hello 22222222 world!'
@app.route('/api/v1/tradetoken', methods=['POST'])
def trade_token():
trade_pswd = request.json['trade_pswd']
account = request.json['app_account']
card = request.json['card']
appid = request.json['appid']
cc = check_parameters(request.json)
message = cc.get_trade_token(trade_pswd)
if message['result_code'] != 0 and message['error_msg'] == 'didn\'t get accesstoken':
no_db_logger.info('didn\'t get accesstoken')
return json.dumps({'result_code':2,'error_msg':'didn\'t get accesstoken'}, ensure_ascii=False)
if message['result_code'] == 0:
token = message['data']['trade_token']
save_update_token(account, appid, None, token, card, True)
return jsonify(**message)
@app.route('/api/v1/account', methods=['POST'])
def get_account_detail():
cc = check_parameters(request.json)
message = cc.get_account_detail()
logtext = log_handler(message, '获取账户信息')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/cash', methods=['POST'])
def get_account_cash():
cc = check_parameters(request.json)
message = cc.get_account_cash()
logtext = log_handler(message, '获取账户现金')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/account/portfolio', methods=['POST'])
def get_account_portfolio():
cc = check_parameters(request.json)
message = cc.get_account_portfolio()
logtext = log_handler(message, '获取账户持仓')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_orders', methods=['POST'])
def get_list_orders():
date_begin = request.json['date_begin']
date_end = request.json['date_end']
cc = check_parameters(request.json)
message = cc.get_list_orders()
logtext = log_handler(message, '获取订单列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/get_list_trades', methods=['POST'])
def get_list_trades():
cc = check_parameters(request.json)
message = cc.get_list_trades()
logtext = log_handler(message, '获取交易列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/place_order', methods=['POST'])
def place_order():
code = request.json['code']
quantity = request.json['quantity']
price = request.json['price']
side = request.json['side']
ltype = request.json['type']
cc = check_parameters(request.json)
message = cc.place_order(code, quantity, price, side, ltype)
logtext = log_handler(message, '下单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/change_order', methods=['POST'])
def change_order():
order_id = request.json['order_id']
quantity = request.json['quantity']
price = request.json['price']
cc = check_parameters(request.json)
message = cc.change_order(order_id, quantity, price)
logtext = log_handler(message, '改单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/api/v1/cancle_order', methods=['POST'])
def cancle_order():
order_id = request.json['order_id']
cc = check_parameters(request.json)
message = cc.cancel_order(order_id)
logtext = log_handler(message, '撤单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False)
@app.route('/ap1/v1/save_token', methods=['POST'])
def save_token():
account = request.json['app_account']
appid = request.json['appid']
market = request.json['market']
token = request.json['token']
card = request.json['card']
card_desc = request.json['text']
DB_result = save_update_token(account, appid, market, token, card, False, card_desc)
if DB_result == 'success':
no_db_logger.info('token save success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token save fail')
return json.dumps({'result_code':1,'error_msg':'token保存失败'}, ensure_ascii=False)
@app.route('/api/v1/delete_token', methods=['POST'])
def delete_token():
appid = request.json['appid']
account = request.json['app_account']
DB_result = delete_tokens(account, appid)
if DB_result == 'success':
no_db_logger.info('token delete success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token delete fail')
return json.dumps({'result_code':1,'error_msg':'token删除失败'}, ensure_ascii=False)
@app.route('/api/v1/list_card', methods=['POST'])
def list_card():
appid = request.json['appid']
account = request.json['app_account']
cards = list_cards(account, appid)
message = dict(cards=cards)
if isinstance(cards, list):
no_db_logger.info('list cards success')
return json.dumps({'result_code':0,'error_msg':'','data':message}, ensure_ascii=False)
else:
no_db_logger.info('list cards fail')
return json.dumps({'result_code':1,'error_msg':'查询账户卡号失败'}, ensure_ascii=False)
if __name__ == '__main__':
app.run()
| zznn/futu-openAPI | app/mainapp.py | Python | apache-2.0 | 6,014 |
<?php
namespace empire\framework\session;
use \RuntimeException;
/**
* Thrown when a session ID (SID) already exists.
*
* @author Tobias Hornberger [tobias.hornberger@falsemirror.de]
*/
class SIDConflictException extends RuntimeException {
} | thornberger/empire | framework/session/SIDConflictException.php | PHP | apache-2.0 | 247 |
class DashboardController < ApplicationController
before_filter :require_admin_user
respond_to :html, :js
def index
grouped_users = ZooniverseUser.count(:group => "DATE(created_at)")
@total_user_count = ZooniverseUser.count
total = 0
@total_users = {}
grouped_users.each_pair do |date,value|
total += value
@total_users[date] = total
end
@users = ZooniverseUser.order('created_at DESC').page(params[:page])
@classifications = Classification.count(:group => "DATE(created_at)")
end
end
| zooniverse/The-Juggernaut | app/controllers/dashboard_controller.rb | Ruby | apache-2.0 | 545 |
import os
from django.core.management.color import supports_color
from django.utils import termcolors
class VerboseCommandMixin(object):
def __init__(self, *args, **kwargs):
super(VerboseCommandMixin, self).__init__(*args, **kwargs)
self.dry_run = False
if supports_color():
opts = ('bold',)
self.style.EXISTS = \
termcolors.make_style(fg='blue', opts=opts)
self.style.APPEND = \
termcolors.make_style(fg='yellow', opts=opts)
self.style.CREATE = \
termcolors.make_style(fg='green', opts=opts)
self.style.REVERT = \
termcolors.make_style(fg='magenta', opts=opts)
self.style.BACKUP = \
termcolors.make_style(fg='cyan', opts=opts)
def msg(self, action, path):
is_withholding_action = False
non_actions = set(['create', 'append', 'revert'])
if self.dry_run and action in non_actions:
is_withholding_action = True
if hasattr(self.style, action.upper()):
s = getattr(self.style, action.upper())
action = s(action)
if is_withholding_action:
action = self.style.NOTICE('did not ') + action
output = '\t{0:>25}\t{1:<}\n'.format(action, os.path.relpath(path))
self.stdout.write(output)
def log(self, output):
if self.verbose:
self.stdout.write(output)
| noslenfa/tdjangorest | uw/lib/python2.7/site-packages/generate_scaffold/management/verbosity.py | Python | apache-2.0 | 1,463 |
/**
* Copyright (c) Anton Johansson <antoon.johansson@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.antonjohansson.geolocation.framework.domain;
/**
* Defines source data.
*/
public interface SourceData
{
/**
* Gets the IP address of this data.
*/
String getAddress();
}
| anton-johansson/ip-to-geolocation-service | src/main/java/com/antonjohansson/geolocation/framework/domain/SourceData.java | Java | apache-2.0 | 859 |
import time
from pynfcreader.sessions.iso14443.iso14443a import Iso14443ASession
def test_iso_14443_a_card_1_generic(hydranfc_connection):
hn = Iso14443ASession(drv=hydranfc_connection, block_size=120)
hn.connect()
hn.field_off()
time.sleep(0.1)
hn.field_on()
hn.polling()
r = hn.send_apdu("00 a4 04 00 0E 32 50 41 59 2E 53 59 53 2E 44 44 46 30 31 00")
assert b'oW\x84\x0e2PAY.S.DDF01\xa5E\xbf\x0cBO\x07\xa0\x00\x00\x00B\x10\x10P\x02\x87\x01\x01\x9f(\x08@\x02\x00\x00\x00\x00a#O\x07\xa0\x00\x00\x00\x04\x10\nMASTERCARD\x02\x9f(\x08@\x00 \x00\x00\x00\x00' == r
r = hn.send_apdu("00 a4 04 00 07 A0 00 00 00 42 10 10 00")
assert b'o?\x84\x07\xa0\x00\x00\x00B\x104P\x02CB\x87\x01\x01\x9f\x11\x01\x12\x0eTransacti CB_-\x04fren\xbf\xdf`\x02\x0b\x14\x9fM\x02\x0b\x14\xdf\x04' == r
r = hn.send_apdu("00 a4 04 00 07 A0 00 00 00 04 10 10 00")
assert b'o?\x84\x07\xa0\x00\x00\x00\x04\x104P\nMASTERCA\x87\x01\x02\x9f\x11\x01\x01\x9f\x12\nMTERCARD_-\x04fn\xbf\x0c\n\xdf`\x02\x0b\x14\x9fM\x14' == r
hn.field_off()
| gvinet/pynfcreader | tests/tests_iso_14443_a_card_1_hydranfc_v2.py | Python | apache-2.0 | 1,077 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* ProspectiveLineItem.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202102;
/**
* Represents a prospective line item to be forecasted.
*/
public class ProspectiveLineItem implements java.io.Serializable {
/* The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject. */
private com.google.api.ads.admanager.axis.v202102.LineItem lineItem;
/* The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both. */
private com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem;
/* When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules. */
private java.lang.Long advertiserId;
public ProspectiveLineItem() {
}
public ProspectiveLineItem(
com.google.api.ads.admanager.axis.v202102.LineItem lineItem,
com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem,
java.lang.Long advertiserId) {
this.lineItem = lineItem;
this.proposalLineItem = proposalLineItem;
this.advertiserId = advertiserId;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("advertiserId", getAdvertiserId())
.add("lineItem", getLineItem())
.add("proposalLineItem", getProposalLineItem())
.toString();
}
/**
* Gets the lineItem value for this ProspectiveLineItem.
*
* @return lineItem * The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject.
*/
public com.google.api.ads.admanager.axis.v202102.LineItem getLineItem() {
return lineItem;
}
/**
* Sets the lineItem value for this ProspectiveLineItem.
*
* @param lineItem * The target of the forecast. If {@link LineItem#id} is null
* or no line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a line item already exists with
* {@link LineItem#id}, the forecast is
* computed for the subject, predicting what would happen
* if the existing line item's settings
* were modified to match the subject.
*/
public void setLineItem(com.google.api.ads.admanager.axis.v202102.LineItem lineItem) {
this.lineItem = lineItem;
}
/**
* Gets the proposalLineItem value for this ProspectiveLineItem.
*
* @return proposalLineItem * The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both.
*/
public com.google.api.ads.admanager.axis.v202102.ProposalLineItem getProposalLineItem() {
return proposalLineItem;
}
/**
* Sets the proposalLineItem value for this ProspectiveLineItem.
*
* @param proposalLineItem * The target of the forecast if this prospective line item is
* a proposal line item.
*
* <p>If {@link ProposalLineItem#id} is null or no proposal
* line item exists with that ID,
* then a forecast is computed for the subject, predicting
* what would happen if it were added
* to the network. If a proposal line item already exists
* with {@link ProposalLineItem#id},
* the forecast is computed for the subject, predicting
* what would happen if the existing proposal
* line item's settings were modified to match the subject.
*
* <p>A proposal line item can optionally correspond
* to an order {@link LineItem}, in which case,
* by forecasting a proposal line item, the corresponding
* line item is implicitly ignored in the
* forecasting.
*
* <p>Either {@link #lineItem} or {@link #proposalLineItem}
* should be specified but not both.
*/
public void setProposalLineItem(com.google.api.ads.admanager.axis.v202102.ProposalLineItem proposalLineItem) {
this.proposalLineItem = proposalLineItem;
}
/**
* Gets the advertiserId value for this ProspectiveLineItem.
*
* @return advertiserId * When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules.
*/
public java.lang.Long getAdvertiserId() {
return advertiserId;
}
/**
* Sets the advertiserId value for this ProspectiveLineItem.
*
* @param advertiserId * When set, the line item is assumed to be from this advertiser,
* and unified blocking rules will
* apply accordingly. If absent, line items without an
* existing order won't be subject to unified
* blocking rules.
*/
public void setAdvertiserId(java.lang.Long advertiserId) {
this.advertiserId = advertiserId;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof ProspectiveLineItem)) return false;
ProspectiveLineItem other = (ProspectiveLineItem) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.lineItem==null && other.getLineItem()==null) ||
(this.lineItem!=null &&
this.lineItem.equals(other.getLineItem()))) &&
((this.proposalLineItem==null && other.getProposalLineItem()==null) ||
(this.proposalLineItem!=null &&
this.proposalLineItem.equals(other.getProposalLineItem()))) &&
((this.advertiserId==null && other.getAdvertiserId()==null) ||
(this.advertiserId!=null &&
this.advertiserId.equals(other.getAdvertiserId())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getLineItem() != null) {
_hashCode += getLineItem().hashCode();
}
if (getProposalLineItem() != null) {
_hashCode += getProposalLineItem().hashCode();
}
if (getAdvertiserId() != null) {
_hashCode += getAdvertiserId().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(ProspectiveLineItem.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "ProspectiveLineItem"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("lineItem");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "lineItem"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "LineItem"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("proposalLineItem");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "proposalLineItem"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "ProposalLineItem"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("advertiserId");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "advertiserId"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| googleads/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202102/ProspectiveLineItem.java | Java | apache-2.0 | 13,510 |
/*
* Copyright [2016-2020] [George Papadakis (gpapadis@yahoo.gr)]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scify.jedai.textmodels;
import org.scify.jedai.utilities.enumerations.RepresentationModel;
import org.scify.jedai.utilities.enumerations.SimilarityMetric;
import gnu.trove.iterator.TObjectIntIterator;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import java.util.HashSet;
import java.util.Set;
/**
*
* @author G.A.P. II
*/
public abstract class BagModel extends AbstractModel {
protected float noOfTotalTerms;
protected final TObjectIntMap<String> itemsFrequency;
public BagModel(int dId, int n, RepresentationModel md, SimilarityMetric sMetric, String iName) {
super(dId, n, md, sMetric, iName);
itemsFrequency = new TObjectIntHashMap<>();
}
@Override
public void finalizeModel() {
}
protected float getEnhancedJaccardSimilarity(BagModel oModel) {
TObjectIntMap<String> itemVector1 = itemsFrequency;
TObjectIntMap<String> itemVector2 = oModel.getItemsFrequency();
if (itemVector2.size() < itemVector1.size()) {
itemVector1 = oModel.getItemsFrequency();
itemVector2 = itemsFrequency;
}
float numerator = 0.0f;
for (TObjectIntIterator<String> iterator = itemVector1.iterator(); iterator.hasNext();) {
iterator.advance();
numerator += Math.min(iterator.value(), itemVector2.get(iterator.key()));
}
float denominator = noOfTotalTerms + oModel.getNoOfTotalTerms() - numerator;
return numerator / denominator;
}
@Override
public float getEntropy(boolean normalized) {
float entropy = 0.0f;
for (TObjectIntIterator<String> iterator = itemsFrequency.iterator(); iterator.hasNext();) {
iterator.advance();
float p_i = (iterator.value() / noOfTotalTerms);
entropy -= (p_i * (Math.log10(p_i) / Math.log10(2.0d)));
}
if (normalized) {
float maxEntropy = (float) Math.log10(noOfTotalTerms) / (float) Math.log10(2.0f);
return entropy / maxEntropy;
}
return entropy;
}
public TObjectIntMap<String> getItemsFrequency() {
return itemsFrequency;
}
protected float getJaccardSimilarity(BagModel oModel) {
final Set<String> commonKeys = new HashSet<>(itemsFrequency.keySet());
commonKeys.retainAll(oModel.getItemsFrequency().keySet());
float numerator = commonKeys.size();
float denominator = itemsFrequency.size() + oModel.getItemsFrequency().size() - numerator;
return numerator / denominator;
}
protected float getNoOfTotalTerms() {
return noOfTotalTerms;
}
@Override
public Set<String> getSignatures() {
return itemsFrequency.keySet();
}
@Override
public float getSimilarity(ITextModel oModel) {
switch (simMetric) {
case COSINE_SIMILARITY:
return getTfCosineSimilarity((BagModel) oModel);
case ENHANCED_JACCARD_SIMILARITY:
return getEnhancedJaccardSimilarity((BagModel) oModel);
case GENERALIZED_JACCARD_SIMILARITY:
return getTfGeneralizedJaccardSimilarity((BagModel) oModel);
case JACCARD_SIMILARITY:
return getJaccardSimilarity((BagModel) oModel);
default:
throw new IllegalStateException(
"The given similarity metric is incompatible with the bag representation model.");
}
}
protected float getTfCosineSimilarity(BagModel oModel) {
float totalTerms2 = oModel.getNoOfTotalTerms();
TObjectIntMap<String> itemVector1 = itemsFrequency;
TObjectIntMap<String> itemVector2 = oModel.getItemsFrequency();
if (itemVector2.size() < itemVector1.size()) {
itemVector1 = oModel.getItemsFrequency();
itemVector2 = itemsFrequency;
}
float numerator = 0.0f;
for (TObjectIntIterator<String> iterator = itemVector1.iterator(); iterator.hasNext();) {
iterator.advance();
numerator += iterator.value() * itemVector2.get(iterator.key()) / noOfTotalTerms / totalTerms2;
}
float denominator = getVectorMagnitude() * oModel.getVectorMagnitude();
return numerator / denominator;
}
protected float getTfGeneralizedJaccardSimilarity(BagModel oModel) {
float totalTerms1 = noOfTotalTerms;
float totalTerms2 = oModel.getNoOfTotalTerms();
TObjectIntMap<String> itemVector1 = itemsFrequency;
TObjectIntMap<String> itemVector2 = oModel.getItemsFrequency();
if (itemVector2.size() < itemVector1.size()) {
itemVector1 = oModel.getItemsFrequency();
itemVector2 = itemsFrequency;
totalTerms1 = oModel.getNoOfTotalTerms();
totalTerms2 = noOfTotalTerms;
}
float numerator = 0.0f;
for (TObjectIntIterator<String> iterator = itemVector1.iterator(); iterator.hasNext(); ) {
iterator.advance();
numerator += Math.min(iterator.value() / totalTerms1, itemVector2.get(iterator.key()) / totalTerms2);
}
final Set<String> allKeys = new HashSet<>(itemVector1.keySet());
allKeys.addAll(itemVector2.keySet());
float denominator = 0.0f;
for (String key : allKeys) {
denominator += Math.max(itemVector1.get(key) / totalTerms1, itemVector2.get(key) / totalTerms2);
}
return numerator / denominator;
}
protected float getVectorMagnitude() {
float magnitude = 0.0f;
for (TObjectIntIterator<String> iterator = itemsFrequency.iterator(); iterator.hasNext();) {
iterator.advance();
magnitude += Math.pow(iterator.value() / noOfTotalTerms, 2.0);
}
return (float) Math.sqrt(magnitude);
}
}
| scify/JedAIToolkit | src/main/java/org/scify/jedai/textmodels/BagModel.java | Java | apache-2.0 | 6,720 |
<?php
$query = "select * from `joinjoin_true` where joinjoin_true.species_chinesename = ".'"'.$animalChineseName.'"'." limit $begin,$limit;";
//连接数据库
$conn = @mysqli_connect("127.0.0.1","guest","guest",$db);
if(mysqli_connect_errno($conn))
{
die("无法连接数据库,请联系服务器管理员");
}else{
mysqli_set_charset($conn,"utf8");
$query = mysqli_real_escape_string($conn,$query);
$query = str_replace("\\","",$query);
//echo $query."\n";
}
$result = mysqli_query($conn,$query);
function printSQLresultAsTable(){
global $conn;
global $result;
//输出表头
echo <<<tableHead
<table class="table">
<caption>查询结果<caption>
<thead>
tableHead;
echo '<tr>';
$fetch_fields = mysqli_fetch_fields($result);
foreach($fetch_fields as $column)
{
echo "<th class=\"warning\">$column->name</th>";
}
echo "</tr>";
echo '</thead>';
//逐行输出数据表
echo "<tbody>";
while($row=mysqli_fetch_array($result,MYSQLI_NUM))
{
if($row[5]=='TRUE')
{
echo '<tr class="success">';
}else{
echo '<tr class="danger">';
}
foreach($row as $value)
{
echo "<td>$value</td>";
}
echo "</tr>";
}
}
echo "</tbody>";
mysqli_close($conn);
?> | 532867531/bysj_web | bysj1/mysql/sqlQueryInAndOut.inc.php | PHP | apache-2.0 | 1,236 |
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new proposals.
To determine which proposals exist, run get_all_proposals.py.
"""
import uuid
# Import appropriate modules from the client library.
from googleads import ad_manager
ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE'
PRIMARY_SALESPERSON_ID = 'INSERT_PRIMARY_SALESPERSON_ID_HERE'
SECONDARY_SALESPERSON_ID = 'INSERT_SECONDARY_SALESPERSON_ID_HERE'
PRIMARY_TRAFFICKER_ID = 'INSERT_PRIMARY_TRAFFICKER_ID_HERE'
def main(client, advertiser_id, primary_salesperson_id,
secondary_salesperson_id, primary_trafficker_id):
# Initialize appropriate services.
proposal_service = client.GetService('ProposalService', version='v201811')
network_service = client.GetService('NetworkService', version='v201811')
# Create proposal objects.
proposal = {
'name': 'Proposal #%s' % uuid.uuid4(),
'advertiser': {
'companyId': advertiser_id,
'type': 'ADVERTISER'
},
'primarySalesperson': {
'userId': primary_salesperson_id,
'split': '75000'
},
'secondarySalespeople': [{
'userId': secondary_salesperson_id,
'split': '25000'
}],
'primaryTraffickerId': primary_trafficker_id,
'probabilityOfClose': '100000',
'budget': {
'microAmount': '100000000',
'currencyCode': network_service.getCurrentNetwork()['currencyCode']
},
'billingCap': 'CAPPED_CUMULATIVE',
'billingSource': 'DFP_VOLUME'
}
# Add proposals.
proposals = proposal_service.createProposals([proposal])
# Display results.
for proposal in proposals:
print ('Proposal with id "%s" and name "%s" was created.'
% (proposal['id'], proposal['name']))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, ADVERTISER_ID, PRIMARY_SALESPERSON_ID,
SECONDARY_SALESPERSON_ID, PRIMARY_TRAFFICKER_ID)
| Aloomaio/googleads-python-lib | examples/ad_manager/v201811/proposal_service/create_proposals.py | Python | apache-2.0 | 2,591 |
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _url = require('url');
var _url2 = _interopRequireDefault(_url);
var _qs = require('qs');
var _qs2 = _interopRequireDefault(_qs);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
/**
* Request is generated each time the user navigates.
* @param {string} path
* @param {object} query
* @param {object} params
*
* @property {string} path - The request path.
* @property {object} query - If the hash contains a query part, it is treated as a query string.
* @property {object} params - An object containing properties mapped to the named route “parameters”.
*/
var Request = function () {
function Request(path, query, params) {
_classCallCheck(this, Request);
this.path = path;
this.query = query;
this.params = params;
}
/**
* create a new Request object
* @param {string} path
* @param {string} query
* @param {array} keys
* @param {array} results
*/
_createClass(Request, null, [{
key: 'create',
value: function create(path, query, keys, results) {
var params = Object.create(null);
keys.forEach(function (key, index) {
return params[key.name] = results[index + 1];
});
return new Request(path, _qs2.default.parse(query), params);
}
}]);
return Request;
}();
exports.default = Request;
//# sourceMappingURL=Request.js.map | metasansana/arouca | lib/Request.js | JavaScript | apache-2.0 | 2,283 |
//
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
namespace Microsoft.PackageManagement.SwidTag.Test.Support {
using System.Linq;
public static class Event<T> where T : class {
private static T EmptyDelegate {
get {
return typeof (T).CreateEmptyDelegate() as T;
}
}
public static T Raise {
get {
return (XTask.CurrentExecutingTask.GetEventHandler(typeof (T)) as T) ?? EmptyDelegate;
}
}
public static T RaiseFirst {
get {
var dlg = XTask.CurrentExecutingTask.GetEventHandler(typeof (T));
return dlg != null ? dlg.GetInvocationList().FirstOrDefault() as T : EmptyDelegate;
}
}
}
} | fearthecowboy/Swidtag_deprecated | Test/Support/Event.cs | C# | apache-2.0 | 1,400 |
import content from "./content.md"
import image from "./image.png"
export default function() {
return content + image
}
| sebastian-software/rollup-plugin-rebase | test/flat-multi/index.js | JavaScript | apache-2.0 | 123 |
/*******************************************************************************
* Copyright 2015 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*******************************************************************************/
package uk.ac.ebi.phenotype.chart;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
*
* @author tudose
*
* Implenetation of Method 2 from http://en.wikipedia.org/wiki/Quartile
* 1. Use the median to divide the ordered data set into two halves. If the median is a datum (as opposed to being the mean of the middle two data), include the median in both halves.
* 2. The lower quartile value is the median of the lower half of the data. The upper quartile value is the median of the upper half of the data.
*/
public class PercentileComputation{
private List<Float> upperValues;
private List<Float> lowerValues;
private List<Float> values;
public PercentileComputation(List<Float> val){
ArrayList <Float> sortedValues = (ArrayList<Float>)val;
Collections.sort(sortedValues);
upperValues = new ArrayList<>();
lowerValues = new ArrayList<>();
values = val;
// Use the median to divide the ordered data set into two halves.
// If the median is a datum (as opposed to being the mean of the middle two data), include the median in both halves.
int n = sortedValues.size();
if (n % 2 == 1){
lowerValues = sortedValues.subList(0, (n+1)/2);
upperValues = sortedValues.subList((n-1)/2, n);
}
else{
lowerValues = sortedValues.subList(0, n/2);
upperValues = sortedValues.subList(n/2, n);
}
}
//The lower quartile value is the median of the lower half of the data. The upper quartile value is the median of the upper half of the data.
public float getUpperQuartile() {
return getMedian(upperValues);
}
public float getLowerQuartile() {
return getMedian(lowerValues);
}
public float getMedian(){
return getMedian(values);
}
private Float getMedian(List<Float> list){
int n = list.size();
if (n % 2 == 1){
return list.get((n - 1)/2);
}
else{
return (list.get(n/2 - 1) + list.get(n/2)) / 2;
}
}
}
| mpi2/PhenotypeData | web/src/main/java/uk/ac/ebi/phenotype/chart/PercentileComputation.java | Java | apache-2.0 | 2,685 |
package ca.six.views.util;
import android.graphics.Color;
public class ColorsUtil {
public static boolean isLight(int color) {
return Math.sqrt(
Color.red(color) * Color.red(color) * 0.241 +
Color.green(color) * Color.green(color) * 0.691 +
Color.blue(color) * Color.blue(color) * 0.068) > 130;
}
public static int getBaseColor(int color) {
if (isLight(color)) {
return Color.BLACK;
}
return Color.WHITE;
}
} | songzhw/SixUiViews | SixUiViews/lib/src/main/java/ca/six/views/util/ColorsUtil.java | Java | apache-2.0 | 473 |
<?php
namespace Student\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* Subject
*
* @ORM\Table(name="subject", indexes={@ORM\Index(name="fk_class_id", columns={"class_id"})})
* @ORM\Entity
*/
class Subject
{
/**
* @var integer
*
* @ORM\Column(name="id", type="integer", nullable=false)
* @ORM\Id
* @ORM\GeneratedValue(strategy="IDENTITY")
*/
private $id;
/**
* @var string
*
* @ORM\Column(name="name", type="string", length=30, nullable=true)
*/
private $name;
/**
* @var string
*
* @ORM\Column(name="description", type="string", length=30, nullable=true)
*/
private $description;
/**
* @var \Student\Entity\SchoolClass
*
* @ORM\ManyToOne(targetEntity="Student\Entity\SchoolClass")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="class_id", referencedColumnName="id")
* })
*/
private $class;
/**
* Get id
*
* @return integer
*/
public function getId()
{
return $this->id;
}
/**
* Set name
*
* @param string $name
* @return Subject
*/
public function setName($name)
{
$this->name = $name;
return $this;
}
/**
* Get name
*
* @return string
*/
public function getName()
{
return $this->name;
}
/**
* Set description
*
* @param string $description
* @return Subject
*/
public function setDescription($description)
{
$this->description = $description;
return $this;
}
/**
* Get description
*
* @return string
*/
public function getDescription()
{
return $this->description;
}
/**
* Set class
*
* @param \Student\Entity\SchoolClass $class
* @return Subject
*/
public function setClass(\Student\Entity\SchoolClass $class = null)
{
$this->class = $class;
return $this;
}
/**
* Get class
*
* @return \Student\Entity\SchoolClass
*/
public function getClass()
{
return $this->class;
}
}
| kuldeep-k/zf2doctrine | module/Student/src/Student/Entity/Subject.php | PHP | apache-2.0 | 2,185 |
/**
* Copyright 2013 Simeon Malchev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vibur.dbcp.cache;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A thin wrapper class which allows us to augment the returned {@code value} of a method invocation with some
* additional "state" information. The instances of this class are used as a cached {@code value} for method
* invocations in a {@link java.util.concurrent.ConcurrentMap} cache implementation, and their "state" is describing
* whether the object is currently available, in use or evicted.
*
* @see StatementInvocationCacheProvider
* @see ConnMethodDef
*
* @author Simeon Malchev
* @param <V> the type of the value object held in this ReturnVal
*/
public class ReturnVal<V> {
/**
* The 3 different states in which a {@code ReturnVal} object can be while being used as a cached value:
*/
public static final int AVAILABLE = 0;
public static final int IN_USE = 1;
public static final int EVICTED = 2;
private final V value;
private final AtomicInteger state;
public ReturnVal(V value, AtomicInteger state) {
if (value == null)
throw new NullPointerException();
this.value = value;
this.state = state;
}
public V value() {
return value;
}
public AtomicInteger state() {
return state;
}
}
| tamalsen/vibur-dbcp | src/main/java/org/vibur/dbcp/cache/ReturnVal.java | Java | apache-2.0 | 1,964 |
package minerGCE;
import org.xml.sax.SAXException;
import ru.ispras.modis.NetBlox.parser.basicParsersAndUtils.Utils;
import ru.ispras.modis.NetBlox.parser.basicParsersAndUtils.XMLIntegerRangeStringProcessor;
import ru.ispras.modis.NetBlox.parser.xmlParser.CommonXMLParser;
import ru.ispras.modis.NetBlox.parser.xmlParser.XMLStringValueProcessor;
import ru.ispras.modis.NetBlox.scenario.RangeOfValues;
public class GCEDescriptionParser extends CommonXMLParser {
class SupplementaryAlgosIdsProcessor extends XMLStringValueProcessor {
@Override
public void closeElement() {
super.closeElement();
String stringOfIds = getText();
String[] ids = stringOfIds.split(Utils.DELIMITER);
for (String stringId : ids) {
Utils.checkWhetherIsWordInScenario(stringId, TAG_SUPPLEMENTARY_ALGOS_IDS, "algorithm");
minerDescription.addSupplementaryAlgorithmId(stringId);
}
}
}
class LaunchesProcessor extends XMLIntegerRangeStringProcessor {
@Override
public void closeElement() {
super.closeElement();
RangeOfValues<Integer> launchNumbers = getValues();
if (launchNumbers != null && !launchNumbers.isEmpty()) {
minerDescription.setLaunchNumbers(launchNumbers);
}
}
}
private static final String TAG_SUPPLEMENTARY_ALGOS_IDS = "supplementaryAlgosIds";
private static final String TAG_LAUNCH_NUMBERS = "launchNumbers";
private static final String TAG_MINIMAL_CLIQUE_SIZE = "minimalCliqueSize";
private final XMLStringValueProcessor minimalCliqueSizeParser;
private DescriptionGCD_GCE minerDescription;
public GCEDescriptionParser() {
super();
add(TAG_SUPPLEMENTARY_ALGOS_IDS, new SupplementaryAlgosIdsProcessor());
add(TAG_LAUNCH_NUMBERS, new LaunchesProcessor());
add(TAG_MINIMAL_CLIQUE_SIZE, minimalCliqueSizeParser = new XMLStringValueProcessor());
}
@Override
public void startDocument() throws SAXException {
super.startDocument();
minerDescription = new DescriptionGCD_GCE();
}
@Override
public void endDocument() throws SAXException {
super.endDocument();
String text = minimalCliqueSizeParser.getText();
if (text != null && !text.isEmpty()) {
minerDescription.setMinimalCliqueSize(Integer.parseInt(text));
}
}
public DescriptionGCD_GCE getParsedDescription() {
return minerDescription;
}
}
| ispras/NetBlox-plug-ins | graphMiners/GCE/src/minerGCE/GCEDescriptionParser.java | Java | apache-2.0 | 2,295 |
package integration
import (
"fmt"
"io"
"io/ioutil"
"reflect"
"testing"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
kyaml "k8s.io/apimachinery/pkg/util/yaml"
"k8s.io/apiserver/pkg/admission"
"k8s.io/client-go/rest"
"k8s.io/kubernetes/pkg/api/legacyscheme"
kapi "k8s.io/kubernetes/pkg/apis/core"
kclientset "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset"
buildv1 "github.com/openshift/api/build/v1"
legacyconfigv1 "github.com/openshift/api/legacyconfig/v1"
buildv1client "github.com/openshift/client-go/build/clientset/versioned"
buildutil "github.com/openshift/origin/pkg/build/util"
configapi "github.com/openshift/origin/pkg/cmd/server/apis/config"
configapilatest "github.com/openshift/origin/pkg/cmd/server/apis/config/latest"
serveradmission "github.com/openshift/origin/pkg/cmd/server/origin/admission"
testtypes "github.com/openshift/origin/test/integration/testing"
testutil "github.com/openshift/origin/test/util"
testserver "github.com/openshift/origin/test/util/server"
)
func setupAdmissionTest(t *testing.T, setupConfig func(*configapi.MasterConfig)) (kclientset.Interface, *rest.Config, func()) {
masterConfig, err := testserver.DefaultMasterOptions()
if err != nil {
t.Fatalf("error creating config: %v", err)
}
setupConfig(masterConfig)
kubeConfigFile, err := testserver.StartConfiguredMasterAPI(masterConfig)
if err != nil {
t.Fatalf("error starting server: %v", err)
}
kubeClient, err := testutil.GetClusterAdminKubeInternalClient(kubeConfigFile)
if err != nil {
t.Fatalf("error getting client: %v", err)
}
clusterAdminConfig, err := testutil.GetClusterAdminClientConfig(kubeConfigFile)
if err != nil {
t.Fatalf("error getting openshift client: %v", err)
}
return kubeClient, clusterAdminConfig, func() {
testserver.CleanupMasterEtcd(t, masterConfig)
}
}
// testAdmissionPlugin sets a label with its name on the object getting admitted
// on create
type testAdmissionPlugin struct {
metav1.TypeMeta
name string
labelValue string
}
func (p *testAdmissionPlugin) Admit(a admission.Attributes) (err error) {
obj := a.GetObject()
accessor, err := meta.Accessor(obj)
if err != nil {
return err
}
labels := accessor.GetLabels()
if labels == nil {
labels = map[string]string{}
}
if len(p.labelValue) > 0 {
labels[p.name] = p.labelValue
} else {
labels[p.name] = "default"
}
accessor.SetLabels(labels)
return nil
}
func (a *testAdmissionPlugin) Handles(operation admission.Operation) bool {
return operation == admission.Create
}
func registerAdmissionPlugins(t *testing.T, names ...string) {
for _, name := range names {
pluginName := name
serveradmission.OriginAdmissionPlugins.Register(pluginName,
func(config io.Reader) (admission.Interface, error) {
plugin := &testAdmissionPlugin{
name: pluginName,
}
if config != nil && !reflect.ValueOf(config).IsNil() {
configData, err := ioutil.ReadAll(config)
if err != nil {
return nil, err
}
configData, err = kyaml.ToJSON(configData)
if err != nil {
return nil, err
}
configObj := &testtypes.TestPluginConfig{}
err = runtime.DecodeInto(legacyscheme.Codecs.UniversalDecoder(), configData, configObj)
if err != nil {
return nil, err
}
plugin.labelValue = configObj.Data
}
return plugin, nil
})
}
}
func admissionTestPod() *kapi.Pod {
pod := &kapi.Pod{}
pod.Name = "test-pod"
container := kapi.Container{}
container.Name = "foo"
container.Image = "openshift/hello-openshift"
pod.Spec.Containers = []kapi.Container{container}
return pod
}
func admissionTestBuild() *buildv1.Build {
build := &buildv1.Build{ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
buildutil.BuildConfigLabel: "mock-build-config",
buildutil.BuildRunPolicyLabel: string(buildv1.BuildRunPolicyParallel),
},
}}
build.Name = "test-build"
build.Spec.Source.Git = &buildv1.GitBuildSource{URI: "http://build.uri/build"}
build.Spec.Strategy.DockerStrategy = &buildv1.DockerBuildStrategy{}
build.Spec.Output.To = &corev1.ObjectReference{
Kind: "DockerImage",
Name: "namespace/image",
}
return build
}
func checkAdmissionObjectLabelsIncludesExcludes(labels map[string]string, includes, excludes []string) error {
for _, expected := range includes {
if _, exists := labels[expected]; !exists {
return fmt.Errorf("labels %v does not include expected label: %s", labels, expected)
}
}
for _, notExpected := range excludes {
if _, exists := labels[notExpected]; exists {
return fmt.Errorf("labels %v includes unexpected label: %s", labels, notExpected)
}
}
return nil
}
func checkAdmissionObjectLabelValues(labels, expected map[string]string) error {
for k, v := range expected {
if labels[k] != v {
return fmt.Errorf("unexpected label value in %v for %s. Expected: %s", labels, k, v)
}
}
return nil
}
func registerAdmissionPluginTestConfigType() {
configapi.Scheme.AddKnownTypes(configapi.SchemeGroupVersion, &testtypes.TestPluginConfig{})
configapi.Scheme.AddKnownTypes(legacyconfigv1.LegacySchemeGroupVersion, &testtypes.TestPluginConfig{})
}
func setupAdmissionPluginTestConfig(t *testing.T, value string) string {
configFile, err := ioutil.TempFile("", "admission-config")
if err != nil {
t.Fatalf("error creating temp file: %v", err)
}
configFile.Close()
configObj := &testtypes.TestPluginConfig{
Data: value,
}
configContent, err := configapilatest.WriteYAML(configObj)
if err != nil {
t.Fatalf("error writing config: %v", err)
}
ioutil.WriteFile(configFile.Name(), configContent, 0644)
return configFile.Name()
}
func TestKubernetesAdmissionPluginOrderOverride(t *testing.T) {
registerAdmissionPlugins(t, "plugin1", "plugin2", "plugin3")
kubeClient, _, fn := setupAdmissionTest(t, func(config *configapi.MasterConfig) {
config.AdmissionConfig.PluginOrderOverride = []string{"plugin1", "plugin2"}
})
defer fn()
createdPod, err := kubeClient.Core().Pods(metav1.NamespaceDefault).Create(admissionTestPod())
if err != nil {
t.Fatalf("Unexpected error creating pod: %v", err)
}
if err = checkAdmissionObjectLabelsIncludesExcludes(createdPod.Labels, []string{"plugin1", "plugin2"}, []string{"plugin3"}); err != nil {
t.Errorf("Error: %v", err)
}
}
func TestKubernetesAdmissionPluginConfigFile(t *testing.T) {
registerAdmissionPluginTestConfigType()
configFile := setupAdmissionPluginTestConfig(t, "plugin1configvalue")
registerAdmissionPlugins(t, "plugin1", "plugin2")
kubeClient, _, fn := setupAdmissionTest(t, func(config *configapi.MasterConfig) {
config.AdmissionConfig.PluginOrderOverride = []string{"plugin1", "plugin2"}
config.AdmissionConfig.PluginConfig = map[string]*configapi.AdmissionPluginConfig{
"plugin1": {
Location: configFile,
},
}
})
defer fn()
createdPod, err := kubeClient.Core().Pods(metav1.NamespaceDefault).Create(admissionTestPod())
if err = checkAdmissionObjectLabelValues(createdPod.Labels, map[string]string{"plugin1": "plugin1configvalue", "plugin2": "default"}); err != nil {
t.Errorf("Error: %v", err)
}
}
func TestKubernetesAdmissionPluginEmbeddedConfig(t *testing.T) {
registerAdmissionPluginTestConfigType()
registerAdmissionPlugins(t, "plugin1", "plugin2")
kubeClient, _, fn := setupAdmissionTest(t, func(config *configapi.MasterConfig) {
config.AdmissionConfig.PluginOrderOverride = []string{"plugin1", "plugin2"}
config.AdmissionConfig.PluginConfig = map[string]*configapi.AdmissionPluginConfig{
"plugin1": {
Configuration: &testtypes.TestPluginConfig{
Data: "embeddedvalue1",
},
},
}
})
defer fn()
createdPod, err := kubeClient.Core().Pods(metav1.NamespaceDefault).Create(admissionTestPod())
if err = checkAdmissionObjectLabelValues(createdPod.Labels, map[string]string{"plugin1": "embeddedvalue1", "plugin2": "default"}); err != nil {
t.Errorf("Error: %v", err)
}
}
func TestOpenshiftAdmissionPluginOrderOverride(t *testing.T) {
registerAdmissionPlugins(t, "plugin1", "plugin2", "plugin3")
_, clusterAdminConfig, fn := setupAdmissionTest(t, func(config *configapi.MasterConfig) {
config.AdmissionConfig.PluginOrderOverride = []string{"plugin1", "plugin2"}
})
defer fn()
createdBuild, err := buildv1client.NewForConfigOrDie(clusterAdminConfig).Build().Builds(metav1.NamespaceDefault).Create(admissionTestBuild())
if err != nil {
t.Errorf("Unexpected error creating build: %v", err)
}
if err = checkAdmissionObjectLabelsIncludesExcludes(createdBuild.Labels, []string{"plugin1", "plugin2"}, []string{"plugin3"}); err != nil {
t.Errorf("Error: %v", err)
}
}
func TestOpenshiftAdmissionPluginConfigFile(t *testing.T) {
registerAdmissionPluginTestConfigType()
configFile := setupAdmissionPluginTestConfig(t, "plugin2configvalue")
registerAdmissionPlugins(t, "plugin1", "plugin2")
_, clusterAdminConfig, fn := setupAdmissionTest(t, func(config *configapi.MasterConfig) {
config.AdmissionConfig.PluginOrderOverride = []string{"plugin1", "plugin2"}
config.AdmissionConfig.PluginConfig = map[string]*configapi.AdmissionPluginConfig{
"plugin2": {
Location: configFile,
},
}
})
defer fn()
createdBuild, err := buildv1client.NewForConfigOrDie(clusterAdminConfig).Build().Builds(metav1.NamespaceDefault).Create(admissionTestBuild())
if err = checkAdmissionObjectLabelValues(createdBuild.Labels, map[string]string{"plugin1": "default", "plugin2": "plugin2configvalue"}); err != nil {
t.Errorf("Error: %v", err)
}
}
func TestOpenshiftAdmissionPluginEmbeddedConfig(t *testing.T) {
registerAdmissionPluginTestConfigType()
registerAdmissionPlugins(t, "plugin1", "plugin2")
_, clusterAdminConfig, fn := setupAdmissionTest(t, func(config *configapi.MasterConfig) {
config.AdmissionConfig.PluginOrderOverride = []string{"plugin1", "plugin2"}
config.AdmissionConfig.PluginConfig = map[string]*configapi.AdmissionPluginConfig{
"plugin2": {
Configuration: &testtypes.TestPluginConfig{
Data: "embeddedvalue2",
},
},
}
})
defer fn()
createdBuild, err := buildv1client.NewForConfigOrDie(clusterAdminConfig).Build().Builds(metav1.NamespaceDefault).Create(admissionTestBuild())
if err = checkAdmissionObjectLabelValues(createdBuild.Labels, map[string]string{"plugin1": "default", "plugin2": "embeddedvalue2"}); err != nil {
t.Errorf("Error: %v", err)
}
}
func TestAlwaysPullImagesOn(t *testing.T) {
masterConfig, err := testserver.DefaultMasterOptions()
if err != nil {
t.Fatalf("error creating config: %v", err)
}
defer testserver.CleanupMasterEtcd(t, masterConfig)
masterConfig.AdmissionConfig.PluginConfig = map[string]*configapi.AdmissionPluginConfig{
"AlwaysPullImages": {
Configuration: &configapi.DefaultAdmissionConfig{},
},
}
kubeConfigFile, err := testserver.StartConfiguredMaster(masterConfig)
if err != nil {
t.Fatalf("error starting server: %v", err)
}
kubeClientset, err := testutil.GetClusterAdminKubeInternalClient(kubeConfigFile)
if err != nil {
t.Fatalf("error getting client: %v", err)
}
ns := &kapi.Namespace{}
ns.Name = testutil.Namespace()
_, err = kubeClientset.Core().Namespaces().Create(ns)
if err != nil {
t.Fatalf("error creating namespace: %v", err)
}
if err := testserver.WaitForPodCreationServiceAccounts(kubeClientset, testutil.Namespace()); err != nil {
t.Fatalf("error getting client config: %v", err)
}
testPod := &kapi.Pod{}
testPod.GenerateName = "test"
testPod.Spec.Containers = []kapi.Container{
{
Name: "container",
Image: "openshift/origin-pod:notlatest",
ImagePullPolicy: kapi.PullNever,
},
}
actualPod, err := kubeClientset.Core().Pods(testutil.Namespace()).Create(testPod)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if actualPod.Spec.Containers[0].ImagePullPolicy != kapi.PullAlways {
t.Errorf("expected %v, got %v", kapi.PullAlways, actualPod.Spec.Containers[0].ImagePullPolicy)
}
}
func TestAlwaysPullImagesOff(t *testing.T) {
masterConfig, kubeConfigFile, err := testserver.StartTestMaster()
if err != nil {
t.Fatalf("error starting server: %v", err)
}
defer testserver.CleanupMasterEtcd(t, masterConfig)
kubeClientset, err := testutil.GetClusterAdminKubeInternalClient(kubeConfigFile)
if err != nil {
t.Fatalf("error getting client: %v", err)
}
ns := &kapi.Namespace{}
ns.Name = testutil.Namespace()
_, err = kubeClientset.Core().Namespaces().Create(ns)
if err != nil {
t.Fatalf("error creating namespace: %v", err)
}
if err := testserver.WaitForPodCreationServiceAccounts(kubeClientset, testutil.Namespace()); err != nil {
t.Fatalf("error getting client config: %v", err)
}
testPod := &kapi.Pod{}
testPod.GenerateName = "test"
testPod.Spec.Containers = []kapi.Container{
{
Name: "container",
Image: "openshift/origin-pod:notlatest",
ImagePullPolicy: kapi.PullNever,
},
}
actualPod, err := kubeClientset.Core().Pods(testutil.Namespace()).Create(testPod)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if actualPod.Spec.Containers[0].ImagePullPolicy != kapi.PullNever {
t.Errorf("expected %v, got %v", kapi.PullNever, actualPod.Spec.Containers[0].ImagePullPolicy)
}
}
| PI-Victor/origin | test/integration/admissionconfig_test.go | GO | apache-2.0 | 13,297 |
/*
* Copyright 2011-2013, by Vladimir Kostyukov and Contributors.
*
* This file is part of la4j project (http://la4j.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributor(s): Maxim Samoylov
*
*/
package org.la4j.decomposition;
import org.la4j.Matrices;
import org.la4j.Matrix;
import org.la4j.matrix.SparseMatrix;
import org.la4j.Vector;
import org.la4j.Vectors;
import org.la4j.vector.DenseVector;
import org.la4j.vector.functor.VectorAccumulator;
/**
* This class represents Eigen decomposition of matrices. More details
* <p>
* <a href="http://mathworld.wolfram.com/EigenDecomposition.html"> here.</a>
* </p>
*/
public class EigenDecompositor extends AbstractDecompositor implements MatrixDecompositor {
public EigenDecompositor(Matrix matrix) {
super(matrix);
}
/**
* Returns the result of Eigen (EVD) decomposition of given matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @return { V, D }
*/
@Override
public Matrix[] decompose() {
if (matrix.is(Matrices.SYMMETRIC_MATRIX)) {
return decomposeSymmetricMatrix(matrix);
} else if (matrix.rows() == matrix.columns()) {
return decomposeNonSymmetricMatrix(matrix);
} else {
throw new IllegalArgumentException("Can't decompose rectangle matrix");
}
}
@Override
public boolean applicableTo(Matrix matrix) {
return matrix.rows() == matrix.columns();
}
/**
* Returns the result of Eigen decomposition for <a
* href="http://mathworld.wolfram.com/SymmetricMatrix.html"> symmetric</a>
* matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @param matrix
* @return { V, D }
*/
private Matrix[] decomposeSymmetricMatrix(Matrix matrix) {
Matrix d = matrix.copy();
Matrix v = SparseMatrix.identity(matrix.rows());
Vector r = generateR(d);
Matrix u = SparseMatrix.identity(matrix.rows());
VectorAccumulator normAccumulator = Vectors.mkEuclideanNormAccumulator();
double n = Matrices.EPS;
double nn = r.fold(normAccumulator);
int kk = 0;
int ll = 0;
while (Math.abs(n - nn) > Matrices.EPS) {
int k = findMax(r);
int l = findMax(d, k);
regenerateU(u, d, k, l, kk, ll);
kk = k;
ll = l;
v = v.multiply(u);
d = u.transpose().multiply(d.multiply(u));
r.set(k, generateRi(d, k));
r.set(l, generateRi(d, l));
n = nn;
nn = r.fold(normAccumulator);
}
return new Matrix[] { v, d };
}
private int findMax(Vector vector) {
double value = vector.get(0);
int result = 0;
for (int i = 1; i < vector.length(); i++) {
double v = vector.get(i);
if (Math.abs(value) < Math.abs(v)) {
result = i;
value = v;
}
}
return result;
}
private int findMax(Matrix matrix, int i) {
double value = i > 0 ? matrix.get(i, 0) : matrix.get(i, 1);
int result = i > 0 ? 0 : 1;
for (int j = 0; j < matrix.columns(); j++) {
if (i != j) {
double v = matrix.get(i, j);
if (Math.abs(value) < Math.abs(v)) {
result = j;
value = v;
}
}
}
return result;
}
private Vector generateR(Matrix matrix) {
Vector result = DenseVector.zero(matrix.rows());
for (int i = 0; i < matrix.rows(); i++) {
result.set(i, generateRi(matrix, i));
}
return result;
}
private double generateRi(Matrix matrix, int i) {
double acc = 0;
for (int j = 0; j < matrix.columns(); j++) {
if (j != i) {
double value = matrix.get(i, j);
acc += value * value;
}
}
return acc;
}
private void regenerateU(Matrix u, Matrix matrix, int k, int l, int kk, int ll) {
u.set(kk, kk, 1.0);
u.set(ll, ll, 1.0);
u.set(kk, ll, 0.0);
u.set(ll, kk, 0.0);
double alpha = 0.0;
double beta = 0.0;
if (Math.abs(matrix.get(k, k) - matrix.get(l, l)) < Matrices.EPS) {
alpha = beta = Math.sqrt(0.5);
} else {
double mu = 2 * matrix.get(k, l) / (matrix.get(k, k) - matrix.get(l, l));
mu = 1.0 / Math.sqrt(1.0 + mu * mu);
alpha = Math.sqrt(0.5 * (1.0 + mu));
beta = Math.signum(mu) * Math.sqrt(0.5 * (1.0 - mu));
}
u.set(k, k, alpha);
u.set(l, l, alpha);
u.set(k, l, -beta);
u.set(l, k, beta);
}
/**
* Returns the result of Eigen decomposition for non-<a
* href="http://mathworld.wolfram.com/SymmetricMatrix.html">symmetric</a>
* matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @param matrix
* @return { P, D }
*/
private Matrix[] decomposeNonSymmetricMatrix(Matrix matrix) {
Matrix A = matrix.copy();
int n = matrix.columns();
Matrix v = SparseMatrix.identity(n);
Vector d = DenseVector.zero(n);
Vector e = DenseVector.zero(n);
Matrix h = A.copy();
Vector ort = DenseVector.zero(n);
// Reduce to Hessenberg form.
orthes(h, v, ort);
// Reduce Hessenberg to real Schur form.
hqr2(h, v, d, e);
Matrix dd = matrix.blankOfShape(n, n);
for (int i = 0; i < n; i++) {
dd.set(i, i, d.get(i));
if (e.get(i) > 0) {
dd.set(i, i + 1, e.get(i));
} else if (e.get(i) < 0) {
dd.set(i, i - 1, e.get(i));
}
}
return new Matrix[] { v, dd };
}
// Nonsymmetric reduction to Hessenberg form.
private void orthes(Matrix h, Matrix v, Vector ort) {
// This is derived from the Algol procedures orthes and ortran,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutines in EISPACK.
int n = ort.length();
int low = 0;
int high = n - 1;
for (int m = low + 1; m <= high - 1; m++) {
// Scale column.
double scale = 0.0;
for (int i = m; i <= high; i++) {
scale = scale + Math.abs(h.get(i, m - 1));
}
if (scale != 0.0) {
// Compute Householder transformation.
double hh = 0.0;
for (int i = high; i >= m; i--) {
ort.set(i, h.get(i, m - 1) / scale);
hh += ort.get(i) * ort.get(i);
}
double g = Math.sqrt(hh);
if (ort.get(m) > Matrices.EPS) {
g = -g;
}
hh = hh - ort.get(m) * g;
ort.updateAt(m, Vectors.asMinusFunction(g));
// Apply Householder similarity transformation
// H = (I-u*u'/h)*H*(I-u*u')/h)
for (int j = m; j < n; j++) {
double f = 0.0;
for (int i = high; i >= m; i--) {
f += ort.get(i) * h.get(i, j);
}
f = f / hh;
for (int i = m; i <= high; i++) {
h.updateAt(i, j, Matrices.asMinusFunction(f * ort.get(i)));
}
}
for (int i = 0; i <= high; i++) {
double f = 0.0;
for (int j = high; j >= m; j--) {
f += ort.get(j) * h.get(i, j);
}
f = f / hh;
for (int j = m; j <= high; j++) {
h.updateAt(i, j, Matrices.asMinusFunction(f * ort.get(j)));
}
}
ort.set(m, scale * ort.get(m));
h.set(m, m - 1, scale * g);
}
}
// Accumulate transformations (Algol's ortran).
for (int m = high - 1; m >= low + 1; m--) {
if (Math.abs(h.get(m, m - 1)) > Matrices.EPS) {
for (int i = m + 1; i <= high; i++) {
ort.set(i, h.get(i, m - 1));
}
for (int j = m; j <= high; j++) {
double g = 0.0;
for (int i = m; i <= high; i++) {
g += ort.get(i) * v.get(i, j);
}
// Double division avoids possible underflow
g = (g / ort.get(m)) / h.get(m, m - 1);
for (int i = m; i <= high; i++) {
v.updateAt(i, j, Matrices.asPlusFunction(g * ort.get(i)));
}
}
}
}
}
// Nonsymmetric reduction from Hessenberg to real Schur form.
private void hqr2(Matrix H, Matrix V, Vector d, Vector e) {
// This is derived from the Algol procedure hqr2,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutine in EISPACK.
// Initialize
int nn = e.length();
int n = nn - 1;
int low = 0;
int high = nn - 1;
double eps = Math.pow(2.0, -52.0);
double exshift = 0.0;
double p = 0;
double q = 0;
double r = 0;
double s = 0;
double z = 0;
double t;
double w;
double x;
double y;
// Store roots isolated by balanc and compute matrix norm
double norm = 0.0;
for (int i = 0; i < nn; i++) {
if (i < low | i > high) {
d.set(i, H.get(i, i));
e.set(i, 0.0);
}
for (int j = Math.max(i - 1, 0); j < nn; j++) {
norm = norm + Math.abs(H.get(i, j));
}
}
// Outer loop over eigenvalue index
int iter = 0;
while (n >= low) {
// Look for single small sub-diagonal element
int l = n;
while (l > low) {
s = Math.abs(H.get(l - 1, l - 1))
+ Math.abs(H.get(l, l));
if (s == 0.0) {
s = norm;
}
if (Math.abs(H.get(l, l - 1)) < eps * s) {
break;
}
l--;
}
// Check for convergence
// One root found
if (l == n) {
H.updateAt(n, n, Matrices.asPlusFunction(exshift));
d.set(n, H.get(n, n));
e.set(n, 0.0);
n--;
iter = 0;
// Two roots found
} else if (l == n - 1) {
w = H.get(n, n - 1) * H.get(n - 1, n);
p = (H.get(n - 1, n - 1) - H.get(n, n)) / 2.0;
q = p * p + w;
z = Math.sqrt(Math.abs(q));
H.updateAt(n, n, Matrices.asPlusFunction(exshift));
H.updateAt(n - 1, n - 1, Matrices.asPlusFunction(exshift));
x = H.get(n, n);
// Real pair
if (q >= 0) {
if (p >= 0) {
z = p + z;
} else {
z = p - z;
}
d.set(n - 1, x + z);
d.set(n, d.get(n - 1));
if (z != 0.0) {
d.set(n, x - w / z);
}
e.set(n - 1, 0.0);
e.set(n, 0.0);
x = H.get(n, n - 1);
s = Math.abs(x) + Math.abs(z);
p = x / s;
q = z / s;
r = Math.sqrt(p * p + q * q);
p = p / r;
q = q / r;
// Row modification
for (int j = n - 1; j < nn; j++) {
z = H.get(n - 1, j);
H.set(n - 1, j, q * z + p * H.get(n, j));
H.set(n, j, q * H.get(n, j) - p * z);
}
// Column modification
for (int i = 0; i <= n; i++) {
z = H.get(i, n - 1);
H.set(i, n - 1, q * z + p * H.get(i, n));
H.set(i, n, q * H.get(i, n) - p * z);
}
// Accumulate transformations
for (int i = low; i <= high; i++) {
z = V.get(i, n - 1);
V.set(i, n - 1, q * z + p * V.get(i, n));
V.set(i, n, q * V.get(i, n) - p * z);
}
// Complex pair
} else {
d.set(n - 1, x + p);
d.set(n, x + p);
e.set(n - 1, z);
e.set(n, -z);
}
n = n - 2;
iter = 0;
// No convergence yet
} else {
// Form shift
x = H.get(n, n);
y = 0.0;
w = 0.0;
if (l < n) {
y = H.get(n - 1, n - 1);
w = H.get(n, n - 1) * H.get(n - 1, n);
}
// Wilkinson's original ad hoc shift
if (iter == 10) {
exshift += x;
for (int i = low; i <= n; i++) {
H.updateAt(i, i, Matrices.asMinusFunction(x));
}
s = Math.abs(H.get(n, n - 1))
+ Math.abs(H.get(n - 1, n - 2));
x = y = 0.75 * s; // WTF ???
w = -0.4375 * s * s; // Are you kidding me???
}
// MATLAB's new ad hoc shift
if (iter == 30) {
s = (y - x) / 2.0;
s = s * s + w;
if (s > 0) {
s = Math.sqrt(s);
if (y < x) {
s = -s;
}
s = x - w / ((y - x) / 2.0 + s);
for (int i = low; i <= n; i++) {
H.updateAt(i, i, Matrices.asMinusFunction(s));
}
exshift += s;
x = y = w = 0.964;
}
}
iter = iter + 1; // (Could check iteration count here.)
// Look for two consecutive small sub-diagonal elements
int m = n - 2;
while (m >= l) {
z = H.get(m, m);
r = x - z;
s = y - z;
p = (r * s - w) / H.get(m + 1, m)
+ H.get(m, m + 1);
q = H.get(m + 1, m + 1) - z - r - s;
r = H.get(m + 2, m + 1);
s = Math.abs(p) + Math.abs(q) + Math.abs(r);
p = p / s;
q = q / s;
r = r / s;
if (m == l) {
break;
}
if (Math.abs(H.get(m, m - 1)) * (Math.abs(q) + Math.abs(r)) < eps
* (Math.abs(p) * (Math.abs(H.get(m - 1, m - 1))
+ Math.abs(z) + Math.abs(H.get(m + 1, m + 1))))) {
break;
}
m--;
}
for (int i = m + 2; i <= n; i++) {
H.set(i, i - 2, 0.0);
if (i > m + 2) {
H.set(i, i - 3, 0.0);
}
}
// Double QR step involving rows l:n and columns m:n
for (int k = m; k <= n - 1; k++) {
boolean notlast = (k != n - 1);
if (k != m) {
p = H.get(k, k - 1);
q = H.get(k + 1, k - 1);
r = (notlast ? H.get(k + 2, k - 1) : 0.0);
x = Math.abs(p) + Math.abs(q) + Math.abs(r);
if (x == 0.0) {
continue;
}
p = p / x;
q = q / x;
r = r / x;
}
s = Math.sqrt(p * p + q * q + r * r);
if (p < 0) {
s = -s;
}
if (s != 0) {
if (k != m) {
H.set(k, k - 1, -s * x);
} else if (l != m) {
H.updateAt(k, k - 1, Matrices.INV_FUNCTION);
}
p = p + s;
x = p / s;
y = q / s;
z = r / s;
q = q / p;
r = r / p;
// Row modification
for (int j = k; j < nn; j++) {
p = H.get(k, j) + q * H.get(k + 1, j);
if (notlast) {
p = p + r * H.get(k + 2, j);
H.updateAt(k + 2, j,
Matrices.asMinusFunction(p * z));
}
H.updateAt(k, j, Matrices.asMinusFunction(p * x));
H.updateAt(k + 1, j, Matrices.asMinusFunction(p * y));
}
// Column modification
for (int i = 0; i <= Math.min(n, k + 3); i++) {
p = x * H.get(i, k) + y
* H.get(i, k + 1);
if (notlast) {
p = p + z * H.get(i, k + 2);
H.updateAt(i, k + 2,
Matrices.asMinusFunction(p * r));
}
H.updateAt(i, k, Matrices.asMinusFunction(p));
H.updateAt(i, k + 1, Matrices.asMinusFunction(p * q));
}
// Accumulate transformations
for (int i = low; i <= high; i++) {
p = x * V.get(i, k) + y
* V.get(i, k + 1);
if (notlast) {
p = p + z * V.get(i, k + 2);
V.updateAt(i, k + 2,
Matrices.asMinusFunction(p * r));
}
V.updateAt(i, k, Matrices.asMinusFunction(p));
V.updateAt(i, k + 1, Matrices.asMinusFunction(p * q));
}
} // (s != 0)
} // k loop
} // check convergence
} // while (n >= low)
// Backsubstitute to find vectors of upper triangular form
if (norm == 0.0) {
return;
}
for (n = nn - 1; n >= 0; n--) {
p = d.get(n);
q = e.get(n);
// Real vector
if (q == 0) {
int l = n;
H.set(n, n, 1.0);
for (int i = n - 1; i >= 0; i--) {
w = H.get(i, i) - p;
r = 0.0;
for (int j = l; j <= n; j++) {
r = r + H.get(i, j) * H.get(j, n);
}
if (e.get(i) < 0.0) {
z = w;
s = r;
} else {
l = i;
if (e.get(i) == 0.0) {
if (w != 0.0) {
H.set(i, n, -r / w);
} else {
H.set(i, n, -r / (eps * norm));
}
// Solve real equations
} else {
x = H.get(i, i + 1);
y = H.get(i + 1, i);
q = (d.get(i) - p) * (d.get(i) - p)
+ e.get(i) * e.get(i);
t = (x * s - z * r) / q;
H.set(i, n, t);
if (Math.abs(x) > Math.abs(z)) {
H.set(i + 1, n, (-r - w * t) / x);
} else {
H.set(i + 1, n, (-s - y * t) / z);
}
}
// Overflow control
t = Math.abs(H.get(i, n));
if ((eps * t) * t > 1) {
for (int j = i; j <= n; j++) {
H.updateAt(j, n, Matrices.asDivFunction(t));
}
}
}
}
// Complex vector
} else if (q < 0) {
int l = n - 1;
// Last vector component imaginary so matrix is triangular
if (Math.abs(H.get(n, n - 1))
> Math.abs(H.get(n - 1, n))) {
H.set(n - 1, n - 1, q / H.get(n, n - 1));
H.set(n - 1, n, -(H.get(n, n) - p)
/ H.get(n, n - 1));
} else {
double[] cdiv = cdiv(0.0, -H.get(n - 1, n),
H.get(n - 1, n - 1) - p, q);
H.set(n - 1, n - 1, cdiv[0]);
H.set(n - 1, n, cdiv[1]);
}
H.set(n, n - 1, 0.0);
H.set(n, n, 1.0);
for (int i = n - 2; i >= 0; i--) {
double ra;
double sa;
double vr;
double vi;
ra = 0.0;
sa = 0.0;
for (int j = l; j <= n; j++) {
ra = ra + H.get(i, j) * H.get(j, n - 1);
sa = sa + H.get(i, j) * H.get(j, n);
}
w = H.get(i, i) - p;
if (e.get(i) < 0.0) {
z = w;
r = ra;
s = sa;
} else {
l = i;
if (e.get(i) == 0) {
double[] cdiv = cdiv(-ra, -sa, w, q);
H.set(i, n - 1, cdiv[0]);
H.set(i, n, cdiv[1]);
} else {
// Solve complex equations
x = H.get(i, i + 1);
y = H.get(i + 1, i);
vr = (d.get(i) - p) * (d.get(i) - p)
+ e.get(i) * e.get(i) - q * q;
vi = (d.get(i) - p) * 2.0 * q;
if (vr == 0.0 & vi == 0.0) {
vr = eps
* norm
* (Math.abs(w) + Math.abs(q)
+ Math.abs(x) + Math.abs(y) + Math
.abs(z));
}
double[] cdiv = cdiv(x * r - z * ra + q * sa,
x * s - z * sa - q * ra, vr, vi);
H.set(i, n - 1, cdiv[0]);
H.set(i, n, cdiv[1]);
if (Math.abs(x) > (Math.abs(z) + Math.abs(q))) {
H.set(i + 1, n - 1, (-ra - w
* H.get(i, n - 1) + q
* H.get(i, n)) / x);
H.set(i + 1, n, (-sa - w
* H.get(i, n) - q
* H.get(i, n - 1)) / x);
} else {
cdiv = cdiv(-r - y
* H.get(i, n - 1), -s - y
* H.get(i, n), z, q);
H.set(i + 1, n - 1, cdiv[0]);
H.set(i + 1, n, cdiv[1]);
}
}
// Overflow control
t = Math.max(Math.abs(H.get(i, n - 1)),
Math.abs(H.get(i, n)));
if ((eps * t) * t > 1) {
for (int j = i; j <= n; j++) {
H.updateAt(j, n - 1, Matrices.asDivFunction(t));
H.updateAt(j, n, Matrices.asDivFunction(t));
}
}
}
}
}
}
// Vectors of isolated roots
for (int i = 0; i < nn; i++) {
if (i < low | i > high) {
for (int j = i; j < nn; j++) {
V.set(i, j, H.get(i, j));
}
}
}
// Back transformation to get eigenvectors of original matrix
for (int j = nn - 1; j >= low; j--) {
for (int i = low; i <= high; i++) {
z = 0.0;
for (int k = low; k <= Math.min(j, high); k++) {
z = z + V.get(i, k) * H.get(k, j);
}
V.set(i, j, z);
}
}
}
private double[] cdiv(double xr, double xi, double yr, double yi) {
double cdivr;
double cdivi;
double r;
double d;
if (Math.abs(yr) > Math.abs(yi)) {
r = yi / yr;
d = yr + r * yi;
cdivr = (xr + r * xi) / d;
cdivi = (xi - r * xr) / d;
} else {
r = yr / yi;
d = yi + r * yr;
cdivr = (r * xr + xi) / d;
cdivi = (r * xi - xr) / d;
}
return new double[] { cdivr, cdivi };
}
}
| vkostyukov/la4j | src/main/java/org/la4j/decomposition/EigenDecompositor.java | Java | apache-2.0 | 27,846 |
using System;
using eggVia.Core;
using EloBuddy;
using SharpDX;
using Color = System.Drawing.Color;
namespace eggVia.Modes
{
internal class Casts : Model
{
public static int[] WLargura = {400, 500, 600, 700, 800};
// TODO auto desativar ult, quando não tiver heroi/minion sem hotkey pressionada
public static void OnEndDraw(EventArgs args)
{
var mPos = Game.CursorPos;
Drawing.DrawLine(new Vector2(mPos.X, mPos.Y),
new Vector2(mPos.X - WLargura[W.Level - 1], mPos.Y), 2f,
Color.White);
}
}
} | mrarticuno/ElobuddyPrivate | eggVia/eggVia/Modes/Casts.cs | C# | apache-2.0 | 605 |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for VirtualSCSIController complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="VirtualSCSIController">
* <complexContent>
* <extension base="{urn:vim25}VirtualController">
* <sequence>
* <element name="hotAddRemove" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="sharedBus" type="{urn:vim25}VirtualSCSISharing"/>
* <element name="scsiCtlrUnitNumber" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "VirtualSCSIController", propOrder = {
"hotAddRemove",
"sharedBus",
"scsiCtlrUnitNumber"
})
@XmlSeeAlso({
VirtualLsiLogicController.class,
ParaVirtualSCSIController.class,
VirtualBusLogicController.class,
VirtualLsiLogicSASController.class
})
public class VirtualSCSIController
extends VirtualController
{
protected Boolean hotAddRemove;
@XmlElement(required = true)
protected VirtualSCSISharing sharedBus;
protected Integer scsiCtlrUnitNumber;
/**
* Gets the value of the hotAddRemove property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isHotAddRemove() {
return hotAddRemove;
}
/**
* Sets the value of the hotAddRemove property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setHotAddRemove(Boolean value) {
this.hotAddRemove = value;
}
/**
* Gets the value of the sharedBus property.
*
* @return
* possible object is
* {@link VirtualSCSISharing }
*
*/
public VirtualSCSISharing getSharedBus() {
return sharedBus;
}
/**
* Sets the value of the sharedBus property.
*
* @param value
* allowed object is
* {@link VirtualSCSISharing }
*
*/
public void setSharedBus(VirtualSCSISharing value) {
this.sharedBus = value;
}
/**
* Gets the value of the scsiCtlrUnitNumber property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getScsiCtlrUnitNumber() {
return scsiCtlrUnitNumber;
}
/**
* Sets the value of the scsiCtlrUnitNumber property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setScsiCtlrUnitNumber(Integer value) {
this.scsiCtlrUnitNumber = value;
}
}
| jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/VirtualSCSIController.java | Java | apache-2.0 | 3,215 |
__author__ = 'Javier'
class Project(object):
def __init__(self, forks, stars, watchs):
self._forks = int(forks)
self._stars = int(stars)
self._watchs = int(watchs)
@property
def forks(self):
return self._forks
@property
def stars(self):
return self._stars
@property
def watchs(self):
return self._watchs
class GIndex(object):
def calculate(self, project):
return project.forks * 3 + project.stars + project.watchs
class ProjectRepositoryService(object):
def __init__(self, conector):
self.conector = conector
self.project_factory = ProjectFactory()
def find(self, user, repo_name):
raw_json = self._read_repo(user, repo_name)
return self.project_factory.build_from(raw_json)
def _read_repo(self, user, repo_name):
repos = self.conector.read_all(user)
for repo in repos:
if repo['name'] == repo_name:
return repo
return None
class ProjectFactory(object):
def build_from(self, json_project):
return Project(json_project['forks_count'],
json_project['watchers_count'],
json_project['stargazers_count']) | javierj/kobudo-katas | Kata-RestConsumer/gindex.py | Python | apache-2.0 | 1,259 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#nullable disable
using System;
using Microsoft.CodeAnalysis.Editor.Tagging;
using Microsoft.VisualStudio.Text.Editor;
namespace Microsoft.CodeAnalysis.Editor.Shared.Tagging
{
internal partial class TaggerEventSources
{
private class SelectionChangedEventSource : AbstractTaggerEventSource
{
private readonly ITextView _textView;
public SelectionChangedEventSource(ITextView textView, TaggerDelay delay)
: base(delay)
{
_textView = textView;
}
public override void Connect()
=> _textView.Selection.SelectionChanged += OnSelectionChanged;
public override void Disconnect()
=> _textView.Selection.SelectionChanged -= OnSelectionChanged;
private void OnSelectionChanged(object sender, EventArgs args)
=> RaiseChanged();
}
}
}
| brettfo/roslyn | src/EditorFeatures/Core/Shared/Tagging/EventSources/TaggerEventSources.SelectionChangedEventSource.cs | C# | apache-2.0 | 1,136 |
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.utilities.procedural;
import org.terasology.math.TeraMath;
import org.terasology.utilities.random.FastRandom;
/**
* A speed-improved simplex noise algorithm for Simplex noise in 2D, 3D and 4D.
* <br><br>
* Based on example code by Stefan Gustavson (stegu@itn.liu.se).
* Optimisations by Peter Eastman (peastman@drizzle.stanford.edu).
* Better rank ordering method by Stefan Gustavson in 2012.
* <br><br>
* This could be speeded up even further, but it's useful as it is.
* <br><br>
* Version 2012-03-09
* <br><br>
* This code was placed in the public domain by its original author,
* Stefan Gustavson. You may use it as you see fit, but
* attribution is appreciated.
* <br><br>
* See http://staffwww.itn.liu.se/~stegu/
* <br><br>
* msteiger: Introduced seed value
*/
public class SimplexNoise extends AbstractNoise implements Noise2D, Noise3D {
/**
* Multiply this with the gridDim provided and noise(x,x) will give tileable 1D noise which will tile
* when x crosses a multiple of (this * gridDim)
*/
public static final float TILEABLE1DMAGICNUMBER = 0.5773502691896258f;
private static Grad[] grad3 = {
new Grad(1, 1, 0), new Grad(-1, 1, 0), new Grad(1, -1, 0), new Grad(-1, -1, 0),
new Grad(1, 0, 1), new Grad(-1, 0, 1), new Grad(1, 0, -1), new Grad(-1, 0, -1),
new Grad(0, 1, 1), new Grad(0, -1, 1), new Grad(0, 1, -1), new Grad(0, -1, -1)};
private static Grad[] grad4 = {
new Grad(0, 1, 1, 1), new Grad(0, 1, 1, -1), new Grad(0, 1, -1, 1), new Grad(0, 1, -1, -1),
new Grad(0, -1, 1, 1), new Grad(0, -1, 1, -1), new Grad(0, -1, -1, 1), new Grad(0, -1, -1, -1),
new Grad(1, 0, 1, 1), new Grad(1, 0, 1, -1), new Grad(1, 0, -1, 1), new Grad(1, 0, -1, -1),
new Grad(-1, 0, 1, 1), new Grad(-1, 0, 1, -1), new Grad(-1, 0, -1, 1), new Grad(-1, 0, -1, -1),
new Grad(1, 1, 0, 1), new Grad(1, 1, 0, -1), new Grad(1, -1, 0, 1), new Grad(1, -1, 0, -1),
new Grad(-1, 1, 0, 1), new Grad(-1, 1, 0, -1), new Grad(-1, -1, 0, 1), new Grad(-1, -1, 0, -1),
new Grad(1, 1, 1, 0), new Grad(1, 1, -1, 0), new Grad(1, -1, 1, 0), new Grad(1, -1, -1, 0),
new Grad(-1, 1, 1, 0), new Grad(-1, 1, -1, 0), new Grad(-1, -1, 1, 0), new Grad(-1, -1, -1, 0)};
// Skewing and unskewing factors for 2, 3, and 4 dimensions
private static final float F2 = 0.5f * (float) (Math.sqrt(3.0f) - 1.0f);
private static final float G2 = (3.0f - (float) Math.sqrt(3.0f)) / 6.0f;
private static final float F3 = 1.0f / 3.0f;
private static final float G3 = 1.0f / 6.0f;
private static final float F4 = ((float) Math.sqrt(5.0f) - 1.0f) / 4.0f;
private static final float G4 = (5.0f - (float) Math.sqrt(5.0f)) / 20.0f;
private final short[] perm;
private final short[] permMod12;
private final int permCount;
/**
* Initialize permutations with a given seed and grid dimension.
*
* @param seed a seed value used for permutation shuffling
*/
public SimplexNoise(long seed) {
this(seed, 256);
}
/**
* Initialize permutations with a given seed and grid dimension.
* Supports 1D tileable noise
* @see SimplexNoise#tileable1DMagicNumber
*
* @param seed a seed value used for permutation shuffling
* @param gridDim gridDim x gridDim will be the number of squares in the square grid formed after skewing the simplices belonging to once "tile"
*/
public SimplexNoise(long seed, int gridDim) {
FastRandom rand = new FastRandom(seed);
permCount = gridDim;
perm = new short[permCount * 2];
permMod12 = new short[permCount * 2];
short[] p = new short[permCount];
// Initialize with all values [0..(permCount-1)]
for (short i = 0; i < permCount; i++) {
p[i] = i;
}
// Shuffle the array
for (int i = 0; i < permCount; i++) {
int j = rand.nextInt(permCount);
short swap = p[i];
p[i] = p[j];
p[j] = swap;
}
for (int i = 0; i < permCount * 2; i++) {
perm[i] = p[i % permCount];
permMod12[i] = (short) (perm[i] % 12);
}
}
private static float dot(Grad g, float x, float y) {
return g.x * x + g.y * y;
}
private static float dot(Grad g, float x, float y, float z) {
return g.x * x + g.y * y + g.z * z;
}
private static float dot(Grad g, float x, float y, float z, float w) {
return g.x * x + g.y * y + g.z * z + g.w * w;
}
/**
* 2D simplex noise
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @return a noise value in the interval [-1,1]
*/
@Override
public float noise(float xin, float yin) {
float n0;
float n1;
float n2; // Noise contributions from the three corners
// Skew the input space to determine which simplex cell we're in
float s = (xin + yin) * F2; // Hairy factor for 2D
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
float t = (i + j) * G2;
float xo0 = i - t; // Unskew the cell origin back to (x,y) space
float yo0 = j - t;
float x0 = xin - xo0; // The x,y distances from the cell origin
float y0 = yin - yo0;
// For the 2D case, the simplex shape is an equilateral triangle.
// Determine which simplex we are in.
int i1; // Offsets for second (middle) corner of simplex in (i,j) coords
int j1;
if (x0 > y0) { // lower triangle, XY order: (0,0)->(1,0)->(1,1)
i1 = 1;
j1 = 0;
} else { // upper triangle, YX order: (0,0)->(0,1)->(1,1)
i1 = 0;
j1 = 1;
}
// A step of (1,0) in (i,j) means a step of (1-c,-c) in (x,y), and
// a step of (0,1) in (i,j) means a step of (-c,1-c) in (x,y), where
// c = (3-sqrt(3))/6
float x1 = x0 - i1 + G2; // Offsets for middle corner in (x,y) unskewed coords
float y1 = y0 - j1 + G2;
float x2 = x0 - 1.0f + 2.0f * G2; // Offsets for last corner in (x,y) unskewed coords
float y2 = y0 - 1.0f + 2.0f * G2;
// Work out the hashed gradient indices of the three simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int gi0 = permMod12[ii + perm[jj]];
int gi1 = permMod12[ii + i1 + perm[jj + j1]];
int gi2 = permMod12[ii + 1 + perm[jj + 1]];
// Calculate the contribution from the three corners
float t0 = 0.5f - x0 * x0 - y0 * y0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad3[gi0], x0, y0); // (x,y) of grad3 used for 2D gradient
}
float t1 = 0.5f - x1 * x1 - y1 * y1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad3[gi1], x1, y1);
}
float t2 = 0.5f - x2 * x2 - y2 * y2;
if (t2 < 0) {
n2 = 0.0f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad3[gi2], x2, y2);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to return values in the interval [-1,1].
return 70.0f * (n0 + n1 + n2);
}
/**
* 3D simplex noise
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @param zin the z input coordinate
* @return a noise value in the interval [-1,1]
*/
@Override
public float noise(float xin, float yin, float zin) {
float n0;
float n1;
float n2;
float n3; // Noise contributions from the four corners
// Skew the input space to determine which simplex cell we're in
float s = (xin + yin + zin) * F3; // Very nice and simple skew factor for 3D
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
int k = TeraMath.floorToInt(zin + s);
float t = (i + j + k) * G3;
float xo0 = i - t; // Unskew the cell origin back to (x,y,z) space
float yo0 = j - t;
float zo0 = k - t;
float x0 = xin - xo0; // The x,y,z distances from the cell origin
float y0 = yin - yo0;
float z0 = zin - zo0;
// For the 3D case, the simplex shape is a slightly irregular tetrahedron.
// Determine which simplex we are in.
int i1;
int j1;
int k1; // Offsets for second corner of simplex in (i,j,k) coords
int i2;
int j2;
int k2; // Offsets for third corner of simplex in (i,j,k) coords
if (x0 >= y0) {
if (y0 >= z0) { // X Y Z order
i1 = 1;
j1 = 0;
k1 = 0;
i2 = 1;
j2 = 1;
k2 = 0;
} else if (x0 >= z0) { // X Z Y order
i1 = 1;
j1 = 0;
k1 = 0;
i2 = 1;
j2 = 0;
k2 = 1;
} else { // Z X Y order
i1 = 0;
j1 = 0;
k1 = 1;
i2 = 1;
j2 = 0;
k2 = 1;
}
} else { // x0<y0
if (y0 < z0) { // Z Y X order
i1 = 0;
j1 = 0;
k1 = 1;
i2 = 0;
j2 = 1;
k2 = 1;
} else if (x0 < z0) { // Y Z X order
i1 = 0;
j1 = 1;
k1 = 0;
i2 = 0;
j2 = 1;
k2 = 1;
} else { // Y X Z order
i1 = 0;
j1 = 1;
k1 = 0;
i2 = 1;
j2 = 1;
k2 = 0;
}
}
// A step of (1,0,0) in (i,j,k) means a step of (1-c,-c,-c) in (x,y,z),
// a step of (0,1,0) in (i,j,k) means a step of (-c,1-c,-c) in (x,y,z), and
// a step of (0,0,1) in (i,j,k) means a step of (-c,-c,1-c) in (x,y,z), where
// c = 1/6.
float x1 = x0 - i1 + G3; // Offsets for second corner in (x,y,z) coords
float y1 = y0 - j1 + G3;
float z1 = z0 - k1 + G3;
float x2 = x0 - i2 + 2.0f * G3; // Offsets for third corner in (x,y,z) coords
float y2 = y0 - j2 + 2.0f * G3;
float z2 = z0 - k2 + 2.0f * G3;
float x3 = x0 - 1.0f + 3.0f * G3; // Offsets for last corner in (x,y,z) coords
float y3 = y0 - 1.0f + 3.0f * G3;
float z3 = z0 - 1.0f + 3.0f * G3;
// Work out the hashed gradient indices of the four simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int kk = Math.floorMod(k, permCount);
int gi0 = permMod12[ii + perm[jj + perm[kk]]];
int gi1 = permMod12[ii + i1 + perm[jj + j1 + perm[kk + k1]]];
int gi2 = permMod12[ii + i2 + perm[jj + j2 + perm[kk + k2]]];
int gi3 = permMod12[ii + 1 + perm[jj + 1 + perm[kk + 1]]];
// Calculate the contribution from the four corners
float t0 = 0.6f - x0 * x0 - y0 * y0 - z0 * z0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad3[gi0], x0, y0, z0);
}
float t1 = 0.6f - x1 * x1 - y1 * y1 - z1 * z1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad3[gi1], x1, y1, z1);
}
float t2 = 0.6f - x2 * x2 - y2 * y2 - z2 * z2;
if (t2 < 0) {
n2 = 0.0f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad3[gi2], x2, y2, z2);
}
float t3 = 0.6f - x3 * x3 - y3 * y3 - z3 * z3;
if (t3 < 0) {
n3 = 0.0f;
} else {
t3 *= t3;
n3 = t3 * t3 * dot(grad3[gi3], x3, y3, z3);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to stay just inside [-1,1]
return 32.0f * (n0 + n1 + n2 + n3);
}
/**
* 4D simplex noise, better simplex rank ordering method 2012-03-09
*
* @param xin the x input coordinate
* @param yin the y input coordinate
* @param zin the z input coordinate
* @return a noise value in the interval [-1,1]
*/
public float noise(float xin, float yin, float zin, float win) {
float n0;
float n1;
float n2;
float n3;
float n4; // Noise contributions from the five corners
// Skew the (x,y,z,w) space to determine which cell of 24 simplices we're in
float s = (xin + yin + zin + win) * F4; // Factor for 4D skewing
int i = TeraMath.floorToInt(xin + s);
int j = TeraMath.floorToInt(yin + s);
int k = TeraMath.floorToInt(zin + s);
int l = TeraMath.floorToInt(win + s);
float t = (i + j + k + l) * G4; // Factor for 4D unskewing
float xo0 = i - t; // Unskew the cell origin back to (x,y,z,w) space
float yo0 = j - t;
float zo0 = k - t;
float wo0 = l - t;
float x0 = xin - xo0; // The x,y,z,w distances from the cell origin
float y0 = yin - yo0;
float z0 = zin - zo0;
float w0 = win - wo0;
// For the 4D case, the simplex is a 4D shape I won't even try to describe.
// To find out which of the 24 possible simplices we're in, we need to
// determine the magnitude ordering of x0, y0, z0 and w0.
// Six pair-wise comparisons are performed between each possible pair
// of the four coordinates, and the results are used to rank the numbers.
int rankx = 0;
int ranky = 0;
int rankz = 0;
int rankw = 0;
if (x0 > y0) {
rankx++;
} else {
ranky++;
}
if (x0 > z0) {
rankx++;
} else {
rankz++;
}
if (x0 > w0) {
rankx++;
} else {
rankw++;
}
if (y0 > z0) {
ranky++;
} else {
rankz++;
}
if (y0 > w0) {
ranky++;
} else {
rankw++;
}
if (z0 > w0) {
rankz++;
} else {
rankw++;
}
int i1;
int j1;
int k1;
int l1; // The integer offsets for the second simplex corner
int i2;
int j2;
int k2;
int l2; // The integer offsets for the third simplex corner
int i3;
int j3;
int k3;
int l3; // The integer offsets for the fourth simplex corner
// simplex[c] is a 4-vector with the numbers 0, 1, 2 and 3 in some order.
// Many values of c will never occur, since e.g. x>y>z>w makes x<z, y<w and x<w
// impossible. Only the 24 indices which have non-zero entries make any sense.
// We use a thresholding to set the coordinates in turn from the largest magnitude.
// Rank 3 denotes the largest coordinate.
i1 = rankx >= 3 ? 1 : 0;
j1 = ranky >= 3 ? 1 : 0;
k1 = rankz >= 3 ? 1 : 0;
l1 = rankw >= 3 ? 1 : 0;
// Rank 2 denotes the second largest coordinate.
i2 = rankx >= 2 ? 1 : 0;
j2 = ranky >= 2 ? 1 : 0;
k2 = rankz >= 2 ? 1 : 0;
l2 = rankw >= 2 ? 1 : 0;
// Rank 1 denotes the second smallest coordinate.
i3 = rankx >= 1 ? 1 : 0;
j3 = ranky >= 1 ? 1 : 0;
k3 = rankz >= 1 ? 1 : 0;
l3 = rankw >= 1 ? 1 : 0;
// The fifth corner has all coordinate offsets = 1, so no need to compute that.
float x1 = x0 - i1 + G4; // Offsets for second corner in (x,y,z,w) coords
float y1 = y0 - j1 + G4;
float z1 = z0 - k1 + G4;
float w1 = w0 - l1 + G4;
float x2 = x0 - i2 + 2.0f * G4; // Offsets for third corner in (x,y,z,w) coords
float y2 = y0 - j2 + 2.0f * G4;
float z2 = z0 - k2 + 2.0f * G4;
float w2 = w0 - l2 + 2.0f * G4;
float x3 = x0 - i3 + 3.0f * G4; // Offsets for fourth corner in (x,y,z,w) coords
float y3 = y0 - j3 + 3.0f * G4;
float z3 = z0 - k3 + 3.0f * G4;
float w3 = w0 - l3 + 3.0f * G4;
float x4 = x0 - 1.0f + 4.0f * G4; // Offsets for last corner in (x,y,z,w) coords
float y4 = y0 - 1.0f + 4.0f * G4;
float z4 = z0 - 1.0f + 4.0f * G4;
float w4 = w0 - 1.0f + 4.0f * G4;
// Work out the hashed gradient indices of the five simplex corners
int ii = Math.floorMod(i, permCount);
int jj = Math.floorMod(j, permCount);
int kk = Math.floorMod(k, permCount);
int ll = Math.floorMod(l, permCount);
int gi0 = perm[ii + perm[jj + perm[kk + perm[ll]]]] % 32;
int gi1 = perm[ii + i1 + perm[jj + j1 + perm[kk + k1 + perm[ll + l1]]]] % 32;
int gi2 = perm[ii + i2 + perm[jj + j2 + perm[kk + k2 + perm[ll + l2]]]] % 32;
int gi3 = perm[ii + i3 + perm[jj + j3 + perm[kk + k3 + perm[ll + l3]]]] % 32;
int gi4 = perm[ii + 1 + perm[jj + 1 + perm[kk + 1 + perm[ll + 1]]]] % 32;
// Calculate the contribution from the five corners
float t0 = 0.6f - x0 * x0 - y0 * y0 - z0 * z0 - w0 * w0;
if (t0 < 0) {
n0 = 0.0f;
} else {
t0 *= t0;
n0 = t0 * t0 * dot(grad4[gi0], x0, y0, z0, w0);
}
float t1 = 0.6f - x1 * x1 - y1 * y1 - z1 * z1 - w1 * w1;
if (t1 < 0) {
n1 = 0.0f;
} else {
t1 *= t1;
n1 = t1 * t1 * dot(grad4[gi1], x1, y1, z1, w1);
}
float t2 = 0.6f - x2 * x2 - y2 * y2 - z2 * z2 - w2 * w2;
if (t2 < 0) {
n2 = 0.f;
} else {
t2 *= t2;
n2 = t2 * t2 * dot(grad4[gi2], x2, y2, z2, w2);
}
float t3 = 0.6f - x3 * x3 - y3 * y3 - z3 * z3 - w3 * w3;
if (t3 < 0) {
n3 = 0.0f;
} else {
t3 *= t3;
n3 = t3 * t3 * dot(grad4[gi3], x3, y3, z3, w3);
}
float t4 = 0.6f - x4 * x4 - y4 * y4 - z4 * z4 - w4 * w4;
if (t4 < 0) {
n4 = 0.0f;
} else {
t4 *= t4;
n4 = t4 * t4 * dot(grad4[gi4], x4, y4, z4, w4);
}
// Sum up and scale the result to cover the range [-1,1]
return 27.0f * (n0 + n1 + n2 + n3 + n4);
}
// Inner class to speed up gradient computations
// (array access is a lot slower than member access)
private static class Grad {
float x;
float y;
float z;
float w;
Grad(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
}
Grad(float x, float y, float z, float w) {
this.x = x;
this.y = y;
this.z = z;
this.w = w;
}
}
}
| Malanius/Terasology | engine/src/main/java/org/terasology/utilities/procedural/SimplexNoise.java | Java | apache-2.0 | 19,875 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import uuid
import mox
from oslo.config import cfg
from quantumclient.v2_0 import client
from nova.compute import instance_types
from nova import context
from nova import exception
from nova.network import model
from nova.network import quantumv2
from nova.network.quantumv2 import api as quantumapi
from nova import test
from nova import utils
CONF = cfg.CONF
#NOTE: Quantum client raises Exception which is discouraged by HACKING.
# We set this variable here and use it for assertions below to avoid
# the hacking checks until we can make quantum client throw a custom
# exception class instead.
QUANTUM_CLIENT_EXCEPTION = Exception
class MyComparator(mox.Comparator):
def __init__(self, lhs):
self.lhs = lhs
def _com_dict(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for key, value in lhs.iteritems():
if key not in rhs:
return False
rhs_value = rhs[key]
if not self._com(value, rhs_value):
return False
return True
def _com_list(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for lhs_value in lhs:
if lhs_value not in rhs:
return False
return True
def _com(self, lhs, rhs):
if lhs is None:
return rhs is None
if isinstance(lhs, dict):
if not isinstance(rhs, dict):
return False
return self._com_dict(lhs, rhs)
if isinstance(lhs, list):
if not isinstance(rhs, list):
return False
return self._com_list(lhs, rhs)
if isinstance(lhs, tuple):
if not isinstance(rhs, tuple):
return False
return self._com_list(lhs, rhs)
return lhs == rhs
def equals(self, rhs):
return self._com(self.lhs, rhs)
def __repr__(self):
return str(self.lhs)
class TestQuantumClient(test.TestCase):
def test_withtoken(self):
self.flags(quantum_url='http://anyhost/')
self.flags(quantum_url_timeout=30)
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
endpoint_url=CONF.quantum_url,
token=my_context.auth_token,
timeout=CONF.quantum_url_timeout,
insecure=False).AndReturn(None)
self.mox.ReplayAll()
quantumv2.get_client(my_context)
def test_withouttoken_keystone_connection_error(self):
self.flags(quantum_auth_strategy='keystone')
self.flags(quantum_url='http://anyhost/')
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(QUANTUM_CLIENT_EXCEPTION,
quantumv2.get_client,
my_context)
def test_withouttoken_keystone_not_auth(self):
self.flags(quantum_auth_strategy=None)
self.flags(quantum_url='http://anyhost/')
self.flags(quantum_url_timeout=30)
my_context = context.RequestContext('userid', 'my_tenantid')
self.mox.StubOutWithMock(client.Client, "__init__")
client.Client.__init__(
endpoint_url=CONF.quantum_url,
auth_strategy=None,
timeout=CONF.quantum_url_timeout,
insecure=False).AndReturn(None)
self.mox.ReplayAll()
quantumv2.get_client(my_context)
class TestQuantumv2(test.TestCase):
def setUp(self):
super(TestQuantumv2, self).setUp()
self.addCleanup(CONF.reset)
self.mox.StubOutWithMock(quantumv2, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
quantumv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
self.instance = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'security_groups': []}
self.nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'tenant_id': 'my_tenantid'}]
self.nets2 = []
self.nets2.append(self.nets1[0])
self.nets2.append({'id': 'my_netid2',
'name': 'my_netname2',
'tenant_id': 'my_tenantid'})
self.nets3 = self.nets2 + [{'id': 'my_netid3',
'name': 'my_netname3',
'tenant_id': 'my_tenantid'}]
self.nets4 = [{'id': 'his_netid4',
'name': 'his_netname4',
'tenant_id': 'his_tenantid'}]
self.nets = [self.nets1, self.nets2, self.nets3, self.nets4]
self.port_address = '10.0.1.2'
self.port_data1 = [{'network_id': 'my_netid1',
'device_id': 'device_id1',
'device_owner': 'compute:nova',
'id': 'my_portid1',
'fixed_ips': [{'ip_address': self.port_address,
'subnet_id': 'my_subid1'}],
'mac_address': 'my_mac1', }]
self.float_data1 = [{'port_id': 'my_portid1',
'fixed_ip_address': self.port_address,
'floating_ip_address': '172.0.1.2'}]
self.dhcp_port_data1 = [{'fixed_ips': [{'ip_address': '10.0.1.9',
'subnet_id': 'my_subid1'}]}]
self.port_data2 = []
self.port_data2.append(self.port_data1[0])
self.port_data2.append({'network_id': 'my_netid2',
'device_id': 'device_id2',
'device_owner': 'compute:nova',
'id': 'my_portid2',
'fixed_ips': [{'ip_address': '10.0.2.2',
'subnet_id': 'my_subid2'}],
'mac_address': 'my_mac2', })
self.float_data2 = []
self.float_data2.append(self.float_data1[0])
self.float_data2.append({'port_id': 'my_portid2',
'fixed_ip_address': '10.0.2.2',
'floating_ip_address': '172.0.2.2'})
self.port_data3 = [{'network_id': 'my_netid1',
'device_id': 'device_id3',
'device_owner': 'compute:nova',
'id': 'my_portid3',
'fixed_ips': [], # no fixed ip
'mac_address': 'my_mac3', }]
self.subnet_data1 = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2 = []
self.subnet_data_n = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']},
{'id': 'my_subid2',
'cidr': '20.0.1.0/24',
'network_id': 'my_netid2',
'gateway_ip': '20.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2.append({'id': 'my_subid2',
'cidr': '10.0.2.0/24',
'network_id': 'my_netid2',
'gateway_ip': '10.0.2.1',
'dns_nameservers': ['8.8.2.1', '8.8.2.2']})
self.fip_pool = {'id': '4fdbfd74-eaf8-4884-90d9-00bd6f10c2d3',
'name': 'ext_net',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_pool_nova = {'id': '435e20c3-d9f1-4f1b-bee5-4611a1dd07db',
'name': 'nova',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_unassociated = {'tenant_id': 'my_tenantid',
'id': 'fip_id1',
'floating_ip_address': '172.24.4.227',
'floating_network_id': self.fip_pool['id'],
'port_id': None,
'fixed_ip_address': None,
'router_id': None}
fixed_ip_address = self.port_data2[1]['fixed_ips'][0]['ip_address']
self.fip_associated = {'tenant_id': 'my_tenantid',
'id': 'fip_id2',
'floating_ip_address': '172.24.4.228',
'floating_network_id': self.fip_pool['id'],
'port_id': self.port_data2[1]['id'],
'fixed_ip_address': fixed_ip_address,
'router_id': 'router_id1'}
self._returned_nw_info = []
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.mox.VerifyAll)
def _verify_nw_info(self, nw_inf, index=0):
id_suffix = index + 1
self.assertEquals('10.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index]['address'])
self.assertEquals('172.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index].floating_ip_addresses()[0])
self.assertEquals('my_netname%s' % id_suffix,
nw_inf[index]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[index]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[index]['address'])
self.assertEquals('10.0.%s.0/24' % id_suffix,
nw_inf[index]['network']['subnets'][0]['cidr'])
self.assertTrue(model.IP(address='8.8.%s.1' % id_suffix) in
nw_inf[index]['network']['subnets'][0]['dns'])
def _get_instance_nw_info(self, number):
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg())
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn({'networks': nets})
self.moxed_client.list_networks(
shared=True).AndReturn({'networks': []})
for i in xrange(1, number + 1):
float_data = number == 1 and self.float_data1 or self.float_data2
for ip in port_data[i - 1]['fixed_ips']:
float_data = [x for x in float_data
if x['fixed_ip_address'] == ip['ip_address']]
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[i - 1]['id']).AndReturn(
{'floatingips': float_data})
subnet_data = i == 1 and self.subnet_data1 or self.subnet_data2
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid%s' % i])).AndReturn(
{'subnets': subnet_data})
self.moxed_client.list_ports(
network_id=subnet_data[0]['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': []})
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context, self.instance)
for i in xrange(0, number):
self._verify_nw_info(nw_inf, i)
def test_get_instance_nw_info_1(self):
# Test to get one port in one network and subnet.
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(1)
def test_get_instance_nw_info_2(self):
# Test to get one port in each of two networks and subnets.
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(2)
def test_get_instance_nw_info_with_nets(self):
# Test get instance_nw_info with networks passed in.
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data1})
port_data = self.port_data1
for ip in port_data[0]['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[0]['id']).AndReturn(
{'floatingips': self.float_data1})
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid1'])).AndReturn(
{'subnets': self.subnet_data1})
self.moxed_client.list_ports(
network_id='my_netid1',
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context,
self.instance,
networks=self.nets1)
self._verify_nw_info(nw_inf, 0)
def test_get_instance_nw_info_without_subnet(self):
# Test get instance_nw_info for a port without subnet.
api = quantumapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg())
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data3})
self.moxed_client.list_networks(
shared=False,
tenant_id=self.instance['project_id']).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
shared=True).AndReturn({'networks': []})
quantumv2.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.mox.ReplayAll()
nw_inf = api.get_instance_nw_info(self.context,
self.instance)
id_suffix = 3
self.assertEquals(0, len(nw_inf.fixed_ips()))
self.assertEquals('my_netname1', nw_inf[0]['network']['label'])
self.assertEquals('my_portid%s' % id_suffix, nw_inf[0]['id'])
self.assertEquals('my_mac%s' % id_suffix, nw_inf[0]['address'])
self.assertEquals(0, len(nw_inf[0]['network']['subnets']))
def test_refresh_quantum_extensions_cache(self):
api = quantumapi.API()
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': 'nvp-qos'}]})
self.mox.ReplayAll()
api._refresh_quantum_extensions_cache()
self.assertEquals({'nvp-qos': {'name': 'nvp-qos'}}, api.extensions)
def test_populate_quantum_extension_values_rxtx_factor(self):
api = quantumapi.API()
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': 'nvp-qos'}]})
self.mox.ReplayAll()
instance_type = instance_types.get_default_instance_type()
instance_type['rxtx_factor'] = 1
sys_meta = utils.dict_to_metadata(
instance_types.save_instance_type_info({}, instance_type))
instance = {'system_metadata': sys_meta}
port_req_body = {'port': {}}
api._populate_quantum_extension_values(instance, port_req_body)
self.assertEquals(port_req_body['port']['rxtx_factor'], 1)
def _stub_allocate_for_instance(self, net_idx=1, **kwargs):
api = quantumapi.API()
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
self.mox.StubOutWithMock(api, '_populate_quantum_extension_values')
# Net idx is 1-based for compatibility with existing unit tests
nets = self.nets[net_idx - 1]
ports = {}
fixed_ips = {}
macs = kwargs.get('macs')
if macs:
macs = set(macs)
req_net_ids = []
if 'requested_networks' in kwargs:
for id, fixed_ip, port_id in kwargs['requested_networks']:
if port_id:
self.moxed_client.show_port(port_id).AndReturn(
{'port': {'id': 'my_portid1',
'network_id': 'my_netid1',
'mac_address': 'my_mac1'}})
ports['my_netid1'] = self.port_data1[0]
id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
else:
fixed_ips[id] = fixed_ip
req_net_ids.append(id)
expected_network_order = req_net_ids
else:
expected_network_order = [n['id'] for n in nets]
if kwargs.get('_break') == 'pre_list_networks':
self.mox.ReplayAll()
return api
search_ids = [net['id'] for net in nets if net['id'] in req_net_ids]
mox_list_network_params = dict(tenant_id=self.instance['project_id'],
shared=False)
if search_ids:
mox_list_network_params['id'] = mox.SameElementsAs(search_ids)
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': nets})
mox_list_network_params = dict(shared=True)
if search_ids:
mox_list_network_params['id'] = mox.SameElementsAs(search_ids)
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': []})
for net_id in expected_network_order:
if kwargs.get('_break') == 'net_id2':
self.mox.ReplayAll()
return api
port_req_body = {
'port': {
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
},
}
port = ports.get(net_id, None)
if port:
port_id = port['id']
self.moxed_client.update_port(port_id,
MyComparator(port_req_body)
).AndReturn(
{'port': port})
else:
fixed_ip = fixed_ips.get(net_id)
if fixed_ip:
port_req_body['port']['fixed_ips'] = [{'ip_address':
fixed_ip}]
port_req_body['port']['network_id'] = net_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = \
self.instance['project_id']
if macs:
port_req_body['port']['mac_address'] = macs.pop()
res_port = {'port': {'id': 'fake'}}
api._populate_quantum_extension_values(
self.instance, port_req_body).AndReturn(None)
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn(res_port)
if kwargs.get('_break') == 'pre_get_instance_nw_info':
self.mox.ReplayAll()
return api
api._get_instance_nw_info(mox.IgnoreArg(),
self.instance,
networks=nets).AndReturn(
self._returned_nw_info)
self.mox.ReplayAll()
return api
def _allocate_for_instance(self, net_idx=1, **kwargs):
api = self._stub_allocate_for_instance(net_idx, **kwargs)
return api.allocate_for_instance(self.context, self.instance, **kwargs)
def test_allocate_for_instance_1(self):
# Allocate one port in one network env.
self._allocate_for_instance(1)
def test_allocate_for_instance_2(self):
# Allocate one port in two networks env.
self._allocate_for_instance(2)
def test_allocate_for_instance_accepts_macs_kwargs_None(self):
# The macs kwarg should be accepted as None.
self._allocate_for_instance(1, macs=None)
def test_allocate_for_instance_accepts_macs_kwargs_set(self):
# The macs kwarg should be accepted, as a set, the
# _allocate_for_instance helper checks that the mac is used to create a
# port.
self._allocate_for_instance(1, macs=set(['ab:cd:ef:01:23:45']))
def test_allocate_for_instance_accepts_only_portid(self):
# Make sure allocate_for_instance works when only a portid is provided
self._returned_nw_info = self.port_data1
result = self._allocate_for_instance(
requested_networks=[(None, None, 'my_portid1')])
self.assertEqual(self.port_data1, result)
def test_allocate_for_instance_not_enough_macs_via_ports(self):
# using a hypervisor MAC via a pre-created port will stop it being
# used to dynamically create a port on a network. We put the network
# first in requested_networks so that if the code were to not pre-check
# requested ports, it would incorrectly assign the mac and not fail.
requested_networks = [
(self.nets2[1]['id'], None, None),
(None, None, 'my_portid1')]
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac1']),
_break='net_id2')
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac1']))
def test_allocate_for_instance_not_enough_macs(self):
# If not enough MAC addresses are available to allocate to networks, an
# error should be raised.
# We could pass in macs=set(), but that wouldn't tell us that
# allocate_for_instance tracks used macs properly, so we pass in one
# mac, and ask for two networks.
requested_networks = [
(self.nets2[1]['id'], None, None),
(self.nets2[0]['id'], None, None)]
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2']),
_break='pre_get_instance_nw_info')
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac2']))
def test_allocate_for_instance_two_macs_two_networks(self):
# If two MACs are available and two networks requested, two new ports
# get made and no exceptions raised.
requested_networks = [
(self.nets2[1]['id'], None, None),
(self.nets2[0]['id'], None, None)]
self._allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2', 'my_mac1']))
def test_allocate_for_instance_mac_conflicting_requested_port(self):
# specify only first and last network
requested_networks = [(None, None, 'my_portid1')]
api = self._stub_allocate_for_instance(
net_idx=1, requested_networks=requested_networks,
macs=set(['unknown:mac']),
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['unknown:mac']))
def test_allocate_for_instance_with_requested_networks(self):
# specify only first and last network
requested_networks = [
(net['id'], None, None)
for net in (self.nets3[1], self.nets3[0], self.nets3[2])]
self._allocate_for_instance(net_idx=3,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_fixedip(self):
# specify only first and last network
requested_networks = [(self.nets1[0]['id'], '10.0.1.0/24', None)]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_port(self):
requested_networks = [(None, None, 'myportid1')]
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_ex1(self):
"""verify we will delete created ports
if we fail to allocate all net resources.
Mox to raise exception when creating a second port.
In this case, the code should delete the first created port.
"""
api = quantumapi.API()
self.mox.StubOutWithMock(api, '_populate_quantum_extension_values')
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
index = 0
for network in self.nets2:
port_req_body = {
'port': {
'network_id': network['id'],
'admin_state_up': True,
'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'tenant_id': self.instance['project_id'],
},
}
port = {'id': 'portid_' + network['id']}
api._populate_quantum_extension_values(
self.instance, port_req_body).AndReturn(None)
if index == 0:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn({'port': port})
else:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
index += 1
self.moxed_client.delete_port('portid_' + self.nets2[0]['id'])
self.mox.ReplayAll()
self.assertRaises(QUANTUM_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_ex2(self):
"""verify we have no port to delete
if we fail to allocate the first net resource.
Mox to raise exception when creating the first port.
In this case, the code should not delete any ports.
"""
api = quantumapi.API()
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
port_req_body = {
'port': {
'network_id': self.nets2[0]['id'],
'admin_state_up': True,
'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id'],
},
}
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
self.mox.ReplayAll()
self.assertRaises(QUANTUM_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_no_port_or_network(self):
class BailOutEarly(Exception):
pass
api = quantumapi.API()
self.mox.StubOutWithMock(api, '_get_available_networks')
# Make sure we get an empty list and then bail out of the rest
# of the function
api._get_available_networks(self.context, self.instance['project_id'],
[]).AndRaise(BailOutEarly)
self.mox.ReplayAll()
self.assertRaises(BailOutEarly,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=[(None, None, None)])
def test_allocate_for_instance_second_time(self):
# Make sure that allocate_for_instance only returns ports that it
# allocated during _that_ run.
new_port = {'id': 'fake'}
self._returned_nw_info = self.port_data1 + [new_port]
nw_info = self._allocate_for_instance()
self.assertEqual(nw_info, [new_port])
def _deallocate_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.list_ports(
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
for port in port_data:
self.moxed_client.delete_port(port['id'])
self.mox.ReplayAll()
api = quantumapi.API()
api.deallocate_for_instance(self.context, self.instance)
def test_deallocate_for_instance_1(self):
# Test to deallocate in one port env.
self._deallocate_for_instance(1)
def test_deallocate_for_instance_2(self):
# Test to deallocate in two ports env.
self._deallocate_for_instance(2)
def _test_deallocate_port_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
self.moxed_client.delete_port(port_data[0]['id'])
nets = [port_data[0]['network_id']]
quantumv2.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data[1:]})
quantumv2.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
self.moxed_client.list_networks(
tenant_id=self.instance['project_id'],
shared=False).AndReturn(
{'networks': [self.nets2[1]]})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': []})
float_data = number == 1 and self.float_data1 or self.float_data2
for data in port_data[1:]:
for ip in data['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=data['id']).AndReturn(
{'floatingips': float_data[1:]})
for port in port_data[1:]:
self.moxed_client.list_subnets(id=['my_subid2']).AndReturn({})
self.mox.ReplayAll()
api = quantumapi.API()
nwinfo = api.deallocate_port_for_instance(self.context, self.instance,
port_data[0]['id'])
self.assertEqual(len(nwinfo), len(port_data[1:]))
if len(port_data) > 1:
self.assertEqual(nwinfo[0]['network']['id'], 'my_netid2')
def test_deallocate_port_for_instance_1(self):
# Test to deallocate the first and only port
self._test_deallocate_port_for_instance(1)
def test_deallocate_port_for_instance_2(self):
# Test to deallocate the first port of two
self._test_deallocate_port_for_instance(2)
def test_list_ports(self):
search_opts = {'parm': 'value'}
self.moxed_client.list_ports(**search_opts)
self.mox.ReplayAll()
quantumapi.API().list_ports(self.context, **search_opts)
def test_show_port(self):
self.moxed_client.show_port('foo')
self.mox.ReplayAll()
quantumapi.API().show_port(self.context, 'foo')
def test_validate_networks(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
api.validate_networks(self.context, requested_networks)
def test_validate_networks_ex_1(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2" in str(ex))
def test_validate_networks_ex_2(self):
requested_networks = [('my_netid1', 'test', None),
('my_netid2', 'test2', None),
('my_netid3', 'test3', None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2', 'my_netid3']),
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1', 'my_netid2', 'my_netid3']),
shared=True).AndReturn(
{'networks': []})
self.mox.ReplayAll()
api = quantumapi.API()
try:
api.validate_networks(self.context, requested_networks)
except exception.NetworkNotFound as ex:
self.assertTrue("my_netid2, my_netid3" in str(ex))
def _mock_list_ports(self, port_data=None):
if port_data is None:
port_data = self.port_data2
address = self.port_address
self.moxed_client.list_ports(
fixed_ips=MyComparator('ip_address=%s' % address)).AndReturn(
{'ports': port_data})
self.mox.ReplayAll()
return address
def test_get_instance_uuids_by_ip_filter(self):
self._mock_list_ports()
filters = {'ip': '^10\\.0\\.1\\.2$'}
api = quantumapi.API()
result = api.get_instance_uuids_by_ip_filter(self.context, filters)
self.assertEquals('device_id1', result[0]['instance_uuid'])
self.assertEquals('device_id2', result[1]['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_no_ports(self):
address = self._mock_list_ports(port_data=[])
api = quantumapi.API()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.get_fixed_ip_by_address,
self.context, address)
def test_get_fixed_ip_by_address_succeeds_for_1_port(self):
address = self._mock_list_ports(port_data=self.port_data1)
api = quantumapi.API()
result = api.get_fixed_ip_by_address(self.context, address)
self.assertEquals('device_id1', result['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_more_than_1_port(self):
address = self._mock_list_ports()
api = quantumapi.API()
self.assertRaises(exception.FixedIpAssociatedWithMultipleInstances,
api.get_fixed_ip_by_address,
self.context, address)
def _get_available_networks(self, prv_nets, pub_nets, req_ids=None):
api = quantumapi.API()
nets = prv_nets + pub_nets
mox_list_network_params = dict(tenant_id=self.instance['project_id'],
shared=False)
if req_ids:
mox_list_network_params['id'] = req_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': prv_nets})
mox_list_network_params = dict(shared=True)
if req_ids:
mox_list_network_params['id'] = req_ids
self.moxed_client.list_networks(
**mox_list_network_params).AndReturn({'networks': pub_nets})
self.mox.ReplayAll()
rets = api._get_available_networks(self.context,
self.instance['project_id'],
req_ids)
self.assertEqual(rets, nets)
def test_get_available_networks_all_private(self):
self._get_available_networks(prv_nets=self.nets2, pub_nets=[])
def test_get_available_networks_all_public(self):
self._get_available_networks(prv_nets=[], pub_nets=self.nets2)
def test_get_available_networks_private_and_public(self):
self._get_available_networks(prv_nets=self.nets1, pub_nets=self.nets4)
def test_get_available_networks_with_network_ids(self):
prv_nets = [self.nets3[0]]
pub_nets = [self.nets3[-1]]
# specify only first and last network
req_ids = [net['id'] for net in (self.nets3[0], self.nets3[-1])]
self._get_available_networks(prv_nets, pub_nets, req_ids)
def test_get_floating_ip_pools(self):
api = quantumapi.API()
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.mox.ReplayAll()
pools = api.get_floating_ip_pools(self.context)
expected = [{'name': self.fip_pool['name']},
{'name': self.fip_pool_nova['name']}]
self.assertEqual(expected, pools)
def _get_expected_fip_model(self, fip_data, idx=0):
expected = {'id': fip_data['id'],
'address': fip_data['floating_ip_address'],
'pool': self.fip_pool['name'],
'project_id': fip_data['tenant_id'],
'fixed_ip_id': fip_data['port_id'],
'fixed_ip':
{'address': fip_data['fixed_ip_address']},
'instance': ({'uuid': self.port_data2[idx]['device_id']}
if fip_data['port_id']
else None)}
return expected
def _test_get_floating_ip(self, fip_data, idx=0, by_address=False):
api = quantumapi.API()
fip_id = fip_data['id']
net_id = fip_data['floating_network_id']
address = fip_data['floating_ip_address']
if by_address:
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
else:
self.moxed_client.show_floatingip(fip_id).\
AndReturn({'floatingip': fip_data})
self.moxed_client.show_network(net_id).\
AndReturn({'network': self.fip_pool})
if fip_data['port_id']:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[idx]})
self.mox.ReplayAll()
expected = self._get_expected_fip_model(fip_data, idx)
if by_address:
fip = api.get_floating_ip_by_address(self.context, address)
else:
fip = api.get_floating_ip(self.context, fip_id)
self.assertEqual(expected, fip)
def test_get_floating_ip_unassociated(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0)
def test_get_floating_ip_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1)
def test_get_floating_ip_by_address(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0,
by_address=True)
def test_get_floating_ip_by_address_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1,
by_address=True)
def test_get_floating_ip_by_address_not_found(self):
api = quantumapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': []})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ip_by_address_multiple_found(self):
api = quantumapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated] * 2})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpMultipleFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ips_by_project(self):
api = quantumapi.API()
project_id = self.context.project_id
self.moxed_client.list_floatingips(tenant_id=project_id).\
AndReturn({'floatingips': [self.fip_unassociated,
self.fip_associated]})
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.moxed_client.list_ports(tenant_id=project_id).\
AndReturn({'ports': self.port_data2})
self.mox.ReplayAll()
expected = [self._get_expected_fip_model(self.fip_unassociated),
self._get_expected_fip_model(self.fip_associated, idx=1)]
fips = api.get_floating_ips_by_project(self.context)
self.assertEqual(expected, fips)
def _test_get_instance_id_by_floating_address(self, fip_data,
associated=False):
api = quantumapi.API()
address = fip_data['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
if associated:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[1]})
self.mox.ReplayAll()
if associated:
expected = self.port_data2[1]['device_id']
else:
expected = None
fip = api.get_instance_id_by_floating_address(self.context, address)
self.assertEqual(expected, fip)
def test_get_instance_id_by_floating_address(self):
self._test_get_instance_id_by_floating_address(self.fip_unassociated)
def test_get_instance_id_by_floating_address_associated(self):
self._test_get_instance_id_by_floating_address(self.fip_associated,
associated=True)
def test_allocate_floating_ip(self):
api = quantumapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, 'ext_net')
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_pool_id(self):
api = quantumapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'id': pool_id}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, pool_id)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_default_pool(self):
api = quantumapi.API()
pool_name = self.fip_pool_nova['name']
pool_id = self.fip_pool_nova['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool_nova]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_release_floating_ip(self):
api = quantumapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.release_floating_ip(self.context, address)
def test_release_floating_ip_associated(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpAssociated,
api.release_floating_ip, self.context, address)
def _setup_mock_for_refresh_cache(self, api):
nw_info = self.mox.CreateMock(model.NetworkInfo)
nw_info.json()
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
api._get_instance_nw_info(mox.IgnoreArg(), self.instance).\
AndReturn(nw_info)
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg())
def test_associate_floating_ip(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[1]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': self.fip_associated['port_id'],
'fixed_ip_address': fixed_address}})
self._setup_mock_for_refresh_cache(api)
self.mox.ReplayAll()
api.associate_floating_ip(self.context, self.instance,
address, fixed_address)
def test_associate_floating_ip_not_found_fixed_ip(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.associate_floating_ip, self.context,
self.instance, address, fixed_address)
def test_disassociate_floating_ip(self):
api = quantumapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': None}})
self._setup_mock_for_refresh_cache(api)
self.mox.ReplayAll()
api.disassociate_floating_ip(self.context, self.instance, address)
def test_add_fixed_ip_to_instance(self):
api = quantumapi.API()
self._setup_mock_for_refresh_cache(api)
network_id = 'my_netid1'
search_opts = {'network_id': network_id}
self.moxed_client.list_subnets(
**search_opts).AndReturn({'subnets': self.subnet_data_n})
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': 'compute:nova',
'network_id': network_id}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [{'subnet_id': 'my_subid1'},
{'subnet_id': 'my_subid1'}],
},
}
port = self.port_data1[0]
port['fixed_ips'] = [{'subnet_id': 'my_subid1'}]
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.add_fixed_ip_to_instance(self.context, self.instance, network_id)
def test_remove_fixed_ip_from_instance(self):
api = quantumapi.API()
self._setup_mock_for_refresh_cache(api)
address = '10.0.0.3'
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [],
},
}
port = self.port_data1[0]
port['fixed_ips'] = []
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.remove_fixed_ip_from_instance(self.context, self.instance, address)
def test_list_floating_ips_without_l3_support(self):
api = quantumapi.API()
QuantumNotFound = quantumv2.exceptions.QuantumClientException(
status_code=404)
self.moxed_client.list_floatingips(
fixed_ip_address='1.1.1.1', port_id=1).AndRaise(QuantumNotFound)
self.mox.ReplayAll()
quantumv2.get_client('fake')
floatingips = api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 1)
self.assertEqual(floatingips, [])
class TestQuantumv2ModuleMethods(test.TestCase):
def test_ensure_requested_network_ordering_no_preference_ids(self):
l = [1, 2, 3]
quantumapi._ensure_requested_network_ordering(
lambda x: x,
l,
None)
def test_ensure_requested_network_ordering_no_preference_hashes(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
quantumapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
None)
self.assertEqual(l, [{'id': 3}, {'id': 1}, {'id': 2}])
def test_ensure_requested_network_ordering_with_preference(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
quantumapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
[1, 2, 3])
self.assertEqual(l, [{'id': 1}, {'id': 2}, {'id': 3}])
| zestrada/nova-cs498cc | nova/tests/network/test_quantumv2.py | Python | apache-2.0 | 55,258 |
<?php
// .-----------------------------------------------------------------------------------
// | Software: [HDPHP framework]
// | Version: 2013.01
// | Site: http://www.hdphp.com
// |-----------------------------------------------------------------------------------
// | Author: 向军 <houdunwangxj@gmail.com>
// | Copyright (c) 2012-2013, http://houdunwang.com. All Rights Reserved.
// |-----------------------------------------------------------------------------------
// | License: http://www.apache.org/licenses/LICENSE-2.0
// '-----------------------------------------------------------------------------------
/**
* 上传处理类
* @package tools_class
* @author 后盾向军 <houdunwangxj@gmail.com>
*/
class Upload
{
//上传类型
public $ext = array();
//上传文件大小
public $size;
//上传路径
public $path;
//错误信息
public $error;
//缩略图处理
public $thumbOn;
//缩略图参数
public $thumb = array();
//是否加水印
public $waterMarkOn;
//上传成功文件信息
public $uploadedFile = array();
/**
* 构造函数
* @param string $path 上传路径
* @param array $ext 允许的文件类型,传入数组如array('jpg','jpeg','png','doc')
* @param array $size 允许上传大小,如array('jpg'=>200000,'rar'=>'39999') 如果不设置系统会依据配置项C("UPLOAD_EXT_SIZE")值
* @param bool $waterMarkOn 是否加水印
* @param bool $thumbOn 是否生成缩略图
* @param array $thumb 缩略图处理参数 只接收3个参数 1缩略图宽度 2缩略图高度 3缩略图生成规则
*/
public function __construct($path = '', $ext = array(), $size = array(), $waterMarkOn = null, $thumbOn = null, $thumb = array())
{
$path = empty($path) ? C("UPLOAD_PATH") : $path; //上传路径
$this->path = rtrim(str_replace('\\', '/', $path), '/') . '/';
$_ext = empty($ext) ? array_keys(C("UPLOAD_EXT_SIZE")) : $ext; //上传类型
foreach ($_ext as $v) {
$this->ext[] = strtoupper($v);
}
$this->size = $size ? $size : array_change_key_case_d(C("UPLOAD_EXT_SIZE"), 1);
$this->waterMarkOn = is_null($waterMarkOn) ? C("WATER_ON") : $waterMarkOn;
$this->thumbOn = is_null($thumbOn) ? C("UPLOAD_THUMB_ON") : $thumbOn;
$this->thumb = $thumb;
}
/**
* 将$_FILES中的文件上传到服务器
* 可以只上传$_FILES中的一个文件
* @param null $fieldName 上传的图片name名
* @return array|bool
* <code>
* 示例1:
* $upload= new Upload();
* $upload->upload();
* 示例2:
* $upload= new Upload();
* $upload->upload("pic");
* </code>
*/
public function upload($fieldName = null)
{
if (!$this->checkDir($this->path)) {
$this->error = $this->path . '图片上传目录创建失败或不可写';
return false;
}
$files = $this->format($fieldName);
//验证文件
foreach ($files as $v) {
$info = pathinfo($v ['name']);
$v ["ext"] = isset($info ["extension"]) ? $info['extension'] : '';
$v['filename'] = isset($info['filename']) ? $info['filename'] : '';
if (!$this->checkFile($v)) {
continue;
}
$uploadedFile = $this->save($v);
if ($uploadedFile) {
$this->uploadedFile [] = $uploadedFile;
}
}
return $this->uploadedFile;
}
/**
* 储存文件
* @param string $file 储存的文件
* @return boolean
*/
private function save($file)
{
$is_img = 0;
$uploadFileName = mt_rand(1, 9999) . time() . "." . $file['ext'];
$filePath = $this->path . $uploadFileName;
if (in_array(strtolower($file ['ext']), array("jpeg", "jpg", "bmp", "gif", "png")) && getimagesize($file ['tmp_name'])) {
$imgDir = C("UPLOAD_IMG_DIR") ? C("UPLOAD_IMG_DIR") . "/" : "";
$filePath = $this->path . $imgDir . $uploadFileName;
if (!$this->checkDir($this->path . $imgDir)) {
$this->error = '图片上传目录创建失败或不可写';
return false;
}
$is_img = 1;
}
if (!move_uploaded_file($file ['tmp_name'], $filePath)) {
$this->error('移动临时文件失败');
return false;
}
if (!$is_img) {
$filePath = ltrim(str_replace(ROOT_PATH, '', $filePath), '/');
$arr = array("path" => $filePath, 'fieldname' => $file['fieldname'], 'image' => 0);
} else {
//处理图像类型文件
$img = new image ();
$imgInfo = getimagesize($filePath);
//对原图进行缩放
if (C("UPLOAD_IMG_RESIZE_ON") && ($imgInfo[0] > C("UPLOAD_IMG_MAX_WIDTH") || $imgInfo[1] > C("UPLOAD_IMG_MAX_HEIGHT"))) {
$img->thumb($filePath, $uploadFileName, C("UPLOAD_IMG_MAX_WIDTH"), C("UPLOAD_IMG_MAX_HEIGHT"), 5, $this->path);
}
//生成缩略图
if ($this->thumbOn) {
$args = array();
if (empty($this->thumb)) {
array_unshift($args, $filePath);
} else {
array_unshift($args, $filePath, "", "");
$args = array_merge($args, $this->thumb);
}
$thumbFile = call_user_func_array(array($img, "thumb"), $args);
}
//加水印
if ($this->waterMarkOn) {
$img->water($filePath);
}
$filePath = trim(str_replace(ROOT_PATH, '', $filePath), '/');
if ($this->thumbOn) {
$thumbFile = trim(str_replace(ROOT_PATH, '', $thumbFile), '/');
$arr = array("path" => $filePath, "thumb" => $thumbFile, 'fieldname' => $file['fieldname'], 'image' => 1);
} else {
$arr = array("path" => $filePath, 'fieldname' => $file['fieldname'], 'image' => 1);
}
}
$arr['path'] = preg_replace('@\./@', '', $arr['path']);
//上传时间
$arr['uptime'] = time();
$info = pathinfo($filePath);
$arr['fieldname'] = $file['fieldname'];
$arr['basename'] = $info['basename'];
$arr['filename'] = $info['filename'];//新文件名
$arr['name'] = $file['filename'];//旧文件名
$arr['size'] = $file['size'];
$arr['ext'] = $file['ext'];
$dir= str_ireplace("\\", "/", dirname($arr['path']));
$arr['dir']=substr($dir, "-1") == "/" ? $dir : $dir . "/";
return $arr;
}
//将上传文件整理为标准数组
private function format($fieldName)
{
if ($fieldName == null) {
$files = $_FILES;
} else if (isset($_FILES[$fieldName])) {
$files[$fieldName] = $_FILES[$fieldName];
}
if (!isset($files)) {
$this->error = '没有任何文件上传';
return false;
}
$info = array();
$n = 0;
foreach ($files as $name => $v) {
if (is_array($v ['name'])) {
$count = count($v ['name']);
for ($i = 0; $i < $count; $i++) {
foreach ($v as $m => $k) {
$info [$n] [$m] = $k [$i];
}
$info [$n] ['fieldname'] = $name; //字段名
$n++;
}
} else {
$info [$n] = $v;
$info [$n] ['fieldname'] = $name; //字段名
$n++;
}
}
return $info;
}
/**
* 验证目录
* @param string $path 目录
* @return bool
*/
private function checkDir($path)
{
return Dir::create($path) && is_writeable($path) ? true : false;
}
private function checkFile($file)
{
if ($file ['error'] != 0) {
$this->error($file ['error']);
return false;
}
$ext = strtoupper($file ['ext']);
$ext_size = is_array($this->size) && isset($this->size[$ext]) ? $this->size[$ext] : $this->size;
if (!in_array($ext, $this->ext)) {
$this->error = '文件类型不允许';
return false;
}
if (strstr(strtolower($file['type']), "image") && !getimagesize($file['tmp_name'])) {
$this->error = '上传内容不是一个合法图片';
return false;
}
if ($file ['size'] > $ext_size) {
$this->error = '上传文件大于' . get_size($ext_size);
return false;
}
if (!is_uploaded_file($file ['tmp_name'])) {
$this->error = '非法文件';
return false;
}
return true;
}
private function error($error)
{
switch ($error) {
case UPLOAD_ERR_INI_SIZE :
$this->error = '上传文件超过PHP.INI配置文件允许的大小';
break;
case UPLOAD_ERR_FORM_SIZE :
$this->error = '文件超过表单限制大小';
break;
case UPLOAD_ERR_PARTIAL :
$this->error = '文件只上有部分上传';
break;
case UPLOAD_ERR_NO_FILE :
$this->error = '没有上传文件';
break;
case UPLOAD_ERR_NO_TMP_DIR :
$this->error = '没有上传临时文件夹';
break;
case UPLOAD_ERR_CANT_WRITE :
$this->error = '写入临时文件夹出错';
break;
}
}
/**
* 返回上传时发生的错误原因
* @return string
*/
public function getError()
{
return $this->error;
}
} | 13808796047/hdcms | hd/HDPHP/hdphp/Extend/Tool/Upload.class.php | PHP | apache-2.0 | 9,980 |
#!/usr/bin/env python
"""
.. py:currentmodule:: FileFormat.SimulationParameters
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
MCXRay simulation parameters input file.
"""
# Script information for the file.
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
# Subversion informations for the file.
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
# Standard library modules.
import copy
# Third party modules.
# Local modules.
# Project modules
import pymcxray.FileFormat.MCXRayModel as MCXRayModel
import pymcxray.FileFormat.Version as Version
# Globals and constants variables.
KEY_BASE_FILENAME = "BaseFileName"
KEY_NUMBER_ELECTRONS = "ElectronNbr"
KEY_NUMBER_PHOTONS = "PhotonNbr"
KEY_NUMBER_WINDOWS = "WindowNbr"
KEY_NUMBER_FILMS_X = "FilmNbrX"
KEY_NUMBER_FILMS_Y = "FilmNbrY"
KEY_NUMBER_FILMS_Z = "FilmNbrZ"
KEY_NUMBER_CHANNELS = "SpectraChannel"
KEY_ENERGY_CHANNEL_WIDTH = "EnergyChannelWidth"
KEY_SPECTRA_INTERPOLATION_MODEL = "SpectraInterpolation"
KEY_VOXEL_SIMPLIFICATION = "VoxelSimplification"
KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR = "ElasticCrossSectionScalingFactor"
KEY_ENERGY_LOSS_SCALING_FACTOR = "EnergyLossScalingFactor"
class SimulationParameters(object):
def __init__(self):
self.version = copy.deepcopy(Version.CURRENT_VERSION)
self._keys = self._createKeys()
self._parameters = {}
self.defaultValues()
def _createKeys(self):
keys = []
keys.append(KEY_BASE_FILENAME)
keys.append(KEY_NUMBER_ELECTRONS)
keys.append(KEY_NUMBER_PHOTONS)
keys.append(KEY_NUMBER_WINDOWS)
keys.append(KEY_NUMBER_FILMS_X)
keys.append(KEY_NUMBER_FILMS_Y)
keys.append(KEY_NUMBER_FILMS_Z)
if self.version == Version.BEFORE_VERSION:
keys.append(KEY_NUMBER_CHANNELS)
else:
keys.append(KEY_ENERGY_CHANNEL_WIDTH)
keys.append(KEY_SPECTRA_INTERPOLATION_MODEL)
keys.append(KEY_VOXEL_SIMPLIFICATION)
if self.version >= Version.VERSION_1_4_4:
keys.append(KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR)
keys.append(KEY_ENERGY_LOSS_SCALING_FACTOR)
return keys
def defaultValues(self):
baseFilenameRef = r"Results\McXRay"
self.baseFilename = baseFilenameRef
self.numberElectrons = 1000
self.numberPhotons = 10000
self.numberWindows = 64
self.numberFilmsX = 128
self.numberFilmsY = 128
self.numberFilmsZ = 128
self.numberChannels = 1024
self.energyChannelWidth_eV = 5.0
self.spectrumInterpolationModel = MCXRayModel.SpectrumInterpolationModel.TYPE_LINEAR_DOUBLE
self.voxelSimplification = None
self.elasticCrossSectionScalingFactor = 1.0
self.energyLossScalingFactor = 1.0
def _createExtractMethod(self):
extractMethods = {}
extractMethods[KEY_BASE_FILENAME] = str
extractMethods[KEY_NUMBER_ELECTRONS] = int
extractMethods[KEY_NUMBER_PHOTONS] = int
extractMethods[KEY_NUMBER_WINDOWS] = int
extractMethods[KEY_NUMBER_FILMS_X] = int
extractMethods[KEY_NUMBER_FILMS_Y] = int
extractMethods[KEY_NUMBER_FILMS_Z] = int
extractMethods[KEY_NUMBER_CHANNELS] = int
extractMethods[KEY_ENERGY_CHANNEL_WIDTH] = float
extractMethods[KEY_SPECTRA_INTERPOLATION_MODEL] = self._extractSpectrumInterpolationModel
extractMethods[KEY_VOXEL_SIMPLIFICATION] = bool
extractMethods[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR] = float
extractMethods[KEY_ENERGY_LOSS_SCALING_FACTOR] = float
return extractMethods
def _createFormatMethod(self):
fromatMethods = {}
fromatMethods[KEY_BASE_FILENAME] = "%s"
fromatMethods[KEY_NUMBER_ELECTRONS] = "%i"
fromatMethods[KEY_NUMBER_PHOTONS] = "%i"
fromatMethods[KEY_NUMBER_WINDOWS] = "%i"
fromatMethods[KEY_NUMBER_FILMS_X] = "%i"
fromatMethods[KEY_NUMBER_FILMS_Y] = "%i"
fromatMethods[KEY_NUMBER_FILMS_Z] = "%i"
fromatMethods[KEY_NUMBER_CHANNELS] = "%i"
fromatMethods[KEY_ENERGY_CHANNEL_WIDTH] = "%s"
fromatMethods[KEY_SPECTRA_INTERPOLATION_MODEL] = "%s"
fromatMethods[KEY_VOXEL_SIMPLIFICATION] = "%s"
fromatMethods[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR] = "%.5f"
fromatMethods[KEY_ENERGY_LOSS_SCALING_FACTOR] = "%.5f"
return fromatMethods
def _extractSpectrumInterpolationModel(self, text):
model = MCXRayModel.SpectrumInterpolationModel(int(text))
return model
def read(self, filepath):
self.version.readFromFile(filepath)
lines = open(filepath, 'r').readlines()
extractMethods = self._createExtractMethod()
for line in lines:
line = line.strip()
for key in self._keys:
if line.startswith(key):
items = line.split('=')
self._parameters[key] = extractMethods[key](items[-1])
def write(self, filepath):
outputFile = open(filepath, 'w')
self._writeHeader(outputFile)
self.version.writeLine(outputFile)
formatMethods = self._createFormatMethod()
keys = self._createKeys()
for key in keys:
if key == KEY_SPECTRA_INTERPOLATION_MODEL:
value = formatMethods[key] % (self._parameters[key].getModel())
else:
value = formatMethods[key] % (self._parameters[key])
if value is not None and value != "None":
line = "%s=%s\n" % (key, value)
outputFile.write(line)
def _writeHeader(self, outputFile):
if self._parameters[KEY_VOXEL_SIMPLIFICATION] is not None:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in PhiRo computations",
"*** FilmNbrX = Number of X layers in PhiRo computations",
"*** FilmNbrY = Number of Y layers in PhiRo computations",
"*** FilmNbrZ = Number of Z layers in PhiRo computations",
"*** SpectraChannel = Number of channels in spectraa",
"*** SpectraInterpolation = Interpolation type for spectras",
"*** VoxelSimplification = Use only middle voxel of trajectories to store energy",
"***",
"********************************************************************************"]
elif self.version == Version.BEFORE_VERSION:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in PhiRo computations",
"*** FilmNbrX = Number of X layers in PhiRo computations",
"*** FilmNbrY = Number of Y layers in PhiRo computations",
"*** FilmNbrZ = Number of Z layers in PhiRo computations",
"*** SpectraChannel = Number of channels in spectraa",
"*** SpectraInterpolation = Interpolation type for spectras",
"***",
"********************************************************************************"]
elif self.version >= Version.VERSION_1_4_4:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in Spectrum computations",
"*** FilmNbrX = Number of X layers in Spectrum computations",
"*** FilmNbrY = Number of Y layers in Spectrum computations",
"*** FilmNbrZ = Number of Z layers in Spectrum computations",
"*** EnergyChannelWidth in eV",
"*** SpectraInterpolation = Interpolation type for spectra",
"*** ElasticCrossSectionScalingFactor",
"*** EnergyLossScalingFactor",
"***",
"********************************************************************************"]
else:
headerLines = [ "********************************************************************************",
"*** SIMULATION PARAMETERS",
"***",
"*** BaseFileName = All output files will be named using this term",
"*** ElectronNbr = Total number of electrons to simulate",
"*** PhotonNbr = Total number of photons to simulate in EDS",
"*** WindowNbr = Number of energy windows in Spectrum computations",
"*** FilmNbrX = Number of X layers in Spectrum computations",
"*** FilmNbrY = Number of Y layers in Spectrum computations",
"*** FilmNbrZ = Number of Z layers in Spectrum computations",
"*** EnergyChannelWidth in eV",
"*** SpectraInterpolation = Interpolation type for spectra",
"***",
"********************************************************************************"]
for line in headerLines:
outputFile.write(line+'\n')
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
@property
def baseFilename(self):
return self._parameters[KEY_BASE_FILENAME]
@baseFilename.setter
def baseFilename(self, baseFilename):
self._parameters[KEY_BASE_FILENAME] = baseFilename
@property
def numberElectrons(self):
return self._parameters[KEY_NUMBER_ELECTRONS]
@numberElectrons.setter
def numberElectrons(self, numberElectrons):
self._parameters[KEY_NUMBER_ELECTRONS] = numberElectrons
@property
def numberPhotons(self):
return self._parameters[KEY_NUMBER_PHOTONS]
@numberPhotons.setter
def numberPhotons(self, numberPhotons):
self._parameters[KEY_NUMBER_PHOTONS] = numberPhotons
@property
def numberWindows(self):
return self._parameters[KEY_NUMBER_WINDOWS]
@numberWindows.setter
def numberWindows(self, numberWindows):
self._parameters[KEY_NUMBER_WINDOWS] = numberWindows
@property
def numberFilmsX(self):
return self._parameters[KEY_NUMBER_FILMS_X]
@numberFilmsX.setter
def numberFilmsX(self, numberFilmsX):
self._parameters[KEY_NUMBER_FILMS_X] = numberFilmsX
@property
def numberFilmsY(self):
return self._parameters[KEY_NUMBER_FILMS_Y]
@numberFilmsY.setter
def numberFilmsY(self, numberFilmsY):
self._parameters[KEY_NUMBER_FILMS_Y] = numberFilmsY
@property
def numberFilmsZ(self):
return self._parameters[KEY_NUMBER_FILMS_Z]
@numberFilmsZ.setter
def numberFilmsZ(self, numberFilmsZ):
self._parameters[KEY_NUMBER_FILMS_Z] = numberFilmsZ
@property
def numberChannels(self):
return self._parameters[KEY_NUMBER_CHANNELS]
@numberChannels.setter
def numberChannels(self, numberChannels):
self._parameters[KEY_NUMBER_CHANNELS] = numberChannels
@property
def energyChannelWidth_eV(self):
return self._parameters[KEY_ENERGY_CHANNEL_WIDTH]
@energyChannelWidth_eV.setter
def energyChannelWidth_eV(self, energyChannelWidth_eV):
self._parameters[KEY_ENERGY_CHANNEL_WIDTH] = energyChannelWidth_eV
@property
def spectrumInterpolationModel(self):
return self._parameters[KEY_SPECTRA_INTERPOLATION_MODEL].getModel()
@spectrumInterpolationModel.setter
def spectrumInterpolationModel(self, spectrumInterpolationModel):
self._parameters[KEY_SPECTRA_INTERPOLATION_MODEL] = MCXRayModel.SpectrumInterpolationModel(spectrumInterpolationModel)
@property
def voxelSimplification(self):
return self._parameters.get(KEY_VOXEL_SIMPLIFICATION, None)
@voxelSimplification.setter
def voxelSimplification(self, voxelSimplification):
self._parameters[KEY_VOXEL_SIMPLIFICATION] = voxelSimplification
@property
def elasticCrossSectionScalingFactor(self):
return self._parameters[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR]
@elasticCrossSectionScalingFactor.setter
def elasticCrossSectionScalingFactor(self, elasticCrossSectionScalingFactor):
self._parameters[KEY_ELASTIC_CROSS_SECTION_SCALING_FACTOR] = elasticCrossSectionScalingFactor
@property
def energyLossScalingFactor(self):
return self._parameters[KEY_ENERGY_LOSS_SCALING_FACTOR]
@energyLossScalingFactor.setter
def energyLossScalingFactor(self, energyLossScalingFactor):
self._parameters[KEY_ENERGY_LOSS_SCALING_FACTOR] = energyLossScalingFactor
| drix00/pymcxray | pymcxray/FileFormat/SimulationParameters.py | Python | apache-2.0 | 15,335 |
package org.glamey.training.designmodel.responsibility_chain;
import lombok.Builder;
import lombok.Getter;
import java.math.BigDecimal;
/**
* @author zhouyang.zhou. 2017.08.14.16.
*/
@Getter
@Builder
public class Applicant {
private String name;
private String subject;
private BigDecimal money;
}
| glameyzhou/training | distribute/src/main/java/org/glamey/training/designmodel/responsibility_chain/Applicant.java | Java | apache-2.0 | 315 |
package br.com.cmabreu.action;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts2.StrutsStatics;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.InterceptorRef;
import org.apache.struts2.convention.annotation.ParentPackage;
import org.apache.struts2.convention.annotation.Result;
import com.opensymphony.xwork2.ActionContext;
@Action(value="getUnitDetail", results= {
@Result(name="ok", type="httpheader", params={"status", "200"}) },
interceptorRefs= { @InterceptorRef("seguranca") }
)
@ParentPackage("default")
public class GetUnitDetailAction {
public String execute(){
String resposta = "";
try {
HttpServletResponse response = (HttpServletResponse)ActionContext.getContext().get(StrutsStatics.HTTP_RESPONSE);
response.setCharacterEncoding("UTF-8");
response.getWriter().write(resposta);
} catch (IOException ex) {
System.out.println("[GetUnitDetailAction] Erro respondendo AJAX.");
}
return "ok";
}
}
| icemagno/mapview | src/main/java/br/com/cmabreu/action/GetUnitDetailAction.java | Java | apache-2.0 | 1,116 |
package org.hl7.fhir.instance.model.api;
import ca.uhn.fhir.model.api.annotation.SearchParamDefinition;
import ca.uhn.fhir.rest.gclient.TokenClientParam;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* An IBaseResource that has a FHIR version of DSTU3 or higher
*/
public interface IAnyResource extends IBaseResource {
/**
* Search parameter constant for <b>_language</b>
*/
@SearchParamDefinition(name="_language", path="", description="The language of the resource", type="string" )
String SP_RES_LANGUAGE = "_language";
/**
* Search parameter constant for <b>_id</b>
*/
@SearchParamDefinition(name="_id", path="", description="The ID of the resource", type="token" )
String SP_RES_ID = "_id";
/**
* <b>Fluent Client</b> search parameter constant for <b>_id</b>
* <p>
* Description: <b>the _id of a resource</b><br>
* Type: <b>string</b><br>
* Path: <b>Resource._id</b><br>
* </p>
*/
TokenClientParam RES_ID = new TokenClientParam(IAnyResource.SP_RES_ID);
String getId();
IIdType getIdElement();
IPrimitiveType<String> getLanguageElement();
Object getUserData(String name);
IAnyResource setId(String theId);
void setUserData(String name, Object value);
}
| SingingTree/hapi-fhir | hapi-fhir-base/src/main/java/org/hl7/fhir/instance/model/api/IAnyResource.java | Java | apache-2.0 | 1,831 |
package ru.job4j.chess.firuges.black;
import ru.job4j.chess.ImpossibleMoveException;
import ru.job4j.chess.firuges.Figure;
import ru.job4j.chess.firuges.Cell;
/**
* Black Knight.
* @author Natasha Panchina (panchinanata25@gmail.com)
* @version 1
* @since 18.08.2018
*/
public class KnightBlack extends Figure {
public KnightBlack(final Cell position) {
super(position);
}
public Cell position() {
return this.position;
}
@Override
public Cell[] way(Cell source, Cell dest) throws ImpossibleMoveException {
if (!this.isMove(source, dest)) {
throw new ImpossibleMoveException();
}
Cell[] steps = new Cell[]{dest};
return steps;
}
@Override
public Figure copy(Cell dest) {
return new KnightBlack(dest);
}
@Override
public boolean isMove(Cell source, Cell dest) {
boolean result = false;
int deltax = Math.abs(source.getX() - dest.getX());
int deltay = Math.abs(source.getY() - dest.getY());
if (Math.abs(deltax - deltay) == 1 && deltax == 2
|| Math.abs(deltax - deltay) == 1 && deltay == 2) {
result = true;
}
return result;
}
}
| NatashaPanchina/npanchina | chess/src/main/java/ru/job4j/chess/firuges/black/KnightBlack.java | Java | apache-2.0 | 1,234 |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hemf.hemfplus.record;
import org.apache.poi.util.Internal;
@Internal
public enum HemfPlusRecordType {
header(0x4001, HemfPlusHeader.class),
endOfFile(0x4002, UnimplementedHemfPlusRecord.class),
comment(0x4003, UnimplementedHemfPlusRecord.class),
getDC(0x4004, UnimplementedHemfPlusRecord.class),
multiFormatStart(0x4005, UnimplementedHemfPlusRecord.class),
multiFormatSection(0x4006, UnimplementedHemfPlusRecord.class),
multiFormatEnd(0x4007, UnimplementedHemfPlusRecord.class),
object(0x4008, UnimplementedHemfPlusRecord.class),
clear(0x4009, UnimplementedHemfPlusRecord.class),
fillRects(0x400A, UnimplementedHemfPlusRecord.class),
drawRects(0x400B, UnimplementedHemfPlusRecord.class),
fillPolygon(0x400C, UnimplementedHemfPlusRecord.class),
drawLines(0x400D, UnimplementedHemfPlusRecord.class),
fillEllipse(0x400E, UnimplementedHemfPlusRecord.class),
drawEllipse(0x400F, UnimplementedHemfPlusRecord.class),
fillPie(0x4010, UnimplementedHemfPlusRecord.class),
drawPie(0x4011, UnimplementedHemfPlusRecord.class),
drawArc(0x4012, UnimplementedHemfPlusRecord.class),
fillRegion(0x4013, UnimplementedHemfPlusRecord.class),
fillPath(0x4014, UnimplementedHemfPlusRecord.class),
drawPath(0x4015, UnimplementedHemfPlusRecord.class),
fillClosedCurve(0x4016, UnimplementedHemfPlusRecord.class),
drawClosedCurve(0x4017, UnimplementedHemfPlusRecord.class),
drawCurve(0x4018, UnimplementedHemfPlusRecord.class),
drawBeziers(0x4019, UnimplementedHemfPlusRecord.class),
drawImage(0x401A, UnimplementedHemfPlusRecord.class),
drawImagePoints(0x401B, UnimplementedHemfPlusRecord.class),
drawString(0x401C, UnimplementedHemfPlusRecord.class),
setRenderingOrigin(0x401D, UnimplementedHemfPlusRecord.class),
setAntiAliasMode(0x401E, UnimplementedHemfPlusRecord.class),
setTextRenderingHint(0x401F, UnimplementedHemfPlusRecord.class),
setTextContrast(0x4020, UnimplementedHemfPlusRecord.class),
setInterpolationMode(0x4021, UnimplementedHemfPlusRecord.class),
setPixelOffsetMode(0x4022, UnimplementedHemfPlusRecord.class),
setComositingMode(0x4023, UnimplementedHemfPlusRecord.class),
setCompositingQuality(0x4024, UnimplementedHemfPlusRecord.class),
save(0x4025, UnimplementedHemfPlusRecord.class),
restore(0x4026, UnimplementedHemfPlusRecord.class),
beginContainer(0x4027, UnimplementedHemfPlusRecord.class),
beginContainerNoParams(0x428, UnimplementedHemfPlusRecord.class),
endContainer(0x4029, UnimplementedHemfPlusRecord.class),
setWorldTransform(0x402A, UnimplementedHemfPlusRecord.class),
resetWorldTransform(0x402B, UnimplementedHemfPlusRecord.class),
multiplyWorldTransform(0x402C, UnimplementedHemfPlusRecord.class),
translateWorldTransform(0x402D, UnimplementedHemfPlusRecord.class),
scaleWorldTransform(0x402E, UnimplementedHemfPlusRecord.class),
rotateWorldTransform(0x402F, UnimplementedHemfPlusRecord.class),
setPageTransform(0x4030, UnimplementedHemfPlusRecord.class),
resetClip(0x4031, UnimplementedHemfPlusRecord.class),
setClipRect(0x4032, UnimplementedHemfPlusRecord.class),
setClipRegion(0x4033, UnimplementedHemfPlusRecord.class),
setClipPath(0x4034, UnimplementedHemfPlusRecord.class),
offsetClip(0x4035, UnimplementedHemfPlusRecord.class),
drawDriverstring(0x4036, UnimplementedHemfPlusRecord.class),
strokeFillPath(0x4037, UnimplementedHemfPlusRecord.class),
serializableObject(0x4038, UnimplementedHemfPlusRecord.class),
setTSGraphics(0x4039, UnimplementedHemfPlusRecord.class),
setTSClip(0x403A, UnimplementedHemfPlusRecord.class);
public final long id;
public final Class<? extends HemfPlusRecord> clazz;
HemfPlusRecordType(long id, Class<? extends HemfPlusRecord> clazz) {
this.id = id;
this.clazz = clazz;
}
public static HemfPlusRecordType getById(long id) {
for (HemfPlusRecordType wrt : values()) {
if (wrt.id == id) return wrt;
}
return null;
}
}
| lvweiwolf/poi-3.16 | src/scratchpad/src/org/apache/poi/hemf/hemfplus/record/HemfPlusRecordType.java | Java | apache-2.0 | 5,033 |
package com.yakami.light.view.fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.yakami.light.BuildConfig;
import com.yakami.light.R;
import com.yakami.light.view.fragment.base.BaseFragment;
import butterknife.Bind;
import butterknife.ButterKnife;
/**
* Created by Yakami on 2016/8/5, enjoying it!
*/
public class AboutFragment extends BaseFragment {
@Bind(R.id.tv_about) TextView mAbout;
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_about, container, false);
ButterKnife.bind(this,view);
mAbout.setText("版本: " + BuildConfig.VERSION_NAME + "\n" + mRes.getString(R.string.author));
mAbout.append("\ngithub地址:https://github.com/hanFengSan/light");
return view;
}
}
| hanFengSan/light | app/src/main/java/com/yakami/light/view/fragment/AboutFragment.java | Java | apache-2.0 | 958 |
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def rotateRight(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
if not head: return None
p = head
listLen = 0 # calculate list length
while p:
p = p.next
listLen += 1
k = k % listLen # now k < listLen
if k == 0:
return head
p1 = head; p2 = head
for _ in xrange(k):
p2 = p2.next
assert p2
while p2.next:
p1 = p1.next
p2 = p2.next
newHead = p1.next
p1.next = None
p2.next = head
return newHead
from utils import *
printlist(Solution().rotateRight(makelist(1,2 ,3 ,4 ,5), 2)) | xiaonanln/myleetcode-python | src/61. Rotate List.py | Python | apache-2.0 | 732 |
package com.adaptris.util.text.mime;
import static com.adaptris.util.text.mime.MimeConstants.HEADER_CONTENT_ENCODING;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import javax.mail.MessagingException;
import javax.mail.internet.InternetHeaders;
import javax.mail.internet.MimeUtility;
public class MimeUtils {
public static byte[] encodeData(byte[] data, String encoding, InternetHeaders hdrs)
throws MessagingException, IOException {
if (!isBlank(encoding)) {
hdrs.setHeader(HEADER_CONTENT_ENCODING, encoding);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (OutputStream encodedOut = MimeUtility.encode(out, encoding)) {
encodedOut.write(data);
}
return out.toByteArray();
}
}
| adaptris/interlok | interlok-core/src/main/java/com/adaptris/util/text/mime/MimeUtils.java | Java | apache-2.0 | 855 |
/*
* #%L
* wcm.io
* %%
* Copyright (C) 2019 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.handler.mediasource.dam.impl;
import static com.day.cq.dam.api.DamConstants.PREFIX_ASSET_WEB;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import com.day.cq.dam.api.Asset;
import com.day.cq.dam.api.Rendition;
import io.wcm.handler.media.testcontext.AppAemContext;
import io.wcm.testing.mock.aem.junit5.AemContext;
import io.wcm.testing.mock.aem.junit5.AemContextExtension;
@ExtendWith(AemContextExtension.class)
class DamAutoCroppingTest {
private final AemContext context = AppAemContext.newAemContext();
@Test
@SuppressWarnings("null")
void testGetWebRenditionForCropping() {
Asset asset = context.create().asset("/content/dam/asset1.jpg", 160, 90, "image/jpeg");
Rendition webRendition = context.create().assetRendition(asset, PREFIX_ASSET_WEB + ".80.45.jpg", 80, 45, "image/jpeg");
RenditionMetadata result = DamAutoCropping.getWebRenditionForCropping(asset);
assertEquals(webRendition.getPath(), result.getRendition().getPath());
}
@Test
void testGetWebRenditionNotExisting() {
Asset assetWithoutRenditions = context.create().asset("/content/dam/asset2.jpg", 160, 90, "image/jpeg");
assertNull(DamAutoCropping.getWebRenditionForCropping(assetWithoutRenditions));
}
}
| cnagel/wcm-io-handler | media/src/test/java/io/wcm/handler/mediasource/dam/impl/DamAutoCroppingTest.java | Java | apache-2.0 | 2,026 |
/// <reference types='jest' />
import * as React from 'react';
import ServicesPanel from '../ServicesPanel';
import Cases from './ServicesPanel.cases';
import { shallow, mount, render } from 'enzyme';
describe('ServicesPanel', () => {
let servicesPanel:any;
beforeEach(()=>{
servicesPanel = mount(<ServicesPanel {...Cases['Default']}/>)
});
it('should render correctly', () => {
expect(servicesPanel.find('Dropdown').length).toEqual(1);
expect(servicesPanel.find('ServicesList').length).toEqual(1);
expect(servicesPanel.find('Input').length).toEqual(1);
});
it('should render the roles dropdown with the services prop roles as items', () => {
// let roles: string[] = [];
// servicesPanel.props().services.forEach((service:any) => {
// if (roles.indexOf(service.roleId) === -1) {
// roles.push(service.roleId);
// }
// });
// let dropDownItemsTexts = servicesPanel.find('DropdownItem .role').map((node:any) => { return node.text()});
// expect(JSON.stringify(roles.sort())).toEqual(JSON.stringify(dropDownItemsTexts.sort()));
});
it('should render only services with the selected role', () => {
// let activeRole = 'Mollis.';
// servicesPanel.setState({
// activeRole: activeRole
// });
// let allHaveActiveRole = servicesPanel.find('ServiceCard').map((node:any) => {
// return node.props().service.roles.indexOf(activeRole) > -1;
// }).every((value:boolean) => {
// return value === true;
// });
// expect(allHaveActiveRole).toEqual(true);
});
it('should render only services with the selected role when a search value is entered by user', () => {
// let activeRole = 'Mollis.';
// servicesPanel.setState({
// activeRole: activeRole,
// searhValue: 'test'
// });
// let allHaveActiveRole = servicesPanel.find('ServiceCard').map((node:any) => {
// return node.props().service.roles.indexOf(activeRole) > -1;
// }).every((value:boolean) => {
// return value === true;
// });
// expect(allHaveActiveRole).toEqual(true);
});
it('should render the Not found services message when search is performed an it returns no services', () => {
// servicesPanel.setState({
// searchValue:'mycrazytestsearchcriteria',
// activeRole:'Mollis.'
// });
// expect(servicesPanel.find('h3').first().text()).toEqual('No services found');
});
it('should render the right search indicator in search bar depending on searching state', () => {
// servicesPanel.setState({loadingSearch:false, searchValue:''});
// expect(servicesPanel.find('.rowHead').first().find('i').length).toEqual(0);
// servicesPanel.setState({loadingSearch:false, searchValue:'mysearchcriteria'});
// expect(servicesPanel.find('.rowHead').first().find('i').length).toEqual(1);
// expect(servicesPanel.find('.rowHead').first().find('i').html()).toContain('fa fa-times fa-lg');
// servicesPanel.setState({loadingSearch:true, searchValue:'mysearchcriteria'});
// expect(servicesPanel.find('.rowHead').first().find('i').length).toEqual(1);
// expect(servicesPanel.find('.rowHead').first().find('i').html()).toContain('fa fa-pulse fa-spinner');
});
it(`should set the right searching state after a second
depending on the searchOnChangeMethod actioned by user input`, (done) => {
done();
// servicesPanel.instance().searchOnChange({target:{value:'mysearchcriteria'}, persist: () => {}});
// expect(servicesPanel.state().searchValue).toEqual('mysearchcriteria');
// expect(servicesPanel.state().loadingSearch).toEqual(true);
// try {
// setTimeout(()=>{
// expect(servicesPanel.state().loadingSearch).toEqual(false);
// done();
// }, 2000);
// }catch (e){
// done.fail(e);
// }
});
});
| nebtex/menshend-ui | src/components/services/ServicesPanel/__tests__/ServicesPanel.spec.tsx | TypeScript | apache-2.0 | 3,882 |
package com.indoqa.daisy.cocoon.controller;
import org.apache.cocoon.rest.controller.annotation.SitemapParameter;
import org.apache.cocoon.rest.controller.response.RestResponse;
import org.apache.cocoon.rest.controller.response.URLResponse;
public class DocumentPartByPathController extends AbstractDocumentController {
@SitemapParameter
private String part;
@Override
public RestResponse sendSuccessResponse(String id) throws Exception {
return new URLResponse("/default/doc/id/" + id + "/part/" + this.part + ".html");
}
}
| reinhard/daisy-integration | daisy-cocoon/src/main/java/com/indoqa/daisy/cocoon/controller/DocumentPartByPathController.java | Java | apache-2.0 | 557 |
package whelk.gui;
import whelk.PortableScript;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
public class SelectScriptPanel extends WizardCard implements ActionListener
{
Wizard wizard;
JLabel description = new JLabel();
public SelectScriptPanel(Wizard wizard)
{
super(wizard);
this.wizard = wizard;
Box vBox = Box.createVerticalBox();
JButton loadButton = new JButton("Öppna script-fil");
loadButton.addActionListener(this);
loadButton.setActionCommand("load");
vBox.add(loadButton);
vBox.add(Box.createVerticalStrut(10));
vBox.add(new JLabel("Valt script:"));
vBox.add(description);
this.add(vBox);
}
@Override
void onShow(Object parameterFromPreviousCard)
{
setNextCard(Wizard.RUN);
disableNext();
}
@Override
public void actionPerformed(ActionEvent actionEvent)
{
if (actionEvent.getActionCommand().equals("load"))
{
JFileChooser chooser = new JFileChooser();
chooser.setPreferredSize(new Dimension(1024, 768));
int returnVal = chooser.showOpenDialog(wizard);
if(returnVal == JFileChooser.APPROVE_OPTION)
{
File chosenFile = chooser.getSelectedFile();
try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(chosenFile)))
{
Object loaded = ois.readObject();
if (loaded instanceof PortableScript)
{
PortableScript loadedScript = (PortableScript) loaded;
description.setText(loadedScript.comment);
setParameterForNextCard(loaded);
enableNext();
}
} catch (IOException | ClassNotFoundException ioe)
{
Wizard.exitFatal(ioe.getMessage());
}
}
}
}
}
| libris/librisxl | gui-whelktool/src/main/java/whelk/gui/SelectScriptPanel.java | Java | apache-2.0 | 2,204 |
/**
* Copyright (C) 2013 metrics-statsd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.readytalk.metrics;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Metered;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Locale;
import java.util.Map;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
/**
* A reporter which publishes metric values to a StatsD server.
*
* @see <a href="https://github.com/etsy/statsd">StatsD</a>
*/
@NotThreadSafe
public class StatsDReporter extends ScheduledReporter {
private static final Logger LOG = LoggerFactory.getLogger(StatsDReporter.class);
private final StatsD statsD;
private final String prefix;
private final String suffix;
private StatsDReporter(final MetricRegistry registry,
final StatsD statsD,
final String prefix,
final String suffix,
final TimeUnit rateUnit,
final TimeUnit durationUnit,
final MetricFilter filter) {
super(registry, "statsd-reporter", filter, rateUnit, durationUnit);
this.statsD = statsD;
this.prefix = prefix;
this.suffix = suffix;
}
/**
* Returns a new {@link Builder} for {@link StatsDReporter}.
*
* @param registry the registry to report
* @return a {@link Builder} instance for a {@link StatsDReporter}
*/
public static Builder forRegistry(final MetricRegistry registry) {
return new Builder(registry);
}
/**
* A builder for {@link StatsDReporter} instances. Defaults to not using a prefix,
* converting rates to events/second, converting durations to milliseconds, and not
* filtering metrics.
*/
@NotThreadSafe
public static final class Builder {
private final MetricRegistry registry;
private String prefix;
private String suffix;
private TimeUnit rateUnit;
private TimeUnit durationUnit;
private MetricFilter filter;
private Builder(final MetricRegistry registry) {
this.registry = registry;
this.prefix = null;
this.rateUnit = TimeUnit.SECONDS;
this.durationUnit = TimeUnit.MILLISECONDS;
this.filter = MetricFilter.ALL;
}
/**
* Prefix all metric names with the given string.
*
* @param _prefix the prefix for all metric names
* @return {@code this}
*/
public Builder prefixedWith(@Nullable final String _prefix) {
this.prefix = _prefix;
return this;
}
/**
* Prefix all metric names with the given string.
*
* @param _suffix the prefix for all metric names
* @return {@code this}
*/
public Builder suffixedWith(@Nullable final String _suffix) {
this.suffix = _suffix;
return this;
}
/**
* Convert rates to the given time unit.
*
* @param _rateUnit a unit of time
* @return {@code this}
*/
public Builder convertRatesTo(final TimeUnit _rateUnit) {
this.rateUnit = _rateUnit;
return this;
}
/**
* Convert durations to the given time unit.
*
* @param _durationUnit a unit of time
* @return {@code this}
*/
public Builder convertDurationsTo(final TimeUnit _durationUnit) {
this.durationUnit = _durationUnit;
return this;
}
/**
* Only report metrics which match the given filter.
*
* @param _filter a {@link MetricFilter}
* @return {@code this}
*/
public Builder filter(final MetricFilter _filter) {
this.filter = _filter;
return this;
}
/**
* Builds a {@link StatsDReporter} with the given properties, sending metrics to StatsD at the given host and port.
*
* @param host the hostname of the StatsD server.
* @param port the port of the StatsD server. This is typically 8125.
* @return a {@link StatsDReporter}
*/
public StatsDReporter build(final String host, final int port) {
return build(new StatsD(host, port));
}
/**
* Builds a {@link StatsDReporter} with the given properties, sending metrics using the
* given {@link StatsD} client.
*
* @param statsD a {@link StatsD} client
* @return a {@link StatsDReporter}
*/
public StatsDReporter build(final StatsD statsD) {
return new StatsDReporter(registry, statsD, prefix, suffix, rateUnit, durationUnit, filter);
}
}
@Override
@SuppressWarnings("rawtypes") //Metrics 3.0 interface specifies the raw Gauge type
public void report(final SortedMap<String, Gauge> gauges,
final SortedMap<String, Counter> counters,
final SortedMap<String, Histogram> histograms,
final SortedMap<String, Meter> meters,
final SortedMap<String, Timer> timers) {
try {
statsD.connect();
for (Map.Entry<String, Gauge> entry : gauges.entrySet()) {
reportGauge(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Counter> entry : counters.entrySet()) {
reportCounter(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
reportHistogram(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Meter> entry : meters.entrySet()) {
reportMetered(entry.getKey(), entry.getValue());
}
for (Map.Entry<String, Timer> entry : timers.entrySet()) {
reportTimer(entry.getKey(), entry.getValue());
}
} catch (IOException e) {
LOG.warn("Unable to report to StatsD", statsD, e);
} finally {
try {
statsD.close();
} catch (IOException e) {
LOG.debug("Error disconnecting from StatsD", statsD, e);
}
}
}
private void reportTimer(final String name, final Timer timer) {
final Snapshot snapshot = timer.getSnapshot();
String suffixedName = suffix(name);
statsD.send(prefix(suffixedName, "max"), formatNumber(convertDuration(snapshot.getMax())));
statsD.send(prefix(suffixedName, "mean"), formatNumber(convertDuration(snapshot.getMean())));
statsD.send(prefix(suffixedName, "min"), formatNumber(convertDuration(snapshot.getMin())));
statsD.send(prefix(suffixedName, "stddev"), formatNumber(convertDuration(snapshot.getStdDev())));
statsD.send(prefix(suffixedName, "p50"), formatNumber(convertDuration(snapshot.getMedian())));
statsD.send(prefix(suffixedName, "p75"), formatNumber(convertDuration(snapshot.get75thPercentile())));
statsD.send(prefix(suffixedName, "p95"), formatNumber(convertDuration(snapshot.get95thPercentile())));
statsD.send(prefix(suffixedName, "p98"), formatNumber(convertDuration(snapshot.get98thPercentile())));
statsD.send(prefix(suffixedName, "p99"), formatNumber(convertDuration(snapshot.get99thPercentile())));
statsD.send(prefix(suffixedName, "p999"), formatNumber(convertDuration(snapshot.get999thPercentile())));
reportMetered(name, timer);
}
private void reportMetered(final String name, final Metered meter) {
String suffixedName = suffix(name);
statsD.send(prefix(suffixedName, "samples"), formatNumber(meter.getCount()));
statsD.send(prefix(suffixedName, "m1_rate"), formatNumber(convertRate(meter.getOneMinuteRate())));
statsD.send(prefix(suffixedName, "m5_rate"), formatNumber(convertRate(meter.getFiveMinuteRate())));
statsD.send(prefix(suffixedName, "m15_rate"), formatNumber(convertRate(meter.getFifteenMinuteRate())));
statsD.send(prefix(suffixedName, "mean_rate"), formatNumber(convertRate(meter.getMeanRate())));
}
private void reportHistogram(final String name, final Histogram histogram) {
final Snapshot snapshot = histogram.getSnapshot();
String suffixedName = suffix(name);
statsD.send(prefix(suffixedName, "samples"), formatNumber(histogram.getCount()));
statsD.send(prefix(suffixedName, "max"), formatNumber(snapshot.getMax()));
statsD.send(prefix(suffixedName, "mean"), formatNumber(snapshot.getMean()));
statsD.send(prefix(suffixedName, "min"), formatNumber(snapshot.getMin()));
statsD.send(prefix(suffixedName, "stddev"), formatNumber(snapshot.getStdDev()));
statsD.send(prefix(suffixedName, "p50"), formatNumber(snapshot.getMedian()));
statsD.send(prefix(suffixedName, "p75"), formatNumber(snapshot.get75thPercentile()));
statsD.send(prefix(suffixedName, "p95"), formatNumber(snapshot.get95thPercentile()));
statsD.send(prefix(suffixedName, "p98"), formatNumber(snapshot.get98thPercentile()));
statsD.send(prefix(suffixedName, "p99"), formatNumber(snapshot.get99thPercentile()));
statsD.send(prefix(suffixedName, "p999"), formatNumber(snapshot.get999thPercentile()));
}
private void reportCounter(final String name, final Counter counter) {
statsD.send(prefix(suffix(name)), formatNumber(counter.getCount()));
}
@SuppressWarnings("rawtypes") //Metrics 3.0 passes us the raw Gauge type
private void reportGauge(final String name, final Gauge gauge) {
final String value = format(gauge.getValue());
if (value != null) {
statsD.send(prefix(suffix(name)), value);
}
}
@Nullable
private String format(final Object o) {
if (o instanceof Float) {
return formatNumber(((Float) o).doubleValue());
} else if (o instanceof Double) {
return formatNumber((Double) o);
} else if (o instanceof Byte) {
return formatNumber(((Byte) o).longValue());
} else if (o instanceof Short) {
return formatNumber(((Short) o).longValue());
} else if (o instanceof Integer) {
return formatNumber(((Integer) o).longValue());
} else if (o instanceof Long) {
return formatNumber((Long) o);
} else if (o instanceof BigInteger) {
return formatNumber((BigInteger) o);
} else if (o instanceof BigDecimal) {
return formatNumber(((BigDecimal) o).doubleValue());
}
return null;
}
private String prefix(final String... components) {
return MetricRegistry.name(prefix, components);
}
private String suffix(String name) {
if (suffix == null || suffix.isEmpty() || !name.contains("%s")) {
return name;
}
return String.format(name, suffix);
}
private String formatNumber(final BigInteger n) {
return String.valueOf(n);
}
private String formatNumber(final long n) {
return Long.toString(n);
}
private String formatNumber(final double v) {
return String.format(Locale.US, "%2.2f", v);
}
}
| researchgate/metrics-statsd | metrics3-statsd/src/main/java/com/readytalk/metrics/StatsDReporter.java | Java | apache-2.0 | 11,572 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.alexaforbusiness.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.alexaforbusiness.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* GetNetworkProfileRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class GetNetworkProfileRequestMarshaller {
private static final MarshallingInfo<String> NETWORKPROFILEARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("NetworkProfileArn").build();
private static final GetNetworkProfileRequestMarshaller instance = new GetNetworkProfileRequestMarshaller();
public static GetNetworkProfileRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(GetNetworkProfileRequest getNetworkProfileRequest, ProtocolMarshaller protocolMarshaller) {
if (getNetworkProfileRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getNetworkProfileRequest.getNetworkProfileArn(), NETWORKPROFILEARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-alexaforbusiness/src/main/java/com/amazonaws/services/alexaforbusiness/model/transform/GetNetworkProfileRequestMarshaller.java | Java | apache-2.0 | 2,087 |
test = {
'name': 'Question 2',
'points': 2,
'suites': [
{
'type': 'sqlite',
'setup': r"""
sqlite> .open hw1.db
""",
'cases': [
{
'code': r"""
sqlite> select * from colors;
red|primary
blue|primary
green|secondary
yellow|primary
""",
},
{
'code': r"""
sqlite> select color from colors;
red
blue
green
yellow
""",
},
],
}
]
}
| jackzhao-mj/ok-client | demo/sqlite/tests/q2.py | Python | apache-2.0 | 548 |
<?php
/**
* The Template for displaying all single posts.
*
* @package volta
*/
get_header(); ?>
<div id="primary" class="content-area">
<main id="main" class="site-main" role="main">
<?php while ( have_posts() ) : the_post(); ?>
<?php get_template_part( 'content', 'single' ); ?>
<?php if( get_theme_mod('volta_show_author_post') == 'yes' ) { volta_author_bio(); } ?>
<?php if( get_theme_mod('volta_show_nav_post') == 'yes' ) { volta_post_nav(); } ?>
<?php
// If comments are open or we have at least one comment, load up the comment template
if ( comments_open() || '0' != get_comments_number() ) :
comments_template();
endif;
?>
<?php endwhile; // end of the loop. ?>
</main><!-- #main -->
</div><!-- #primary -->
<?php get_sidebar(); ?>
<?php get_footer(); ?> | AKDiamond/iitbazaar | wp-content/themes/volta/single.php | PHP | apache-2.0 | 864 |
package org.gradle.test.performance.mediummonolithicjavaproject.p282;
public class Production5650 {
private String property0;
public String getProperty0() {
return property0;
}
public void setProperty0(String value) {
property0 = value;
}
private String property1;
public String getProperty1() {
return property1;
}
public void setProperty1(String value) {
property1 = value;
}
private String property2;
public String getProperty2() {
return property2;
}
public void setProperty2(String value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
} | oehme/analysing-gradle-performance | my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p282/Production5650.java | Java | apache-2.0 | 1,891 |
package com.gaojice.diskviewer.processor;
import java.io.File;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import com.gaojice.diskviewer.dao.DiskFileDao;
import com.gaojice.diskviewer.entity.DiskFile;
public class FileProcessor implements Runnable {
private DiskFileDao diskFileDao;
private File root;
private DiskFile p;
private org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor taskExecutor;
public FileProcessor(DiskFileDao diskFileDao, File root, DiskFile p, ThreadPoolTaskExecutor taskExecutor) {
super();
this.diskFileDao = diskFileDao;
this.root = root;
this.p = p;
this.taskExecutor = taskExecutor;
}
public void setTaskExecutor(ThreadPoolTaskExecutor taskExecutor) {
this.taskExecutor = taskExecutor;
}
public void setDiskFileDao(DiskFileDao diskFileDao) {
this.diskFileDao = diskFileDao;
}
public void setRoot(File root) {
this.root = root;
}
public void setP(DiskFile p) {
this.p = p;
}
public void run() {
DiskFile diskFile = new DiskFile();
diskFile.setParent(p);
diskFile.setName(root.getName());
if (root.isDirectory()) {
diskFile.setName(root.getAbsolutePath());
diskFile.setType("D");
diskFile.setSize(0L);
diskFileDao.insert(diskFile);
File[] children = root.listFiles();
if (children != null) {
for (File child : children) {
FileProcessor fileProcessor = new FileProcessor(diskFileDao, child, diskFile, taskExecutor);
taskExecutor.execute(fileProcessor);
}
}
} else {
diskFile.setType("F");
diskFile.setSize(root.length());
diskFileDao.insert(diskFile);
}
}
}
| gaojice/diskviewer | src/main/java/com/gaojice/diskviewer/processor/FileProcessor.java | Java | apache-2.0 | 1,628 |
<?php
namespace App\Http\Controllers;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesResources;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Routing\Controller as BaseController;
class Controller extends BaseController
{
private $user_id = null;
use AuthorizesRequests, AuthorizesResources, DispatchesJobs, ValidatesRequests;
public function getUserId()
{
if ($this->user_id == null) {
$this->user_id = auth()->guest() ? null : auth()->user()->id;
}
return $this->user_id;
}
}
| theballkyo/Isad-2015 | src/app/Http/Controllers/Controller.php | PHP | apache-2.0 | 670 |
// Copyright 2017, Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using Google.Api.Gax;
using Google.Api.Gax.Grpc;
using Google.Protobuf.WellKnownTypes;
using Grpc.Core;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Threading;
using System.Threading.Tasks;
namespace Google.Cloud.Language.V1
{
/// <summary>
/// Settings for a <see cref="LanguageServiceClient"/>.
/// </summary>
public sealed partial class LanguageServiceSettings : ServiceSettingsBase
{
/// <summary>
/// Get a new instance of the default <see cref="LanguageServiceSettings"/>.
/// </summary>
/// <returns>
/// A new instance of the default <see cref="LanguageServiceSettings"/>.
/// </returns>
public static LanguageServiceSettings GetDefault() => new LanguageServiceSettings();
/// <summary>
/// Constructs a new <see cref="LanguageServiceSettings"/> object with default settings.
/// </summary>
public LanguageServiceSettings() { }
private LanguageServiceSettings(LanguageServiceSettings existing) : base(existing)
{
GaxPreconditions.CheckNotNull(existing, nameof(existing));
AnalyzeSentimentSettings = existing.AnalyzeSentimentSettings;
AnalyzeEntitiesSettings = existing.AnalyzeEntitiesSettings;
AnalyzeEntitySentimentSettings = existing.AnalyzeEntitySentimentSettings;
AnalyzeSyntaxSettings = existing.AnalyzeSyntaxSettings;
AnnotateTextSettings = existing.AnnotateTextSettings;
OnCopy(existing);
}
partial void OnCopy(LanguageServiceSettings existing);
/// <summary>
/// The filter specifying which RPC <see cref="StatusCode"/>s are eligible for retry
/// for "Idempotent" <see cref="LanguageServiceClient"/> RPC methods.
/// </summary>
/// <remarks>
/// The eligible RPC <see cref="StatusCode"/>s for retry for "Idempotent" RPC methods are:
/// <list type="bullet">
/// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="StatusCode.Unavailable"/></description></item>
/// </list>
/// </remarks>
public static Predicate<RpcException> IdempotentRetryFilter { get; } =
RetrySettings.FilterForStatusCodes(StatusCode.DeadlineExceeded, StatusCode.Unavailable);
/// <summary>
/// The filter specifying which RPC <see cref="StatusCode"/>s are eligible for retry
/// for "NonIdempotent" <see cref="LanguageServiceClient"/> RPC methods.
/// </summary>
/// <remarks>
/// The eligible RPC <see cref="StatusCode"/>s for retry for "NonIdempotent" RPC methods are:
/// <list type="bullet">
/// <item><description><see cref="StatusCode.Unavailable"/></description></item>
/// </list>
/// </remarks>
public static Predicate<RpcException> NonIdempotentRetryFilter { get; } =
RetrySettings.FilterForStatusCodes(StatusCode.Unavailable);
/// <summary>
/// "Default" retry backoff for <see cref="LanguageServiceClient"/> RPC methods.
/// </summary>
/// <returns>
/// The "Default" retry backoff for <see cref="LanguageServiceClient"/> RPC methods.
/// </returns>
/// <remarks>
/// The "Default" retry backoff for <see cref="LanguageServiceClient"/> RPC methods is defined as:
/// <list type="bullet">
/// <item><description>Initial delay: 100 milliseconds</description></item>
/// <item><description>Maximum delay: 60000 milliseconds</description></item>
/// <item><description>Delay multiplier: 1.3</description></item>
/// </list>
/// </remarks>
public static BackoffSettings GetDefaultRetryBackoff() => new BackoffSettings(
delay: TimeSpan.FromMilliseconds(100),
maxDelay: TimeSpan.FromMilliseconds(60000),
delayMultiplier: 1.3
);
/// <summary>
/// "Default" timeout backoff for <see cref="LanguageServiceClient"/> RPC methods.
/// </summary>
/// <returns>
/// The "Default" timeout backoff for <see cref="LanguageServiceClient"/> RPC methods.
/// </returns>
/// <remarks>
/// The "Default" timeout backoff for <see cref="LanguageServiceClient"/> RPC methods is defined as:
/// <list type="bullet">
/// <item><description>Initial timeout: 60000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Maximum timeout: 60000 milliseconds</description></item>
/// </list>
/// </remarks>
public static BackoffSettings GetDefaultTimeoutBackoff() => new BackoffSettings(
delay: TimeSpan.FromMilliseconds(60000),
maxDelay: TimeSpan.FromMilliseconds(60000),
delayMultiplier: 1.0
);
/// <summary>
/// <see cref="CallSettings"/> for synchronous and asynchronous calls to
/// <c>LanguageServiceClient.AnalyzeSentiment</c> and <c>LanguageServiceClient.AnalyzeSentimentAsync</c>.
/// </summary>
/// <remarks>
/// The default <c>LanguageServiceClient.AnalyzeSentiment</c> and
/// <c>LanguageServiceClient.AnalyzeSentimentAsync</c> <see cref="RetrySettings"/> are:
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds</description></item>
/// <item><description>Initial timeout: 60000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Timeout maximum delay: 60000 milliseconds</description></item>
/// </list>
/// Retry will be attempted on the following response status codes:
/// <list>
/// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="StatusCode.Unavailable"/></description></item>
/// </list>
/// Default RPC expiration is 600000 milliseconds.
/// </remarks>
public CallSettings AnalyzeSentimentSettings { get; set; } = CallSettings.FromCallTiming(
CallTiming.FromRetry(new RetrySettings(
retryBackoff: GetDefaultRetryBackoff(),
timeoutBackoff: GetDefaultTimeoutBackoff(),
totalExpiration: Expiration.FromTimeout(TimeSpan.FromMilliseconds(600000)),
retryFilter: IdempotentRetryFilter
)));
/// <summary>
/// <see cref="CallSettings"/> for synchronous and asynchronous calls to
/// <c>LanguageServiceClient.AnalyzeEntities</c> and <c>LanguageServiceClient.AnalyzeEntitiesAsync</c>.
/// </summary>
/// <remarks>
/// The default <c>LanguageServiceClient.AnalyzeEntities</c> and
/// <c>LanguageServiceClient.AnalyzeEntitiesAsync</c> <see cref="RetrySettings"/> are:
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds</description></item>
/// <item><description>Initial timeout: 60000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Timeout maximum delay: 60000 milliseconds</description></item>
/// </list>
/// Retry will be attempted on the following response status codes:
/// <list>
/// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="StatusCode.Unavailable"/></description></item>
/// </list>
/// Default RPC expiration is 600000 milliseconds.
/// </remarks>
public CallSettings AnalyzeEntitiesSettings { get; set; } = CallSettings.FromCallTiming(
CallTiming.FromRetry(new RetrySettings(
retryBackoff: GetDefaultRetryBackoff(),
timeoutBackoff: GetDefaultTimeoutBackoff(),
totalExpiration: Expiration.FromTimeout(TimeSpan.FromMilliseconds(600000)),
retryFilter: IdempotentRetryFilter
)));
/// <summary>
/// <see cref="CallSettings"/> for synchronous and asynchronous calls to
/// <c>LanguageServiceClient.AnalyzeEntitySentiment</c> and <c>LanguageServiceClient.AnalyzeEntitySentimentAsync</c>.
/// </summary>
/// <remarks>
/// The default <c>LanguageServiceClient.AnalyzeEntitySentiment</c> and
/// <c>LanguageServiceClient.AnalyzeEntitySentimentAsync</c> <see cref="RetrySettings"/> are:
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds</description></item>
/// <item><description>Initial timeout: 60000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Timeout maximum delay: 60000 milliseconds</description></item>
/// </list>
/// Retry will be attempted on the following response status codes:
/// <list>
/// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="StatusCode.Unavailable"/></description></item>
/// </list>
/// Default RPC expiration is 600000 milliseconds.
/// </remarks>
public CallSettings AnalyzeEntitySentimentSettings { get; set; } = CallSettings.FromCallTiming(
CallTiming.FromRetry(new RetrySettings(
retryBackoff: GetDefaultRetryBackoff(),
timeoutBackoff: GetDefaultTimeoutBackoff(),
totalExpiration: Expiration.FromTimeout(TimeSpan.FromMilliseconds(600000)),
retryFilter: IdempotentRetryFilter
)));
/// <summary>
/// <see cref="CallSettings"/> for synchronous and asynchronous calls to
/// <c>LanguageServiceClient.AnalyzeSyntax</c> and <c>LanguageServiceClient.AnalyzeSyntaxAsync</c>.
/// </summary>
/// <remarks>
/// The default <c>LanguageServiceClient.AnalyzeSyntax</c> and
/// <c>LanguageServiceClient.AnalyzeSyntaxAsync</c> <see cref="RetrySettings"/> are:
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds</description></item>
/// <item><description>Initial timeout: 60000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Timeout maximum delay: 60000 milliseconds</description></item>
/// </list>
/// Retry will be attempted on the following response status codes:
/// <list>
/// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="StatusCode.Unavailable"/></description></item>
/// </list>
/// Default RPC expiration is 600000 milliseconds.
/// </remarks>
public CallSettings AnalyzeSyntaxSettings { get; set; } = CallSettings.FromCallTiming(
CallTiming.FromRetry(new RetrySettings(
retryBackoff: GetDefaultRetryBackoff(),
timeoutBackoff: GetDefaultTimeoutBackoff(),
totalExpiration: Expiration.FromTimeout(TimeSpan.FromMilliseconds(600000)),
retryFilter: IdempotentRetryFilter
)));
/// <summary>
/// <see cref="CallSettings"/> for synchronous and asynchronous calls to
/// <c>LanguageServiceClient.AnnotateText</c> and <c>LanguageServiceClient.AnnotateTextAsync</c>.
/// </summary>
/// <remarks>
/// The default <c>LanguageServiceClient.AnnotateText</c> and
/// <c>LanguageServiceClient.AnnotateTextAsync</c> <see cref="RetrySettings"/> are:
/// <list type="bullet">
/// <item><description>Initial retry delay: 100 milliseconds</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds</description></item>
/// <item><description>Initial timeout: 60000 milliseconds</description></item>
/// <item><description>Timeout multiplier: 1.0</description></item>
/// <item><description>Timeout maximum delay: 60000 milliseconds</description></item>
/// </list>
/// Retry will be attempted on the following response status codes:
/// <list>
/// <item><description><see cref="StatusCode.DeadlineExceeded"/></description></item>
/// <item><description><see cref="StatusCode.Unavailable"/></description></item>
/// </list>
/// Default RPC expiration is 600000 milliseconds.
/// </remarks>
public CallSettings AnnotateTextSettings { get; set; } = CallSettings.FromCallTiming(
CallTiming.FromRetry(new RetrySettings(
retryBackoff: GetDefaultRetryBackoff(),
timeoutBackoff: GetDefaultTimeoutBackoff(),
totalExpiration: Expiration.FromTimeout(TimeSpan.FromMilliseconds(600000)),
retryFilter: IdempotentRetryFilter
)));
/// <summary>
/// Creates a deep clone of this object, with all the same property values.
/// </summary>
/// <returns>A deep clone of this <see cref="LanguageServiceSettings"/> object.</returns>
public LanguageServiceSettings Clone() => new LanguageServiceSettings(this);
}
/// <summary>
/// LanguageService client wrapper, for convenient use.
/// </summary>
public abstract partial class LanguageServiceClient
{
/// <summary>
/// The default endpoint for the LanguageService service, which is a host of "language.googleapis.com" and a port of 443.
/// </summary>
public static ServiceEndpoint DefaultEndpoint { get; } = new ServiceEndpoint("language.googleapis.com", 443);
/// <summary>
/// The default LanguageService scopes.
/// </summary>
/// <remarks>
/// The default LanguageService scopes are:
/// <list type="bullet">
/// <item><description>"https://www.googleapis.com/auth/cloud-platform"</description></item>
/// </list>
/// </remarks>
public static IReadOnlyList<string> DefaultScopes { get; } = new ReadOnlyCollection<string>(new string[] {
"https://www.googleapis.com/auth/cloud-platform",
});
private static readonly ChannelPool s_channelPool = new ChannelPool(DefaultScopes);
// Note: we could have parameterless overloads of Create and CreateAsync,
// documented to just use the default endpoint, settings and credentials.
// Pros:
// - Might be more reassuring on first use
// - Allows method group conversions
// Con: overloads!
/// <summary>
/// Asynchronously creates a <see cref="LanguageServiceClient"/>, applying defaults for all unspecified settings,
/// and creating a channel connecting to the given endpoint with application default credentials where
/// necessary.
/// </summary>
/// <param name="endpoint">Optional <see cref="ServiceEndpoint"/>.</param>
/// <param name="settings">Optional <see cref="LanguageServiceSettings"/>.</param>
/// <returns>The task representing the created <see cref="LanguageServiceClient"/>.</returns>
public static async Task<LanguageServiceClient> CreateAsync(ServiceEndpoint endpoint = null, LanguageServiceSettings settings = null)
{
Channel channel = await s_channelPool.GetChannelAsync(endpoint ?? DefaultEndpoint).ConfigureAwait(false);
return Create(channel, settings);
}
/// <summary>
/// Synchronously creates a <see cref="LanguageServiceClient"/>, applying defaults for all unspecified settings,
/// and creating a channel connecting to the given endpoint with application default credentials where
/// necessary.
/// </summary>
/// <param name="endpoint">Optional <see cref="ServiceEndpoint"/>.</param>
/// <param name="settings">Optional <see cref="LanguageServiceSettings"/>.</param>
/// <returns>The created <see cref="LanguageServiceClient"/>.</returns>
public static LanguageServiceClient Create(ServiceEndpoint endpoint = null, LanguageServiceSettings settings = null)
{
Channel channel = s_channelPool.GetChannel(endpoint ?? DefaultEndpoint);
return Create(channel, settings);
}
/// <summary>
/// Creates a <see cref="LanguageServiceClient"/> which uses the specified channel for remote operations.
/// </summary>
/// <param name="channel">The <see cref="Channel"/> for remote operations. Must not be null.</param>
/// <param name="settings">Optional <see cref="LanguageServiceSettings"/>.</param>
/// <returns>The created <see cref="LanguageServiceClient"/>.</returns>
public static LanguageServiceClient Create(Channel channel, LanguageServiceSettings settings = null)
{
GaxPreconditions.CheckNotNull(channel, nameof(channel));
LanguageService.LanguageServiceClient grpcClient = new LanguageService.LanguageServiceClient(channel);
return new LanguageServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create(ServiceEndpoint, LanguageServiceSettings)"/>
/// and <see cref="CreateAsync(ServiceEndpoint, LanguageServiceSettings)"/>. Channels which weren't automatically
/// created are not affected.
/// </summary>
/// <remarks>After calling this method, further calls to <see cref="Create(ServiceEndpoint, LanguageServiceSettings)"/>
/// and <see cref="CreateAsync(ServiceEndpoint, LanguageServiceSettings)"/> will create new channels, which could
/// in turn be shut down by another call to this method.</remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static Task ShutdownDefaultChannelsAsync() => s_channelPool.ShutdownChannelsAsync();
/// <summary>
/// The underlying gRPC LanguageService client.
/// </summary>
public virtual LanguageService.LanguageServiceClient GrpcClient
{
get { throw new NotImplementedException(); }
}
/// <summary>
/// Analyzes the sentiment of the provided text.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeSentimentResponse> AnalyzeSentimentAsync(
Document document,
CallSettings callSettings = null) => AnalyzeSentimentAsync(
new AnalyzeSentimentRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
},
callSettings);
/// <summary>
/// Analyzes the sentiment of the provided text.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeSentimentResponse> AnalyzeSentimentAsync(
Document document,
CancellationToken cancellationToken) => AnalyzeSentimentAsync(
document,
CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Analyzes the sentiment of the provided text.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeSentimentResponse AnalyzeSentiment(
Document document,
CallSettings callSettings = null) => AnalyzeSentiment(
new AnalyzeSentimentRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
},
callSettings);
/// <summary>
/// Analyzes the sentiment of the provided text.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeSentimentResponse> AnalyzeSentimentAsync(
AnalyzeSentimentRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// Analyzes the sentiment of the provided text.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeSentimentResponse AnalyzeSentiment(
AnalyzeSentimentRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// Finds named entities (currently proper names and common nouns) in the text
/// along with entity types, salience, mentions for each entity, and
/// other properties.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeEntitiesResponse> AnalyzeEntitiesAsync(
Document document,
EncodingType encodingType,
CallSettings callSettings = null) => AnalyzeEntitiesAsync(
new AnalyzeEntitiesRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// Finds named entities (currently proper names and common nouns) in the text
/// along with entity types, salience, mentions for each entity, and
/// other properties.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeEntitiesResponse> AnalyzeEntitiesAsync(
Document document,
EncodingType encodingType,
CancellationToken cancellationToken) => AnalyzeEntitiesAsync(
document,
encodingType,
CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Finds named entities (currently proper names and common nouns) in the text
/// along with entity types, salience, mentions for each entity, and
/// other properties.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeEntitiesResponse AnalyzeEntities(
Document document,
EncodingType encodingType,
CallSettings callSettings = null) => AnalyzeEntities(
new AnalyzeEntitiesRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// Finds named entities (currently proper names and common nouns) in the text
/// along with entity types, salience, mentions for each entity, and
/// other properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeEntitiesResponse> AnalyzeEntitiesAsync(
AnalyzeEntitiesRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// Finds named entities (currently proper names and common nouns) in the text
/// along with entity types, salience, mentions for each entity, and
/// other properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeEntitiesResponse AnalyzeEntities(
AnalyzeEntitiesRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
/// sentiment associated with each entity and its mentions.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeEntitySentimentResponse> AnalyzeEntitySentimentAsync(
Document document,
EncodingType encodingType,
CallSettings callSettings = null) => AnalyzeEntitySentimentAsync(
new AnalyzeEntitySentimentRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
/// sentiment associated with each entity and its mentions.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeEntitySentimentResponse> AnalyzeEntitySentimentAsync(
Document document,
EncodingType encodingType,
CancellationToken cancellationToken) => AnalyzeEntitySentimentAsync(
document,
encodingType,
CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
/// sentiment associated with each entity and its mentions.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeEntitySentimentResponse AnalyzeEntitySentiment(
Document document,
EncodingType encodingType,
CallSettings callSettings = null) => AnalyzeEntitySentiment(
new AnalyzeEntitySentimentRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
/// sentiment associated with each entity and its mentions.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeEntitySentimentResponse> AnalyzeEntitySentimentAsync(
AnalyzeEntitySentimentRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
/// sentiment associated with each entity and its mentions.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeEntitySentimentResponse AnalyzeEntitySentiment(
AnalyzeEntitySentimentRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// Analyzes the syntax of the text and provides sentence boundaries and
/// tokenization along with part of speech tags, dependency trees, and other
/// properties.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeSyntaxResponse> AnalyzeSyntaxAsync(
Document document,
EncodingType encodingType,
CallSettings callSettings = null) => AnalyzeSyntaxAsync(
new AnalyzeSyntaxRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// Analyzes the syntax of the text and provides sentence boundaries and
/// tokenization along with part of speech tags, dependency trees, and other
/// properties.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeSyntaxResponse> AnalyzeSyntaxAsync(
Document document,
EncodingType encodingType,
CancellationToken cancellationToken) => AnalyzeSyntaxAsync(
document,
encodingType,
CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Analyzes the syntax of the text and provides sentence boundaries and
/// tokenization along with part of speech tags, dependency trees, and other
/// properties.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeSyntaxResponse AnalyzeSyntax(
Document document,
EncodingType encodingType,
CallSettings callSettings = null) => AnalyzeSyntax(
new AnalyzeSyntaxRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// Analyzes the syntax of the text and provides sentence boundaries and
/// tokenization along with part of speech tags, dependency trees, and other
/// properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnalyzeSyntaxResponse> AnalyzeSyntaxAsync(
AnalyzeSyntaxRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// Analyzes the syntax of the text and provides sentence boundaries and
/// tokenization along with part of speech tags, dependency trees, and other
/// properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnalyzeSyntaxResponse AnalyzeSyntax(
AnalyzeSyntaxRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// A convenience method that provides all syntax, sentiment, and entity
/// features in one call.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="features">
/// The enabled features.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnnotateTextResponse> AnnotateTextAsync(
Document document,
AnnotateTextRequest.Types.Features features,
EncodingType encodingType,
CallSettings callSettings = null) => AnnotateTextAsync(
new AnnotateTextRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
Features = GaxPreconditions.CheckNotNull(features, nameof(features)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// A convenience method that provides all syntax, sentiment, and entity
/// features in one call.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="features">
/// The enabled features.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="cancellationToken">
/// A <see cref="CancellationToken"/> to use for this RPC.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnnotateTextResponse> AnnotateTextAsync(
Document document,
AnnotateTextRequest.Types.Features features,
EncodingType encodingType,
CancellationToken cancellationToken) => AnnotateTextAsync(
document,
features,
encodingType,
CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// A convenience method that provides all syntax, sentiment, and entity
/// features in one call.
/// </summary>
/// <param name="document">
/// Input document.
/// </param>
/// <param name="features">
/// The enabled features.
/// </param>
/// <param name="encodingType">
/// The encoding type used by the API to calculate offsets.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnnotateTextResponse AnnotateText(
Document document,
AnnotateTextRequest.Types.Features features,
EncodingType encodingType,
CallSettings callSettings = null) => AnnotateText(
new AnnotateTextRequest
{
Document = GaxPreconditions.CheckNotNull(document, nameof(document)),
Features = GaxPreconditions.CheckNotNull(features, nameof(features)),
EncodingType = encodingType,
},
callSettings);
/// <summary>
/// A convenience method that provides all syntax, sentiment, and entity
/// features in one call.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public virtual Task<AnnotateTextResponse> AnnotateTextAsync(
AnnotateTextRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
/// <summary>
/// A convenience method that provides all syntax, sentiment, and entity
/// features in one call.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public virtual AnnotateTextResponse AnnotateText(
AnnotateTextRequest request,
CallSettings callSettings = null)
{
throw new NotImplementedException();
}
}
/// <summary>
/// LanguageService client wrapper implementation, for convenient use.
/// </summary>
public sealed partial class LanguageServiceClientImpl : LanguageServiceClient
{
private readonly ApiCall<AnalyzeSentimentRequest, AnalyzeSentimentResponse> _callAnalyzeSentiment;
private readonly ApiCall<AnalyzeEntitiesRequest, AnalyzeEntitiesResponse> _callAnalyzeEntities;
private readonly ApiCall<AnalyzeEntitySentimentRequest, AnalyzeEntitySentimentResponse> _callAnalyzeEntitySentiment;
private readonly ApiCall<AnalyzeSyntaxRequest, AnalyzeSyntaxResponse> _callAnalyzeSyntax;
private readonly ApiCall<AnnotateTextRequest, AnnotateTextResponse> _callAnnotateText;
/// <summary>
/// Constructs a client wrapper for the LanguageService service, with the specified gRPC client and settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">The base <see cref="LanguageServiceSettings"/> used within this client </param>
public LanguageServiceClientImpl(LanguageService.LanguageServiceClient grpcClient, LanguageServiceSettings settings)
{
this.GrpcClient = grpcClient;
LanguageServiceSettings effectiveSettings = settings ?? LanguageServiceSettings.GetDefault();
ClientHelper clientHelper = new ClientHelper(effectiveSettings);
_callAnalyzeSentiment = clientHelper.BuildApiCall<AnalyzeSentimentRequest, AnalyzeSentimentResponse>(
GrpcClient.AnalyzeSentimentAsync, GrpcClient.AnalyzeSentiment, effectiveSettings.AnalyzeSentimentSettings);
_callAnalyzeEntities = clientHelper.BuildApiCall<AnalyzeEntitiesRequest, AnalyzeEntitiesResponse>(
GrpcClient.AnalyzeEntitiesAsync, GrpcClient.AnalyzeEntities, effectiveSettings.AnalyzeEntitiesSettings);
_callAnalyzeEntitySentiment = clientHelper.BuildApiCall<AnalyzeEntitySentimentRequest, AnalyzeEntitySentimentResponse>(
GrpcClient.AnalyzeEntitySentimentAsync, GrpcClient.AnalyzeEntitySentiment, effectiveSettings.AnalyzeEntitySentimentSettings);
_callAnalyzeSyntax = clientHelper.BuildApiCall<AnalyzeSyntaxRequest, AnalyzeSyntaxResponse>(
GrpcClient.AnalyzeSyntaxAsync, GrpcClient.AnalyzeSyntax, effectiveSettings.AnalyzeSyntaxSettings);
_callAnnotateText = clientHelper.BuildApiCall<AnnotateTextRequest, AnnotateTextResponse>(
GrpcClient.AnnotateTextAsync, GrpcClient.AnnotateText, effectiveSettings.AnnotateTextSettings);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void OnConstruction(LanguageService.LanguageServiceClient grpcClient, LanguageServiceSettings effectiveSettings, ClientHelper clientHelper);
/// <summary>
/// The underlying gRPC LanguageService client.
/// </summary>
public override LanguageService.LanguageServiceClient GrpcClient { get; }
// Partial modifier methods contain '_' to ensure no name conflicts with RPC methods.
partial void Modify_AnalyzeSentimentRequest(ref AnalyzeSentimentRequest request, ref CallSettings settings);
partial void Modify_AnalyzeEntitiesRequest(ref AnalyzeEntitiesRequest request, ref CallSettings settings);
partial void Modify_AnalyzeEntitySentimentRequest(ref AnalyzeEntitySentimentRequest request, ref CallSettings settings);
partial void Modify_AnalyzeSyntaxRequest(ref AnalyzeSyntaxRequest request, ref CallSettings settings);
partial void Modify_AnnotateTextRequest(ref AnnotateTextRequest request, ref CallSettings settings);
/// <summary>
/// Analyzes the sentiment of the provided text.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public override Task<AnalyzeSentimentResponse> AnalyzeSentimentAsync(
AnalyzeSentimentRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeSentimentRequest(ref request, ref callSettings);
return _callAnalyzeSentiment.Async(request, callSettings);
}
/// <summary>
/// Analyzes the sentiment of the provided text.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public override AnalyzeSentimentResponse AnalyzeSentiment(
AnalyzeSentimentRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeSentimentRequest(ref request, ref callSettings);
return _callAnalyzeSentiment.Sync(request, callSettings);
}
/// <summary>
/// Finds named entities (currently proper names and common nouns) in the text
/// along with entity types, salience, mentions for each entity, and
/// other properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public override Task<AnalyzeEntitiesResponse> AnalyzeEntitiesAsync(
AnalyzeEntitiesRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeEntitiesRequest(ref request, ref callSettings);
return _callAnalyzeEntities.Async(request, callSettings);
}
/// <summary>
/// Finds named entities (currently proper names and common nouns) in the text
/// along with entity types, salience, mentions for each entity, and
/// other properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public override AnalyzeEntitiesResponse AnalyzeEntities(
AnalyzeEntitiesRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeEntitiesRequest(ref request, ref callSettings);
return _callAnalyzeEntities.Sync(request, callSettings);
}
/// <summary>
/// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
/// sentiment associated with each entity and its mentions.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public override Task<AnalyzeEntitySentimentResponse> AnalyzeEntitySentimentAsync(
AnalyzeEntitySentimentRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeEntitySentimentRequest(ref request, ref callSettings);
return _callAnalyzeEntitySentiment.Async(request, callSettings);
}
/// <summary>
/// Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
/// sentiment associated with each entity and its mentions.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public override AnalyzeEntitySentimentResponse AnalyzeEntitySentiment(
AnalyzeEntitySentimentRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeEntitySentimentRequest(ref request, ref callSettings);
return _callAnalyzeEntitySentiment.Sync(request, callSettings);
}
/// <summary>
/// Analyzes the syntax of the text and provides sentence boundaries and
/// tokenization along with part of speech tags, dependency trees, and other
/// properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public override Task<AnalyzeSyntaxResponse> AnalyzeSyntaxAsync(
AnalyzeSyntaxRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeSyntaxRequest(ref request, ref callSettings);
return _callAnalyzeSyntax.Async(request, callSettings);
}
/// <summary>
/// Analyzes the syntax of the text and provides sentence boundaries and
/// tokenization along with part of speech tags, dependency trees, and other
/// properties.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public override AnalyzeSyntaxResponse AnalyzeSyntax(
AnalyzeSyntaxRequest request,
CallSettings callSettings = null)
{
Modify_AnalyzeSyntaxRequest(ref request, ref callSettings);
return _callAnalyzeSyntax.Sync(request, callSettings);
}
/// <summary>
/// A convenience method that provides all syntax, sentiment, and entity
/// features in one call.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// A Task containing the RPC response.
/// </returns>
public override Task<AnnotateTextResponse> AnnotateTextAsync(
AnnotateTextRequest request,
CallSettings callSettings = null)
{
Modify_AnnotateTextRequest(ref request, ref callSettings);
return _callAnnotateText.Async(request, callSettings);
}
/// <summary>
/// A convenience method that provides all syntax, sentiment, and entity
/// features in one call.
/// </summary>
/// <param name="request">
/// The request object containing all of the parameters for the API call.
/// </param>
/// <param name="callSettings">
/// If not null, applies overrides to this RPC call.
/// </param>
/// <returns>
/// The RPC response.
/// </returns>
public override AnnotateTextResponse AnnotateText(
AnnotateTextRequest request,
CallSettings callSettings = null)
{
Modify_AnnotateTextRequest(ref request, ref callSettings);
return _callAnnotateText.Sync(request, callSettings);
}
}
// Partial classes to enable page-streaming
}
| mbrukman/gcloud-dotnet | apis/Google.Cloud.Language.V1.Experimental/Google.Cloud.Language.V1.Experimental/LanguageServiceClient.cs | C# | apache-2.0 | 56,098 |
<?php
namespace Admin\Controller;
use Admin\Controller\BaseController;
class FlagController extends BaseController
{
/**
* 开启和关闭标识
* @method is_enable
*/
public function is_enable()
{
$is_enable = I('is_enable', 1);
$id = I('id', '');
if (empty($id)) {
$this->error('数据错误');
}
$model = D('Flag');
$map['id'] = $id;
$result = $model->where($map)->setField('is_enable', $is_enable);
if ($result !== false) {
$this->success('操作成功');
} else {
$this->error('操作失败');
}
}
public function _before_del()
{
$tag_id = I('flag_id');
$map['flag_id'] = $tag_id;
$info = D('ArticleFlagMap')->_get($map);
if ($info) {
$this->error('还有文章应用此标识,请删除对应文章后再删除');
} else {
$this->success('操作成功');
}
}
}
| a3147972/blog | App/Admin/Controller/FlagController.class.php | PHP | apache-2.0 | 1,008 |
// jQuery List DragSort v0.4
// Website: http://dragsort.codeplex.com/
// License: http://dragsort.codeplex.com/license
(function($) {
$.fn.dragsort = function(options) {
var opts = $.extend({}, $.fn.dragsort.defaults, options);
var lists = [];
var list = null, lastPos = null;
if (this.selector)
$("head").append("<style type='text/css'>" + (this.selector.split(",").join(" " + opts.dragSelector + ",") + " " + opts.dragSelector) + " { cursor: pointer; }</style>");
this.each(function(i, cont) {
if ($(cont).is("table") && $(cont).children().size() == 1 && $(cont).children().is("tbody"))
cont = $(cont).children().get(0);
var newList = {
draggedItem: null,
placeHolderItem: null,
pos: null,
offset: null,
offsetLimit: null,
scroll: null,
container: cont,
init: function() {
$(this.container).attr("data-listIdx", i).mousedown(this.grabItem).find(opts.dragSelector).css("cursor", "pointer");
$(this.container).children(opts.itemSelector).each(function(j) { $(this).attr("data-itemIdx", j); });
},
grabItem: function(e) {
if (e.which != 1 || $(e.target).is(opts.dragSelectorExclude))
return;
var elm = e.target;
while (!$(elm).is("[data-listIdx='" + $(this).attr("data-listIdx") + "'] " + opts.dragSelector)) {
if (elm == this) return;
elm = elm.parentNode;
}
if (list != null && list.draggedItem != null)
list.dropItem();
$(e.target).css("cursor", "move");
list = lists[$(this).attr("data-listIdx")];
list.draggedItem = $(elm).closest(opts.itemSelector);
var mt = parseInt(list.draggedItem.css("marginTop"));
var ml = parseInt(list.draggedItem.css("marginLeft"));
list.offset = list.draggedItem.offset();
list.offset.top = e.pageY - list.offset.top + (isNaN(mt) ? 0 : mt) - 1;
list.offset.left = e.pageX - list.offset.left + (isNaN(ml) ? 0 : ml) - 1;
if (!opts.dragBetween) {
var containerHeight = $(list.container).outerHeight() == 0 ? Math.max(1, Math.round(0.5 + $(list.container).children(opts.itemSelector).size() * list.draggedItem.outerWidth() / $(list.container).outerWidth())) * list.draggedItem.outerHeight() : $(list.container).outerHeight();
list.offsetLimit = $(list.container).offset();
list.offsetLimit.right = list.offsetLimit.left + $(list.container).outerWidth() - list.draggedItem.outerWidth();
list.offsetLimit.bottom = list.offsetLimit.top + containerHeight - list.draggedItem.outerHeight();
}
var h = list.draggedItem.height();
var w = list.draggedItem.width();
var orig = list.draggedItem.attr("style");
list.draggedItem.attr("data-origStyle", orig ? orig : "");
if (opts.itemSelector == "tr") {
list.draggedItem.children().each(function() { $(this).width($(this).width()); });
list.placeHolderItem = list.draggedItem.clone().attr("data-placeHolder", true);
list.draggedItem.after(list.placeHolderItem);
list.placeHolderItem.children().each(function() { $(this).css({ borderWidth:0, width: $(this).width() + 1, height: $(this).height() + 1 }).html(" "); });
} else {
list.draggedItem.after(opts.placeHolderTemplate);
list.placeHolderItem = list.draggedItem.next().css({ height: h, width: w }).attr("data-placeHolder", true);
}
list.draggedItem.css({ position: "absolute", opacity: 0.8, "z-index": 999, height: h, width: w });
$(lists).each(function(i, l) { l.createDropTargets(); l.buildPositionTable(); });
list.scroll = { moveX: 0, moveY: 0, maxX: $(document).width() - $(window).width(), maxY: $(document).height() - $(window).height() };
list.scroll.scrollY = window.setInterval(function() {
if (opts.scrollContainer != window) {
$(opts.scrollContainer).scrollTop($(opts.scrollContainer).scrollTop() + list.scroll.moveY);
return;
}
var t = $(opts.scrollContainer).scrollTop();
if (list.scroll.moveY > 0 && t < list.scroll.maxY || list.scroll.moveY < 0 && t > 0) {
$(opts.scrollContainer).scrollTop(t + list.scroll.moveY);
list.draggedItem.css("top", list.draggedItem.offset().top + list.scroll.moveY + 1);
}
}, 10);
list.scroll.scrollX = window.setInterval(function() {
if (opts.scrollContainer != window) {
$(opts.scrollContainer).scrollLeft($(opts.scrollContainer).scrollLeft() + list.scroll.moveX);
return;
}
var l = $(opts.scrollContainer).scrollLeft();
if (list.scroll.moveX > 0 && l < list.scroll.maxX || list.scroll.moveX < 0 && l > 0) {
$(opts.scrollContainer).scrollLeft(l + list.scroll.moveX);
list.draggedItem.css("left", list.draggedItem.offset().left + list.scroll.moveX + 1);
}
}, 10);
list.setPos(e.pageX, e.pageY);
$(document).bind("selectstart", list.stopBubble); //stop ie text selection
$(document).bind("mousemove", list.swapItems);
$(document).bind("mouseup", list.dropItem);
if (opts.scrollContainer != window)
$(window).bind("DOMMouseScroll mousewheel", list.wheel);
return false; //stop moz text selection
},
setPos: function(x, y) {
var top = y - this.offset.top;
var left = x - this.offset.left;
if (!opts.dragBetween) {
top = Math.min(this.offsetLimit.bottom, Math.max(top, this.offsetLimit.top));
left = Math.min(this.offsetLimit.right, Math.max(left, this.offsetLimit.left));
}
this.draggedItem.parents().each(function() {
if ($(this).css("position") != "static" && (!$.browser.mozilla || $(this).css("display") != "table")) {
var offset = $(this).offset();
top -= offset.top;
left -= offset.left;
return false;
}
});
if (opts.scrollContainer == window) {
y -= $(window).scrollTop();
x -= $(window).scrollLeft();
y = Math.max(0, y - $(window).height() + 5) + Math.min(0, y - 5);
x = Math.max(0, x - $(window).width() + 5) + Math.min(0, x - 5);
} else {
var cont = $(opts.scrollContainer);
var offset = cont.offset();
y = Math.max(0, y - cont.height() - offset.top) + Math.min(0, y - offset.top);
x = Math.max(0, x - cont.width() - offset.left) + Math.min(0, x - offset.left);
}
list.scroll.moveX = x == 0 ? 0 : x * opts.scrollSpeed / Math.abs(x);
list.scroll.moveY = y == 0 ? 0 : y * opts.scrollSpeed / Math.abs(y);
this.draggedItem.css({ top: top, left: left });
},
wheel: function(e) {
if (($.browser.safari || $.browser.mozilla) && list && opts.scrollContainer != window) {
var cont = $(opts.scrollContainer);
var offset = cont.offset();
if (e.pageX > offset.left && e.pageX < offset.left + cont.width() && e.pageY > offset.top && e.pageY < offset.top + cont.height()) {
var delta = e.detail ? e.detail * 5 : e.wheelDelta / -2;
cont.scrollTop(cont.scrollTop() + delta);
e.preventDefault();
}
}
},
buildPositionTable: function() {
var item = this.draggedItem == null ? null : this.draggedItem.get(0);
var pos = [];
$(this.container).children(opts.itemSelector).each(function(i, elm) {
if (elm != item) {
var loc = $(elm).offset();
loc.right = loc.left + $(elm).width();
loc.bottom = loc.top + $(elm).height();
loc.elm = elm;
pos.push(loc);
}
});
this.pos = pos;
},
dropItem: function() {
if (list.draggedItem == null)
return;
$(list.container).find(opts.dragSelector).css("cursor", "pointer");
list.placeHolderItem.before(list.draggedItem);
//list.draggedItem.attr("style", "") doesn't work on IE8 and jQuery 1.5 or lower
//list.draggedItem.removeAttr("style") doesn't work on chrome and jQuery 1.6 (works jQuery 1.5 or lower)
var orig = list.draggedItem.attr("data-origStyle");
list.draggedItem.attr("style", orig);
if (orig == "")
list.draggedItem.removeAttr("style");
list.draggedItem.removeAttr("data-origStyle");
list.placeHolderItem.remove();
$("[data-dropTarget]").remove();
window.clearInterval(list.scroll.scrollY);
window.clearInterval(list.scroll.scrollX);
var changed = false;
$(lists).each(function() {
$(this.container).children(opts.itemSelector).each(function(j) {
if (parseInt($(this).attr("data-itemIdx")) != j) {
changed = true;
$(this).attr("data-itemIdx", j);
}
});
});
if (changed)
opts.dragEnd.apply(list.draggedItem);
list.draggedItem = null;
$(document).unbind("selectstart", list.stopBubble);
$(document).unbind("mousemove", list.swapItems);
$(document).unbind("mouseup", list.dropItem);
if (opts.scrollContainer != window)
$(window).unbind("DOMMouseScroll mousewheel", list.wheel);
return false;
},
stopBubble: function() { return false; },
swapItems: function(e) {
if (list.draggedItem == null)
return false;
list.setPos(e.pageX, e.pageY);
var ei = list.findPos(e.pageX, e.pageY);
var nlist = list;
for (var i = 0; ei == -1 && opts.dragBetween && i < lists.length; i++) {
ei = lists[i].findPos(e.pageX, e.pageY);
nlist = lists[i];
}
if (ei == -1 || $(nlist.pos[ei].elm).attr("data-placeHolder"))
return false;
if (lastPos == null || lastPos.top > list.draggedItem.offset().top || lastPos.left > list.draggedItem.offset().left)
$(nlist.pos[ei].elm).before(list.placeHolderItem);
else
$(nlist.pos[ei].elm).after(list.placeHolderItem);
$(lists).each(function(i, l) { l.createDropTargets(); l.buildPositionTable(); });
lastPos = list.draggedItem.offset();
return false;
},
findPos: function(x, y) {
for (var i = 0; i < this.pos.length; i++) {
if (this.pos[i].left < x && this.pos[i].right > x && this.pos[i].top < y && this.pos[i].bottom > y)
return i;
}
return -1;
},
createDropTargets: function() {
if (!opts.dragBetween)
return;
$(lists).each(function() {
var ph = $(this.container).find("[data-placeHolder]");
var dt = $(this.container).find("[data-dropTarget]");
if (ph.size() > 0 && dt.size() > 0)
dt.remove();
else if (ph.size() == 0 && dt.size() == 0) {
//list.placeHolderItem.clone().removeAttr("data-placeHolder") crashes in IE7 and jquery 1.5.1 (doesn't in jquery 1.4.2 or IE8)
$(this.container).append(list.placeHolderItem.removeAttr("data-placeHolder").clone().attr("data-dropTarget", true));
list.placeHolderItem.attr("data-placeHolder", true);
}
});
}
};
newList.init();
lists.push(newList);
});
return this;
};
$.fn.dragsort.defaults = {
itemSelector: "li",
dragSelector: "li",
dragSelectorExclude: "input, textarea, a[href]",
dragEnd: function() { },
dragBetween: false,
placeHolderTemplate: "<li> </li>",
scrollContainer: window,
scrollSpeed: 5
};
})(jQuery);
| coraldane/ops-meta | static/javascript/jquery.plugin/jquery.dragsort.js | JavaScript | apache-2.0 | 11,046 |
# Copyright 2022 The ML Collections Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for config_path."""
from absl.testing import absltest
from absl.testing import parameterized
from ml_collections.config_flags import config_path
from ml_collections.config_flags.tests import fieldreference_config
from ml_collections.config_flags.tests import mock_config
class ConfigPathTest(parameterized.TestCase):
def test_list_extra_index(self):
"""Tries to index a non-indexable list element."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.get_value('dict.list[0][0]', test_config)
def test_list_out_of_range_get(self):
"""Tries to access out-of-range value in list."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.get_value('dict.list[2][1]', test_config)
def test_list_out_of_range_set(self):
"""Tries to override out-of-range value in list."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.set_value('dict.list[2][1]', test_config, -1)
def test_reading_non_existing_key(self):
"""Tests reading non existing key from config."""
test_config = mock_config.get_config()
with self.assertRaises(KeyError):
config_path.set_value('dict.not_existing_key', test_config, 1)
def test_reading_setting_existing_key_in_dict(self):
"""Tests setting non existing key from dict inside config."""
test_config = mock_config.get_config()
with self.assertRaises(KeyError):
config_path.set_value('dict.not_existing_key.key', test_config, 1)
def test_empty_key(self):
"""Tests calling an empty key update."""
test_config = mock_config.get_config()
with self.assertRaises(ValueError):
config_path.set_value('', test_config, None)
def test_field_reference_types(self):
"""Tests whether types of FieldReference fields are valid."""
test_config = fieldreference_config.get_config()
paths = ['ref_nodefault', 'ref']
paths_types = [int, int]
config_types = [config_path.get_type(path, test_config) for path in paths]
self.assertEqual(paths_types, config_types)
@parameterized.parameters(
('float', float),
('integer', int),
('string', str),
('bool', bool),
('dict', dict),
('dict.float', float),
('dict.list', list),
('list', list),
('list[0]', int),
('object.float', float),
('object.integer', int),
('object.string', str),
('object.bool', bool),
('object.dict', dict),
('object.dict.float', float),
('object.dict.list', list),
('object.list', list),
('object.list[0]', int),
('object.tuple', tuple),
('object_reference.float', float),
('object_reference.integer', int),
('object_reference.string', str),
('object_reference.bool', bool),
('object_reference.dict', dict),
('object_reference.dict.float', float),
('object_copy.float', float),
('object_copy.integer', int),
('object_copy.string', str),
('object_copy.bool', bool),
('object_copy.dict', dict),
('object_copy.dict.float', float),
)
def test_types(self, path, path_type):
"""Tests whether various types of objects are valid."""
test_config = mock_config.get_config()
self.assertEqual(path_type, config_path.get_type(path, test_config))
if __name__ == '__main__':
absltest.main()
| google/ml_collections | ml_collections/config_flags/tests/config_path_test.py | Python | apache-2.0 | 4,017 |
#!/usr/bin/python2.7
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create an API definition by interpreting a discovery document.
This module interprets a discovery document to create a tree of classes which
represent the API structure in a way that is useful for generating a library.
For each discovery element (e.g. schemas, resources, methods, ...) there is
a class to represent it which is directly usable in the templates. The
instances of those classes are annotated with extra variables for use
in the template which are language specific.
The current way to make use of this class is to create a programming language
specific subclass of Api, which adds annotations and template variables
appropriate for that language.
TODO(user): Refactor this so that the API can be loaded first, then annotated.
"""
__author__ = 'aiuto@google.com (Tony Aiuto)'
import json
import logging
import operator
import urlparse
from googleapis.codegen import data_types
from googleapis.codegen import template_objects
from googleapis.codegen import utilities
from googleapis.codegen.api_exception import ApiException
from googleapis.codegen.schema import Schema
from googleapis.codegen.utilities import convert_size
_DEFAULT_SERVICE_HOST = 'www.googleapis.com'
_DEFAULT_OWNER_DOMAIN = 'google.com'
_DEFAULT_OWNER_NAME = 'Google'
_RECOGNIZED_GOOGLE_DOMAINS = (
'google.com',
'googleapis.com',
'googleplex.com'
)
# Recognized names of request and response fields used for paging.
_PAGE_TOKEN_NAMES = ('pageToken', 'nextPageToken')
_LOGGER = logging.getLogger('codegen')
class Api(template_objects.CodeObject):
"""An API definition.
This class holds a discovery centric definition of an API. It contains
members such as "resources" and "schemas" which relate directly to discovery
concepts. It defines several properties that can be used in code generation
templates:
name: The API name.
version: The API version.
versionNoDots: The API version with all '.' characters replaced with '_'.
This is typically used in class names.
versionNoDash: The API version with all '-' characters replaced with '_'.
This is typically used in file names where '-' has meaning.
authScopes: The list of the OAuth scopes used by this API.
dataWrapper: True if the API definition contains the 'dataWrapper' feature.
methods: The list of top level API methods.
models: The list of API data models, both from the schema section of
discovery and from anonymous objects defined in method definitions.
parameters: The list of global method parameters (applicable to all methods)
resources: The list of API resources
"""
def __init__(self, discovery_doc, language=None):
super(Api, self).__init__(discovery_doc, self,
wire_name=discovery_doc['name'])
name = self.values['name']
self._validator.ValidateApiName(name)
if name != 'freebase':
self._validator.ValidateApiVersion(self.values['version'])
canonical_name = self.values.get('canonicalName') or name
if not self.values.get('canonicalName'):
self.values['canonicalName'] = canonical_name
self._class_name = self.ToClassName(canonical_name, self)
# Guard against language implementor not taking care of spaces
self._class_name = self._class_name.replace(' ', '')
self._NormalizeOwnerInformation()
self._language = language
self._template_dir = None
self._surface_features = {}
self._schemas = {}
self._methods_by_name = {}
self._all_methods = []
self.SetTemplateValue('className', self._class_name)
self.SetTemplateValue('versionNoDots',
self.values['version'].replace('.', '_'))
self.SetTemplateValue('versionNoDash',
self.values['version'].replace('-', '_'))
self.SetTemplateValue('dataWrapper',
'dataWrapper' in discovery_doc.get('features', []))
self.values.setdefault('title', name)
self.values.setdefault('exponentialBackoffDefault', False)
if not self.values.get('revision'):
self.values['revision'] = 'snapshot'
self._NormalizeUrlComponents()
# Information for variant subtypes, a dictionary of the format:
#
# { 'wireName': {'discriminant': discriminant, 'value': value,
# 'schema': schema},
# ... }
#
# ... where wireName is the name of variant subtypes, discriminant
# the field name of the discriminant, value the discriminant value
# for this variant, and schema the base schema.
#
# This information cannot be stored in the referred schema at
# reading time because at the time we read it from the base
# schema, the referenced variant schemas may not yet be loaded. So
# we first store it here, and after all schemas have been loaded,
# update the schema template properties.
self._variant_info = {}
# Build data types and methods
self._SetupModules()
self.void_type = data_types.Void(self)
self._BuildSchemaDefinitions()
self._BuildResourceDefinitions()
self.SetTemplateValue('resources', self._resources)
# Make data models part of the api dictionary
self.SetTemplateValue('models', self.ModelClasses())
# Replace methods dict with Methods
self._top_level_methods = []
method_dict = self.values.get('methods') or {}
for name in sorted(method_dict):
self._top_level_methods.append(Method(self, name, method_dict[name]))
self.SetTemplateValue('methods', self._top_level_methods)
# Global parameters
self._parameters = []
param_dict = self.values.get('parameters') or {}
for name in sorted(param_dict):
parameter = Parameter(self, name, param_dict[name], self)
self._parameters.append(parameter)
if name == 'alt':
self.SetTemplateValue('alt', parameter)
self.SetTemplateValue('parameters', self._parameters)
# Auth scopes
self._authscopes = []
if (self.values.get('auth') and
self.values['auth'].get('oauth2') and
self.values['auth']['oauth2'].get('scopes')):
for value, auth_dict in sorted(
self.values['auth']['oauth2']['scopes'].iteritems()):
self._authscopes.append(AuthScope(self, value, auth_dict))
self.SetTemplateValue('authscopes', self._authscopes)
@property
def all_schemas(self):
"""The dictionary of all the schema objects found in the API."""
return self._schemas
def _SetupModules(self):
"""Compute and set the module(s) which this API belongs under."""
# The containing module is based on the owner information.
path = self.values.get('modulePath') or self.values.get('packagePath')
self._containing_module = template_objects.Module(
package_path=path,
owner_name=self.values.get('owner'),
owner_domain=self.values.get('ownerDomain'))
self.SetTemplateValue('containingModule', self._containing_module)
# The API is a child of the containing_module
base = self.values['name']
# TODO(user): Introduce a breaking change where we always prefer
# canonicalName.
if self.values.get('packagePath'):
# Lowercase the canonical name only for non-cloud-endpoints Google APIs.
# This is to avoid breaking changes to existing Google-owned Cloud
# Endpoints APIs.
if self.values.get('rootUrl').find('.googleapis.com') > 0:
base = self.values.get('canonicalName').lower() or base
else:
base = self.values.get('canonicalName') or base
if self.values.get('version_module'):
base = '%s/%s' % (base, self.values['versionNoDots'])
self._module = template_objects.Module(package_path=base,
parent=self._containing_module)
self.SetTemplateValue('module', self._module)
# The default module for data models defined by this API.
self._model_module = template_objects.Module(package_path=None,
parent=self._module)
def _BuildResourceDefinitions(self):
"""Loop over the resources in the discovery doc and build definitions."""
self._resources = []
def_dict = self.values.get('resources') or {}
for name in sorted(def_dict):
resource = Resource(self, name, def_dict[name], parent=self)
self._resources.append(resource)
def _BuildSchemaDefinitions(self):
"""Loop over the schemas in the discovery doc and build definitions."""
schemas = self.values.get('schemas')
if schemas:
for name in sorted(schemas):
def_dict = schemas[name]
# Upgrade the string format schema to a dict.
if isinstance(def_dict, unicode):
def_dict = json.loads(def_dict)
self._schemas[name] = self.DataTypeFromJson(def_dict, name)
# Late bind info for variant types, and mark the discriminant
# field and value.
for name, info in self._variant_info.iteritems():
if name not in self._schemas:
# The error will be reported elsewhere
continue
schema = self._schemas[name]
for prop in schema.values.get('properties'):
if prop.values['wireName'] == info['discriminant']:
# Filter out the discriminant property as it is already
# contained in the base type.
schema.SetTemplateValue(
'properties',
[p for p in schema.values.get('properties') if p != prop])
break
else:
logging.warn("Variant schema '%s' for base schema '%s' "
"has not the expected discriminant property '%s'.",
name, info['schema'].values['wireName'],
info['discriminant'])
schema.SetTemplateValue('superClass', info['schema'].class_name)
# TODO(user): baseType is for backwards compatability only. It should
# have always been a different name. When the old Java generators roll
# off, remove it.
schema.SetTemplateValue('baseType', info['schema'].class_name)
schema.SetTemplateValue('discriminantValue', info['value'])
def _NormalizeOwnerInformation(self):
"""Ensure that owner and ownerDomain are set to sane values."""
owner_domain = self.get('ownerDomain', '')
if not owner_domain:
root_url = self.get('rootUrl')
if root_url:
owner_domain = urlparse.urlparse(root_url).hostname
# Normalize google domains.
if any(owner_domain.endswith(d) for d in _RECOGNIZED_GOOGLE_DOMAINS):
owner_domain = 'google.com'
if owner_domain:
owner_domain = utilities.SanitizeDomain(owner_domain)
else:
owner_domain = _DEFAULT_OWNER_DOMAIN
self.SetTemplateValue('ownerDomain', owner_domain)
if not self.get('ownerName'):
if owner_domain == _DEFAULT_OWNER_DOMAIN:
owner_name = _DEFAULT_OWNER_NAME
else:
owner_name = owner_domain.replace('.', '_')
self.SetTemplateValue('ownerName', owner_name)
if not self.get('owner'):
self.SetTemplateValue('owner', self['ownerName'].lower())
def _NormalizeUrlComponents(self):
"""Sets template values concerning the path to the service.
Sets rootUrl and servicePath from the values given or defaults based on what
is available. Verifies them for safeness. The hierarchy of the possible
inputs is:
use rootUrl + servicePath as the best choice if it exists (v1new)
or rpcPath
or use baseUrl (v1)
or use basePath (v1)
or restBasePath (v0.3)
or default to 'api/version'
Raises:
ValueError: if the values available are inconsistent or disallowed.
"""
# If both rootUrl and servicePath exist, they equal what is in baseUrl.
root_url = self.values.get('rootUrl')
service_path = self.values.get('servicePath')
rpc_path = self.values.get('rpcPath')
if root_url:
# oauth2 has a servicePath of "". This is wierd but OK for that API, but
# it means we must explicitly check against None.
if service_path is not None:
base_url = root_url + service_path
elif rpc_path:
base_url = rpc_path
else:
raise ValueError('Neither servicePath nor rpcPath is defined.')
else:
base_url = self.values.get('baseUrl')
# If we have a full path ('https://superman.appspot.com/kryptonite/hurts'),
# then go with that, otherwise just use the various things which might
# hint at the servicePath.
best_path = (base_url
or self.values.get('basePath')
or self.values.get('restBasePath')
or '/%s/%s/' % (self.values['name'], self.values['version']))
if best_path.find('..') >= 0:
raise ValueError('api path must not contain ".." (%s)' % best_path)
# And let urlparse to the grunt work of normalizing and parsing.
url_parts = urlparse.urlparse(best_path)
scheme = url_parts.scheme or 'https'
service_host = url_parts.netloc or _DEFAULT_SERVICE_HOST
base_path = url_parts.path
if not root_url:
self._api.SetTemplateValue('rootUrl', '%s://%s/' % (scheme, service_host))
if service_path is None:
self._api.SetTemplateValue('servicePath', base_path[1:])
# Make sure template writers do not revert
self._api.DeleteTemplateValue('baseUrl')
self._api.DeleteTemplateValue('basePath')
self._api.DeleteTemplateValue('serviceHost')
def ModelClasses(self):
"""Return all the model classes."""
ret = set(
s for s in self._schemas.itervalues()
if isinstance(s, Schema) or isinstance(s, data_types.MapDataType))
return sorted(ret, key=operator.attrgetter('class_name'))
def TopLevelModelClasses(self):
"""Return the models which are not children of another model."""
return [m for m in self.ModelClasses() if not m.parent]
def DataTypeFromJson(self, type_dict, default_name, parent=None,
wire_name=None):
"""Returns a schema object represented by a JSON Schema dictionary.
Evaluate a JSON schema dictionary and return an appropriate schema object.
If a data type is defined in-line, then create the schema dynamically. If
the schema is a $ref to another, return the previously created schema or
a lazy reference.
If the type_dict is None, a blank schema will be created.
Args:
type_dict: A dict of the form expected of a request or response member
of a method description. See the Discovery specification for more.
default_name: The unique name to give the schema if we have to create it.
parent: The schema where I was referenced. If we cannot determine that
this is a top level schema, set the parent to this.
wire_name: The name which will identify objects of this type in data on
the wire.
Returns:
A Schema object.
"""
# new or not initialized, create a fresh one
schema = Schema.Create(self, default_name, type_dict or {}, wire_name,
parent)
# Only put it in our by-name list if it is a real object
if isinstance(schema, Schema) or isinstance(schema, data_types.MapDataType):
# Use the path to the schema as a key. This means that an anonymous class
# for the 'person' property under the schema 'Activity' will have the
# unique name 'Activity.person', rather than 'ActivityPerson'.
path = '.'.join(
[a.values.get('wireName', '<anon>') for a in schema.full_path])
_LOGGER.debug('DataTypeFromJson: add %s to cache', path)
self._schemas[path] = schema
return schema
def AddMethod(self, method):
"""Add a new method to the set of all methods."""
self._all_methods.append(method)
self._methods_by_name[method.values['rpcMethod']] = method
def MethodByName(self, method_name):
"""Find a method by name.
Args:
method_name: (str) the full RPC name of a method defined by this API.
Returns:
Method object or None if not found.
"""
return self._methods_by_name.get(method_name)
def SchemaByName(self, schema_name):
"""Find a schema by name.
Args:
schema_name: (str) name of a schema defined by this API.
Returns:
Schema object or None if not found.
"""
return self._schemas.get(schema_name, None)
def SetVariantInfo(self, ref, discriminant, value, schema):
"""Sets variant info for the given reference."""
if ref in self._variant_info:
logging.warning("Base type of '%s' changed from '%s' to '%s'. "
"This is an indication that a variant schema is used "
"from multiple base schemas and may result in an "
"inconsistent model.",
ref, self._base_type[ref].wireName, schema.wireName)
self._variant_info[ref] = {'discriminant': discriminant, 'value': value,
'schema': schema}
def VisitAll(self, func):
"""Visit all nodes of an API tree and apply a function to each.
Walks a tree and calls a function on each element of it. This should be
called after the API is fully loaded.
Args:
func: (function) Method to call on each object.
"""
_LOGGER.debug('Applying function to all nodes')
func(self._containing_module)
func(self._module)
func(self._model_module)
for resource in self.values['resources']:
self._VisitResource(resource, func)
# Top level methods
for method in self.values['methods']:
self._VisitMethod(method, func)
for parameter in self.values['parameters']:
func(parameter)
func(parameter.data_type)
for schema in self._schemas.values():
self._VisitSchema(schema, func)
for scope in self.GetTemplateValue('authscopes') or []:
func(scope)
def _VisitMethod(self, method, func):
"""Visit a method, calling a function on every child.
Args:
method: (Method) The Method to visit.
func: (function) Method to call on each object.
"""
func(method)
for parameter in method.parameters:
func(parameter)
def _VisitResource(self, resource, func):
"""Visit a resource tree, calling a function on every child.
Calls down recursively to sub resources.
Args:
resource: (Resource) The Resource to visit.
func: (function) Method to call on each object.
"""
func(resource)
for method in resource.values['methods']:
self._VisitMethod(method, func)
for r in resource.values['resources']:
self._VisitResource(r, func)
def _VisitSchema(self, schema, func):
"""Visit a schema tree, calling a function on every child.
Args:
schema: (Schema) The Schema to visit.
func: (function) Method to call on each object.
"""
func(schema)
func(schema.module)
for prop in schema.values.get('properties', []):
func(prop)
for child in self.children:
func(child)
# Do not warn about unused arguments, pylint: disable=unused-argument
def ToClassName(self, s, element, element_type=None):
"""Convert a name to a suitable class name in the target language.
This default implementation camel cases the string, which is appropriate
for some languages. Subclasses are encouraged to override this.
Args:
s: (str) A rosy name of data element.
element: (object) The object we are making a class name for.
element_type: (str) Deprecated. The kind of object we are making a class
name for. E.g. resource, method, schema.
TODO(user): replace type in favor of class of element, but that will
require changing the place where we call ToClassName with no element.
Returns:
A name suitable for use as a class in the generator's target language.
"""
return utilities.CamelCase(s).replace(' ', '')
def NestedClassNameForProperty(self, name, schema):
"""Returns the class name of an object nested in a property."""
# TODO(user): This functionality belongs in the language model, but
# because of the way the api is bootstrapped, that isn't available when we
# need it. When language model is available from the start, this should be
# moved.
return '%s%s' % (schema.class_name, utilities.CamelCase(name))
@property
def class_name(self):
return self.values['className']
@property
def model_module(self):
return self._model_module
@property
def containing_module(self):
return self._containing_module
@property
def all_methods(self):
"""All the methods in the entire API."""
return self._all_methods
@property
def top_level_methods(self):
"""All the methods at the API top level (not in a resource)."""
return self._top_level_methods
class Resource(template_objects.CodeObject):
def __init__(self, api, name, def_dict, parent=None):
"""Creates a Resource.
Args:
api: (Api) The Api which owns this Resource.
name: (string) The discovery name of the Resource.
def_dict: (dict) The discovery dictionary for this Resource.
parent: (CodeObject) The resource containing this method, if any. Top
level resources have the API as a parent.
"""
super(Resource, self).__init__(def_dict, api, parent=parent, wire_name=name)
self.ValidateName(name)
class_name = api.ToClassName(name, self, element_type='resource')
self.SetTemplateValue('className', class_name)
# Replace methods dict with Methods
self._methods = []
method_dict = self.values.get('methods') or {}
for name in sorted(method_dict):
self._methods.append(Method(api, name, method_dict[name], parent=self))
self.SetTemplateValue('methods', self._methods)
# Get sub resources
self._resources = []
r_def_dict = self.values.get('resources') or {}
for name in sorted(r_def_dict):
r = Resource(api, name, r_def_dict[name], parent=self)
self._resources.append(r)
self.SetTemplateValue('resources', self._resources)
@property
def methods(self):
return self._methods
@property
def methods_dict(self):
return {method['wireName']: method for method in self._methods}
class AuthScope(template_objects.CodeObject):
"""The definition of an auth scope.
An AuthScope defines these template values
value: The scope url
name: a sanitized version of the value, transformed so it generally can
be used as an indentifier in code. Deprecated, use constantName
description: the description of the scope.
It also provides a template property which can be used after a language
binding is set.
constantName: A transformation of the value so it is suitable as a constant
name in the specific language.
"""
GOOGLE_PREFIX = 'https://www.googleapis.com/auth/'
HTTPS_PREFIX = 'https://'
def __init__(self, api, value, def_dict):
"""Construct an auth scope.
Args:
api: (Api) The Api which owns this Property
value: (string) The unique identifier of this scope, often a URL
def_dict: (dict) The discovery dictionary for this auth scope.
"""
super(AuthScope, self).__init__(def_dict, api, wire_name=value)
self._module = api.module
self.SetTemplateValue('value', value)
while value.endswith('/'):
value = value[:-1]
if 'description' not in self.values:
self.SetTemplateValue('description', value)
# Strip the common prefix to get a unique identifying name
if value.startswith(AuthScope.GOOGLE_PREFIX):
scope_id = value[len(AuthScope.GOOGLE_PREFIX):]
elif value.startswith(AuthScope.HTTPS_PREFIX):
# some comon scopes are are just a URL
scope_id = value[len(AuthScope.HTTPS_PREFIX):]
else:
scope_id = value
# We preserve the value stripped of the most common prefixes so we can
# use it for building constantName in templates.
self.SetTemplateValue('lastPart', scope_id)
# replace all non alphanumeric with '_' to form 'name'
name = ''.join([(c if c.isalnum() else '_') for c in scope_id.upper()])
self.SetTemplateValue('name', name)
@property
def constantName(self): # pylint: disable=g-bad-name
"""Overrides default behavior of constantName."""
return self._language_model.ApplyPolicy('constant', self,
self.values['lastPart'])
class Method(template_objects.CodeObject):
"""The definition of a method."""
def __init__(self, api, name, def_dict, parent=None):
"""Construct a method.
Methods in REST discovery are inside of a resource. Note that the method
name and id are calculable from each other. id will always be equal to
api_name.resource_name[.sub_resource...].method_name. At least it should
be, as that is the transformation Discovery makes from the API definition,
which is essentially a flat list of methods, into a hierarchy of resources.
Args:
api: (Api) The Api which owns this Method.
name: (string) The discovery name of the Method.
def_dict: (dict) The discovery dictionary for this Method.
parent: (CodeObject) The resource containing this Method, if any.
Raises:
ApiException: If the httpMethod type is not one we know how to
handle.
"""
super(Method, self).__init__(def_dict, api, parent=(parent or api))
# TODO(user): Fix java templates to name vs. wireName correctly. Then
# change the __init__ to have wire_name=def_dict.get('id') or name
# then eliminate this line.
self.SetTemplateValue('wireName', name)
self.ValidateName(name)
class_name = api.ToClassName(name, self, element_type='method')
if parent and class_name == parent.values['className']:
# Some languages complain when the collection name is the same as the
# method name.
class_name = '%sRequest' % class_name
# The name is the key of the dict defining use. The id field is what you
# have to use to call the method via RPC. That is unique, name might not be.
self.SetTemplateValue('name', name)
# Fix up very old discovery, which does not have an id.
if 'id' not in self.values:
self.values['id'] = name
self.SetTemplateValue('className', class_name)
http_method = def_dict.get('httpMethod', 'POST').upper()
self.SetTemplateValue('httpMethod', http_method)
self.SetTemplateValue('rpcMethod',
def_dict.get('rpcMethod') or def_dict['id'])
rest_path = def_dict.get('path') or def_dict.get('restPath')
# TODO(user): if rest_path is not set, raise a good error and fail fast.
self.SetTemplateValue('restPath', rest_path)
# Figure out the input and output types and schemas for this method.
expected_request = self.values.get('request')
if expected_request:
# TODO(user): RequestBody is only used if the schema is anonymous.
# When we go to nested models, this could be a nested class off the
# Method, making it unique without the silly name. Same for ResponseBody.
request_schema = api.DataTypeFromJson(expected_request,
'%sRequestContent' % name,
parent=self)
self.SetTemplateValue('requestType', request_schema)
expected_response = def_dict.get('response') or def_dict.get('returns')
if expected_response:
response_schema = api.DataTypeFromJson(expected_response,
'%sResponse' % name,
parent=self)
if self.values['wireName'] == 'get':
response_schema.values['associatedResource'] = parent
self.SetTemplateValue('responseType', response_schema)
else:
self.SetTemplateValue('responseType', api.void_type)
# Make sure we can handle this method type and do any fixups.
if http_method not in ['DELETE', 'GET', 'OPTIONS', 'PATCH', 'POST', 'PUT',
'PROPFIND', 'PROPPATCH', 'REPORT']:
raise ApiException('Unknown HTTP method: %s' % http_method, def_dict)
if http_method == 'GET':
self.SetTemplateValue('requestType', None)
# Replace parameters dict with Parameters. We try to order them by their
# position in the request path so that the generated code can track the
# more human readable definition, rather than the order of the parameters
# in the discovery doc.
order = self.values.get('parameterOrder', [])
req_parameters = []
opt_parameters = []
for name, def_dict in self.values.get('parameters', {}).iteritems():
param = Parameter(api, name, def_dict, self)
if name == 'alt':
# Treat the alt parameter differently
self.SetTemplateValue('alt', param)
continue
# Standard params are part of the generic request class
# We want to push all parameters that aren't declared inside
# parameterOrder after those that are.
if param.values['wireName'] in order:
req_parameters.append(param)
else:
# optional parameters are appended in the order they're declared.
opt_parameters.append(param)
# pylint: disable=g-long-lambda
req_parameters.sort(lambda x, y: cmp(order.index(x.values['wireName']),
order.index(y.values['wireName'])))
# sort optional parameters by name to avoid code churn
opt_parameters.sort(lambda x, y: cmp(x.values['wireName'], y.values['wireName']))
req_parameters.extend(opt_parameters)
self.SetTemplateValue('parameters', req_parameters)
self._InitMediaUpload(parent)
self._InitPageable(api)
api.AddMethod(self)
def _InitMediaUpload(self, parent):
media_upload = self.values.get('mediaUpload')
if media_upload:
if parent:
parent.SetTemplateValue('isMedia', True)
# Get which MIME Media Ranges are accepted for media uploads to this
# method.
accepted_mime_ranges = media_upload.get('accept')
self.SetTemplateValue('accepted_mime_ranges', accepted_mime_ranges)
max_size = media_upload.get('maxSize')
self.SetTemplateValue('max_size', max_size)
self.SetTemplateValue('max_size_bytes',
convert_size.ConvertSize(max_size))
# Find which upload protocols are supported.
upload_protocols = media_upload['protocols']
for upload_protocol in upload_protocols:
self._SetUploadTemplateValues(
upload_protocol, upload_protocols[upload_protocol])
def _InitPageable(self, api):
response_type = self.values.get('responseType')
if response_type == api.void_type:
return
next_page_token_name = self.FindPageToken(
response_type.values.get('properties'))
if not next_page_token_name:
return
is_page_token_parameter = True
page_token_name = self.FindPageToken(self.optional_parameters)
if not page_token_name:
# page token may be field of request body instead of query parameter
is_page_token_parameter = False
request_type = self.values.get('requestType')
if request_type:
page_token_name = self.FindPageToken(
request_type.values.get('properties'))
if not page_token_name:
return
self.SetTemplateValue('isPageable', True)
self.SetTemplateValue('isPagingStyleStandard',
(is_page_token_parameter and
page_token_name == 'pageToken' and
next_page_token_name == 'nextPageToken'))
def _SetUploadTemplateValues(self, upload_protocol, protocol_dict):
"""Sets upload specific template values.
Args:
upload_protocol: (str) The name of the upload protocol. Eg: 'simple' or
'resumable'.
protocol_dict: (dict) The dictionary that corresponds to this upload
protocol. It typically contains keys like 'path', 'multipart' etc.
"""
self.SetTemplateValue('%s_upload_supported' % upload_protocol, True)
upload_path = protocol_dict.get('path')
if upload_path:
self.SetTemplateValue('%s_upload_path' % upload_protocol, upload_path)
self.SetTemplateValue('%s_upload_multipart' % upload_protocol,
protocol_dict.get('multipart', False))
@property
def media_upload_parameters(self):
return self.values.get('mediaUpload')
@property
def parameters(self):
return self.values['parameters']
@property
def optional_parameters(self):
return [p for p in self.values['parameters'] if not p.required]
@property
def required_parameters(self):
return [p for p in self.values['parameters'] if p.required]
@property
def path_parameters(self):
return [p for p in self.values['parameters'] if p.location == 'path']
@property
def query_parameters(self):
return [p for p in self.values['parameters'] if p.location == 'query']
@staticmethod
def FindCodeObjectWithWireName(things, wire_name):
"""Looks for an element having the given wire_name.
Args:
things: (array of DataType) List of parameters or properties to search.
wire_name: (str) The wireName we are looking to find.
Returns:
None or element with the given wire_name.
"""
if not things: return None
for e in things:
if e.values['wireName'] == wire_name: return e
return None
@staticmethod
def FindPageToken(things):
"""Looks for an element with a wireName like a page token.
Args:
things: (array of DataType) List of parameters or properties to search.
Returns:
None or page token name found.
"""
for token_name in _PAGE_TOKEN_NAMES:
if Method.FindCodeObjectWithWireName(things, token_name):
return token_name
return None
#
# Expose some properties with the naming convention we use in templates
#
def optionalParameters(self): # pylint: disable=g-bad-name
return self.optional_parameters
def requiredParameters(self): # pylint: disable=g-bad-name
return self.required_parameters
def pathParameters(self): # pylint: disable=g-bad-name
return self.path_parameters
def queryParameters(self): # pylint: disable=g-bad-name
return self.query_parameters
class Parameter(template_objects.CodeObject):
"""The definition of a method parameter."""
def __init__(self, api, name, def_dict, method):
super(Parameter, self).__init__(def_dict, api, parent=method,
wire_name=name)
self.ValidateName(name)
self.schema = api
# TODO(user): Deal with dots in names better. What we should do is:
# For x.y, x.z create a little class X, with members y and z. Then
# have the constructor method take an X.
self._repeated = self.values.get('repeated', False)
self._required = self.values.get('required', False)
self._location = (self.values.get('location')
or self.values.get('restParameterType')
or 'query')
# TODO(user): Why not just use Schema.Create here?
referenced_schema = self.values.get('$ref')
if referenced_schema:
self._data_type = (api.SchemaByName(referenced_schema) or
data_types.SchemaReference(referenced_schema, api))
elif def_dict.get('type') == 'array':
self._data_type = Schema.Create(api, name, def_dict, name, method)
elif self.values.get('enum'):
self._data_type = data_types.Enum(def_dict,
api,
name,
self.values.get('enum'),
self.values.get('enumDescriptions'),
parent=method)
self.SetTemplateValue('enumType', self._data_type)
else:
self._data_type = data_types.PrimitiveDataType(def_dict, api, parent=self)
if self._repeated:
self._data_type = data_types.ArrayDataType(name, self._data_type,
parent=self)
@property
def repeated(self):
return self._repeated
@property
def required(self):
return self._required
@property
def location(self):
return self._location
@property
def code_type(self):
return self._data_type.code_type
@property
def data_type(self):
return self._data_type
| bshaffer/google-api-php-client-services | generator/src/googleapis/codegen/api.py | Python | apache-2.0 | 37,026 |
package com.vaguehope.onosendai.payload;
import android.content.Context;
import android.content.Intent;
import com.vaguehope.onosendai.config.Account;
import com.vaguehope.onosendai.model.Meta;
import com.vaguehope.onosendai.model.MetaType;
import com.vaguehope.onosendai.model.Tweet;
import com.vaguehope.onosendai.ui.PostActivity;
import com.vaguehope.onosendai.util.EqualHelper;
public class AddCommentPayload extends Payload {
private final Account account;
public AddCommentPayload (final Account account, final Tweet ownerTweet) {
super(ownerTweet, null, PayloadType.COMMENT);
this.account = account;
}
@Override
public String getTitle () {
return "Add Comment"; //ES
}
@Override
public boolean intentable () {
return true;
}
@Override
public Intent toIntent (final Context context) {
final Intent intent = new Intent(context, PostActivity.class);
intent.putExtra(PostActivity.ARG_ACCOUNT_ID, this.account.getId());
intent.putExtra(PostActivity.ARG_IN_REPLY_TO_UID, getOwnerTweet().getUid());
intent.putExtra(PostActivity.ARG_IN_REPLY_TO_SID, getOwnerTweet().getSid());
final Meta replyToId = getOwnerTweet().getFirstMetaOfType(MetaType.REPLYTO);
if (replyToId != null) intent.putExtra(PostActivity.ARG_ALT_REPLY_TO_SID, replyToId.getData());
return intent;
}
@Override
public int hashCode () {
return this.account != null ? this.account.getId() != null ? this.account.getId().hashCode() : 0 : 0;
}
@Override
public boolean equals (final Object o) {
if (o == null) return false;
if (o == this) return true;
if (!(o instanceof AddCommentPayload)) return false;
final AddCommentPayload that = (AddCommentPayload) o;
return EqualHelper.equal(this.getOwnerTweet(), that.getOwnerTweet()) &&
EqualHelper.equal(this.account, that.account);
}
}
| haku/Onosendai | src/main/java/com/vaguehope/onosendai/payload/AddCommentPayload.java | Java | apache-2.0 | 1,811 |
/*******************************************************************************
* PathVisio, a tool for data visualization and analysis using biological pathways
* Copyright 2006-2019 BiGCaT Bioinformatics
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package org.pathvisio.core.view;
/**
* Utility class for creating and destroying LinkAnchors around a rectangular element.
*/
public class DefaultLinkAnchorDelegate extends AbstractLinkAnchorDelegate
{
private final Graphics parent;
private final VPathway canvas;
DefaultLinkAnchorDelegate(Graphics parent)
{
this.parent = parent;
this.canvas = parent.getDrawing();
}
private int numLinkanchorsH = -1;
private int numLinkanchorsV = -1;
private static final int MIN_SIZE_LA = 25;
public void showLinkAnchors()
{
if (parent instanceof Group &&
parent.gdata.getGroupStyle().isDisallowLinks())
{
return;
}
//Number of link anchors depends on the size of the object
//If the width/height is large enough, there will be three link anchors per side,
//Otherwise there will be only one link anchor per side
String anchorsCnt = parent.gdata.getDynamicProperty("NumberOfAnchors");
int numAnchors = 3;
if (anchorsCnt != null) {
numAnchors = Integer.parseInt(anchorsCnt);
}
int numH = parent.gdata.getMWidth() < MIN_SIZE_LA ? 1 : numAnchors;
int numV = parent.gdata.getMHeight() < MIN_SIZE_LA ? 1 : numAnchors;
if(numH != numLinkanchorsH || numV != numLinkanchorsV)
{
linkAnchors.clear();
double deltaH = 2.0/(numH + 1);
for(int i = 1; i <= numH; i++) {
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, -1 + i * deltaH, -1));
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, -1 + i * deltaH, 1));
}
double deltaV = 2.0/(numV + 1);
for(int i = 1; i <= numV; i++) {
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, -1, -1 + i * deltaV));
linkAnchors.add(new LinkAnchor(canvas, parent, parent.gdata, 1, -1 + i * deltaV));
}
numLinkanchorsH = numH;
numLinkanchorsV = numV;
}
}
public void hideLinkAnchors()
{
super.hideLinkAnchors();
numLinkanchorsV = -1;
numLinkanchorsH = -1;
}
}
| PathVisio/pathvisio | modules/org.pathvisio.core/src/org/pathvisio/core/view/DefaultLinkAnchorDelegate.java | Java | apache-2.0 | 2,844 |
package finalWeb.controller;
import org.springframework.stereotype.Controller;
@Controller
public class MainController {
}
| cocainism/Sham | FinalWeb/src/main/java/finalWeb/controller/MainController.java | Java | apache-2.0 | 140 |
/*
* Copyright (c) 2013-2015 Josef Hardi <josef.hardi@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.obidea.semantika.database.datatype;
import java.sql.Types;
import java.util.HashMap;
import com.obidea.semantika.datatype.DataType;
/**
* A mapping specification from SQL datatypes to XML Schema datatypes.
* Source: {@link http://www.w3.org/2001/sw/rdb2rdf/wiki/Mapping_SQL_datatypes_to_XML_Schema_datatypes}
*/
public final class SqlTypeToXmlType
{
private static HashMap<Integer, String> mTypeMapping;
static {
mTypeMapping = new HashMap<Integer, String>();
// mCoreMapping.put(Types.BINARY, DataType.HEX_BINARY);
// mCoreMapping.put(Types.JAVA_OBJECT, DataType.HEX_BINARY);
mTypeMapping.put(Types.NUMERIC, DataType.DECIMAL);
mTypeMapping.put(Types.DECIMAL, DataType.DECIMAL);
mTypeMapping.put(Types.BIGINT, DataType.LONG);
mTypeMapping.put(Types.INTEGER, DataType.INTEGER);
mTypeMapping.put(Types.SMALLINT, DataType.SHORT);
mTypeMapping.put(Types.TINYINT, DataType.BYTE);
mTypeMapping.put(Types.REAL, DataType.FLOAT);
mTypeMapping.put(Types.FLOAT, DataType.FLOAT);
mTypeMapping.put(Types.DOUBLE, DataType.DOUBLE);
mTypeMapping.put(Types.CHAR, DataType.STRING);
mTypeMapping.put(Types.VARCHAR, DataType.STRING);
mTypeMapping.put(Types.NCHAR, DataType.STRING);
mTypeMapping.put(Types.NVARCHAR, DataType.STRING);
mTypeMapping.put(Types.LONGVARCHAR, DataType.STRING);
mTypeMapping.put(Types.LONGNVARCHAR, DataType.STRING);
mTypeMapping.put(Types.DATE, DataType.DATE);
mTypeMapping.put(Types.TIME, DataType.TIME);
mTypeMapping.put(Types.TIMESTAMP, DataType.DATE_TIME);
mTypeMapping.put(Types.BOOLEAN, DataType.BOOLEAN);
mTypeMapping.put(Types.BIT, DataType.BOOLEAN);
mTypeMapping.put(Types.OTHER, DataType.STRING);
}
/**
* Return the corresponding XML type given the SQL type.
*
* @param sqlType
* The JDBC SQL type (see {@link java.sql.Types}).
* @return a URI string representing the XML type.
* @throws UnsupportedSqlDataTypeException
* if the data type has no corresponding XML type.
*/
public static String get(int sqlType)
{
String toReturn = mTypeMapping.get(sqlType);
if (toReturn == null) {
throw new UnsupportedSqlDataTypeException(sqlType);
}
return toReturn;
}
}
| obidea/semantika | src/main/java/com/obidea/semantika/database/datatype/SqlTypeToXmlType.java | Java | apache-2.0 | 2,978 |
/*
* Copyright (C) 2017 the original author or authors.
*
* This file is part of jBB Application Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.jbb.system.impl.database.provider;
import org.jbb.lib.db.DbProperties;
import org.jbb.lib.db.provider.H2InMemoryProvider;
import org.jbb.system.api.database.DatabaseProvider;
import org.jbb.system.api.database.DatabaseSettings;
import org.jbb.system.api.database.h2.H2InMemorySettings;
import org.springframework.stereotype.Component;
import lombok.RequiredArgsConstructor;
@Component
@RequiredArgsConstructor
public class H2InMemoryManager implements DatabaseProviderManager<H2InMemorySettings> {
public static final String PROVIDER_PROPERTY_VALUE = H2InMemoryProvider.PROVIDER_VALUE;
private final DbProperties dbProperties;
@Override
public DatabaseProvider getProviderName() {
return DatabaseProvider.H2_IN_MEMORY;
}
@Override
public H2InMemorySettings getCurrentProviderSettings() {
return H2InMemorySettings.builder()
.databaseName(dbProperties.h2InMemoryDbName())
.build();
}
@Override
public void setProviderSettings(DatabaseSettings newDatabaseSettings) {
H2InMemorySettings newProviderSettings = newDatabaseSettings
.getH2InMemorySettings();
dbProperties.setProperty(DbProperties.H2_IN_MEMORY_DB_NAME_KEY,
newProviderSettings.getDatabaseName());
}
}
| jbb-project/jbb | domain-services/jbb-system/src/main/java/org/jbb/system/impl/database/provider/H2InMemoryManager.java | Java | apache-2.0 | 1,603 |
package com.wearit.shike.web.model.dao.weather;
import java.util.List;
import com.wearit.shike.web.model.weather.TrackWeather;
import com.wearit.shike.web.model.weather.Weather;
public interface WeatherDao {
/**
* Metodo che viene usato per elencare tutti i record appartenenti alla tabella
* Weather.
*
* @return
*/
public List<Weather> getAllWeather();
/**
* Metodo che viene usato per aggiungere informazioni meteo alla tabella Weather.
*
* @param w
* meteo da aggiungere al database
*/
public void addTrackWeather(TrackWeather tw);
/**
* Metodo che viene usato per estrarre le informazioni meteo di un track.
*
* @param idt
* id del virtual track per ottenere il meteo
* @return il trackweather associato al tracciato richiesto
*/
public List<Weather> getTrackWeather(int idt);
/**
* Metodo che viene usato per estrarre una singola informazione meteo di un track
*
* @param _idt
* id del virtual track per ottenere il meteo
* @param date
* data della previsione meteo
* @return il singolo trackweather associato al tracciato richiesto o null se lista
* trackweather vuota
*/
public Weather getSingleTrackWeather(int _idt, long date);
/**
* Metodo che viene usato per togliere un record dalla tabella Weather.
*
* @param id
* del meteo da cancellare
* @param order
* forecastOrder del meteo da cancellare
*/
public void deleteWeather(int id, long order);
} | ZeitnotSWE/sHike | Codice/Web/src/main/java/com/wearit/shike/web/model/dao/weather/WeatherDao.java | Java | apache-2.0 | 1,519 |
package compute
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.11.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
import (
"github.com/Azure/azure-sdk-for-go/Godeps/_workspace/src/github.com/Azure/go-autorest/autorest"
"net/http"
"net/url"
)
// VirtualMachineImagesClient is the client for the VirtualMachineImages
// methods of the Compute service.
type VirtualMachineImagesClient struct {
ManagementClient
}
// NewVirtualMachineImagesClient creates an instance of the
// VirtualMachineImagesClient client.
func NewVirtualMachineImagesClient(subscriptionID string) VirtualMachineImagesClient {
return NewVirtualMachineImagesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewVirtualMachineImagesClientWithBaseURI creates an instance of the
// VirtualMachineImagesClient client.
func NewVirtualMachineImagesClientWithBaseURI(baseURI string, subscriptionID string) VirtualMachineImagesClient {
return VirtualMachineImagesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Get gets a virtual machine image.
//
func (client VirtualMachineImagesClient) Get(location string, publisherName string, offer string, skus string, version string) (result VirtualMachineImage, ae error) {
req, err := client.GetPreparer(location, publisherName, offer, skus, version)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "Get", "Failure preparing request")
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "Get", "Failure sending request")
}
result, err = client.GetResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "Get", "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client VirtualMachineImagesClient) GetPreparer(location string, publisherName string, offer string, skus string, version string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"offer": url.QueryEscape(offer),
"publisherName": url.QueryEscape(publisherName),
"skus": url.QueryEscape(skus),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
"version": url.QueryEscape(version),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) GetResponder(resp *http.Response) (result VirtualMachineImage, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets a list of virtual machine images.
//
// filter is the filter to apply on the operation.
func (client VirtualMachineImagesClient) List(location string, publisherName string, offer string, skus string, filter string, top int, orderby string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListPreparer(location, publisherName, offer, skus, filter, top, orderby)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "List", "Failure preparing request")
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "List", "Failure sending request")
}
result, err = client.ListResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "List", "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client VirtualMachineImagesClient) ListPreparer(location string, publisherName string, offer string, skus string, filter string, top int, orderby string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"offer": url.QueryEscape(offer),
"publisherName": url.QueryEscape(publisherName),
"skus": url.QueryEscape(skus),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"$filter": filter,
"$orderby": orderby,
"$top": top,
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListOffers gets a list of virtual machine image offers.
//
func (client VirtualMachineImagesClient) ListOffers(location string, publisherName string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListOffersPreparer(location, publisherName)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListOffers", "Failure preparing request")
}
resp, err := client.ListOffersSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListOffers", "Failure sending request")
}
result, err = client.ListOffersResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListOffers", "Failure responding to request")
}
return
}
// ListOffersPreparer prepares the ListOffers request.
func (client VirtualMachineImagesClient) ListOffersPreparer(location string, publisherName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"publisherName": url.QueryEscape(publisherName),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListOffersSender sends the ListOffers request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListOffersSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListOffersResponder handles the response to the ListOffers request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListOffersResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListPublishers gets a list of virtual machine image publishers.
//
func (client VirtualMachineImagesClient) ListPublishers(location string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListPublishersPreparer(location)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListPublishers", "Failure preparing request")
}
resp, err := client.ListPublishersSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListPublishers", "Failure sending request")
}
result, err = client.ListPublishersResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListPublishers", "Failure responding to request")
}
return
}
// ListPublishersPreparer prepares the ListPublishers request.
func (client VirtualMachineImagesClient) ListPublishersPreparer(location string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListPublishersSender sends the ListPublishers request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListPublishersSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListPublishersResponder handles the response to the ListPublishers request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListPublishersResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListSkus gets a list of virtual machine image skus.
//
func (client VirtualMachineImagesClient) ListSkus(location string, publisherName string, offer string) (result VirtualMachineImageResourceList, ae error) {
req, err := client.ListSkusPreparer(location, publisherName, offer)
if err != nil {
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListSkus", "Failure preparing request")
}
resp, err := client.ListSkusSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListSkus", "Failure sending request")
}
result, err = client.ListSkusResponder(resp)
if err != nil {
ae = autorest.NewErrorWithError(err, "compute/VirtualMachineImagesClient", "ListSkus", "Failure responding to request")
}
return
}
// ListSkusPreparer prepares the ListSkus request.
func (client VirtualMachineImagesClient) ListSkusPreparer(location string, publisherName string, offer string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"location": url.QueryEscape(location),
"offer": url.QueryEscape(offer),
"publisherName": url.QueryEscape(publisherName),
"subscriptionId": url.QueryEscape(client.SubscriptionID),
}
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
return autorest.Prepare(&http.Request{},
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus"),
autorest.WithPathParameters(pathParameters),
autorest.WithQueryParameters(queryParameters))
}
// ListSkusSender sends the ListSkus request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualMachineImagesClient) ListSkusSender(req *http.Request) (*http.Response, error) {
return client.Send(req, http.StatusOK)
}
// ListSkusResponder handles the response to the ListSkus request. The method always
// closes the http.Response Body.
func (client VirtualMachineImagesClient) ListSkusResponder(resp *http.Response) (result VirtualMachineImageResourceList, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
autorest.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| paulmey/azure-sdk-for-go | arm/compute/virtualmachineimages.go | GO | apache-2.0 | 14,509 |
setTimeout(function() {
$('.color-box').colpick({
layout:'hex',
submit:0,
colorScheme:'dark',
onChange:function(hsb,hex,rgb,el,bySetColor) {
$(el).css('background','#'+hex);
// Fill the text box just if the color was set using the picker, and not the colpickSetColor function.
if(!bySetColor) $(el).val(hex);
}
});
$("#batch-input").click(function () {
$("#batch").fadeIn("fast");
$(".popup-background").fadeIn("fast");
});
$(".close-popup").click(function () {
$(".popup").fadeOut("fast");
$(".popup-background").fadeOut("fast");
});
$("a.debug").click(function () {
$("#debug").css("visibility", "visible");
});
$("a.save-version").click(function () {
$("#save").fadeIn("fast");
$(".popup-background").fadeIn("fast");
});
$("a.leave").click(function () {
$("#message").fadeIn("fast");
$(".popup-background").fadeIn("fast");
});
$("a.topline").click(function () {
$("#topline").fadeIn("fast");
$("#topline").delay( 5000 ).fadeOut("fast");
});
$("a.win-edit").click(function () {
$("#markdown").fadeIn("fast");
$(".popup-background").fadeIn("fast");
})
$("a.search-button").click(function () {
$("#search").css("visibility", "visible");
})
},1000);
| emksaz/choicefrom | development/de-app/script/popup.js | JavaScript | apache-2.0 | 1,411 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.integration;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyQueryMetadata;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StoreQueryParameters;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.QueryableStoreType;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.test.IntegrationTest;
import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.getStore;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName;
import static org.apache.kafka.streams.integration.utils.IntegrationTestUtils.startApplicationAndWaitUntilRunning;
import static org.apache.kafka.streams.state.QueryableStoreTypes.keyValueStore;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
@Category({IntegrationTest.class})
public class StoreQueryIntegrationTest {
private static final Logger LOG = LoggerFactory.getLogger(StoreQueryIntegrationTest.class);
private static final int NUM_BROKERS = 1;
private static int port = 0;
private static final String INPUT_TOPIC_NAME = "input-topic";
private static final String TABLE_NAME = "source-table";
public final EmbeddedKafkaCluster cluster = new EmbeddedKafkaCluster(NUM_BROKERS);
@Rule
public TestName testName = new TestName();
private final List<KafkaStreams> streamsToCleanup = new ArrayList<>();
private final MockTime mockTime = cluster.time;
@Before
public void before() throws InterruptedException, IOException {
cluster.start();
cluster.createTopic(INPUT_TOPIC_NAME, 2, 1);
}
@After
public void after() {
for (final KafkaStreams kafkaStreams : streamsToCleanup) {
kafkaStreams.close();
}
cluster.stop();
}
@Test
public void shouldQueryOnlyActivePartitionStoresByDefault() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
until(() -> {
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(TABLE_NAME, kafkaStreams1, queryableStoreType);
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(TABLE_NAME, kafkaStreams2, queryableStoreType);
final boolean kafkaStreams1IsActive = (keyQueryMetadata.activeHost().port() % 2) == 1;
try {
if (kafkaStreams1IsActive) {
assertThat(store1.get(key), is(notNullValue()));
assertThat(store2.get(key), is(nullValue()));
} else {
assertThat(store1.get(key), is(nullValue()));
assertThat(store2.get(key), is(notNullValue()));
}
return true;
} catch (final InvalidStateStoreException exception) {
assertThat(
exception.getMessage(),
containsString("Cannot get state store source-table because the stream thread is PARTITIONS_ASSIGNED, not RUNNING")
);
LOG.info("Streams wasn't running. Will try again.");
return false;
}
});
}
@Test
public void shouldQuerySpecificActivePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
until(() -> {
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final boolean kafkaStreams1IsActive = (keyQueryMetadata.activeHost().port() % 2) == 1;
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> storeQueryParam =
StoreQueryParameters.<ReadOnlyKeyValueStore<Integer, Integer>>fromNameAndType(TABLE_NAME, keyValueStore())
.withPartition(keyPartition);
ReadOnlyKeyValueStore<Integer, Integer> store1 = null;
ReadOnlyKeyValueStore<Integer, Integer> store2 = null;
if (kafkaStreams1IsActive) {
store1 = getStore(kafkaStreams1, storeQueryParam);
} else {
store2 = getStore(kafkaStreams2, storeQueryParam);
}
if (kafkaStreams1IsActive) {
assertThat(store1, is(notNullValue()));
assertThat(store2, is(nullValue()));
} else {
assertThat(store2, is(notNullValue()));
assertThat(store1, is(nullValue()));
}
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> storeQueryParam2 =
StoreQueryParameters.<ReadOnlyKeyValueStore<Integer, Integer>>fromNameAndType(TABLE_NAME, keyValueStore())
.withPartition(keyDontBelongPartition);
try {
// Assert that key is not served when wrong specific partition is requested
// If kafkaStreams1 is active for keyPartition, kafkaStreams2 would be active for keyDontBelongPartition
// So, in that case, store3 would be null and the store4 would not return the value for key as wrong partition was requested
if (kafkaStreams1IsActive) {
assertThat(store1.get(key), is(notNullValue()));
assertThat(getStore(kafkaStreams2, storeQueryParam2).get(key), is(nullValue()));
final InvalidStateStoreException exception =
assertThrows(InvalidStateStoreException.class, () -> getStore(kafkaStreams1, storeQueryParam2).get(key));
assertThat(
exception.getMessage(),
containsString("The specified partition 1 for store source-table does not exist.")
);
} else {
assertThat(store2.get(key), is(notNullValue()));
assertThat(getStore(kafkaStreams1, storeQueryParam2).get(key), is(nullValue()));
final InvalidStateStoreException exception =
assertThrows(InvalidStateStoreException.class, () -> getStore(kafkaStreams2, storeQueryParam2).get(key));
assertThat(
exception.getMessage(),
containsString("The specified partition 1 for store source-table does not exist.")
);
}
return true;
} catch (final InvalidStateStoreException exception) {
assertThat(
exception.getMessage(),
containsString("Cannot get state store source-table because the stream thread is PARTITIONS_ASSIGNED, not RUNNING")
);
LOG.info("Streams wasn't running. Will try again.");
return false;
}
});
}
@Test
public void shouldQueryAllStalePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(TABLE_NAME, kafkaStreams1, true, queryableStoreType);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(TABLE_NAME, kafkaStreams2, true, queryableStoreType);
return store2.get(key) != null;
}, "store2 cannot find results for key");
}
@Test
public void shouldQuerySpecificStalePartitionStores() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyPartition);
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
return store2.get(key) != null;
}, "store2 cannot find results for key");
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyDontBelongPartition);
final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
// Assert that
assertThat(store3.get(key), is(nullValue()));
assertThat(store4.get(key), is(nullValue()));
}
@Test
public void shouldQuerySpecificStalePartitionStoresMultiStreamThreads() throws Exception {
final int batch1NumMessages = 100;
final int key = 1;
final Semaphore semaphore = new Semaphore(0);
final int numStreamThreads = 2;
final StreamsBuilder builder = new StreamsBuilder();
builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()),
Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME)
.withCachingDisabled())
.toStream()
.peek((k, v) -> semaphore.release());
final Properties streamsConfiguration1 = streamsConfiguration();
streamsConfiguration1.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final Properties streamsConfiguration2 = streamsConfiguration();
streamsConfiguration2.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration1);
final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration2);
final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
assertTrue(kafkaStreams1.localThreadsMetadata().size() > 1);
assertTrue(kafkaStreams2.localThreadsMetadata().size() > 1);
produceValueRange(key, 0, batch1NumMessages);
// Assert that all messages in the first batch were processed in a timely manner
assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, new IntegerSerializer());
//key belongs to this partition
final int keyPartition = keyQueryMetadata.partition();
//key doesn't belongs to this partition
final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
// Assert that both active and standby are able to query for a key
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyPartition);
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
return store1.get(key) != null;
}, "store1 cannot find results for key");
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
return store2.get(key) != null;
}, "store2 cannot find results for key");
final StoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = StoreQueryParameters
.fromNameAndType(TABLE_NAME, queryableStoreType)
.enableStaleStores()
.withPartition(keyDontBelongPartition);
final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
// Assert that
assertThat(store3.get(key), is(nullValue()));
assertThat(store4.get(key), is(nullValue()));
}
private static void until(final TestCondition condition) {
boolean success = false;
final long deadline = System.currentTimeMillis() + IntegrationTestUtils.DEFAULT_TIMEOUT;
while (!success && System.currentTimeMillis() < deadline) {
try {
success = condition.conditionMet();
Thread.sleep(500L);
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
}
private KafkaStreams createKafkaStreams(final StreamsBuilder builder, final Properties config) {
final KafkaStreams streams = new KafkaStreams(builder.build(config), config);
streamsToCleanup.add(streams);
return streams;
}
private void produceValueRange(final int key, final int start, final int endExclusive) {
final Properties producerProps = new Properties();
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
IntegrationTestUtils.produceKeyValuesSynchronously(
INPUT_TOPIC_NAME,
IntStream.range(start, endExclusive)
.mapToObj(i -> KeyValue.pair(key, i))
.collect(Collectors.toList()),
producerProps,
mockTime);
}
private Properties streamsConfiguration() {
final String safeTestName = safeUniqueTestName(getClass(), testName);
final Properties config = new Properties();
config.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, StreamsConfig.OPTIMIZE);
config.put(StreamsConfig.APPLICATION_ID_CONFIG, "app-" + safeTestName);
config.put(StreamsConfig.APPLICATION_SERVER_CONFIG, "localhost:" + (++port));
config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
config.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
config.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
config.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100);
config.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 200);
config.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 1000);
config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
return config;
}
}
| Chasego/kafka | streams/src/test/java/org/apache/kafka/streams/integration/StoreQueryIntegrationTest.java | Java | apache-2.0 | 23,416 |
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.safariservices;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSError;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("SafariServices")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class SFContentBlockerManager extends NSObject {
static {
NatJ.register();
}
@Generated
protected SFContentBlockerManager(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native SFContentBlockerManager alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native SFContentBlockerManager allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("getStateOfContentBlockerWithIdentifier:completionHandler:")
public static native void getStateOfContentBlockerWithIdentifierCompletionHandler(String identifier,
@ObjCBlock(name = "call_getStateOfContentBlockerWithIdentifierCompletionHandler") Block_getStateOfContentBlockerWithIdentifierCompletionHandler completionHandler);
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native SFContentBlockerManager new_objc();
@Generated
@Selector("reloadContentBlockerWithIdentifier:completionHandler:")
public static native void reloadContentBlockerWithIdentifierCompletionHandler(String identifier,
@ObjCBlock(name = "call_reloadContentBlockerWithIdentifierCompletionHandler") Block_reloadContentBlockerWithIdentifierCompletionHandler completionHandler);
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
@Generated
@Selector("init")
public native SFContentBlockerManager init();
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_getStateOfContentBlockerWithIdentifierCompletionHandler {
@Generated
void call_getStateOfContentBlockerWithIdentifierCompletionHandler(SFContentBlockerState state, NSError error);
}
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_reloadContentBlockerWithIdentifierCompletionHandler {
@Generated
void call_reloadContentBlockerWithIdentifierCompletionHandler(NSError error);
}
}
| multi-os-engine/moe-core | moe.apple/moe.platform.ios/src/main/java/apple/safariservices/SFContentBlockerManager.java | Java | apache-2.0 | 6,153 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six import string_types
import random
import re
import json
import numpy as np
import traceback
from cognitive import stim_generator as sg
import cognitive.constants as const
_R_MEAN = 123.68
_G_MEAN = 116.78
_B_MEAN = 103.94
def convert_to_grid(xy_coord, prefs):
"""Given a x-y coordinate, return the target activity for a grid of neurons.
Args:
xy_coord : numpy 2-D array (batch_size, 2)
prefs: numpy 2-D array (n_out_pnt, 2). x and y preferences.
Returns:
activity: numpy array (batch_size, GRID_SIZE**2)
"""
sigma2 = 0.02 # 2*sigma-squared
activity = np.exp(-((xy_coord[:, 0:1] - prefs[:, 0])**2 +
(xy_coord[:, 1:2] - prefs[:, 1])**2) / sigma2)
activity = (activity.T / np.sum(activity, axis=1)).T
return activity
def map_sentence2ints(sentence):
"""Map a sentence to a list of words."""
word_list = re.findall(r"[\w']+|[.,!?;]", sentence)
int_list = [const.INPUTVOCABULARY.index(word) for word in word_list]
return np.array(int_list).astype(np.int32)
def preprocess(in_imgs_, vis_type):
"""Pre-process images."""
if (vis_type == 'vgg') or (vis_type == 'vgg_pretrain'):
in_imgs_ -= np.array([_R_MEAN, _G_MEAN, _B_MEAN], dtype=np.float32)
else:
in_imgs_ /= 255.
in_imgs_ -= np.mean(in_imgs_)
return in_imgs_
def tasks_to_rules(tasks):
"""Generate in_rule and seq_length arrays.
Args:
tasks: a list of tg.Task instances or string rules, length is batch_size.
"""
batch_size = len(tasks)
in_rule = np.zeros((const.MAXSEQLENGTH, batch_size), dtype=np.int64)
seq_length = np.zeros((batch_size,), dtype=np.int64)
for i_task, task in enumerate(tasks):
word_list = re.findall(r"[\w']+|[.,!?;]", str(task))
seq_length[i_task] = len(word_list)
for i_word, word in enumerate(word_list):
in_rule[i_word, i_task] = const.INPUTVOCABULARY.index(word)
return in_rule, seq_length
def set_outputs_from_tasks(n_epoch, tasks, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word):
j = 0
for epoch_now in range(n_epoch):
for task, objset in zip(tasks, objsets):
target = task(objset, epoch_now)
if target is const.INVALID:
# For invalid target, no loss is used. Everything remains zero.
pass
elif isinstance(target, sg.Loc):
# minimize point loss
out_pnt_xy[j, :] = target.value
mask_pnt[j] = 1.
elif isinstance(target, bool) or isinstance(target, sg.Attribute):
if isinstance(target, bool):
target = 'true' if target else 'false'
else:
target = target.value
# For boolean target, only minimize word loss
out_word[j] = const.OUTPUTVOCABULARY.index(target)
mask_word[j] = 1.
else:
raise TypeError('Unknown target type.')
j += 1
def set_outputs_from_targets(n_epoch, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word):
j = 0
for epoch_now in range(n_epoch):
for objset in objsets:
target = objset.targets[epoch_now]
if target == 'invalid':
# For invalid target, no loss is used. Everything remains zero.
pass
elif isinstance(target, (list, tuple)):
assert len(target) == 2, "Expected 2-D target. Got " + str(target)
# minimize point loss
out_pnt_xy[j, :] = target
mask_pnt[j] = 1.
elif isinstance(target, string_types):
out_word[j] = const.OUTPUTVOCABULARY.index(target)
mask_word[j] = 1.
else:
raise TypeError('Unknown target type: %s %s' % (type(target), target))
j += 1
def generate_batch(tasks,
n_epoch=30,
img_size=224,
objsets=None,
n_distractor=1,
average_memory_span=2):
"""Generate a batch of trials.
Return numpy arrays to feed the tensorflow placeholders.
Args:
tasks: a list of tg.Task instances, length is batch_size.
n_epoch: int, number of epochs
img_size: int, image size
objsets: None or list of ObjectSet/StaticObjectSet instances
n_distractor: int, number of distractors to add
average_memory_span: int, the average number of epochs by which an object
need to be held in working memory, if needed at all
Returns:
All variables are numpy array of float32
in_imgs: (n_epoch*batch_size, img_size, img_size, 3)
in_rule: (max_seq_length, batch_size) the rule language input, type int32
seq_length: (batch_size,) the length of each task instruction
out_pnt: (n_epoch*batch_size, n_out_pnt)
out_pnt_xy: (n_epoch*batch_size, 2)
out_word: (n_epoch*batch_size, n_out_word)
mask_pnt: (n_epoch*batch_size)
mask_word: (n_epoch*batch_size)
Raises:
TypeError: when target type is incorrect.
"""
batch_size = len(tasks)
if objsets is None:
objsets = list()
for task in tasks:
objsets.append(
task.generate_objset(n_epoch,
n_distractor=n_distractor,
average_memory_span=average_memory_span))
max_objset_epoch = max([objset.n_epoch for objset in objsets])
assert max_objset_epoch == n_epoch, '%d != %d' % (max_objset_epoch, n_epoch)
in_imgs = sg.render(objsets, img_size)
# The rendered images are batch major
in_imgs = np.reshape(in_imgs, [batch_size, n_epoch, img_size, img_size, 3])
# Swap to time major
in_imgs = np.swapaxes(in_imgs, 0, 1)
# Outputs and masks
out_pnt_xy = np.zeros((n_epoch * batch_size, 2), dtype=np.float32)
out_word = np.zeros((n_epoch * batch_size), dtype=np.int64)
mask_pnt = np.zeros((n_epoch * batch_size), dtype=np.float32)
mask_word = np.zeros((n_epoch * batch_size), dtype=np.float32)
if isinstance(objsets[0], sg.StaticObjectSet):
set_outputs_from_targets(n_epoch, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word)
else:
set_outputs_from_tasks(n_epoch, tasks, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word)
# Process outputs
out_pnt = convert_to_grid(out_pnt_xy, const.PREFS)
# Generate rule inputs, padded to maximum number of words in a sentence
in_rule, seq_length = tasks_to_rules(tasks)
return (in_imgs, in_rule, seq_length, out_pnt, out_pnt_xy, out_word, mask_pnt,
mask_word)
def static_objsets_from_examples(examples):
"""Returns a list of StaticObjectSet objects.
Args:
examples: an iterable of dictionaries decoded from json examples.
"""
static_objsets = []
for e in examples:
static_objs = [o for multi_epoch_obj in e['objects']
for o in sg.static_objects_from_dict(multi_epoch_obj)]
static_objset = sg.StaticObjectSet(n_epoch=e['epochs'],
static_objects=static_objs,
targets=e['answers'])
static_objsets.append(static_objset)
return static_objsets
def json_to_feeds(json_examples):
if isinstance(json_examples, string_types):
json_examples = [json_examples]
examples = []
families = []
rules = []
for je in json_examples:
try:
e = json.loads(je)
except (ValueError, TypeError):
traceback.print_exc()
raise
rules.append(e['question'])
examples.append(e)
families.append(e['family'])
epochs = examples[0]['epochs']
static_objsets = static_objsets_from_examples(examples)
values = generate_batch(rules, n_epoch=epochs,
img_size=112, objsets=static_objsets,
# not used when objsets are given
n_distractor=0,
# not used when objsets are given
average_memory_span=0)
values = values + (families,)
return values
def generate_feeds(tasks, hparams, dataparams=None):
"""Generate feed dict for placeholders.
Args:
tasks: a list of tg.Task instances, length is batch_size.
hparams: hyperparameters in tf.HParams format.
dataparams: dictionary of parameters for the dataset
Returns:
feed_dict: the tensorflow feed_dict dictionary
"""
if isinstance(hparams.n_epoch, int):
n_epoch = hparams.n_epoch
else:
n_epoch = random.randrange(hparams.n_epoch[0], hparams.n_epoch[1] + 1)
# in_imgs, in_rule, seq_length, out_pnt, out_pnt_xy, out_word, mask_pnt,
# mask_word
return generate_batch(
tasks,
n_epoch=n_epoch,
img_size=112,
n_distractor=dataparams['n_distractor'],
average_memory_span=dataparams['average_memory_span']
)
| google/cog | cognitive/train_utils.py | Python | apache-2.0 | 9,504 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202108;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* Contains third party auto-pixeling settings for cross-sell Partners.
*
*
* <p>Java class for ThirdPartyMeasurementSettings complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ThirdPartyMeasurementSettings">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="viewabilityPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyViewabilityIntegrationPartner" minOccurs="0"/>
* <element name="viewabilityClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="viewabilityReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherViewabilityPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyViewabilityIntegrationPartner" minOccurs="0"/>
* <element name="publisherViewabilityClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherViewabilityReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="brandLiftPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyBrandLiftIntegrationPartner" minOccurs="0"/>
* <element name="brandLiftClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="brandLiftReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="reachPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyReachIntegrationPartner" minOccurs="0"/>
* <element name="reachClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="reachReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherReachPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyReachIntegrationPartner" minOccurs="0"/>
* <element name="publisherReachClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherReachReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ThirdPartyMeasurementSettings", propOrder = {
"viewabilityPartner",
"viewabilityClientId",
"viewabilityReportingId",
"publisherViewabilityPartner",
"publisherViewabilityClientId",
"publisherViewabilityReportingId",
"brandLiftPartner",
"brandLiftClientId",
"brandLiftReportingId",
"reachPartner",
"reachClientId",
"reachReportingId",
"publisherReachPartner",
"publisherReachClientId",
"publisherReachReportingId"
})
public class ThirdPartyMeasurementSettings {
@XmlSchemaType(name = "string")
protected ThirdPartyViewabilityIntegrationPartner viewabilityPartner;
protected String viewabilityClientId;
protected String viewabilityReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyViewabilityIntegrationPartner publisherViewabilityPartner;
protected String publisherViewabilityClientId;
protected String publisherViewabilityReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyBrandLiftIntegrationPartner brandLiftPartner;
protected String brandLiftClientId;
protected String brandLiftReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyReachIntegrationPartner reachPartner;
protected String reachClientId;
protected String reachReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyReachIntegrationPartner publisherReachPartner;
protected String publisherReachClientId;
protected String publisherReachReportingId;
/**
* Gets the value of the viewabilityPartner property.
*
* @return
* possible object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public ThirdPartyViewabilityIntegrationPartner getViewabilityPartner() {
return viewabilityPartner;
}
/**
* Sets the value of the viewabilityPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public void setViewabilityPartner(ThirdPartyViewabilityIntegrationPartner value) {
this.viewabilityPartner = value;
}
/**
* Gets the value of the viewabilityClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getViewabilityClientId() {
return viewabilityClientId;
}
/**
* Sets the value of the viewabilityClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setViewabilityClientId(String value) {
this.viewabilityClientId = value;
}
/**
* Gets the value of the viewabilityReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getViewabilityReportingId() {
return viewabilityReportingId;
}
/**
* Sets the value of the viewabilityReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setViewabilityReportingId(String value) {
this.viewabilityReportingId = value;
}
/**
* Gets the value of the publisherViewabilityPartner property.
*
* @return
* possible object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public ThirdPartyViewabilityIntegrationPartner getPublisherViewabilityPartner() {
return publisherViewabilityPartner;
}
/**
* Sets the value of the publisherViewabilityPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public void setPublisherViewabilityPartner(ThirdPartyViewabilityIntegrationPartner value) {
this.publisherViewabilityPartner = value;
}
/**
* Gets the value of the publisherViewabilityClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherViewabilityClientId() {
return publisherViewabilityClientId;
}
/**
* Sets the value of the publisherViewabilityClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherViewabilityClientId(String value) {
this.publisherViewabilityClientId = value;
}
/**
* Gets the value of the publisherViewabilityReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherViewabilityReportingId() {
return publisherViewabilityReportingId;
}
/**
* Sets the value of the publisherViewabilityReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherViewabilityReportingId(String value) {
this.publisherViewabilityReportingId = value;
}
/**
* Gets the value of the brandLiftPartner property.
*
* @return
* possible object is
* {@link ThirdPartyBrandLiftIntegrationPartner }
*
*/
public ThirdPartyBrandLiftIntegrationPartner getBrandLiftPartner() {
return brandLiftPartner;
}
/**
* Sets the value of the brandLiftPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyBrandLiftIntegrationPartner }
*
*/
public void setBrandLiftPartner(ThirdPartyBrandLiftIntegrationPartner value) {
this.brandLiftPartner = value;
}
/**
* Gets the value of the brandLiftClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBrandLiftClientId() {
return brandLiftClientId;
}
/**
* Sets the value of the brandLiftClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBrandLiftClientId(String value) {
this.brandLiftClientId = value;
}
/**
* Gets the value of the brandLiftReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBrandLiftReportingId() {
return brandLiftReportingId;
}
/**
* Sets the value of the brandLiftReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBrandLiftReportingId(String value) {
this.brandLiftReportingId = value;
}
/**
* Gets the value of the reachPartner property.
*
* @return
* possible object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public ThirdPartyReachIntegrationPartner getReachPartner() {
return reachPartner;
}
/**
* Sets the value of the reachPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public void setReachPartner(ThirdPartyReachIntegrationPartner value) {
this.reachPartner = value;
}
/**
* Gets the value of the reachClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReachClientId() {
return reachClientId;
}
/**
* Sets the value of the reachClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReachClientId(String value) {
this.reachClientId = value;
}
/**
* Gets the value of the reachReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReachReportingId() {
return reachReportingId;
}
/**
* Sets the value of the reachReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReachReportingId(String value) {
this.reachReportingId = value;
}
/**
* Gets the value of the publisherReachPartner property.
*
* @return
* possible object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public ThirdPartyReachIntegrationPartner getPublisherReachPartner() {
return publisherReachPartner;
}
/**
* Sets the value of the publisherReachPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public void setPublisherReachPartner(ThirdPartyReachIntegrationPartner value) {
this.publisherReachPartner = value;
}
/**
* Gets the value of the publisherReachClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherReachClientId() {
return publisherReachClientId;
}
/**
* Sets the value of the publisherReachClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherReachClientId(String value) {
this.publisherReachClientId = value;
}
/**
* Gets the value of the publisherReachReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherReachReportingId() {
return publisherReachReportingId;
}
/**
* Sets the value of the publisherReachReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherReachReportingId(String value) {
this.publisherReachReportingId = value;
}
}
| googleads/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202108/ThirdPartyMeasurementSettings.java | Java | apache-2.0 | 13,628 |
package it.unimi.di.law.bubing.sieve;
/*
* Copyright (C) 2010-2017 Paolo Boldi, Massimo Santini, and Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import it.unimi.dsi.sux4j.mph.AbstractHashFunction;
import java.io.IOException;
//RELEASE-STATUS: DIST
/** A sieve that simply (and immediately) copies {@linkplain #enqueue(Object, Object) enqueued keys} to the {@linkplain #setNewFlowRecevier(it.unimi.di.law.bubing.sieve.AbstractSieve.NewFlowReceiver) new flow receiver}.
*
* <p>Note that instances of this class call {@link AbstractSieve.NewFlowReceiver#prepareToAppend()} in the constructor only, and
* {@link AbstractSieve.NewFlowReceiver#noMoreAppend()} in the method {@link #close()} only.
*/
public final class IdentitySieve<K, V> extends AbstractSieve<K, V> {
public IdentitySieve(final NewFlowReceiver<K> newFlowReceiver, final ByteSerializerDeserializer<K> keySerDeser, final ByteSerializerDeserializer<V> valueSerDeser, final AbstractHashFunction<K> hashingStrategy, final UpdateStrategy<K, V> updateStrategy) throws IOException {
super(keySerDeser, valueSerDeser, hashingStrategy, updateStrategy);
setNewFlowRecevier(newFlowReceiver);
newFlowReceiver.prepareToAppend();
}
@Override
public boolean enqueue(K key, V value) throws IOException {
newFlowReceiver.append(0, key);
return false;
}
@Override
public void close() throws IOException {
newFlowReceiver.noMoreAppend();
}
@Override
public void flush() throws IOException, InterruptedException {}
}
| LAW-Unimi/BUbiNG | src/it/unimi/di/law/bubing/sieve/IdentitySieve.java | Java | apache-2.0 | 2,029 |
(function () {
'use strict';
angular.module('Pedal2Play')
.directive('logoutModal', function ()
{
return {
restrict: 'E',
templateUrl: 'partials/logout.modal.html'
};
});
})(); | kaelvofraga/Pedal-to-Play-App | www/js/directives/logout-modal.js | JavaScript | apache-2.0 | 230 |
/**
* Handles the player state
*/
export enum PlayerState {
LoadingAssets,
Ready,
Disconnected,
Idle
} | dweng0/wildflower | source/interface/assets/playerstate.ts | TypeScript | apache-2.0 | 120 |
/* Copyright 2013-present Barefoot Networks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Antonin Bas (antonin@barefootnetworks.com)
*
*/
#include <gtest/gtest.h>
#include <memory>
#include <string>
#include <mutex>
#include <thread>
#include <condition_variable>
#include <chrono>
#include <cassert>
#include "bm_sim/learning.h"
using std::chrono::milliseconds;
using std::chrono::duration_cast;
using std::this_thread::sleep_for;
class MemoryAccessor : public LearnWriter {
public:
enum class Status { CAN_READ, CAN_WRITE };
public:
MemoryAccessor(size_t max_size)
: max_size(max_size), status(Status::CAN_WRITE) {
buffer_.reserve(max_size);
}
int send(const char *buffer, size_t len) const override {
if(len > max_size) return -1;
std::unique_lock<std::mutex> lock(mutex);
while(status != Status::CAN_WRITE) {
can_write.wait(lock);
}
buffer_.insert(buffer_.end(), buffer, buffer + len);
status = Status::CAN_READ;
can_read.notify_one();
return 0;
}
int send_msgs(
const std::initializer_list<TransportIface::MsgBuf> &msgs
) const override
{
size_t len = 0;
for(const auto &msg : msgs) {
len += msg.len;
}
if(len > max_size) return -1;
std::unique_lock<std::mutex> lock(mutex);
while(status != Status::CAN_WRITE) {
can_write.wait(lock);
}
for(const auto &msg : msgs) {
buffer_.insert(buffer_.end(), msg.buf, msg.buf + msg.len);
}
status = Status::CAN_READ;
can_read.notify_one();
return 0;
}
int read(char *dst, size_t len) const {
len = (len > max_size) ? max_size : len;
std::unique_lock<std::mutex> lock(mutex);
while(status != Status::CAN_READ) {
can_read.wait(lock);
}
std::copy(buffer_.begin(), buffer_.begin() + len, dst);
buffer_.clear();
status = Status::CAN_WRITE;
can_write.notify_one();
return 0;
}
Status check_status() {
std::unique_lock<std::mutex> lock(mutex);
return status;
}
private:
// dirty trick (mutable) to make sure that send() const is override
mutable std::vector<char> buffer_;
size_t max_size;
mutable Status status;
mutable std::mutex mutex;
mutable std::condition_variable can_write;
mutable std::condition_variable can_read;
};
// Google Test fixture for learning tests
class LearningTest : public ::testing::Test {
protected:
typedef std::chrono::high_resolution_clock clock;
protected:
PHVFactory phv_factory;
HeaderType testHeaderType;
header_id_t testHeader1{0}, testHeader2{1};
std::shared_ptr<MemoryAccessor> learn_writer;
char buffer[4096];
// used exclusively for callback mode
LearnEngine::msg_hdr_t cb_hdr;
bool cb_written;
mutable std::mutex cb_written_mutex;
mutable std::condition_variable cb_written_cv;
LearnEngine learn_engine;
clock::time_point start;
LearningTest()
: testHeaderType("test_t", 0),
learn_writer(new MemoryAccessor(4096)) {
testHeaderType.push_back_field("f16", 16);
testHeaderType.push_back_field("f48", 48);
phv_factory.push_back_header("test1", testHeader1, testHeaderType);
phv_factory.push_back_header("test2", testHeader2, testHeaderType);
}
virtual void SetUp() {
Packet::set_phv_factory(phv_factory);
start = clock::now();
}
virtual void TearDown() {
Packet::unset_phv_factory();
}
void learn_on_test1_f16(LearnEngine::list_id_t list_id,
size_t max_samples, unsigned timeout_ms) {
learn_engine.list_create(list_id, max_samples, timeout_ms);
learn_engine.list_set_learn_writer(list_id, learn_writer);
learn_engine.list_push_back_field(list_id, testHeader1, 0); // test1.f16
learn_engine.list_init(list_id);
}
Packet get_pkt() {
// dummy packet, won't be parsed
return Packet(0, 0, 0, 128, PacketBuffer(256));
}
void learn_cb_(LearnEngine::msg_hdr_t msg_hdr, size_t size,
std::unique_ptr<char []> data) {
std::unique_lock<std::mutex> lock(cb_written_mutex);
std::copy(&data[0], &data[size], buffer);
cb_hdr = msg_hdr;
cb_written = true;
cb_written_cv.notify_one();
}
static void learn_cb(LearnEngine::msg_hdr_t msg_hdr, size_t size,
std::unique_ptr<char []> data, void *cookie) {
assert(size <= sizeof(buffer));
((LearningTest *) cookie)->learn_cb_(msg_hdr, size, std::move(data));
}
};
TEST_F(LearningTest, OneSample) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 1; unsigned timeout_ms = 100;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
const char *data = buffer + sizeof(LearnEngine::msg_hdr_t);
ASSERT_EQ(0, msg_hdr->switch_id);
ASSERT_EQ(list_id, msg_hdr->list_id);
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(1u, msg_hdr->num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
}
TEST_F(LearningTest, OneSampleTimeout) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 2; unsigned timeout_ms = 100;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
clock::time_point end = clock::now();
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
const char *data = buffer + sizeof(LearnEngine::msg_hdr_t);
ASSERT_EQ(0, msg_hdr->switch_id);
ASSERT_EQ(list_id, msg_hdr->list_id);
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(1u, msg_hdr->num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
// check that the timeout was on time :)
unsigned int elapsed = duration_cast<milliseconds>(end - start).count();
ASSERT_GT(elapsed, timeout_ms - 20u);
ASSERT_LT(elapsed, timeout_ms + 20u);
}
TEST_F(LearningTest, NoTimeout) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 2; unsigned timeout_ms = 0;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
ASSERT_NE(MemoryAccessor::Status::CAN_READ, learn_writer->check_status());
sleep_for(milliseconds(1000));
// if we still cannot read after 1s, that means that nothing was written
// i.e. no timeout happened
// 1s was chosen arbitrarily
ASSERT_NE(MemoryAccessor::Status::CAN_READ, learn_writer->check_status());
}
TEST_F(LearningTest, OneSampleConstData) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 1;
unsigned int timeout_ms = 500;
char buffer[sizeof(LearnEngine::msg_hdr_t) + 2];
std::shared_ptr<MemoryAccessor> learn_writer(new
MemoryAccessor(sizeof(buffer)));
learn_engine.list_create(list_id, max_samples, timeout_ms);
learn_engine.list_set_learn_writer(list_id, learn_writer);
learn_engine.list_push_back_constant(list_id, "0xaba"); // 2 bytes
learn_engine.list_init(list_id);
Packet pkt = get_pkt();
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
const char *data = buffer + sizeof(LearnEngine::msg_hdr_t);
ASSERT_EQ(0, msg_hdr->switch_id);
ASSERT_EQ(list_id, msg_hdr->list_id);
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(1u, msg_hdr->num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
}
TEST_F(LearningTest, TwoSampleTimeout) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 3; unsigned timeout_ms = 200;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
sleep_for(milliseconds(100));
ASSERT_NE(MemoryAccessor::Status::CAN_READ, learn_writer->check_status());
f.set("0xabb");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
clock::time_point end = clock::now();
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
const char *data = buffer + sizeof(LearnEngine::msg_hdr_t);
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(2u, msg_hdr->num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
ASSERT_EQ((char) 0xa, data[2]);
ASSERT_EQ((char) 0xbb, data[3]);
// check that the timeout was on time :)
unsigned int elapsed = duration_cast<milliseconds>(end - start).count();
ASSERT_GT(elapsed, timeout_ms - 20u);
ASSERT_LT(elapsed, timeout_ms + 20u);
}
TEST_F(LearningTest, Filter) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 2; unsigned timeout_ms = 0;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
learn_engine.learn(list_id, pkt);
sleep_for(milliseconds(100));
ASSERT_NE(MemoryAccessor::Status::CAN_READ, learn_writer->check_status());
f.set("0xabb");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
const char *data = buffer + sizeof(LearnEngine::msg_hdr_t);
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(2u, msg_hdr->num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
ASSERT_EQ((char) 0xa, data[2]);
ASSERT_EQ((char) 0xbb, data[3]);
}
TEST_F(LearningTest, FilterAck) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 1; unsigned timeout_ms = 0;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
const char *data = buffer + sizeof(LearnEngine::msg_hdr_t);
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(1u, msg_hdr->num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
learn_engine.learn(list_id, pkt); // cannot learn a second time
sleep_for(milliseconds(100));
ASSERT_NE(MemoryAccessor::Status::CAN_READ, learn_writer->check_status());
learn_engine.ack(list_id, 0, 0); // ack and learn again
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
ASSERT_EQ(1u, msg_hdr->buffer_id); // buffer id was incremented
ASSERT_EQ(1u, msg_hdr->num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
}
TEST_F(LearningTest, FilterAcks) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 2; unsigned timeout_ms = 0;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
f.set("0xabb");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(2u, msg_hdr->num_samples);
learn_engine.learn(list_id, pkt); // cannot learn a second time
sleep_for(milliseconds(100));
ASSERT_NE(MemoryAccessor::Status::CAN_READ, learn_writer->check_status());
learn_engine.ack(list_id, 0, {0, 1}); // ack both samples and learn again
f.set("0xaba");
learn_engine.learn(list_id, pkt);
f.set("0xabb");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
ASSERT_EQ(1u, msg_hdr->buffer_id); // buffer id was incremented
ASSERT_EQ(2u, msg_hdr->num_samples);
}
TEST_F(LearningTest, FilterAckBuffer) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 2; unsigned timeout_ms = 0;
learn_on_test1_f16(list_id, max_samples, timeout_ms);
LearnEngine::msg_hdr_t *msg_hdr = (LearnEngine::msg_hdr_t *) buffer;
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
f.set("0xabb");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
ASSERT_EQ(0u, msg_hdr->buffer_id);
ASSERT_EQ(2u, msg_hdr->num_samples);
learn_engine.learn(list_id, pkt); // cannot learn a second time
sleep_for(milliseconds(100));
ASSERT_NE(MemoryAccessor::Status::CAN_READ, learn_writer->check_status());
learn_engine.ack_buffer(list_id, 0); // ack whole buffer
f.set("0xaba");
learn_engine.learn(list_id, pkt);
f.set("0xabb");
learn_engine.learn(list_id, pkt);
learn_writer->read(buffer, sizeof(buffer));
ASSERT_EQ(1u, msg_hdr->buffer_id); // buffer id was incremented
ASSERT_EQ(2u, msg_hdr->num_samples);
}
TEST_F(LearningTest, OneSampleCbMode) {
LearnEngine::list_id_t list_id = 1;
size_t max_samples = 1; unsigned timeout_ms = 100;
learn_engine.list_create(list_id, max_samples, timeout_ms);
learn_engine.list_set_learn_cb(list_id, LearningTest::learn_cb, this);
learn_engine.list_push_back_field(list_id, testHeader1, 0); // test1.f16
learn_engine.list_init(list_id);
cb_written = false;
Packet pkt = get_pkt();
Field &f = pkt.get_phv()->get_field(testHeader1, 0);
f.set("0xaba");
learn_engine.learn(list_id, pkt);
std::unique_lock<std::mutex> lock(cb_written_mutex);
while(!cb_written) {
cb_written_cv.wait(lock);
}
const char *data = buffer;
ASSERT_EQ(0, cb_hdr.switch_id);
ASSERT_EQ(list_id, cb_hdr.list_id);
ASSERT_EQ(0u, cb_hdr.buffer_id);
ASSERT_EQ(1u, cb_hdr.num_samples);
ASSERT_EQ((char) 0xa, data[0]);
ASSERT_EQ((char) 0xba, data[1]);
}
| notmem/ops-p4dp | submodules/bm/tests/test_learning.cpp | C++ | apache-2.0 | 14,432 |
'use strict';
angular.module('angularPHP')
.controller('CreateorganizationCtrl', function ($scope, Session, Organizations, $timeout, $location) {
Session.require();
$scope.name = "";
$scope.msg = "";
$scope.error = "";
$scope.create = function() {
$scope.error = "";
$scope.msg = "";
Organizations.create($scope.name,function(){
$scope.msg = "The organization was created successfully";
$scope.$apply;
$timeout(function(){
$location.path("/organizations");
},2000);
},function(error){
$scope.error = error;
$scope.$apply;
});
};
});
| smwa/angular-momentum | app/scripts/controllers/main/createorganization.js | JavaScript | apache-2.0 | 690 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reach oracle element used for configuration."""
import dataclasses
from pyreach.gyms import reach_element
@dataclasses.dataclass(frozen=True)
class ReachOracle(reach_element.ReachElement):
"""A Reach Oracle configuration class.
Attributes:
reach_name: The name of the Oracle.
task_code: The task code string.
intent: The intention of the task. This agument is optional and defaults to
an empty string.
success_type: The type of success. This argument is optional and defaults
to an empty string.
is_synchronous: If True, the next Gym observation will synchronize all
observation elements that have this flag set otherwise the next
observation is asynchronous. This argument is optional and defaults to
False.
"""
task_code: str
intent: str = ""
success_type: str = ""
is_synchronous: bool = False
| google-research/pyreach | pyreach/gyms/oracle_element.py | Python | apache-2.0 | 1,448 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticbeanstalk.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Request to disassociate the operations role from an environment.
* </p>
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/elasticbeanstalk-2010-12-01/DisassociateEnvironmentOperationsRole"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DisassociateEnvironmentOperationsRoleRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*/
private String environmentName;
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*
* @param environmentName
* The name of the environment from which to disassociate the operations role.
*/
public void setEnvironmentName(String environmentName) {
this.environmentName = environmentName;
}
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*
* @return The name of the environment from which to disassociate the operations role.
*/
public String getEnvironmentName() {
return this.environmentName;
}
/**
* <p>
* The name of the environment from which to disassociate the operations role.
* </p>
*
* @param environmentName
* The name of the environment from which to disassociate the operations role.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DisassociateEnvironmentOperationsRoleRequest withEnvironmentName(String environmentName) {
setEnvironmentName(environmentName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEnvironmentName() != null)
sb.append("EnvironmentName: ").append(getEnvironmentName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DisassociateEnvironmentOperationsRoleRequest == false)
return false;
DisassociateEnvironmentOperationsRoleRequest other = (DisassociateEnvironmentOperationsRoleRequest) obj;
if (other.getEnvironmentName() == null ^ this.getEnvironmentName() == null)
return false;
if (other.getEnvironmentName() != null && other.getEnvironmentName().equals(this.getEnvironmentName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEnvironmentName() == null) ? 0 : getEnvironmentName().hashCode());
return hashCode;
}
@Override
public DisassociateEnvironmentOperationsRoleRequest clone() {
return (DisassociateEnvironmentOperationsRoleRequest) super.clone();
}
}
| aws/aws-sdk-java | aws-java-sdk-elasticbeanstalk/src/main/java/com/amazonaws/services/elasticbeanstalk/model/DisassociateEnvironmentOperationsRoleRequest.java | Java | apache-2.0 | 4,243 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simplesystemsmanagement.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.simplesystemsmanagement.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DocumentDescription JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DocumentDescriptionJsonUnmarshaller implements Unmarshaller<DocumentDescription, JsonUnmarshallerContext> {
public DocumentDescription unmarshall(JsonUnmarshallerContext context) throws Exception {
DocumentDescription documentDescription = new DocumentDescription();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Sha1", targetDepth)) {
context.nextToken();
documentDescription.setSha1(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Hash", targetDepth)) {
context.nextToken();
documentDescription.setHash(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("HashType", targetDepth)) {
context.nextToken();
documentDescription.setHashType(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Name", targetDepth)) {
context.nextToken();
documentDescription.setName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DisplayName", targetDepth)) {
context.nextToken();
documentDescription.setDisplayName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("VersionName", targetDepth)) {
context.nextToken();
documentDescription.setVersionName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Owner", targetDepth)) {
context.nextToken();
documentDescription.setOwner(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("CreatedDate", targetDepth)) {
context.nextToken();
documentDescription.setCreatedDate(DateJsonUnmarshallerFactory.getInstance("unixTimestamp").unmarshall(context));
}
if (context.testExpression("Status", targetDepth)) {
context.nextToken();
documentDescription.setStatus(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("StatusInformation", targetDepth)) {
context.nextToken();
documentDescription.setStatusInformation(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DocumentVersion", targetDepth)) {
context.nextToken();
documentDescription.setDocumentVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Description", targetDepth)) {
context.nextToken();
documentDescription.setDescription(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Parameters", targetDepth)) {
context.nextToken();
documentDescription.setParameters(new ListUnmarshaller<DocumentParameter>(DocumentParameterJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("PlatformTypes", targetDepth)) {
context.nextToken();
documentDescription.setPlatformTypes(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
if (context.testExpression("DocumentType", targetDepth)) {
context.nextToken();
documentDescription.setDocumentType(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("SchemaVersion", targetDepth)) {
context.nextToken();
documentDescription.setSchemaVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("LatestVersion", targetDepth)) {
context.nextToken();
documentDescription.setLatestVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DefaultVersion", targetDepth)) {
context.nextToken();
documentDescription.setDefaultVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DocumentFormat", targetDepth)) {
context.nextToken();
documentDescription.setDocumentFormat(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("TargetType", targetDepth)) {
context.nextToken();
documentDescription.setTargetType(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Tags", targetDepth)) {
context.nextToken();
documentDescription.setTags(new ListUnmarshaller<Tag>(TagJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("AttachmentsInformation", targetDepth)) {
context.nextToken();
documentDescription.setAttachmentsInformation(new ListUnmarshaller<AttachmentInformation>(AttachmentInformationJsonUnmarshaller
.getInstance())
.unmarshall(context));
}
if (context.testExpression("Requires", targetDepth)) {
context.nextToken();
documentDescription.setRequires(new ListUnmarshaller<DocumentRequires>(DocumentRequiresJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("Author", targetDepth)) {
context.nextToken();
documentDescription.setAuthor(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("ReviewInformation", targetDepth)) {
context.nextToken();
documentDescription.setReviewInformation(new ListUnmarshaller<ReviewInformation>(ReviewInformationJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("ApprovedVersion", targetDepth)) {
context.nextToken();
documentDescription.setApprovedVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("PendingReviewVersion", targetDepth)) {
context.nextToken();
documentDescription.setPendingReviewVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("ReviewStatus", targetDepth)) {
context.nextToken();
documentDescription.setReviewStatus(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("Category", targetDepth)) {
context.nextToken();
documentDescription.setCategory(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
if (context.testExpression("CategoryEnum", targetDepth)) {
context.nextToken();
documentDescription.setCategoryEnum(new ListUnmarshaller<String>(context.getUnmarshaller(String.class))
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return documentDescription;
}
private static DocumentDescriptionJsonUnmarshaller instance;
public static DocumentDescriptionJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DocumentDescriptionJsonUnmarshaller();
return instance;
}
}
| aws/aws-sdk-java | aws-java-sdk-ssm/src/main/java/com/amazonaws/services/simplesystemsmanagement/model/transform/DocumentDescriptionJsonUnmarshaller.java | Java | apache-2.0 | 10,561 |
package com.pluralsight.orderfulfillment.customer;
/**
* Domain object for a Customer
*
* @author Michael Hoffman, Pluralsight
*
*/
public class Customer {
private long id;
private String firstName;
private String lastName;
private String email;
public Customer() {
}
public Customer(long id, String firstName, String lastName, String email) {
super();
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
}
/**
* @return the id
*/
public long getId() {
return id;
}
/**
* @param id
* the id to set
*/
public void setId(long id) {
this.id = id;
}
/**
* @return the firstName
*/
public String getFirstName() {
return firstName;
}
/**
* @param firstName
* the firstName to set
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
}
/**
* @return the lastName
*/
public String getLastName() {
return lastName;
}
/**
* @param lastName
* the lastName to set
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
* @return the email
*/
public String getEmail() {
return email;
}
/**
* @param email
* the email to set
*/
public void setEmail(String email) {
this.email = email;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("Customer [id=");
builder.append(id);
builder.append(", ");
if (firstName != null) {
builder.append("firstName=");
builder.append(firstName);
builder.append(", ");
}
if (lastName != null) {
builder.append("lastName=");
builder.append(lastName);
builder.append(", ");
}
if (email != null) {
builder.append("email=");
builder.append(email);
}
builder.append("]");
return builder.toString();
}
}
| mikevoxcap/spring4-sample | src/main/java/com/pluralsight/orderfulfillment/customer/Customer.java | Java | apache-2.0 | 2,192 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import apache_beam as beam
import logging
from typing import List, Dict, Any
from uploaders.google_ads.customer_match.abstract_uploader import GoogleAdsCustomerMatchAbstractUploaderDoFn
from uploaders import utils as utils
from models.execution import DestinationType, AccountConfig
from models.oauth_credentials import OAuthCredentials
class GoogleAdsCustomerMatchMobileUploaderDoFn(GoogleAdsCustomerMatchAbstractUploaderDoFn):
def get_list_definition(self, account_config: AccountConfig, destination_metadata: List[str]) -> Dict[str, Any]:
list_name = destination_metadata[0]
app_id = account_config.app_id
#overwrite app_id from default to custom
if len(destination_metadata) >=4 and len(destination_metadata[3]) > 0:
app_id = destination_metadata[3]
return {
'membership_status': 'OPEN',
'name': list_name,
'description': 'List created automatically by Megalista',
'membership_life_span': 10000,
'crm_based_user_list': {
'upload_key_type': 'MOBILE_ADVERTISING_ID', #CONTACT_INFO, CRM_ID, MOBILE_ADVERTISING_ID
'data_source_type': 'FIRST_PARTY',
'app_id': app_id
}
}
def get_row_keys(self) -> List[str]:
return ['mobile_id']
def get_action_type(self) -> DestinationType:
return DestinationType.ADS_CUSTOMER_MATCH_MOBILE_DEVICE_ID_UPLOAD
| google/megalista | megalista_dataflow/uploaders/google_ads/customer_match/mobile_uploader.py | Python | apache-2.0 | 1,938 |
package com.example.wk.ybeg.fragment;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.wk.ybeg.R;
/**
* Created by WK on 2016/11/25.
*/
public class GengDuo extends BaseFragment {
public static final String TAG=GengDuo.class.getSimpleName();
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
view=inflater.inflate(R.layout.gengduo,container,false);
return view; }
}
| wkadzz/YbEg | app/src/main/java/com/example/wk/ybeg/fragment/GengDuo.java | Java | apache-2.0 | 626 |
//# sourceMappingURL=audio.component.js.map | reTHINK-project/dev-smart-contextual-assistance-app | src/app/components/rethink/hypertyResource/audio/audio.component.js | JavaScript | apache-2.0 | 43 |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is "SMS Library for the Java platform".
*
* The Initial Developer of the Original Code is Markus Eriksson.
* Portions created by the Initial Developer are Copyright (C) 2002
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.marre.mime.encoder;
import java.io.IOException;
import java.io.OutputStream;
import org.marre.mime.MimeBodyPart;
/**
* Interface for all mime encoders.
*
* @author Markus Eriksson
* @version $Id$
*/
public interface MimeEncoder
{
/**
* Writes the content-type of the message to the given stream.
*
* @param os
* The stream to write to
* @param msg
* The message to get the content-type from
* @throws IOException
* Thrown if we fail to write the content-type to the stream
*/
void writeContentType(OutputStream os, MimeBodyPart msg) throws IOException;
/**
* Writes the headers of the message to the given stream.
*
* @param os
* The stream to write to
* @param msg
* The message to get the headers from
* @throws IOException
* Thrown if we fail to write the headers to the stream
*/
void writeHeaders(OutputStream os, MimeBodyPart msg) throws IOException;
/**
* Writes the body of the message to the given stream.
*
* @param os
* The stream to write to
* @param msg
* The message to get the data from
* @throws IOException
* Thrown if we fail to write the body to the stream
*/
void writeBody(OutputStream os, MimeBodyPart msg) throws IOException;
}
| Lihuanghe/CMPPGate | src/main/java/org/marre/mime/encoder/MimeEncoder.java | Java | apache-2.0 | 3,133 |
// Copyright 2015 Patrick Putnam
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef ENGINE_BATCHED_TASK_HPP_
#define ENGINE_BATCHED_TASK_HPP_
#ifdef DEBUG_MODE
#define DEBUGGING 0
#endif // DEBUG_MODE
#include "qtlsim_logger.hpp"
#include <boost/property_tree/ptree.hpp>
#include "clotho/genetics/population_growth_toolkit.hpp"
#include "clotho/data_spaces/allele_space/allele_space_vector.hpp"
#include "clotho/data_spaces/allele_space/allele_generator_vector.hpp"
#include "clotho/data_spaces/phenotype_evaluator/trait_space_vector.hpp"
//#include "clotho/data_spaces/phenotype_evaluator/trait_space_generator.hpp"
#include "clotho/data_spaces/phenotype_evaluator/trait_accumulator.hpp"
#include "clotho/data_spaces/free_space/free_space_mts.hpp"
#ifdef USE_BATCH_JOBS
#include "clotho/data_spaces/crossover/batch_crossover_mts.hpp"
#ifdef USE_CROSSOVER_EVENT_POOLING
#define CROSSOVER_TYPE clotho::genetics::BatchCrossoverMTWE
#else
#define CROSSOVER_TYPE clotho::genetics::BatchCrossoverMT
#endif // USE_CROSSOVER_EVENT_POOLING
#include "clotho/data_spaces/phenotype_evaluator/batch_phenotype_mts.hpp"
#define PHENOTYPE_TYPE clotho::genetics::BatchPhenotypeMT
#else
#include "clotho/data_spaces/crossover/crossover_mt.hpp"
#define CROSSOVER_TYPE clotho::genetics::CrossoverMT
#include "clotho/data_spaces/phenotype_evaluator/phenotype_mt.hpp"
#define PHENOTYPE_TYPE clotho::genetics::PhenotypeMT
#endif // USE_BATCH_JOBS
#include "clotho/data_spaces/population_space/population_spaces.hpp"
#include "clotho/data_spaces/selection/selection.hpp"
#include "clotho/data_spaces/mutation/batch_mutation_mt.hpp"
#include "clotho/data_spaces/fitness/general_fitness.hpp"
#include "clotho/utility/state_object.hpp"
#include "clotho/data_spaces/task/thread_pool.hpp"
struct batched_task {} ;
template < class RNG, class RealType, class BlockType, class SizeType >
class Engine< RNG, RealType, BlockType, SizeType, batched_task > {
public:
typedef Engine< RNG, RealType, BlockType, SizeType, batched_task > self_type;
typedef RealType position_type;
typedef RealType weight_type;
typedef weight_type * phenotype_type;
typedef BlockType block_type;
typedef RNG random_engine_type;
typedef SizeType size_type;
typedef clotho::genetics::thread_pool< RNG > thread_pool_type;
typedef clotho::genetics::AlleleSpace< position_type, size_type > allele_type;
#ifdef USE_ROW_MODIFICATION
typedef clotho::genetics::population_space_row_modified< block_type, weight_type > sequence_space_type;
#else
typedef clotho::genetics::population_space_row< block_type, weight_type > sequence_space_type;
#endif // USE_ROW_MODIFICATION
typedef clotho::genetics::trait_space_vector< weight_type > trait_space_type;
typedef clotho::genetics::FreeSpaceAnalyzerMT< sequence_space_type, size_type > free_space_type;
typedef clotho::genetics::mutation_allocator< random_engine_type, size_type > mutation_alloc_type;
typedef clotho::genetics::BatchMutationMT< random_engine_type, sequence_space_type, allele_type, free_space_type, trait_space_type > mutation_type;
typedef clotho::genetics::GeneralFitness fitness_type;
typedef clotho::genetics::SelectionGenerator< random_engine_type, clotho::genetics::fitness_selection< fitness_type > > selection_type;
typedef CROSSOVER_TYPE< random_engine_type, sequence_space_type, allele_type > crossover_type;
typedef PHENOTYPE_TYPE< sequence_space_type, trait_space_type > phenotype_eval_type;
typedef std::shared_ptr< ipopulation_growth_generator > population_growth_generator_type;
typedef std::shared_ptr< ipopulation_growth > population_growth_type;
friend struct clotho::utility::state_getter< self_type >;
Engine( random_engine_type * rng, boost::property_tree::ptree & config ) :
m_rand( rng )
, m_parent( &m_pop0 )
, m_child( &m_pop1 )
, m_trait_space( config )
, m_fixed_traits( config )
, m_thread_pool( rng, config )
, m_free_space( )
, select_gen( rng, config )
, cross_gen( rng, config )
, mutate_gen( rng, config )
, m_fit( config )
, m_generation( 0 )
, m_pop_growth()
{
population_growth_generator_type tmp = population_growth_toolkit::getInstance()->get_tool( config );
if( tmp ) {
m_pop_growth = tmp->generate();
if( m_pop_growth ) {
m_pop_growth->log( std::cerr );
std::cerr << std::endl;
}
} else {
population_growth_toolkit::getInstance()->tool_configurations( config );
}
init(0);
}
size_t getGeneration() const {
return m_generation;
}
void init( size_t aN ) {
size_t pN = 0;
if( m_pop_growth ) {
pN = m_pop_growth->operator()( pN, m_generation );
}
m_pop0.grow( pN, aN, m_trait_space.trait_count() );
m_pop1.grow( pN, aN, m_trait_space.trait_count() );
m_pop1.clear();
m_pop0.clear();
m_fit.resize( pN );
#ifdef USE_ROW_VECTOR
m_pop1.getSequenceSpace().fill_empty();
m_pop1.getSequenceSpace().finalize();
#endif // USE_ROW_VECTOR
++m_generation;
}
void simulate( ) {
std::swap( m_child, m_parent ); // use the current child population as the parent population for the next round
// at the start of each simulate round, m_fit has already been updated from the previous
// round with the fitness of the "then child/now parent" popualtions fitness
//
size_t pN = select_gen.individual_count();
if( m_pop_growth ) {
pN = m_pop_growth->operator()( pN, m_generation );
}
// size_type pM = m_mut_alloc.allocate( 2 * pN ); // generate the number of new mutations
size_type pM = mutate_gen.generateNewMutation( 2 * pN );
timer_type fix_time;
size_type free_count = updateFixedAlleles( m_parent ); // update the fixed alleles with those of parent population
fix_time.stop();
size_t all_size = child_max_alleles( m_allele_space.size(), free_count, pM ); // rescale allele space for child population given free space from parent population and new allele count (pM)
#ifdef DEBUGGING
BOOST_LOG_TRIVIAL( debug ) << "Generation " << m_generation << ": " << pN << " individuals; " << pM << " new alleles";
BOOST_LOG_TRIVIAL( debug ) << "Free space: " << free_count << "; alleles: " << m_allele_space.size();
BOOST_LOG_TRIVIAL( debug ) << "Rescaling child population to be: " << pN << " individuals x " << all_size << " alleles";
std::cerr << "Generation " << m_generation << ": " << pN << " individuals; " << pM << " new alleles" << std::endl;
std::cerr << "Rescaling child population to be: " << pN << " individuals x " << all_size << " alleles" << std::endl;
#endif // DEBUGGING
m_child->grow( pN, all_size, m_trait_space.trait_count() ); // grow the child population accordingly
// m_allele_space.alignNeutralToPopulation( m_child->getMaxBlocks() );
select_gen.update( m_fit, pN );
timer_type xover_time;
cross_gen( select_gen, m_parent, m_child, &m_allele_space, m_thread_pool );
xover_time.stop();
timer_type mutate_time;
mutate_gen( m_child, &m_allele_space, &m_trait_space, &m_free_space, pM, m_generation, m_thread_pool, 1 );
mutate_time.stop();
timer_type pheno_time;
//if( !m_allele_space.isAllNeutral() ) {
if( !m_trait_space.isAllNeutral() ) {
m_pheno( m_parent, m_child, &m_trait_space, m_thread_pool );
} else {
m_pheno.constant_phenotype( m_child, &m_trait_space );
}
pheno_time.stop();
m_fit( m_pheno );
clotho::utility::add_value_array( fix_times, fix_time );
clotho::utility::add_value_array( xover_times, xover_time );
clotho::utility::add_value_array( mutate_times, mutate_time );
clotho::utility::add_value_array( pheno_times, pheno_time );
clotho::utility::add_value_array( free_sizes, free_count );
clotho::utility::add_value_array( var_sizes, m_free_space.variable_count() );
clotho::utility::add_value_array( fixed_sizes, m_free_space.fixed_size() );
++m_generation;
}
sequence_space_type * getChildPopulation() const {
return m_child;
}
sequence_space_type * getParentPopulation() const {
return m_parent;
}
void getPerformanceResults( boost::property_tree::ptree & log ) {
log.put_child( "performance.mutate", mutate_times );
log.put_child( "performance.crossover", xover_times );
log.put_child( "performance.fixed", fix_times );
log.put_child( "performance.phenotypes", pheno_times );
log.put_child( "memory.free_count", free_sizes );
log.put_child( "memory.variable_count", var_sizes );
log.put_child( "memory.fixed_count", fixed_sizes );
}
allele_type * getAlleleSpace() {
return &m_allele_space;
}
virtual ~Engine() { }
protected:
// void generate_child_mutations( unsigned int N ) {
//// std::cerr << "Child population size: " << m_child->haploid_genome_count() << std::endl;
// typename mutation_type::sequence_distribution_type seq_gen( 0, m_child->haploid_genome_count() - 1);
//
// typename free_space_type::base_type::iterator it = m_free_space.free_begin(), end = m_free_space.free_end();
// while( N && it != end ) {
// typename free_space_type::size_type all_idx = *it++;
// unsigned int seq_idx = seq_gen( *m_rand );
//
// mutate_gen( m_child, seq_idx, all_idx );
// allele_gen( m_allele_space, all_idx, m_generation );
// trait_gen( m_trait_space, all_idx );
// --N;
// }
//
// while( N ) {
// typename free_space_type::size_type all_idx = m_allele_space.size();
// unsigned int seq_idx = seq_gen( *m_rand );
//
// assert( all_idx < m_child->getMaxAlleles() );
//
// mutate_gen( m_child, seq_idx, all_idx );
// allele_gen( m_allele_space, all_idx, m_generation );
// trait_gen( m_trait_space, all_idx );
// --N;
// }
// }
/**
* estimate the maximum number of alleles in the child
*
* N_parent - number of alleles in the parent population
* F_parent - number of free alleles in the parent population
* M_child - number of new alleles to be added the child population
*/
size_t child_max_alleles( size_t N_parent, size_t F_parent, size_t M_child ) const {
#ifdef DEBUGGING
BOOST_LOG_TRIVIAL(info) << "Parent alleles: " << N_parent << "; Free: " << F_parent << "; New Alleles: " << M_child;
std::cerr << "Parent alleles: " << N_parent << "; Free: " << F_parent << "; New Alleles: " << M_child << std::endl;
#endif // DEBUGGING
if( F_parent >= M_child ) {
// if there are more free alleles in the parent generation
// than there are new alleles to be added to the child generation
// then do not adjust scale of the allele space
return N_parent;
} else {
return N_parent + (M_child - F_parent);
}
}
size_type updateFixedAlleles( sequence_space_type * ss ) {
m_free_space( ss, m_thread_pool ); // analyze the parent population sequence space
typedef typename free_space_type::iterator fixed_iterator;
typedef typename trait_space_type::iterator trait_iterator;
// std::cerr << "Fixed count: " << m_free_space.fixed_size() << std::endl;
fixed_iterator fix_it = m_free_space.fixed_begin();
fixed_iterator fix_end = m_free_space.fixed_end();
while( fix_it != fix_end ) {
size_type fixed_index = *fix_it++;
ss->remove_fixed_allele( fixed_index );
m_fixed.append( m_allele_space, fixed_index );
trait_iterator tstart = m_trait_space.begin( fixed_index ), tend = m_trait_space.end( fixed_index );
m_fixed_traits.append( tstart, tend );
}
#ifdef DEBUGGING
typedef typename free_space_type::iterator free_iterator;
free_iterator fr_it = m_free_space.free_begin();
free_iterator fr_end = m_free_space.free_end();
unsigned int j = 0;
while( fr_it != fr_end ) {
size_type i = *fr_it++;
if( !ss->freeColumn( i ) ) {
assert(false);
}
++j;
}
assert( j == m_free_space.free_size() );
#endif // DEBUGGING
return m_free_space.free_size();
}
random_engine_type * m_rand;
allele_type m_allele_space, m_fixed;
sequence_space_type m_pop0, m_pop1;
sequence_space_type * m_parent, * m_child;
trait_space_type m_trait_space, m_fixed_traits;
thread_pool_type m_thread_pool;
phenotype_eval_type m_pheno;
free_space_type m_free_space;
selection_type select_gen;
crossover_type cross_gen;
mutation_type mutate_gen;
fitness_type m_fit;
size_t m_generation;
population_growth_type m_pop_growth;
// mutation_alloc_type m_mut_alloc;
// trait_generator_type trait_gen;
// allele_generator_type allele_gen;
//
boost::property_tree::ptree fix_times, mutate_times, xover_times, pheno_times;
boost::property_tree::ptree free_sizes, var_sizes, fixed_sizes;
};
namespace clotho {
namespace utility {
template < class RNG, class RealType, class BlockType, class SizeType >
struct state_getter< Engine< RNG, RealType, BlockType, SizeType, batched_task > > {
typedef Engine< RNG, RealType, BlockType, SizeType, batched_task > object_type;
void operator()( boost::property_tree::ptree & s, object_type & obj ) {
boost::property_tree::ptree tr;
state_getter< typename object_type::trait_space_type > tr_logger;
tr_logger( tr, obj.m_trait_space );
boost::property_tree::ptree ph;
state_getter< typename object_type::phenotype_eval_type > pheno_logger;
pheno_logger( ph, obj.m_pheno );
boost::property_tree::ptree fr;
state_getter< typename object_type::free_space_type > free_logger;
free_logger( fr, obj.m_free_space );
boost::property_tree::ptree fx, alls;
state_getter< typename object_type::allele_type > all_logger;
all_logger( fx, obj.m_fixed );
all_logger( alls, obj.m_allele_space );
// boost::property_tree::ptree c_pop;
// state_getter< typename object_type::sequence_space_type > pop_logger;
// pop_logger( c_pop, *(obj.m_child) );
s.put_child( "phenotypes", ph );
s.put_child( "free_space", fr );
s.put_child( "allele_space", alls );
s.put_child( "trait_space", tr );
s.put_child( "fixed_alleles", fx );
// s.put_child( "child", c_pop );
}
};
}
}
#endif // ENGINE_BATCHED_TASK_HPP_
| putnampp/clotho | examples/QTLSim/engine_batched_task.hpp | C++ | apache-2.0 | 16,287 |
package com.transcend.monitor.transform;
import java.util.Date;
import java.util.Map;
import java.util.TimeZone;
import org.slf4j.Logger;
import com.msi.tough.core.Appctx;
import com.msi.tough.core.DateHelper;
import com.msi.tough.monitor.common.MonitorConstants;
import com.msi.tough.query.ErrorResponse;
import com.msi.tough.query.QueryFaults;
import com.transcend.monitor.message.GetMetricStatisticsMessage.GetMetricStatisticsRequest;
import com.transcend.monitor.message.MetricAlarmMessage.Statistic;
import com.transcend.monitor.message.MetricAlarmMessage.Unit;
public class GetMetricStatisticsRequestUnmarshaller extends BaseMonitorUnmarshaller<GetMetricStatisticsRequest>
{
public static final int MAX_DATAPOINTS = 100;
private final static Logger logger = Appctx
.getLogger(GetMetricStatisticsRequestUnmarshaller.class.getName());
private static GetMetricStatisticsRequestUnmarshaller instance;
public static GetMetricStatisticsRequestUnmarshaller getInstance()
{
if (instance == null)
{
instance = new GetMetricStatisticsRequestUnmarshaller();
}
return instance;
}
@Override
public GetMetricStatisticsRequest unmarshall(Map<String, String[]> in)
{
final GetMetricStatisticsRequest.Builder req =
GetMetricStatisticsRequest.newBuilder();
req.setPeriod(MarshallingUtils.unmarshallInteger(in,
MonitorConstants.NODE_PERIOD,
logger));
req.setStartTime(MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_STARTTIME,
logger));
req.setEndTime(MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_ENDTIME,
logger));
req.setMetricName(MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_METRICNAME,
logger));
String unit = MarshallingUtils.unmarshallString(in,
MonitorConstants.NODE_UNIT, null,
logger);
req.setUnit(unit == null? Unit.None : Unit.valueOf(unit));
int i = 0;
while (true)
{
i++;
final String n[] = in.get("Statistics.member." + i);
if (n == null)
{
break;
}
try {
req.addStatistic(Statistic.valueOf(n[0]));
} catch (Exception e) {
throw QueryFaults.InvalidParameterValue();
}
}
if (req.getStatisticCount() == 0) {
throw ErrorResponse.missingParameter();
}
req.addAllDimension(unmarshallDimensions(in));
Date start = DateHelper.getCalendarFromISO8601String(req.getStartTime(),
TimeZone.getTimeZone("GMT")).getTime();
Date end = DateHelper.getCalendarFromISO8601String(req.getEndTime(),
TimeZone.getTimeZone("GMT")).getTime();
if (!start.before(end)) {
throw QueryFaults.InvalidParameterValue();
}
if (req.getPeriod() < 60 || req.getPeriod() % 60 != 0) {
throw QueryFaults.InvalidParameterValue();
}
long timeDelta = end.getTime() -
start.getTime();
long numPoints = timeDelta / req.getPeriod() / 1000 / 60;
if (numPoints > MAX_DATAPOINTS) {
throw QueryFaults.InvalidParameterCombination("You have requested" +
" up to "+numPoints+" datapoints, which exceeds the " +
"limit of "+MAX_DATAPOINTS+".");
}
return super.unmarshall(req.buildPartial(), in);
}
}
| TranscendComputing/TopStackMetricSearch | src/com/transcend/monitor/transform/GetMetricStatisticsRequestUnmarshaller.java | Java | apache-2.0 | 3,650 |
/*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.result.supplement;
import android.content.Context;
import android.os.AsyncTask;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.method.LinkMovementMethod;
import android.text.style.URLSpan;
import android.util.Log;
import android.widget.TextView;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.google.zxing.client.result.ISBNParsedResult;
import com.google.zxing.client.result.ParsedResult;
import com.google.zxing.client.result.ProductParsedResult;
import com.google.zxing.client.result.URIParsedResult;
import com.google.zxing.client.android.common.executor.AsyncTaskExecInterface;
import com.google.zxing.client.android.common.executor.AsyncTaskExecManager;
import com.google.zxing.client.android.history.HistoryManager;
public abstract class SupplementalInfoRetriever extends AsyncTask<Object,Object,Object> {
private static final String TAG = "SupplementalInfo";
public static void maybeInvokeRetrieval(TextView textView,
ParsedResult result,
HistoryManager historyManager,
Context context) {
AsyncTaskExecInterface taskExec = new AsyncTaskExecManager().build();
if (result instanceof URIParsedResult) {
taskExec.execute(new URIResultInfoRetriever(textView, (URIParsedResult) result, historyManager, context));
taskExec.execute(new TitleRetriever(textView, (URIParsedResult) result, historyManager));
} else if (result instanceof ProductParsedResult) {
String productID = ((ProductParsedResult) result).getProductID();
taskExec.execute(new ProductResultInfoRetriever(textView, productID, historyManager, context));
switch (productID.length()) {
case 12:
taskExec.execute(new AmazonInfoRetriever(textView, "UPC", productID, historyManager, context));
break;
case 13:
taskExec.execute(new AmazonInfoRetriever(textView, "EAN", productID, historyManager, context));
break;
}
} else if (result instanceof ISBNParsedResult) {
String isbn = ((ISBNParsedResult) result).getISBN();
taskExec.execute(new ProductResultInfoRetriever(textView, isbn, historyManager, context));
taskExec.execute(new BookResultInfoRetriever(textView, isbn, historyManager, context));
taskExec.execute(new AmazonInfoRetriever(textView, "ISBN", isbn, historyManager, context));
}
}
private final WeakReference<TextView> textViewRef;
private final WeakReference<HistoryManager> historyManagerRef;
private final List<Spannable> newContents;
private final List<String[]> newHistories;
SupplementalInfoRetriever(TextView textView, HistoryManager historyManager) {
textViewRef = new WeakReference<TextView>(textView);
historyManagerRef = new WeakReference<HistoryManager>(historyManager);
newContents = new ArrayList<Spannable>();
newHistories = new ArrayList<String[]>();
}
@Override
public final Object doInBackground(Object... args) {
try {
retrieveSupplementalInfo();
} catch (IOException e) {
Log.w(TAG, e);
}
return null;
}
@Override
protected final void onPostExecute(Object arg) {
TextView textView = textViewRef.get();
if (textView != null) {
for (Spannable content : newContents) {
textView.append(content);
}
textView.setMovementMethod(LinkMovementMethod.getInstance());
}
HistoryManager historyManager = historyManagerRef.get();
if (historyManager != null) {
for (String[] text : newHistories) {
historyManager.addHistoryItemDetails(text[0], text[1]);
}
}
}
abstract void retrieveSupplementalInfo() throws IOException;
final void append(String itemID, String source, String[] newTexts, String linkURL) {
StringBuilder newTextCombined = new StringBuilder();
if (source != null) {
newTextCombined.append(source).append(' ');
}
int linkStart = newTextCombined.length();
boolean first = true;
for (String newText : newTexts) {
if (first) {
newTextCombined.append(newText);
first = false;
} else {
newTextCombined.append(" [");
newTextCombined.append(newText);
newTextCombined.append(']');
}
}
int linkEnd = newTextCombined.length();
String newText = newTextCombined.toString();
Spannable content = new SpannableString(newText + "\n\n");
if (linkURL != null) {
// Strangely, some Android browsers don't seem to register to handle HTTP:// or HTTPS://.
// Lower-case these as it should always be OK to lower-case these schemes.
if (linkURL.startsWith("HTTP://")) {
linkURL = "http" + linkURL.substring(4);
} else if (linkURL.startsWith("HTTPS://")) {
linkURL = "https" + linkURL.substring(5);
}
content.setSpan(new URLSpan(linkURL), linkStart, linkEnd, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
newContents.add(content);
newHistories.add(new String[] {itemID, newText});
}
static void maybeAddText(String text, Collection<String> texts) {
if (text != null && text.length() > 0) {
texts.add(text);
}
}
static void maybeAddTextSeries(Collection<String> textSeries, Collection<String> texts) {
if (textSeries != null && !textSeries.isEmpty()) {
boolean first = true;
StringBuilder authorsText = new StringBuilder();
for (String author : textSeries) {
if (first) {
first = false;
} else {
authorsText.append(", ");
}
authorsText.append(author);
}
texts.add(authorsText.toString());
}
}
}
| bestdpf/2dbarcode | android/src/com/google/zxing/client/android/result/supplement/SupplementalInfoRetriever.java | Java | apache-2.0 | 6,505 |
/*
* Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eike Stepper - initial API and implementation
*/
package org.eclipse.emf.cdo.spi.common.id;
import java.io.IOException;
import java.util.Arrays;
import org.eclipse.emf.cdo.common.id.CDOIDUtil;
import org.eclipse.net4j.util.CheckUtil;
import org.eclipse.net4j.util.io.ExtendedDataInput;
import org.eclipse.net4j.util.io.ExtendedDataOutput;
/**
* @author Eike Stepper
* @since 4.1
* @noextend This interface is not intended to be extended by clients.
*/
public abstract class AbstractCDOIDByteArray extends AbstractCDOID
{
public static final String NULL_VALUE = null;
private static final long serialVersionUID = 1L;
private byte[] value;
public AbstractCDOIDByteArray()
{
}
public AbstractCDOIDByteArray(byte[] value)
{
CheckUtil.checkArg(value != null && value.length == 16, "Illegal UUID value");
this.value = value;
}
public byte[] getByteArrayValue()
{
return value;
}
public String toURIFragment()
{
return CDOIDUtil.encodeUUID(value);
}
@Override
public void read(String fragmentPart)
{
value = CDOIDUtil.decodeUUID(fragmentPart);
}
@Override
public void read(ExtendedDataInput in) throws IOException
{
value = in.readByteArray();
}
@Override
public void write(ExtendedDataOutput out) throws IOException
{
out.writeByteArray(value);
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
{
return true;
}
if (obj != null && obj.getClass() == getClass())
{
AbstractCDOIDByteArray that = (AbstractCDOIDByteArray)obj;
return Arrays.equals(value, that.value);
}
return false;
}
@Override
public int hashCode()
{
return getClass().hashCode() ^ Arrays.hashCode(value);
}
}
| IHTSDO/snow-owl | dependencies/org.eclipse.emf.cdo.common/src/org/eclipse/emf/cdo/spi/common/id/AbstractCDOIDByteArray.java | Java | apache-2.0 | 2,099 |
package com.errorplayer.lala_weather.db;
import org.litepal.crud.DataSupport;
/**
* Created by linze on 2017/7/7.
*/
public class City extends DataSupport {
private int id;
private String cityName;
private int cityCode;
private int provinceId;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getCityName() {
return cityName;
}
public void setCityName(String cityName) {
this.cityName = cityName;
}
public int getCityCode() {
return cityCode;
}
public void setCityCode(int cityCode) {
this.cityCode = cityCode;
}
public int getProvinceId() {
return provinceId;
}
public void setProvinceId(int provinceId) {
this.provinceId = provinceId;
}
}
| errorplayer/LaLa-Weather | app/src/main/java/com/errorplayer/lala_weather/db/City.java | Java | apache-2.0 | 845 |
/*
* www.yiji.com Inc.
* Copyright (c) 2016 All Rights Reserved
*/
package com.falcon.suitagent.web;
/*
* 修订记录:
* guqiu@yiji.com 2016-07-26 13:54 创建
*/
import com.falcon.suitagent.config.AgentConfiguration;
import com.falcon.suitagent.plugins.metrics.MetricsCommon;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
/**
* @author guqiu@yiji.com
*/
@Slf4j
public class Response {
private static final int BUFFER_SIZE = 1024;
Request request;
OutputStream output;
public Response(OutputStream output) {
this.output = output;
}
public void setRequest(Request request) {
this.request = request;
}
public void send_404() throws IOException {
String errorMessage = "HTTP/1.1 404 File Not Found\r\n" +
"Content-Type: text/html\r\n" +
"Content-Length: 23\r\n" +
"\r\n" +
"<h1>File Not Found</h1>";
output.write(errorMessage.getBytes());
}
public void send(String html) throws IOException {
String errorMessage = "HTTP/1.1 200 \r\n" +
"Content-Type: text/html\r\n" +
"Content-Length: " + html.length() + "\r\n" +
"\r\n" +
html;
output.write(errorMessage.getBytes());
}
public void doRequest() throws IOException {
List<String> urlPath = request.getUrlPath();
if(urlPath.size() >= 1 && "mock".equals(urlPath.get(0))){
if(urlPath.size() < 2){
send("error! must have option");
return;
}
String msg = "";
String option = urlPath.get(1);
if("list".equals(option)){
msg = MetricsCommon.getMockServicesList();
}else if(urlPath.size() != 4){
send("<h3>error! url path must be match : /mock/{option}/{serviceType}/{serviceName}</h3>");
}else{
String type = urlPath.get(2);
String server = urlPath.get(3);
if("add".equals(option)){
MetricsCommon.addMockService(type,server);
msg = String.format("<h2>add mock server %s:%s success</h2>",type,server);
}else if("remove".equals(option)){
MetricsCommon.removeMockService(type,server);
msg = String.format("<h2>remove mock server %s:%s success</h2>",type,server);
}
}
send(msg);
}else if(urlPath.size() >= 1 && "version".equals(urlPath.get(0))){
send("Version " + AgentConfiguration.VERSION);
}else{
send_404();
}
}
}
| DevopsJK/SuitAgent | src/main/java/com/falcon/suitagent/web/Response.java | Java | apache-2.0 | 2,763 |
__author__ = 'Autio'
from distutils.core import setup
import py2exe
setup(windows=['ShitCrimson.py'])
| ArchBang85/S_Crimson | Setup.py | Python | apache-2.0 | 104 |
package pl.openrest.filters.query;
import java.io.Serializable;
import org.springframework.data.mapping.PersistentProperty;
import pl.openrest.predicate.parser.FilterPart;
import pl.openrest.predicate.parser.PredicateParts;
public interface PredicateContextBuilder {
PredicateContextBuilder withFilterTree(FilterPart tree);
PredicateContextBuilder withId(PersistentProperty<?> idProperty, Serializable id);
PredicateContextBuilder withPredicateParts(PredicateParts predicateParts);
PredicateContextBuilder withStaticFilters();
PredicateContext<?> build();
}
| konik32/openrest | openrest-filters/src/main/java/pl/openrest/filters/query/PredicateContextBuilder.java | Java | apache-2.0 | 587 |
def test_dummy_request():
from rasa.nlu.emulators.no_emulator import NoEmulator
em = NoEmulator()
norm = em.normalise_request_json({"text": ["arb text"]})
assert norm == {"text": "arb text", "time": None}
norm = em.normalise_request_json({"text": ["arb text"], "time": "1499279161658"})
assert norm == {"text": "arb text", "time": "1499279161658"}
def test_dummy_response():
from rasa.nlu.emulators.no_emulator import NoEmulator
em = NoEmulator()
data = {"intent": "greet", "text": "hi", "entities": {}, "confidence": 1.0}
assert em.normalise_response_json(data) == data
def test_emulators_can_handle_missing_data():
from rasa.nlu.emulators.luis import LUISEmulator
em = LUISEmulator()
norm = em.normalise_response_json(
{"text": "this data doesn't contain an intent result"}
)
assert norm["prediction"]["topIntent"] is None
assert norm["prediction"]["intents"] == {}
| RasaHQ/rasa_nlu | tests/nlu/emulators/test_no_emulator.py | Python | apache-2.0 | 948 |
package org.inferred.freevisitor.tests.j8;
public class TypeA extends VisitableType {}
| inferred/freevisitor | src/it/j8/main/java/org/inferred/freevisitor/tests/j8/TypeA.java | Java | apache-2.0 | 88 |