text stringlengths 1 1.05M |
|---|
<reponame>mxsolopov/picture-advanced-js
// Замена изображений при наведении
const pictureSize = (imgSelector) => {
// Блоки изображений
const blocks = document.querySelectorAll(imgSelector);
// Показывать изображения
function showImg(block) {
// Картинка в блоке
const img = block.querySelector('img');
// Изменить путь картинки
img.src = img.src.slice(0, -4) + '-1.png';
// Убрать детали
block.querySelectorAll('p:not(.sizes-hit)').forEach( p => {
p.style.display = 'none';
});
}
// Скрывать изображения
function hideImg(block) {
// Картинка в блоке
const img = block.querySelector('img');
// Изменить путь картинки
img.src = img.src.slice(0, -6) + '.png';
// Показать детали
block.querySelectorAll('p:not(.sizes-hit)').forEach( p => {
p.style.display = 'block';
});
}
// Показывать или изображение при изменении наведения курсора мыши
blocks.forEach(block => {
block.addEventListener('mouseover', () => {
showImg(block);
});
block.addEventListener('mouseout', () => {
hideImg(block);
});
});
};
export default pictureSize; |
import uuid
class ApiGatewayMethod:
def __init__(self, url):
self.url = url
self.methods = []
def add_method(self, http_method, integration, authorization_type):
method = {
"HTTPMethod": http_method,
"Integration": integration,
"AuthorizationType": authorization_type
}
self.methods.append(method)
return method
def override_property(self, method, property_name, new_value):
method[property_name] = new_value
# Demonstration of usage
api_gateway_url = "{URL}/v1"
api_gateway = ApiGatewayMethod(api_gateway_url)
events_integration = "events_integration"
api_authorizer = "api_authorizer"
get_events = api_gateway.add_method("GET", events_integration, "CUSTOM")
api_gateway.override_property(get_events, "AuthorizerId", str(uuid.uuid4()))
print(api_gateway.methods) |
<gh_stars>0
import threading
import typing
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusSerialisable
from hydrus.core import HydrusTags
from hydrus.client import ClientSearch
CLIENT_API_PERMISSION_ADD_URLS = 0
CLIENT_API_PERMISSION_ADD_FILES = 1
CLIENT_API_PERMISSION_ADD_TAGS = 2
CLIENT_API_PERMISSION_SEARCH_FILES = 3
CLIENT_API_PERMISSION_MANAGE_PAGES = 4
CLIENT_API_PERMISSION_MANAGE_COOKIES = 5
CLIENT_API_PERMISSION_MANAGE_DATABASE = 6
CLIENT_API_PERMISSION_ADD_NOTES = 7
ALLOWED_PERMISSIONS = ( CLIENT_API_PERMISSION_ADD_FILES, CLIENT_API_PERMISSION_ADD_TAGS, CLIENT_API_PERMISSION_ADD_URLS, CLIENT_API_PERMISSION_SEARCH_FILES, CLIENT_API_PERMISSION_MANAGE_PAGES, CLIENT_API_PERMISSION_MANAGE_COOKIES, CLIENT_API_PERMISSION_MANAGE_DATABASE, CLIENT_API_PERMISSION_ADD_NOTES )
basic_permission_to_str_lookup = {}
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_ADD_URLS ] = 'add urls for processing'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_ADD_FILES ] = 'import files'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_ADD_TAGS ] = 'add tags to files'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_SEARCH_FILES ] = 'search for files'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_MANAGE_PAGES ] = 'manage pages'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_MANAGE_COOKIES ] = 'manage cookies'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_MANAGE_DATABASE ] = 'manage database'
basic_permission_to_str_lookup[ CLIENT_API_PERMISSION_ADD_NOTES ] = 'add notes to files'
SEARCH_RESULTS_CACHE_TIMEOUT = 4 * 3600
SESSION_EXPIRY = 86400
api_request_dialog_open = False
last_api_permissions_request = None
class APIManager( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_MANAGER
SERIALISABLE_NAME = 'Client API Manager'
SERIALISABLE_VERSION = 1
def __init__( self ):
HydrusSerialisable.SerialisableBase.__init__( self )
self._dirty = False
self._access_keys_to_permissions = {}
self._session_keys_to_access_keys_and_expirys = {}
self._lock = threading.Lock()
HG.client_controller.sub( self, 'MaintainMemory', 'memory_maintenance_pulse' )
def _GetSerialisableInfo( self ):
serialisable_api_permissions_objects = [ api_permissions.GetSerialisableTuple() for api_permissions in self._access_keys_to_permissions.values() ]
return serialisable_api_permissions_objects
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
serialisable_api_permissions_objects = serialisable_info
api_permissions_objects = [ HydrusSerialisable.CreateFromSerialisableTuple( serialisable_api_permissions ) for serialisable_api_permissions in serialisable_api_permissions_objects ]
self._access_keys_to_permissions = { api_permissions.GetAccessKey() : api_permissions for api_permissions in api_permissions_objects }
def _SetDirty( self ):
self._dirty = True
def AddAccess( self, api_permissions ):
with self._lock:
self._access_keys_to_permissions[ api_permissions.GetAccessKey() ] = api_permissions
self._SetDirty()
def DeleteAccess( self, access_keys ):
with self._lock:
for access_key in access_keys:
if access_key in self._access_keys_to_permissions:
del self._access_keys_to_permissions[ access_key ]
self._SetDirty()
def GenerateSessionKey( self, access_key ):
session_key = HydrusData.GenerateKey()
with self._lock:
self._session_keys_to_access_keys_and_expirys[ session_key ] = ( access_key, HydrusData.GetNow() + SESSION_EXPIRY )
return session_key
def GetAccessKey( self, session_key ):
with self._lock:
if session_key not in self._session_keys_to_access_keys_and_expirys:
raise HydrusExceptions.DataMissing( 'Did not find an entry for that session key!' )
( access_key, session_expiry ) = self._session_keys_to_access_keys_and_expirys[ session_key ]
if HydrusData.TimeHasPassed( session_expiry ):
del self._session_keys_to_access_keys_and_expirys[ session_expiry ]
raise HydrusExceptions.SessionException( 'That session key has expired!' )
self._session_keys_to_access_keys_and_expirys[ session_key ] = ( access_key, HydrusData.GetNow() + SESSION_EXPIRY )
return access_key
def GetAllPermissions( self ):
return list( self._access_keys_to_permissions.values() )
def GetPermissions( self, access_key ):
with self._lock:
if access_key not in self._access_keys_to_permissions:
raise HydrusExceptions.DataMissing( 'Did not find an entry for that access key!' )
return self._access_keys_to_permissions[ access_key ]
def IsDirty( self ):
with self._lock:
return self._dirty
def MaintainMemory( self ):
with self._lock:
for api_permissions in self._access_keys_to_permissions.values():
api_permissions.MaintainMemory()
def OverwriteAccess( self, api_permissions ):
self.AddAccess( api_permissions )
def SetClean( self ):
with self._lock:
self._dirty = False
def SetPermissions( self, api_permissions_objects ):
with self._lock:
self._access_keys_to_permissions = { api_permissions.GetAccessKey() : api_permissions for api_permissions in api_permissions_objects }
self._SetDirty()
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_MANAGER ] = APIManager
class APIPermissions( HydrusSerialisable.SerialisableBaseNamed ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_PERMISSIONS
SERIALISABLE_NAME = 'Client API Permissions'
SERIALISABLE_VERSION = 1
def __init__( self, name = 'new api permissions', access_key = None, basic_permissions = None, search_tag_filter = None ):
if access_key is None:
access_key = HydrusData.GenerateKey()
if basic_permissions is None:
basic_permissions = set()
if search_tag_filter is None:
search_tag_filter = HydrusTags.TagFilter()
HydrusSerialisable.SerialisableBaseNamed.__init__( self, name )
self._access_key = access_key
self._basic_permissions = set( basic_permissions )
self._search_tag_filter = search_tag_filter
self._last_search_results = None
self._search_results_timeout = 0
self._lock = threading.Lock()
def _GetSerialisableInfo( self ):
serialisable_access_key = self._access_key.hex()
serialisable_basic_permissions = list( self._basic_permissions )
serialisable_search_tag_filter = self._search_tag_filter.GetSerialisableTuple()
return ( serialisable_access_key, serialisable_basic_permissions, serialisable_search_tag_filter )
def _HasPermission( self, permission ):
return permission in self._basic_permissions
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
( serialisable_access_key, serialisable_basic_permissions, serialisable_search_tag_filter ) = serialisable_info
self._access_key = bytes.fromhex( serialisable_access_key )
self._basic_permissions = set( serialisable_basic_permissions )
self._search_tag_filter = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_search_tag_filter )
def CheckAtLeastOnePermission( self, permissions ):
with self._lock:
if True not in ( self._HasPermission( permission ) for permission in permissions ):
raise HydrusExceptions.InsufficientCredentialsException( 'You need at least one these permissions: {}'.format( ', '.join( basic_permission_to_str_lookup[ permission ] for permission in permissions ) ) )
def CheckCanSearchTags( self, tags ):
with self._lock:
if self._search_tag_filter.AllowsEverything():
return
if len( tags ) > 0:
filtered_tags = self._search_tag_filter.Filter( tags )
if len( filtered_tags ) > 0:
return
raise HydrusExceptions.InsufficientCredentialsException( 'You do not have permission to do this search. Your tag search permissions are: {}'.format( self._search_tag_filter.ToPermittedString() ) )
def CheckCanSeeAllFiles( self ):
with self._lock:
if not ( self._HasPermission( CLIENT_API_PERMISSION_SEARCH_FILES ) and self._search_tag_filter.AllowsEverything() ):
raise HydrusExceptions.InsufficientCredentialsException( 'You do not have permission to see all files, so you cannot do this.' )
def CheckPermission( self, permission ):
if not self.HasPermission( permission ):
raise HydrusExceptions.InsufficientCredentialsException( 'You do not have permission to: {}'.format( basic_permission_to_str_lookup[ permission ] ) )
def CheckPermissionToSeeFiles( self, hash_ids ):
with self._lock:
if self._search_tag_filter.AllowsEverything():
return
if self._last_search_results is None:
raise HydrusExceptions.BadRequestException( 'It looks like those search results are no longer available--please run the search again!' )
num_files_asked_for = len( hash_ids )
num_files_allowed_to_see = len( self._last_search_results.intersection( hash_ids ) )
if num_files_allowed_to_see != num_files_asked_for:
error_text = 'You do not seem to have access to all those files! You asked to see {} files, but you were only authorised to see {} of them!'
error_text = error_text.format( HydrusData.ToHumanInt( num_files_asked_for ), HydrusData.ToHumanInt( num_files_allowed_to_see ) )
raise HydrusExceptions.InsufficientCredentialsException( error_text )
self._search_results_timeout = HydrusData.GetNow() + SEARCH_RESULTS_CACHE_TIMEOUT
def FilterTagPredicateResponse( self, predicates: typing.List[ ClientSearch.Predicate ] ):
with self._lock:
if self._search_tag_filter.AllowsEverything():
return predicates
return [ predicate for predicate in predicates if self._search_tag_filter.TagOK( predicate.GetValue() ) ]
def GenerateNewAccessKey( self ):
with self._lock:
self._access_key = HydrusData.GenerateKey()
def GetAccessKey( self ):
with self._lock:
return self._access_key
def GetAdvancedPermissionsString( self ):
with self._lock:
p_strings = []
if self._HasPermission( CLIENT_API_PERMISSION_SEARCH_FILES ):
p_strings.append( 'Can search: {}'.format( self._search_tag_filter.ToPermittedString() ) )
return ''.join( p_strings )
def GetBasicPermissions( self ):
with self._lock:
return self._basic_permissions
def GetBasicPermissionsString( self ):
with self._lock:
l = sorted( ( basic_permission_to_str_lookup[ p ] for p in self._basic_permissions ) )
return ', '.join( l )
def GetSearchTagFilter( self ):
with self._lock:
return self._search_tag_filter
def HasPermission( self, permission ):
with self._lock:
return self._HasPermission( permission )
def MaintainMemory( self ):
with self._lock:
if self._last_search_results is not None and HydrusData.TimeHasPassed( self._search_results_timeout ):
self._last_search_results = None
def SetLastSearchResults( self, hash_ids ):
with self._lock:
if self._search_tag_filter.AllowsEverything():
return
self._last_search_results = set( hash_ids )
self._search_results_timeout = HydrusData.GetNow() + SEARCH_RESULTS_CACHE_TIMEOUT
def SetSearchTagFilter( self, search_tag_filter ):
with self._lock:
self._search_tag_filter = search_tag_filter
def ToHumanString( self ):
s = 'API Permissions ({}): '.format( self._name )
basic_string = self.GetBasicPermissionsString()
advanced_string = self.GetAdvancedPermissionsString()
if len( basic_string ) == '':
s += 'does not have permission to do anything'
else:
s += basic_string
if len( advanced_string ) > 0:
s += ': {}'.format( advanced_string )
return s
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_CLIENT_API_PERMISSIONS ] = APIPermissions
|
#!/bin/bash
if [[ -z "$1" ]]; then
echo "Provide a target directory to store downloaded files as argument. E.g.: /data/kraken-download/datasets"
exit 1
fi
mkdir -p $1
cd $1
# MeSH Medical Subject Headings
wget -N -a download.log ftp://nlmpubs.nlm.nih.gov/online/mesh/rdf/mesh.nt.gz
wget -N -a download.log ftp://nlmpubs.nlm.nih.gov/online/mesh/rdf/void_1.0.0.ttl
wget -N -a download.log ftp://nlmpubs.nlm.nih.gov/online/mesh/rdf/vocabulary_1.0.0.ttl
find . -name "*.gz" -exec gzip -d {} +
|
package main.unused.classTypeChanged;
public @interface ClassTypeChangedA2C {
}
|
<gh_stars>1-10
export class Users {
id: number;
owner: any;
editor: any;
seq: number;
status: number;
meta: any;
title: string;
description: string;
title_short: string;
constructor() {
this.id = this.id || 0;
this.status = this.status || 2;
}
} |
import android.app.Activity;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
public class MainActivity extends Activity {
private SensorManager sensorManager;
private Sensor stepSensor;
private SensorEventListener stepListener;
TextView stepCounter;
int stepsTaken = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initSteps();
findViewById(R.id.start_button).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startTracking();
}
});
findViewById(R.id.stop_button).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
stopTracking();
}
});
}
private void initSteps(){
stepCounter = findViewById(R.id.step_counter);
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
stepSensor = sensorManager.getDefaultSensor(Sensor.TYPE_STEP_COUNTER);
if (stepSensor == null) {
stepCounter.setText("No Step Sensor Found!");
} else {
stepListener = new SensorEventListener() {
public void onSensorChanged(SensorEvent sensorEvent) {
stepsTaken++;
displaySteps();
}
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
};
}
private void startTracking(){
sensorManager.registerListener(stepListener, stepSensor, SensorManager.SENSOR_DELAY_UI);
}
private void stopTracking(){
sensorManager.unregisterListener(stepListener);
}
private void displaySteps(){
stepCounter.setText("Steps taken: " + stepsTaken);
}
} |
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'uri'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Application Migration simplifies the migration of applications from Oracle Cloud Infrastructure Classic to Oracle Cloud Infrastructure.
# You can use Application Migration API to migrate applications, such as Oracle Java Cloud Service, SOA Cloud Service, and Integration Classic
# instances, to Oracle Cloud Infrastructure. For more information, see
# [Overview of Application Migration](/iaas/application-migration/appmigrationoverview.htm).
class ApplicationMigration::ApplicationMigrationClient
# Client used to make HTTP requests.
# @return [OCI::ApiClient]
attr_reader :api_client
# Fully qualified endpoint URL
# @return [String]
attr_reader :endpoint
# The default retry configuration to apply to all operations in this service client. This can be overridden
# on a per-operation basis. The default retry configuration value is `nil`, which means that an operation
# will not perform any retries
# @return [OCI::Retry::RetryConfig]
attr_reader :retry_config
# The region, which will usually correspond to a value in {OCI::Regions::REGION_ENUM}.
# @return [String]
attr_reader :region
# rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Layout/EmptyLines, Metrics/PerceivedComplexity
# Creates a new ApplicationMigrationClient.
# Notes:
# If a config is not specified, then the global OCI.config will be used.
#
# This client is not thread-safe
#
# Either a region or an endpoint must be specified. If an endpoint is specified, it will be used instead of the
# region. A region may be specified in the config or via or the region parameter. If specified in both, then the
# region parameter will be used.
# @param [Config] config A Config object.
# @param [String] region A region used to determine the service endpoint. This will usually
# correspond to a value in {OCI::Regions::REGION_ENUM}, but may be an arbitrary string.
# @param [String] endpoint The fully qualified endpoint URL
# @param [OCI::BaseSigner] signer A signer implementation which can be used by this client. If this is not provided then
# a signer will be constructed via the provided config. One use case of this parameter is instance principals authentication,
# so that the instance principals signer can be provided to the client
# @param [OCI::ApiClientProxySettings] proxy_settings If your environment requires you to use a proxy server for outgoing HTTP requests
# the details for the proxy can be provided in this parameter
# @param [OCI::Retry::RetryConfig] retry_config The retry configuration for this service client. This represents the default retry configuration to
# apply across all operations. This can be overridden on a per-operation basis. The default retry configuration value is `nil`, which means that an operation
# will not perform any retries
def initialize(config: nil, region: nil, endpoint: nil, signer: nil, proxy_settings: nil, retry_config: nil)
# If the signer is an InstancePrincipalsSecurityTokenSigner or SecurityTokenSigner and no config was supplied (they are self-sufficient signers)
# then create a dummy config to pass to the ApiClient constructor. If customers wish to create a client which uses instance principals
# and has config (either populated programmatically or loaded from a file), they must construct that config themselves and then
# pass it to this constructor.
#
# If there is no signer (or the signer is not an instance principals signer) and no config was supplied, this is not valid
# so try and load the config from the default file.
config = OCI::Config.validate_and_build_config_with_signer(config, signer)
signer = OCI::Signer.config_file_auth_builder(config) if signer.nil?
@api_client = OCI::ApiClient.new(config, signer, proxy_settings: proxy_settings)
@retry_config = retry_config
if endpoint
@endpoint = endpoint + '/20191031'
else
region ||= config.region
region ||= signer.region if signer.respond_to?(:region)
self.region = region
end
logger.info "ApplicationMigrationClient endpoint set to '#{@endpoint}'." if logger
end
# rubocop:enable Metrics/AbcSize, Metrics/CyclomaticComplexity, Layout/EmptyLines, Metrics/PerceivedComplexity
# Set the region that will be used to determine the service endpoint.
# This will usually correspond to a value in {OCI::Regions::REGION_ENUM},
# but may be an arbitrary string.
def region=(new_region)
@region = new_region
raise 'A region must be specified.' unless @region
@endpoint = OCI::Regions.get_service_endpoint_for_template(@region, 'https://applicationmigration.{region}.oci.{secondLevelDomain}') + '/20191031'
logger.info "ApplicationMigrationClient endpoint set to '#{@endpoint} from region #{@region}'." if logger
end
# @return [Logger] The logger for this client. May be nil.
def logger
@api_client.config.logger
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Cancels the specified work request. When you cancel a work request, it causes the in-progress task to be canceled.
# For example, if the create migration work request is in the accepted or in progress state for a long time, you can cancel the work request.
#
# When you cancel a work request, the state of the work request changes to cancelling, and then to the cancelled state.
#
# @param [String] work_request_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the work request.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/cancel_work_request.rb.html) to see an example of how to use cancel_work_request API.
def cancel_work_request(work_request_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#cancel_work_request.' if logger
raise "Missing the required parameter 'work_request_id' when calling cancel_work_request." if work_request_id.nil?
raise "Parameter value for 'work_request_id' must not be blank" if OCI::Internal::Util.blank_string?(work_request_id)
path = '/workRequests/{workRequestId}'.sub('{workRequestId}', work_request_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#cancel_work_request') do
@api_client.call_api(
:DELETE,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Moves the specified migration into a different compartment within the same tenancy. For information about moving resources between compartments,
# see [Moving Resources to a Different Compartment](https://docs.cloud.oracle.com/iaas/Content/Identity/Tasks/managingcompartments.htm#moveRes).
#
# @param [String] migration_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the migration.
# @param [OCI::ApplicationMigration::Models::ChangeCompartmentDetails] change_migration_compartment_details The updated compartment details
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :opc_retry_token A token that uniquely identifies a request so it can be retried in case of a timeout or
# server error without risk of retrying the same action. Retry tokens expire after
# 24 hours, but can be invalidated before then due to conflicting operations. For example,
# if a resource has been deleted and purged from the system, then a retry of the original
# creation request may be rejected.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/change_migration_compartment.rb.html) to see an example of how to use change_migration_compartment API.
def change_migration_compartment(migration_id, change_migration_compartment_details, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#change_migration_compartment.' if logger
raise "Missing the required parameter 'migration_id' when calling change_migration_compartment." if migration_id.nil?
raise "Missing the required parameter 'change_migration_compartment_details' when calling change_migration_compartment." if change_migration_compartment_details.nil?
raise "Parameter value for 'migration_id' must not be blank" if OCI::Internal::Util.blank_string?(migration_id)
path = '/migrations/{migrationId}/actions/changeCompartment'.sub('{migrationId}', migration_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token]
# rubocop:enable Style/NegatedIf
header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token
post_body = @api_client.object_to_http_body(change_migration_compartment_details)
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#change_migration_compartment') do
@api_client.call_api(
:POST,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Moves the specified source into a different compartment within the same tenancy. For information about moving resources
# between compartments, see [Moving Resources to a Different Compartment](https://docs.cloud.oracle.com/iaas/Content/Identity/Tasks/managingcompartments.htm#moveRes).
#
# @param [String] source_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the source.
# @param [OCI::ApplicationMigration::Models::ChangeCompartmentDetails] change_source_compartment_details The updated compartment details
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :opc_retry_token A token that uniquely identifies a request so it can be retried in case of a timeout or
# server error without risk of retrying the same action. Retry tokens expire after
# 24 hours, but can be invalidated before then due to conflicting operations. For example,
# if a resource has been deleted and purged from the system, then a retry of the original
# creation request may be rejected.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/change_source_compartment.rb.html) to see an example of how to use change_source_compartment API.
def change_source_compartment(source_id, change_source_compartment_details, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#change_source_compartment.' if logger
raise "Missing the required parameter 'source_id' when calling change_source_compartment." if source_id.nil?
raise "Missing the required parameter 'change_source_compartment_details' when calling change_source_compartment." if change_source_compartment_details.nil?
raise "Parameter value for 'source_id' must not be blank" if OCI::Internal::Util.blank_string?(source_id)
path = '/sources/{sourceId}/actions/changeCompartment'.sub('{sourceId}', source_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token]
# rubocop:enable Style/NegatedIf
header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token
post_body = @api_client.object_to_http_body(change_source_compartment_details)
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#change_source_compartment') do
@api_client.call_api(
:POST,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Creates a migration. A migration represents the end-to-end workflow of moving an application from a source environment to Oracle Cloud
# Infrastructure. Each migration moves a single application to Oracle Cloud Infrastructure. For more information,
# see [Manage Migrations](https://docs.cloud.oracle.com/iaas/application-migration/manage_migrations.htm).
#
# When you create a migration, provide the required information to let Application Migration access the source environment.
# Application Migration uses this information to access the application in the source environment and discover application artifacts.
#
# All Oracle Cloud Infrastructure resources, including migrations, get an Oracle-assigned, unique ID called an Oracle Cloud Identifier (OCID).
# When you create a resource, you can find its OCID in the response. You can also retrieve a resource's OCID by using a List API operation on
# that resource type, or by viewing the resource in the Console. For more information, see Resource Identifiers.
#
# After you send your request, a migration is created in the compartment that contains the source. The new migration's lifecycle state
# will temporarily be <code>CREATING</code> and the state of the migration will be <code>DISCOVERING_APPLICATION</code>. During this phase,
# Application Migration sets the template for the <code>serviceConfig</code> and <code>applicationConfig</code> fields of the migration.
# When this operation is complete, the state of the migration changes to <code>MISSING_CONFIG_VALUES</code>.
# Next, you'll need to update the migration to provide configuration values. Before updating the
# migration, ensure that its state has changed to <code>MISSING_CONFIG_VALUES</code>.
#
# To track the progress of this operation, you can monitor the status of the Create Migration and Discover Application work requests
# by using the <code>{#get_work_request get_work_request}</code> REST API operation on the work request or by viewing the status of the work request in
# the console.
#
# @param [OCI::ApplicationMigration::Models::CreateMigrationDetails] create_migration_details The properties for creating a migration.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :opc_retry_token A token that uniquely identifies a request so it can be retried in case of a timeout or
# server error without risk of retrying the same action. Retry tokens expire after
# 24 hours, but can be invalidated before then due to conflicting operations. For example,
# if a resource has been deleted and purged from the system, then a retry of the original
# creation request may be rejected.
#
# @return [Response] A Response object with data of type {OCI::ApplicationMigration::Models::Migration Migration}
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/create_migration.rb.html) to see an example of how to use create_migration API.
def create_migration(create_migration_details, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#create_migration.' if logger
raise "Missing the required parameter 'create_migration_details' when calling create_migration." if create_migration_details.nil?
path = '/migrations'
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token]
# rubocop:enable Style/NegatedIf
header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token
post_body = @api_client.object_to_http_body(create_migration_details)
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#create_migration') do
@api_client.call_api(
:POST,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'OCI::ApplicationMigration::Models::Migration'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Creates a source in the specified compartment. In Application Migration, a source refers to the environment from which the application
# is being migrated. For more information, see [Manage Sources](https://docs.cloud.oracle.com/iaas/application-migration/manage_sources.htm).
#
# All Oracle Cloud Infrastructure resources, including sources, get an Oracle-assigned, unique ID called an Oracle Cloud Identifier (OCID).
# When you create a resource, you can find its OCID in the response. You can also retrieve a resource's OCID by using a List API operation
# on that resource type, or by viewing the resource in the Console.
#
# After you send your request, a source is created in the specified compartment. The new source's lifecycle state will temporarily be
# <code>CREATING</code>. Application Migration connects to the source environment with the authentication credentials that you have provided.
# If the connection is established, the status of the source changes to <code>ACTIVE</code> and Application Migration fetches the list of
# applications that are available for migration in the source environment.
#
# To track the progress of the operation, you can monitor the status of the Create Source work request by using the
# <code>{#get_work_request get_work_request}</code> REST API operation on the work request or by viewing the status of the work request in the console.
#
# Ensure that the state of the source has changed to <code>ACTIVE</code>, before you retrieve the list of applications from
# the source environment using the <code>{#list_source_applications list_source_applications}</code> REST API call.
#
# @param [OCI::ApplicationMigration::Models::CreateSourceDetails] create_source_details The properties for creating a source.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :opc_retry_token A token that uniquely identifies a request so it can be retried in case of a timeout or
# server error without risk of retrying the same action. Retry tokens expire after
# 24 hours, but can be invalidated before then due to conflicting operations. For example,
# if a resource has been deleted and purged from the system, then a retry of the original
# creation request may be rejected.
#
# @return [Response] A Response object with data of type {OCI::ApplicationMigration::Models::Source Source}
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/create_source.rb.html) to see an example of how to use create_source API.
def create_source(create_source_details, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#create_source.' if logger
raise "Missing the required parameter 'create_source_details' when calling create_source." if create_source_details.nil?
path = '/sources'
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token]
# rubocop:enable Style/NegatedIf
header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token
post_body = @api_client.object_to_http_body(create_source_details)
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#create_source') do
@api_client.call_api(
:POST,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'OCI::ApplicationMigration::Models::Source'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Deletes the specified migration.
#
# If you have migrated the application or for any other reason if you no longer require a migration, then you can delete the
# relevant migration. You can delete a migration, irrespective of its state. If any work request is being processed for the migration
# that you want to delete, then the associated work requests are cancelled and then the migration is deleted.
#
# @param [String] migration_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the migration.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/delete_migration.rb.html) to see an example of how to use delete_migration API.
def delete_migration(migration_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#delete_migration.' if logger
raise "Missing the required parameter 'migration_id' when calling delete_migration." if migration_id.nil?
raise "Parameter value for 'migration_id' must not be blank" if OCI::Internal::Util.blank_string?(migration_id)
path = '/migrations/{migrationId}'.sub('{migrationId}', migration_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#delete_migration') do
@api_client.call_api(
:DELETE,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Deletes the specified source.
#
# Before deleting a source, you must delete all the migrations associated with the source.
# If you have migrated all the required applications in a source or for any other reason you no longer require a source, then you can
# delete the relevant source.
#
# @param [String] source_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the source.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/delete_source.rb.html) to see an example of how to use delete_source API.
def delete_source(source_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#delete_source.' if logger
raise "Missing the required parameter 'source_id' when calling delete_source." if source_id.nil?
raise "Parameter value for 'source_id' must not be blank" if OCI::Internal::Util.blank_string?(source_id)
path = '/sources/{sourceId}'.sub('{sourceId}', source_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#delete_source') do
@api_client.call_api(
:DELETE,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves details of the specified migration.
# @param [String] migration_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the migration.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @return [Response] A Response object with data of type {OCI::ApplicationMigration::Models::Migration Migration}
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/get_migration.rb.html) to see an example of how to use get_migration API.
def get_migration(migration_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#get_migration.' if logger
raise "Missing the required parameter 'migration_id' when calling get_migration." if migration_id.nil?
raise "Parameter value for 'migration_id' must not be blank" if OCI::Internal::Util.blank_string?(migration_id)
path = '/migrations/{migrationId}'.sub('{migrationId}', migration_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#get_migration') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'OCI::ApplicationMigration::Models::Migration'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves details of the specified source. Specify the OCID of the source for which you want to retrieve details.
#
# @param [String] source_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the source.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @return [Response] A Response object with data of type {OCI::ApplicationMigration::Models::Source Source}
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/get_source.rb.html) to see an example of how to use get_source API.
def get_source(source_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#get_source.' if logger
raise "Missing the required parameter 'source_id' when calling get_source." if source_id.nil?
raise "Parameter value for 'source_id' must not be blank" if OCI::Internal::Util.blank_string?(source_id)
path = '/sources/{sourceId}'.sub('{sourceId}', source_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#get_source') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'OCI::ApplicationMigration::Models::Source'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Gets the details of the specified work request.
# @param [String] work_request_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the work request.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @return [Response] A Response object with data of type {OCI::ApplicationMigration::Models::WorkRequest WorkRequest}
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/get_work_request.rb.html) to see an example of how to use get_work_request API.
def get_work_request(work_request_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#get_work_request.' if logger
raise "Missing the required parameter 'work_request_id' when calling get_work_request." if work_request_id.nil?
raise "Parameter value for 'work_request_id' must not be blank" if OCI::Internal::Util.blank_string?(work_request_id)
path = '/workRequests/{workRequestId}'.sub('{workRequestId}', work_request_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#get_work_request') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'OCI::ApplicationMigration::Models::WorkRequest'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves details of all the migrations that are available in the specified compartment.
#
# @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a compartment. Retrieves details of objects in the specified compartment.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) on which to query for a migration.
#
# @option opts [Integer] :limit The number of items returned in a paginated `List` call. For information about pagination, see
# [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :page The value of the `opc-next-page` response header from the preceding `List` call.
# For information about pagination, see [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :sort_order The sort order, either `ASC` (ascending) or `DESC` (descending).
# (default to ASC)
# @option opts [String] :sort_by Specifies the field on which to sort.
# By default, `TIMECREATED` is ordered descending.
# By default, `DISPLAYNAME` is ordered ascending. Note that you can sort only on one field.
# (default to DISPLAYNAME)
# Allowed values are: TIMECREATED, DISPLAYNAME
# @option opts [String] :display_name Display name on which to query.
# @option opts [String] :lifecycle_state This field is not supported. Do not use.
# @return [Response] A Response object with data of type Array<{OCI::ApplicationMigration::Models::MigrationSummary MigrationSummary}>
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/list_migrations.rb.html) to see an example of how to use list_migrations API.
def list_migrations(compartment_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#list_migrations.' if logger
raise "Missing the required parameter 'compartment_id' when calling list_migrations." if compartment_id.nil?
if opts[:sort_order] && !OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.include?(opts[:sort_order])
raise 'Invalid value for "sort_order", must be one of the values in OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.'
end
if opts[:sort_by] && !%w[TIMECREATED DISPLAYNAME].include?(opts[:sort_by])
raise 'Invalid value for "sort_by", must be one of TIMECREATED, DISPLAYNAME.'
end
if opts[:lifecycle_state] && !OCI::ApplicationMigration::Models::MIGRATION_LIFECYCLE_STATES_ENUM.include?(opts[:lifecycle_state])
raise 'Invalid value for "lifecycle_state", must be one of the values in OCI::ApplicationMigration::Models::MIGRATION_LIFECYCLE_STATES_ENUM.'
end
path = '/migrations'
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
query_params[:compartmentId] = compartment_id
query_params[:id] = opts[:id] if opts[:id]
query_params[:limit] = opts[:limit] if opts[:limit]
query_params[:page] = opts[:page] if opts[:page]
query_params[:sortOrder] = opts[:sort_order] if opts[:sort_order]
query_params[:sortBy] = opts[:sort_by] if opts[:sort_by]
query_params[:displayName] = opts[:display_name] if opts[:display_name]
query_params[:lifecycleState] = opts[:lifecycle_state] if opts[:lifecycle_state]
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#list_migrations') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'Array<OCI::ApplicationMigration::Models::MigrationSummary>'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves details of all the applications associated with the specified source.
# This list is generated dynamically by interrogating the source and the list changes as applications are started or
# stopped in the source environment.
#
# @param [String] source_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the source.
# @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a compartment. Retrieves details of objects in the specified compartment.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [Integer] :limit The number of items returned in a paginated `List` call. For information about pagination, see
# [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :page The value of the `opc-next-page` response header from the preceding `List` call.
# For information about pagination, see [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :sort_order The sort order, either `ASC` (ascending) or `DESC` (descending).
# (default to ASC)
# @option opts [String] :sort_by Specifies the field on which to sort.
# By default, `TIMECREATED` is ordered descending.
# By default, `DISPLAYNAME` is ordered ascending. Note that you can sort only on one field.
# (default to DISPLAYNAME)
# Allowed values are: TIMECREATED, DISPLAYNAME
# @option opts [String] :display_name Resource name on which to query.
# @return [Response] A Response object with data of type Array<{OCI::ApplicationMigration::Models::SourceApplicationSummary SourceApplicationSummary}>
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/list_source_applications.rb.html) to see an example of how to use list_source_applications API.
def list_source_applications(source_id, compartment_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#list_source_applications.' if logger
raise "Missing the required parameter 'source_id' when calling list_source_applications." if source_id.nil?
raise "Missing the required parameter 'compartment_id' when calling list_source_applications." if compartment_id.nil?
if opts[:sort_order] && !OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.include?(opts[:sort_order])
raise 'Invalid value for "sort_order", must be one of the values in OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.'
end
if opts[:sort_by] && !%w[TIMECREATED DISPLAYNAME].include?(opts[:sort_by])
raise 'Invalid value for "sort_by", must be one of TIMECREATED, DISPLAYNAME.'
end
raise "Parameter value for 'source_id' must not be blank" if OCI::Internal::Util.blank_string?(source_id)
path = '/sources/{sourceId}/applications'.sub('{sourceId}', source_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
query_params[:compartmentId] = compartment_id
query_params[:limit] = opts[:limit] if opts[:limit]
query_params[:page] = opts[:page] if opts[:page]
query_params[:sortOrder] = opts[:sort_order] if opts[:sort_order]
query_params[:sortBy] = opts[:sort_by] if opts[:sort_by]
query_params[:displayName] = opts[:display_name] if opts[:display_name]
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#list_source_applications') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'Array<OCI::ApplicationMigration::Models::SourceApplicationSummary>'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves details of all the sources that are available in the specified compartment and match the specified query criteria.
# If you don't specify any query criteria, then details of all the sources are displayed.
# To filter the retrieved results, you can pass one or more of the following query parameters, by appending them to the URI
# as shown in the following example.
#
# @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a compartment. Retrieves details of objects in the specified compartment.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) on which to query for a source.
#
# @option opts [Integer] :limit The number of items returned in a paginated `List` call. For information about pagination, see
# [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :page The value of the `opc-next-page` response header from the preceding `List` call.
# For information about pagination, see [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :sort_order The sort order, either `ASC` (ascending) or `DESC` (descending).
# (default to ASC)
# @option opts [String] :sort_by Specifies the field on which to sort.
# By default, `TIMECREATED` is ordered descending.
# By default, `DISPLAYNAME` is ordered ascending. Note that you can sort only on one field.
# (default to DISPLAYNAME)
# Allowed values are: TIMECREATED, DISPLAYNAME
# @option opts [String] :display_name Display name on which to query.
# @option opts [String] :lifecycle_state Retrieves details of sources in the specified lifecycle state.
# @return [Response] A Response object with data of type Array<{OCI::ApplicationMigration::Models::SourceSummary SourceSummary}>
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/list_sources.rb.html) to see an example of how to use list_sources API.
def list_sources(compartment_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#list_sources.' if logger
raise "Missing the required parameter 'compartment_id' when calling list_sources." if compartment_id.nil?
if opts[:sort_order] && !OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.include?(opts[:sort_order])
raise 'Invalid value for "sort_order", must be one of the values in OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.'
end
if opts[:sort_by] && !%w[TIMECREATED DISPLAYNAME].include?(opts[:sort_by])
raise 'Invalid value for "sort_by", must be one of TIMECREATED, DISPLAYNAME.'
end
if opts[:lifecycle_state] && !OCI::ApplicationMigration::Models::SOURCE_LIFECYCLE_STATES_ENUM.include?(opts[:lifecycle_state])
raise 'Invalid value for "lifecycle_state", must be one of the values in OCI::ApplicationMigration::Models::SOURCE_LIFECYCLE_STATES_ENUM.'
end
path = '/sources'
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
query_params[:compartmentId] = compartment_id
query_params[:id] = opts[:id] if opts[:id]
query_params[:limit] = opts[:limit] if opts[:limit]
query_params[:page] = opts[:page] if opts[:page]
query_params[:sortOrder] = opts[:sort_order] if opts[:sort_order]
query_params[:sortBy] = opts[:sort_by] if opts[:sort_by]
query_params[:displayName] = opts[:display_name] if opts[:display_name]
query_params[:lifecycleState] = opts[:lifecycle_state] if opts[:lifecycle_state]
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#list_sources') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'Array<OCI::ApplicationMigration::Models::SourceSummary>'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves details of the errors encountered while executing an operation that is tracked by the specified work request.
#
# @param [String] work_request_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the work request.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [Integer] :limit The number of items returned in a paginated `List` call. For information about pagination, see
# [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :page The value of the `opc-next-page` response header from the preceding `List` call.
# For information about pagination, see [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :sort_order The sort order, either `ASC` (ascending) or `DESC` (descending).
# (default to ASC)
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @return [Response] A Response object with data of type Array<{OCI::ApplicationMigration::Models::WorkRequestError WorkRequestError}>
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/list_work_request_errors.rb.html) to see an example of how to use list_work_request_errors API.
def list_work_request_errors(work_request_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#list_work_request_errors.' if logger
raise "Missing the required parameter 'work_request_id' when calling list_work_request_errors." if work_request_id.nil?
if opts[:sort_order] && !OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.include?(opts[:sort_order])
raise 'Invalid value for "sort_order", must be one of the values in OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.'
end
raise "Parameter value for 'work_request_id' must not be blank" if OCI::Internal::Util.blank_string?(work_request_id)
path = '/workRequests/{workRequestId}/errors'.sub('{workRequestId}', work_request_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
query_params[:limit] = opts[:limit] if opts[:limit]
query_params[:page] = opts[:page] if opts[:page]
query_params[:sortOrder] = opts[:sort_order] if opts[:sort_order]
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#list_work_request_errors') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'Array<OCI::ApplicationMigration::Models::WorkRequestError>'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves logs for the specified work request.
#
# @param [String] work_request_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the work request.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [Integer] :limit The number of items returned in a paginated `List` call. For information about pagination, see
# [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :page The value of the `opc-next-page` response header from the preceding `List` call.
# For information about pagination, see [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :sort_order The sort order, either `ASC` (ascending) or `DESC` (descending).
# (default to ASC)
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @return [Response] A Response object with data of type Array<{OCI::ApplicationMigration::Models::WorkRequestLogEntry WorkRequestLogEntry}>
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/list_work_request_logs.rb.html) to see an example of how to use list_work_request_logs API.
def list_work_request_logs(work_request_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#list_work_request_logs.' if logger
raise "Missing the required parameter 'work_request_id' when calling list_work_request_logs." if work_request_id.nil?
if opts[:sort_order] && !OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.include?(opts[:sort_order])
raise 'Invalid value for "sort_order", must be one of the values in OCI::ApplicationMigration::Models::SORT_ORDERS_ENUM.'
end
raise "Parameter value for 'work_request_id' must not be blank" if OCI::Internal::Util.blank_string?(work_request_id)
path = '/workRequests/{workRequestId}/logs'.sub('{workRequestId}', work_request_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
query_params[:limit] = opts[:limit] if opts[:limit]
query_params[:page] = opts[:page] if opts[:page]
query_params[:sortOrder] = opts[:sort_order] if opts[:sort_order]
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#list_work_request_logs') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'Array<OCI::ApplicationMigration::Models::WorkRequestLogEntry>'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Retrieves details of all the work requests and match the specified query criteria. To filter the retrieved results, you can pass one or more of the following query parameters, by appending them to the URI as shown in the following example.
#
# @param [String] compartment_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a compartment. Retrieves details of objects in the specified compartment.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :resource_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) for a resource. Retrieves details of the specified resource.
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [Integer] :limit The number of items returned in a paginated `List` call. For information about pagination, see
# [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @option opts [String] :page The value of the `opc-next-page` response header from the preceding `List` call.
# For information about pagination, see [List Pagination](https://docs.cloud.oracle.com/iaas/Content/API/Concepts/usingapi.htm#nine).
#
# @return [Response] A Response object with data of type Array<{OCI::ApplicationMigration::Models::WorkRequestSummary WorkRequestSummary}>
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/list_work_requests.rb.html) to see an example of how to use list_work_requests API.
def list_work_requests(compartment_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#list_work_requests.' if logger
raise "Missing the required parameter 'compartment_id' when calling list_work_requests." if compartment_id.nil?
path = '/workRequests'
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
query_params[:compartmentId] = compartment_id
query_params[:resourceId] = opts[:resource_id] if opts[:resource_id]
query_params[:limit] = opts[:limit] if opts[:limit]
query_params[:page] = opts[:page] if opts[:page]
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
# rubocop:enable Style/NegatedIf
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#list_work_requests') do
@api_client.call_api(
:GET,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body,
return_type: 'Array<OCI::ApplicationMigration::Models::WorkRequestSummary>'
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Starts migrating the specified application to Oracle Cloud Infrastructure.
#
# Before sending this request, ensure that you have provided configuration details to update the migration and the state of the migration
# is <code>READY</code>.
#
# After you send this request, the migration's state will temporarily be <code>MIGRATING</code>.
#
# To track the progress of the operation, you can monitor the status of the Migrate Application work request by using the
# <code>{#get_work_request get_work_request}</code> REST API operation on the work request or by viewing the status of the work request in the console.
# When this work request is processed successfully, Application Migration creates the required resources in the target environment
# and the state of the migration changes to <code>MIGRATION_SUCCEEDED</code>.
#
# @param [String] migration_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the migration.
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @option opts [String] :opc_retry_token A token that uniquely identifies a request so it can be retried in case of a timeout or
# server error without risk of retrying the same action. Retry tokens expire after
# 24 hours, but can be invalidated before then due to conflicting operations. For example,
# if a resource has been deleted and purged from the system, then a retry of the original
# creation request may be rejected.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/migrate_application.rb.html) to see an example of how to use migrate_application API.
def migrate_application(migration_id, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#migrate_application.' if logger
raise "Missing the required parameter 'migration_id' when calling migrate_application." if migration_id.nil?
raise "Parameter value for 'migration_id' must not be blank" if OCI::Internal::Util.blank_string?(migration_id)
path = '/migrations/{migrationId}/actions/migrate'.sub('{migrationId}', migration_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token]
# rubocop:enable Style/NegatedIf
header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token
post_body = nil
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#migrate_application') do
@api_client.call_api(
:POST,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# Updates the configuration details for the specified migration.
#
# When you create a migration, Application Migration sets the template for the <code>serviceConfig</code> and <code>applicationConfig</code>
# attributes of the migration.
# When you update the migration, you must provide values for these fields to specify configuration information for the application in the
# target environment.
#
#
#
# Before updating the migration, complete the following tasks:
# <ol>
# <li>Identify the migration that you want to update and ensure that the migration is in the <code>MISSING_CONFIG_VALUES</code> state.</li>
# <li>Get details of the migration using the <code>GetMigration</code> command. This returns the template for the <code>serviceConfig</code>
# and <code>applicationConfig</code> attributes of the migration.</li>
# <li>You must fill out the required details for the <code>serviceConfig</code> and <code>applicationConfig</code> attributes.
# The <code>isRequired</code> attribute of a configuration property indicates whether it is mandatory to provide a value.</li>
# <li>You can provide values for the optional configuration properties or you can delete the optional properties for which you do not
# provide values. Note that you cannot add any property that is not present in the template.</li>
# </ol>
#
# To update the migration, pass the configuration values in the request body. The information that you must provide depends on the type
# of application that you are migrating. For reference information about configuration fields, see
# [Provide Configuration Information](https://docs.cloud.oracle.com/iaas/application-migration/manage_migrations.htm#provide_configuration_details).
#
# To track the progress of the operation, you can monitor the status of the Update Migration work request by using the
# <code>{#get_work_request get_work_request}</code> REST API operation on the work request or by viewing the status of the work request in the console.
#
# When the migration has been updated, the state of the migration changes to <code>READY</code>. After updating the migration,
# you can start the migration whenever you are ready.
#
# @param [String] migration_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the migration.
# @param [OCI::ApplicationMigration::Models::UpdateMigrationDetails] update_migration_details Updated configuration for the migration.
#
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :opc_retry_token A token that uniquely identifies a request so it can be retried in case of a timeout or
# server error without risk of retrying the same action. Retry tokens expire after
# 24 hours, but can be invalidated before then due to conflicting operations. For example,
# if a resource has been deleted and purged from the system, then a retry of the original
# creation request may be rejected.
#
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/update_migration.rb.html) to see an example of how to use update_migration API.
def update_migration(migration_id, update_migration_details, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#update_migration.' if logger
raise "Missing the required parameter 'migration_id' when calling update_migration." if migration_id.nil?
raise "Missing the required parameter 'update_migration_details' when calling update_migration." if update_migration_details.nil?
raise "Parameter value for 'migration_id' must not be blank" if OCI::Internal::Util.blank_string?(migration_id)
path = '/migrations/{migrationId}'.sub('{migrationId}', migration_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'opc-retry-token'] = opts[:opc_retry_token] if opts[:opc_retry_token]
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
# rubocop:enable Style/NegatedIf
header_params[:'opc-retry-token'] ||= OCI::Retry.generate_opc_retry_token
post_body = @api_client.object_to_http_body(update_migration_details)
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#update_migration') do
@api_client.call_api(
:PUT,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines
# You can update the authorization details to access the source environment from which you want to migrate applications to Oracle Cloud
# Infrastructure. You can also update the description and tags of a source.
#
# **Warning:** Oracle recommends that you avoid using any confidential information when you supply string values using the API.
#
# @param [String] source_id The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the source.
# @param [OCI::ApplicationMigration::Models::UpdateSourceDetails] update_source_details Updated configuration for the source.
#
# @param [Hash] opts the optional parameters
# @option opts [OCI::Retry::RetryConfig] :retry_config The retry configuration to apply to this operation. If no key is provided then the service-level
# retry configuration defined by {#retry_config} will be used. If an explicit `nil` value is provided then the operation will not retry
# @option opts [String] :opc_request_id Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a
# particular request, please provide the request ID.
#
# @option opts [String] :if_match For optimistic concurrency control. In the `PUT` or `DELETE` call for a resource, set the `if-match`
# parameter to the value of the etag from a previous `GET` or `POST` response for that resource. The resource
# will be updated or deleted only if the etag you provide matches the resource's current etag value.
#
# @return [Response] A Response object with data of type nil
# @note Click [here](https://docs.cloud.oracle.com/en-us/iaas/tools/ruby-sdk-examples/latest/applicationmigration/update_source.rb.html) to see an example of how to use update_source API.
def update_source(source_id, update_source_details, opts = {})
logger.debug 'Calling operation ApplicationMigrationClient#update_source.' if logger
raise "Missing the required parameter 'source_id' when calling update_source." if source_id.nil?
raise "Missing the required parameter 'update_source_details' when calling update_source." if update_source_details.nil?
raise "Parameter value for 'source_id' must not be blank" if OCI::Internal::Util.blank_string?(source_id)
path = '/sources/{sourceId}'.sub('{sourceId}', source_id.to_s)
operation_signing_strategy = :standard
# rubocop:disable Style/NegatedIf
# Query Params
query_params = {}
# Header Params
header_params = {}
header_params[:accept] = 'application/json'
header_params[:'content-type'] = 'application/json'
header_params[:'opc-request-id'] = opts[:opc_request_id] if opts[:opc_request_id]
header_params[:'if-match'] = opts[:if_match] if opts[:if_match]
# rubocop:enable Style/NegatedIf
post_body = @api_client.object_to_http_body(update_source_details)
# rubocop:disable Metrics/BlockLength
OCI::Retry.make_retrying_call(applicable_retry_config(opts), call_name: 'ApplicationMigrationClient#update_source') do
@api_client.call_api(
:PUT,
path,
endpoint,
header_params: header_params,
query_params: query_params,
operation_signing_strategy: operation_signing_strategy,
body: post_body
)
end
# rubocop:enable Metrics/BlockLength
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Style/IfUnlessModifier, Metrics/ParameterLists
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines
private
def applicable_retry_config(opts = {})
return @retry_config unless opts.key?(:retry_config)
opts[:retry_config]
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
|
<gh_stars>100-1000
from flake8_plugin_utils import assert_error, assert_not_error
from flake8_pytest_style.config import DEFAULT_CONFIG
from flake8_pytest_style.errors import UseFixturesWithoutParameters
from flake8_pytest_style.visitors import MarksVisitor
def test_ok():
code = """
import pytest
@pytest.mark.usefixtures('a')
def test_something():
pass
"""
assert_not_error(MarksVisitor, code, config=DEFAULT_CONFIG)
def test_ok_another_mark_with_parens():
code = """
import pytest
@pytest.mark.foo()
def test_something():
pass
"""
assert_not_error(MarksVisitor, code, config=DEFAULT_CONFIG)
def test_ok_another_mark_no_parens():
code = """
import pytest
@pytest.mark.foo
def test_something():
pass
"""
config = DEFAULT_CONFIG._replace(mark_parentheses=False)
assert_not_error(MarksVisitor, code, config=config)
def test_error_with_parens():
code = """
import pytest
@pytest.mark.usefixtures()
def test_something():
pass
"""
assert_error(
MarksVisitor, code, UseFixturesWithoutParameters, config=DEFAULT_CONFIG
)
def test_error_no_parens():
code = """
import pytest
@pytest.mark.usefixtures
def test_something():
pass
"""
config = DEFAULT_CONFIG._replace(mark_parentheses=False)
assert_error(MarksVisitor, code, UseFixturesWithoutParameters, config=config)
|
<reponame>dennisdrew/mysharepal
import MinistryService from './MinistryService'
import validateMinistryId from './MinistryIdValidator'
const MINIMUM_NAME_LENGTH = 3
// Only allow alphanumeric and space
const MINISTRY_NAME_REGEX = /^[A-Za-z0-9 ]+$/
// Since ministry ID follow AB001, if we are at AB999 we have to throw an error to preserve ID convention of 5 char length
const MAX_COLLIDED_PREFIXES = 999
const SUFFIX_LENGTH = 3
export async function createMinistry(name) {
validateMinistryName(name)
const newMinistry = await createNewMinistryModel(name)
return await MinistryService.addMinistry(newMinistry)
}
export async function getMinistry(mid) {
return await MinistryService.getMinistry(mid)
}
export async function updateMinistry(mid, ministry) {
return await MinistryService.updateMinistry(mid, ministry)
}
function validateMinistryName(name) {
if (!name || !name.length || name.length < MINIMUM_NAME_LENGTH) {
throw new Error('Invalid name. Please enter at least 3 letters for your ministry name.')
}
if (!MINISTRY_NAME_REGEX.test(name)) {
throw new Error('You entered an invalid ministry name. Please remove any special characters.')
}
}
async function createNewMinistryModel(name) {
const newId = await generateMinistryId(name)
try {
validateMinistryId(newId)
} catch (error) {
// For some reason, our code for generating ministry ID didn't match the validator
// Throw a more generic error but still give enough context to investigate when reported
// (Otherwise, user will think they entered their ministry *name* incorrectly)
throw new Error('Failed to create ministry: Unable to generate ministry code ' + newId + '. This error has been reported and will be investigated.')
}
return {
name,
id: newId
}
}
async function generateMinistryId(name) {
const existingMinistryIds = await MinistryService.listMinistryIds()
console.log('Obtained existing ministry IDs: ' + existingMinistryIds)
return generateMinistryIdFromName(name, existingMinistryIds)
}
/**
* Given the ministry name, generate a unique ministry ID.
*
* Takes the first 2 letters of the ministry, capitalizes, then adds an incremental "index".
*
* I.E. YWAM would be YW001 -- if more YWAM-related ministries (or ministries starting with YW) come on, it would increment each time.
*
* @param {Name of the ministry} name
* @param {Existing ministry IDs to avoid} existingIds
*/
function generateMinistryIdFromName(name, existingIds) {
const capitalizeName = name.toUpperCase()
const prefix = capitalizeName.substring(0, 2)
const suffixInt = getFirstAvailableSuffixInt(prefix, existingIds)
const suffix = padIntToString(SUFFIX_LENGTH, suffixInt)
const mid = prefix + suffix
console.log('Generated ministry ID: ' + mid)
return mid
}
function getFirstAvailableSuffixInt(prefix, existingIds) {
if (!existingIds || existingIds.length < 1) {
return 0
}
const collidingIds = existingIds.filter(id => id.startsWith(prefix))
if (!collidingIds || !collidingIds.length || collidingIds.length < 1) {
return 0
}
const sortedCollidingIdSuffixes = idsToIntSuffixes(collidingIds)
const highestSuffix = sortedCollidingIdSuffixes[0]
if (highestSuffix >= MAX_COLLIDED_PREFIXES) {
throw new Error('No more available ministry IDs for prefix: ' + prefix)
}
return highestSuffix + 1
}
function idsToIntSuffixes(ids) {
return ids
.map(id => id.substring(2))
.map(id => parseInt(id))
.sort((idA, idB) => idB - idA)
}
function padIntToString(toStringLength, integer) {
const intString = `${integer}`
for (let i = 1; i <= toStringLength; i++) {
if (integer < (10 * i)) {
let zeroString = ''
const diff = toStringLength - intString.length
while(zeroString.length < diff) {
zeroString += '0'
}
return zeroString + intString
}
}
} |
#!/bin/sh
proto_path="./proto:${GOPATH}/src/github.com/gogo/protobuf/protobuf:${GOPATH}/src/github.com/gogo/protobuf:${GOPATH}/src"
### Documentation generation
# e2sub
protoc --proto_path=$proto_path \
--doc_out=docs/onos/e2sub \
--doc_opt=markdown,endpoint.md \
proto/onos/e2sub/endpoint/endpoint.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/e2sub \
--doc_opt=markdown,subscription.md \
proto/onos/e2sub/subscription/subscription.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/e2sub \
--doc_opt=markdown,task.md \
proto/onos/e2sub/task/task.proto
# e2t
protoc --proto_path=$proto_path \
--doc_out=docs/onos/e2t \
--doc_opt=markdown,admin.md \
proto/onos/e2t/admin/admin.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/e2t \
--doc_opt=markdown,e2.md \
proto/onos/e2t/e2/v1beta1/*.proto
# a1t
protoc --proto_path=$proto_path \
--doc_out=docs/onos/a1t \
--doc_opt=markdown,a1.md \
proto/onos/a1t/a1/*.proto
# topo
protoc --proto_path=$proto_path \
--doc_out=docs/onos/topo \
--doc_opt=markdown,topo.md \
proto/onos/topo/topo.proto
# config
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,admin.md \
proto/onos/config/admin/admin.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,diags.md \
proto/onos/config/diags/diags.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,change_types.md \
proto/onos/config/change/types.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,device_change.md \
proto/onos/config/change/device/types.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,network_change.md \
proto/onos/config/change/network/types.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,snapshot_types.md \
proto/onos/config/snapshot/types.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,device_snapshot.md \
proto/onos/config/snapshot/device/types.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config \
--doc_opt=markdown,network_snapshot.md \
proto/onos/config/snapshot/network/types.proto
## onos-config v2 API
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config/v2 \
--doc_opt=markdown,value.md \
proto/onos/config/v2/value.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config/v2 \
--doc_opt=markdown,transaction.md \
proto/onos/config/v2/transaction.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config/v2 \
--doc_opt=markdown,proposal.md \
proto/onos/config/v2/proposal.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/config/v2 \
--doc_opt=markdown,configuration.md \
proto/onos/config/v2/configuration.proto
#configmodel
protoc --proto_path=$proto_path \
--doc_out=docs/onos/configmodel \
--doc_opt=markdown,registry.md \
proto/onos/configmodel/registry.proto
# kpimon
protoc --proto_path=$proto_path \
--doc_out=docs/onos/kpimon \
--doc_opt=markdown,kpimon.md \
proto/onos/kpimon/kpimon.proto
# pci
protoc --proto_path=$proto_path \
--doc_out=docs/onos/pci \
--doc_opt=markdown,pci.md \
proto/onos/pci/pci.proto
# mlb
protoc --proto_path=$proto_path \
--doc_out=docs/onos/mlb \
--doc_opt=markdown,mlb.md \
proto/onos/mlb/mlb.proto
# rsm
protoc --proto_path=$proto_path \
--doc_out=docs/onos/rsm \
--doc_opt=markdown,rsm.md \
proto/onos/rsm/rsm.proto
# ransim
protoc --proto_path=$proto_path \
--doc_out=docs/onos/ransim \
--doc_opt=markdown,metrics.md \
proto/onos/ransim/metrics/metrics.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/ransim \
--doc_opt=markdown,model.md \
proto/onos/ransim/model/model.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/ransim \
--doc_opt=markdown,trafficsim.md \
proto/onos/ransim/trafficsim/trafficsim.proto
protoc --proto_path=$proto_path \
--doc_out=docs/onos/ransim \
--doc_opt=markdown,types.md \
proto/onos/ransim/types/types.proto
### Go Protobuf code generation
go_import_paths="Mgoogle/protobuf/any.proto=github.com/gogo/protobuf/types"
go_import_paths="${go_import_paths},Mgoogle/protobuf/timestamp.proto=github.com/gogo/protobuf/types"
go_import_paths="${go_import_paths},Mgoogle/protobuf/duration.proto=github.com/gogo/protobuf/types"
go_import_paths="${go_import_paths},Mgoogle/protobuf/empty.proto=github.com/gogo/protobuf/types"
go_import_paths="${go_import_paths},Monos/config/device/types.proto=github.com/onosproject/onos-api/go/onos/config/device"
go_import_paths="${go_import_paths},Monos/config/admin/admin.proto=github.com/onosproject/onos-api/go/onos/config/admin"
go_import_paths="${go_import_paths},Monos/config/change/types.proto=github.com/onosproject/onos-api/go/onos/config/change"
go_import_paths="${go_import_paths},Monos/config/change/device/types.proto=github.com/onosproject/onos-api/go/onos/config/change/device"
go_import_paths="${go_import_paths},Monos/config/change/network/types.proto=github.com/onosproject/onos-api/go/onos/config/change/network"
go_import_paths="${go_import_paths},Monos/config/snapshot/types.proto=github.com/onosproject/onos-api/go/onos/config/snapshot"
go_import_paths="${go_import_paths},Monos/config/snapshot/device/types.proto=github.com/onosproject/onos-api/go/onos/config/snapshot/device"
go_import_paths="${go_import_paths},Monos/ransim/types/types.proto=github.com/onosproject/onos-api/go/onos/ransim/types"
go_import_paths="${go_import_paths},Monos/config/v2/object.proto=github.com/onosproject/onos-api/go/onos/config/v2"
go_import_paths="${go_import_paths},Monos/config/v2/failure.proto=github.com/onosproject/onos-api/go/onos/config/v2"
go_import_paths="${go_import_paths},Monos/config/v2/value.proto=github.com/onosproject/onos-api/go/onos/config/v2"
go_import_paths="${go_import_paths},Monos/config/v2/transaction.proto=github.com/onosproject/onos-api/go/onos/config/v2"
go_import_paths="${go_import_paths},Monos/config/v2/proposal.proto=github.com/onosproject/onos-api/go/onos/config/v2"
go_import_paths="${go_import_paths},Monos/config/v2/configuration.proto=github.com/onosproject/onos-api/go/onos/config/v2"
# topo and UE-NIB
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/topo,plugins=grpc:./go \
proto/onos/topo/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/uenib,plugins=grpc:./go \
proto/onos/uenib/*.proto
# e2sub
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/e2sub/endpoint,plugins=grpc:./go \
proto/onos/e2sub/endpoint/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/e2sub/subscription,plugins=grpc:./go \
proto/onos/e2sub/subscription/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/e2sub/task,plugins=grpc:./go \
proto/onos/e2sub/task/*.proto
# e2t
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/e2t/admin,plugins=grpc:./go \
proto/onos/e2t/admin/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/e2t/e2,plugins=grpc:./go \
proto/onos/e2t/e2/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/e2t/e2/v1beta1,plugins=grpc:./go \
proto/onos/e2t/e2/v1beta1/*.proto
# a1t
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/a1t/a1,plugins=grpc:./go \
proto/onos/a1t/a1/*.proto
# config
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/change,plugins=grpc:./go \
proto/onos/config/change/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/change/device,plugins=grpc:./go \
proto/onos/config/change/device/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/change/network,plugins=grpc:./go \
proto/onos/config/change/network/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/snapshot,plugins=grpc:./go \
proto/onos/config/snapshot/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/snapshot/device,plugins=grpc:./go \
proto/onos/config/snapshot/device/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/snapshot/network,plugins=grpc:./go \
proto/onos/config/snapshot/network/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/diags,plugins=grpc:./go \
proto/onos/config/diags/*.proto
# onos-config v2 API
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/config/v2,plugins=grpc:./go \
proto/onos/config/v2/*.proto
#configmodel
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/configmodel,plugins=grpc:./go \
proto/onos/configmodel/*.proto
# admin.proto cannot be generated with fast marshaler/unmarshaler because it uses gnmi.ModelData
protoc --proto_path=$proto_path \
--gogo_out=$go_import_paths,import_path=onos/config/admin,plugins=grpc:./go \
proto/onos/config/admin/*.proto
# kpimon
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/kpimon,plugins=grpc:./go \
proto/onos/kpimon/*.proto
# pci
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/pci,plugins=grpc:./go \
proto/onos/pci/*.proto
# mlb
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/mlb,plugins=grpc:./go \
proto/onos/mlb/*.proto
# rsm
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/rsm,plugins=grpc:./go \
proto/onos/rsm/*.proto
# mho
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/mho,plugins=grpc:./go \
proto/onos/mho/*.proto
# ransim
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/ransim/metrics,plugins=grpc:./go \
proto/onos/ransim/metrics/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/ransim/model,plugins=grpc:./go \
proto/onos/ransim/model/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/ransim/trafficsim,plugins=grpc:./go \
proto/onos/ransim/trafficsim/*.proto
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/ransim/types,plugins=grpc:./go \
proto/onos/ransim/types/*.proto
# perf
protoc --proto_path=$proto_path \
--gogofaster_out=$go_import_paths,import_path=onos/topo,plugins=grpc:./go \
proto/onos/perf/perf.proto
### Python Protobuf code generation
mkdir -p ./python
protoc --proto_path=$proto_path \
--python_betterproto_out=./python \
$(find proto -name "*.proto" | sort)
# FIXME: come up with a better way to patch python files; this is too brittle
# git apply ./build/bin/patches/*.patch
|
#!/bin/bash
PARENT=stablebaselines/stable-baselines
TAG=stablebaselines/rl-baselines-zoo
VERSION=v2.10.0
if [[ ${USE_GPU} == "True" ]]; then
PARENT="${PARENT}:${VERSION}"
else
PARENT="${PARENT}-cpu:${VERSION}"
TAG="${TAG}-cpu"
fi
docker build --build-arg PARENT_IMAGE=${PARENT} --build-arg USE_GPU=${USE_GPU} -t ${TAG}:${VERSION} . -f docker/Dockerfile
docker tag ${TAG}:${VERSION} ${TAG}:latest
if [[ ${RELEASE} == "True" ]]; then
docker push ${TAG}:${VERSION}
docker push ${TAG}:latest
fi
|
import React, { Component } from 'react';
class Alphabet extends Component {
render() {
let alphabet = "zyxwvutsrqponmlkjihgfedcba";
return (
<div>
<h2>Alphabet in Reverse Order</h2>
<p>{alphabet}</p>
</div>
);
}
}
export default Alphabet; |
# Copyright (c) 2020 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
set -euxo pipefail
APT_PATH=`which apt-get` || true
apt_get=${APT_PATH:-"/usr/local/bin/apt-get"}
# Cmake executable
CMAKE_INSTALL_DIR="/opt/cmake"
export PATH=:${CMAKE_INSTALL_DIR}/bin:${PATH}
PACKAGECLOUD_RELEASE_REPO_DEB="https://packagecloud.io/install/repositories/fdio/release/script.deb.sh"
PACKAGECLOUD_RELEASE_REPO_RPM="https://packagecloud.io/install/repositories/fdio/release/script.rpm.sh"
PACKAGECLOUD_HICN_REPO_DEB="https://packagecloud.io/install/repositories/fdio/hicn/script.deb.sh"
PACKAGECLOUD_HICN_REPO_RPM="https://packagecloud.io/install/repositories/fdio/hicn/script.rpm.sh"
VPP_GIT_REPO="https://github.com/FDio/vpp"
VPP_BRANCH="stable/2005"
# Figure out what system we are running on
if [ -f /etc/os-release ]; then
. /etc/os-release
else
echo "ERROR: System configuration not recognized. Build failed"
exit 1
fi
VERSION_REGEX="s/v([0-9]+).([0-9]+)(.*)?-([0-9]+)-(g[0-9a-f]+)/\1.\2-release/g"
VPP_VERSION_DEB=$(git describe --long --match "v*" | sed -E ${VERSION_REGEX})
VPP_VERSION_RPM="${VPP_VERSION_DEB}.x86_64"
DEPS_UBUNTU=("build-essential"
"doxygen"
"curl"
"cmake"
"libasio-dev"
"libconfig-dev"
"libconfig++-dev"
"libcurl4-openssl-dev"
"libevent-dev"
"libssl-dev"
"ninja-build"
"python3-ply")
DEPS_UBUNTU_PKGCLOUD=("libparc-dev"
"libmemif-dev"
"libmemif"
"vpp=${VPP_VERSION_DEB}"
"vpp-dev=${VPP_VERSION_DEB}"
"libvppinfra=${VPP_VERSION_DEB}"
"libvppinfra-dev=${VPP_VERSION_DEB}"
"vpp-plugin-core=${VPP_VERSION_DEB}"
"libparc-dev")
COLLECTD_SOURCE="https://github.com/collectd/collectd/releases/download/collectd-5.12.0/collectd-5.12.0.tar.bz2"
function install_collectd_headers() {
curl -OL ${COLLECTD_SOURCE}
tar -xf collectd-5.12.0.tar.bz2
pushd collectd-5.12.0
./configure && make -j$(nproc)
popd
export COLLECTD_HOME=${PWD}/collectd-5.12.0/src
}
function setup_fdio_repo() {
DISTRIB_ID=${ID}
if [ "${DISTRIB_ID}" == "ubuntu" ]; then
curl -s ${PACKAGECLOUD_RELEASE_REPO_DEB} | sudo bash
curl -s ${PACKAGECLOUD_HICN_REPO_DEB} | sudo bash
elif [ "${DISTRIB_ID}" == "centos" ]; then
curl -s ${PACKAGECLOUD_RELEASE_REPO_RPM} | sudo bash
curl -s ${PACKAGECLOUD_HICN_REPO_RPM} | sudo bash
else
echo "Distribution ${DISTRIB_ID} is not supported"
exit 1
fi
}
# Install dependencies
function install_deps() {
DISTRIB_ID=${ID}
echo ${DEPS_UBUNTU[@]} | xargs sudo ${apt_get} install -y --allow-unauthenticated --no-install-recommends
}
function install_pkgcloud_deps() {
DISTRIB_ID=${ID}
echo ${DEPS_UBUNTU_PKGCLOUD[@]} | xargs sudo ${apt_get} install -y --allow-unauthenticated --no-install-recommends
}
# Call a function once
function call_once() {
# OP_NAME is the name of the function
OP_NAME=${1}
# If function was already called return
[[ -f /tmp/${OP_NAME} ]] && return 0
# Otherwise call the function
${@}
# And mark the function as called if no error occurred
echo ${OP_NAME} > /tmp/${OP_NAME}
}
function setup() {
echo DISTRIBUTION: ${PRETTY_NAME}
# export variables depending on the platform we are running
call_once setup_fdio_repo
call_once install_deps
call_once install_pkgcloud_deps
call_once install_collectd_headers
}
function setup_extras() {
echo DISTRIBUTION: ${PRETTY_NAME}
# export variables depending on the platform we are running
call_once install_deps
call_once install_collectd_headers
}
|
import React from 'react'
import PropTypes from 'prop-types'
import classNames from 'classnames'
const FilterColumn = (props) => {
const {
className,
modifier,
children
} = props
const modifiedClassNames = classNames('filter-panel__column', className, modifier)
return (
<div className={modifiedClassNames}>
{children}
</div>
)
}
FilterColumn.propTypes = {
className: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string),
PropTypes.object
]),
modifier: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string),
PropTypes.object
]),
children: PropTypes.node
}
export default FilterColumn
|
#include "core/pin.hpp"
#include "drivers/systick.hpp"
#include "drivers/gpio.hpp"
static void delay(unsigned ticks)
{
unsigned ends = drivers::Systick::ticks() + ticks;
while (drivers::Systick::ticks() < ends)
{
}
}
int main()
{
drivers::Systick::init(8000000, 1000);
Pin<drivers::GPIOC, 13> led;
while (true)
{
led.toggle();
delay(500);
}
} |
// @flow
import axios from 'axios';
import iconv from 'iconv-lite';
import type { Station } from 'types/abfahrten';
export default async function(searchTerm: string): Promise<Station[]> {
const buffer = (await axios.get(`http://reiseauskunft.bahn.de/bin/ajax-getstop.exe/dn?S=${searchTerm}*`, {
responseType: 'arraybuffer',
})).data;
const rawReply = iconv.decode(buffer, 'latin-1');
const stringReply = rawReply.substring(8, rawReply.length - 22);
const stations = JSON.parse(stringReply).suggestions;
return stations
.filter(s => s.value !== s.value.toUpperCase())
.map(s => ({
title: s.value,
id: Number.parseInt(s.extId, 10).toString(),
}));
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check that all logs are terminated with '\n'
#
# Some logs are continued over multiple lines. They should be explicitly
# commented with /* Continued */
#
# There are some instances of LogPrintf() in comments. Those can be
# ignored
export LC_ALL=C
UNTERMINATED_LOGS=$(git grep --extended-regexp "LogPrintf?\(" -- "*.cpp" | \
grep -v '\\n"' | \
grep -v "/\* Continued \*/" | \
grep -v "LogPrint()" | \
grep -v "LogPrintf()")
if [[ ${UNTERMINATED_LOGS} != "" ]]; then
echo "All calls to LogPrintf() and LogPrint() should be terminated with \\n"
echo
echo "${UNTERMINATED_LOGS}"
exit 1
fi
|
<filename>app/js/src/state/base/reducer.js
/**
* @fileoverview
*
* Reducers for App
*/
goog.provide("app.state.BaseReducers");
/**
* [BaseActions description]
* @enum {Function}
*/
app.state.BaseReducers = {};
/**
* [AppReducer description]
* @param {Object} state Current State
* @param {{type:string, payload:*}} action Action to update state
*/
app.state.BaseReducers.loaded = function ( state, action ) {
switch ( action.type ) {
case 'IS_LOADED':
return action.payload;
default:
return state;
}
};
/**
* [AppReducer description]
* @param {Object} state Current State
* @param {{type:string, payload:*}} action Action to update state
*/
app.state.BaseReducers.loading = function ( state, action ) {
switch ( action.type ) {
case 'IS_LOADING':
return action.payload;
default:
return state;
}
};
/**
* [AppReducer description]
* @param {Object} state Current State
* @param {{type:string, payload:*}} action Action to update state
*/
app.state.BaseReducers.user = function ( state, action ) {
switch ( action.type ) {
case 'IS_USER':
return action.payload;
default:
return state;
}
}; |
package org.hzero.sso.saml.token;
import java.util.Collection;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.SpringSecurityCoreVersion;
public class CustomSamlAuthenticationToken extends UsernamePasswordAuthenticationToken {
private static final long serialVersionUID = SpringSecurityCoreVersion.SERIAL_VERSION_UID;
public CustomSamlAuthenticationToken(Object principal, Object credentials) {
super(principal, credentials);
}
public CustomSamlAuthenticationToken(Object principal, Object credentials,
Collection<? extends GrantedAuthority> authorities) {
super(principal, credentials, authorities);
}
}
|
# Code to create a virtual assistant to answer questions about a car and its features
# Imports
import spacy
import random
from tts_engine import tts
# Create Spacy model
nlp = spacy.load("en_core_web_sm")
# Create a custom dialogue
dialogue = {
"What features does the car have?": "The car is equipped with a powerful engine, advanced safety features and an intuitive infotainment system.",
"What engine does the car have?": "The car is powered by a 2.0L turbocharged engine that delivers powerful performance and optimal fuel efficiency.",
"Does the car have advanced safety features?": "Yes, the car is equipped with advanced safety features such as blind spot monitoring, lane departure warning and autonomous emergency braking."
}
# Function to process inputs and return responses
def process_input(user_input):
# Get the tokens
tokens = nlp(user_input)
intent = None
entities = []
# Loop through the tokens
for token in tokens:
# Get the lemma for the token
lemma = token.lemma_
if lemma == "-PRON-":
lemma = token.text
# Get the intent and entities
intent = token.pos_
entities.append(lemma)
# Get the response from the dialogue
response = dialogue.get(' '.join(entities), "I'm sorry, I don't understand.")
# Return the response
return response
# Function to start the conversation
def start_conversation(bot_response):
# Print the bot response
print(bot_response)
# Speak the bot response
tts(bot_response)
# Start the conversation loop
while True:
# Get the user input
user_input = input("You: ")
# Process the user input
bot_response = process_input(user_input)
# Print the bot response
print(bot_response)
# Speak the bot response
tts(bot_response)
# Start conversation
start_conversation("Hi, I'm your virtual assistant. How can I help you?") |
/* eslint-disable react/prop-types */
import React, { ReactNode, ComponentType } from 'react';
import { Chip, ChipGroup, FormGroup } from '@patternfly/react-core';
import useFieldApi from '@data-driven-forms/react-form-renderer/use-field-api';
export interface InitialChipsProps {
name: string;
label: ReactNode;
}
export interface InitialChipsValue {
id: string;
name: ReactNode;
}
const InitialChips: ComponentType<InitialChipsProps> = ({ name, label }) => {
const {
input: { value, onChange }
} = useFieldApi<InitialChipsValue[]>({ name });
const handleRemove = (id: string) =>
onChange(value.filter((item) => item.id !== id));
if (value?.length === 0) {
return null;
}
return (
<FormGroup fieldId={name} label={label}>
<ChipGroup>
{value.map(({ name, id }) => (
<Chip key={id} onClick={() => handleRemove(id)}>
{name}
</Chip>
))}
</ChipGroup>
</FormGroup>
);
};
export default InitialChips;
|
<gh_stars>0
package com.twu.biblioteca;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.*;
public class BookListTest {
private BookList bookList;
@Before
public void init(){
this.bookList = new BookList();
}
@Test
public void printDetailList(){
assertThat(bookList.printDetailList(), equalTo(9));
}
@Test
public void printOverviewList(){
assertThat(bookList.printOverviewList(), equalTo(9));
}
@Test
public void isBookAvailabelById(){
assertThat(bookList.borrowBookById("#003"), equalTo(true));
assertThat(bookList.borrowBookById("#006"), equalTo(false));
}
@Test
public void isBookAvailabelByName(){
assertThat(bookList.borrowBookByName("The Goblet of Fire"), equalTo(true));
assertThat(bookList.borrowBookByName("The Half-Blood Prince"), equalTo(false));
}
@Test
public void borrowBook(){
assertThat(bookList.findBookByName("The Goblet of Fire").getIsAvailable(), equalTo("Available"));
bookList.borrowBook(bookList.findBookById("#004"));
assertThat(bookList.findBookByName("The Goblet of Fire").getIsAvailable(), equalTo("Unavailable"));
}
@Test
public void returnBook(){
assertThat(bookList.findBookById("#006").getIsAvailable(), equalTo("Unavailable"));
bookList.returnBook(bookList.findBookById("#006"));
assertThat(bookList.findBookById("#006").getIsAvailable(), equalTo("Available"));
}
@Test
public void testBorrowABookByid(){
assertThat(bookList.printDetailList(), equalTo(9));
assertThat(bookList.findBookById("#004").getIsAvailable(), equalTo("Available"));
bookList.borrowBookById("#004");
assertThat(bookList.findBookById("#004").getIsAvailable(), equalTo("Unavailable"));
assertThat(bookList.printDetailList(), equalTo(8));
}
@Test
public void testBorrowABookByName(){
assertThat(bookList.printDetailList(), equalTo(9));
assertThat(bookList.findBookByName("The Order of the Phoenix").getIsAvailable(),
equalTo("Available"));
bookList.borrowBookByName("The Order of the Phoenix");
assertThat(bookList.findBookByName("The Order of the Phoenix").getIsAvailable(),
equalTo("Unavailable"));
assertThat(bookList.printDetailList(), equalTo(8));
}
@Test
public void testReturnABook(){
assertThat(bookList.printDetailList(), equalTo(9));
assertThat(bookList.findBookById("#006").getIsAvailable(), equalTo("Unavailable"));
bookList.returnBookById("#006");
assertThat(bookList.findBookById("#006").getIsAvailable(), equalTo("Available"));
assertThat(bookList.printDetailList(), equalTo(10));
}
} |
<reponame>rafax/sourcegraph
package dbstore
import (
"github.com/sourcegraph/sourcegraph/internal/database/dbtesting"
"github.com/sourcegraph/sourcegraph/internal/database/dbutil"
"github.com/sourcegraph/sourcegraph/internal/observation"
)
func init() {
dbtesting.DBNameSuffix = "oss-codeintel"
}
func testStore(db dbutil.DB) *Store {
return NewWithDB(db, &observation.TestContext, nil)
}
|
package com.attendancefortjit.tjitattendance;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.widget.TextView;
/**
* Created by RJ on 01/04/16.
*/
public class Timetable extends Activity {
TextView timetable, timetablelink;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.timetable);
timetable = (TextView) findViewById(R.id.tvtimetable);
timetablelink = (TextView) findViewById(R.id.tvtimetablelink);
timetable.setText("Please click on the link below to download the current timetable:");
}
@Override
public void onBackPressed() {
super.onBackPressed();
Intent intent = new Intent(Timetable.this,Welcome.class);
startActivity(intent);
finish();
}
}
|
#!/bin/sh
git clone git@github.com:feathericons/feather.git
git clone git@github.com:tabler/tabler-icons.git
git clone git@github.com:iconic/open-iconic.git
git clone git@github.com:refactoringui/heroicons.git
git clone git@github.com:lucaburgio/iconoir.git
|
function factorial(n) {
if (n === 0) {
return 1;
}
return n * factorial(n - 1);
} |
#!/bin/sh -xv
cd ../aws-amplify-cypress-auth
amplify-dev add auth
amplify-dev push --yes
echo "executed all Amplify commands"
|
// Code generated by mockery v1.0.0. DO NOT EDIT.
package mocks
import (
packet "github.com/danilarff86/miio-go/protocol/packet"
mock "github.com/stretchr/testify/mock"
)
// Crypto is an autogenerated mock type for the Crypto type
type Crypto struct {
mock.Mock
}
// Decrypt provides a mock function with given fields: data
func (_m *Crypto) Decrypt(data []byte) ([]byte, error) {
ret := _m.Called(data)
var r0 []byte
if rf, ok := ret.Get(0).(func([]byte) []byte); ok {
r0 = rf(data)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]byte)
}
}
var r1 error
if rf, ok := ret.Get(1).(func([]byte) error); ok {
r1 = rf(data)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Encrypt provides a mock function with given fields: data
func (_m *Crypto) Encrypt(data []byte) ([]byte, error) {
ret := _m.Called(data)
var r0 []byte
if rf, ok := ret.Get(0).(func([]byte) []byte); ok {
r0 = rf(data)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]byte)
}
}
var r1 error
if rf, ok := ret.Get(1).(func([]byte) error); ok {
r1 = rf(data)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// NewPacket provides a mock function with given fields: data
func (_m *Crypto) NewPacket(data []byte) (*packet.Packet, error) {
ret := _m.Called(data)
var r0 *packet.Packet
if rf, ok := ret.Get(0).(func([]byte) *packet.Packet); ok {
r0 = rf(data)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*packet.Packet)
}
}
var r1 error
if rf, ok := ret.Get(1).(func([]byte) error); ok {
r1 = rf(data)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// VerifyPacket provides a mock function with given fields: pkt
func (_m *Crypto) VerifyPacket(pkt *packet.Packet) error {
ret := _m.Called(pkt)
var r0 error
if rf, ok := ret.Get(0).(func(*packet.Packet) error); ok {
r0 = rf(pkt)
} else {
r0 = ret.Error(0)
}
return r0
}
|
<reponame>mdsd-team-1/photos-metamodeling
/**
*/
package PhotosMetaModel.impl;
import PhotosMetaModel.AppAccess;
import PhotosMetaModel.PhotosMetaModelPackage;
import org.eclipse.emf.ecore.EClass;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>App Access</b></em>'.
* <!-- end-user-doc -->
*
* @generated
*/
public class AppAccessImpl extends FunctionalitiesImpl implements AppAccess {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected AppAccessImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return PhotosMetaModelPackage.Literals.APP_ACCESS;
}
} //AppAccessImpl
|
<reponame>dfelski/spring-boot-axon-docker-example
package peanuts.web;
import org.axonframework.commandhandling.gateway.CommandGateway;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import peanuts.FillBowlCommand;
import peanuts.PlaceNewBowlOnTableCommand;
import peanuts.TakeOutPeanutsCommand;
import java.util.UUID;
@RestController
class PeanutBowlController {
@Autowired
private CommandGateway commandGateway;
@PostMapping("/bowl")
String create(){
UUID id = UUID.randomUUID();
commandGateway.sendAndWait(new PlaceNewBowlOnTableCommand(id));
return "Oh, look! A new bowl on the table! "+id;
}
@PutMapping("/bowl/{id}/peanuts")
void fill(@PathVariable("id") UUID id, @RequestBody FillingDTO fillingDTO){
commandGateway.sendAndWait(new FillBowlCommand(id, fillingDTO.getAmount()));
}
@DeleteMapping("/bowl/{id}/peanuts")
void takeOut(@PathVariable("id") UUID id, @RequestBody TakeOutDTO takeOutDTO){
commandGateway.sendAndWait(new TakeOutPeanutsCommand(id, takeOutDTO.getAmount()));
}
}
|
var internals = {};
$(document).ready(function () {
$.get( "docs", function( data ) {
// load documentation
// var sofaDocs = JSON.parse(data);
internals.load(data.docs, function (err, requestsDropDownBox) {});
$( "body" )
.append( "Data: " + JSON.stringify(data.docs.requests.user.methods.test.signature) );
}, "json" );
});
internals.load = function (docs) {
// process requests
var requests = Object.keys(docs.requests);
var requestOptions = '';
var requestsDocumentation = [];
for (var i = 0; i < requests.length; ++i) {
// load request group's documentation
var docRecordsArray = internals.loadGroupDocumentation(docs.requests[requests[i]], requests[i], 'requests');
var group = {
name: requests[i],
records: docRecordsArray
};
requestsDocumentation.push(group);
// load request group's navigation menu
requestOptions += '<li><a href="#">' + requests[i].toUpperCase() + '</a>' +
'<ul>' +
internals.loadGroup(docs.requests[requests[i]], requests[i], 'requests') +
'</ul>' +
'</li>';
};
var requestsOptgroup = '<ul class="dropdown-menu">' + requestOptions + '</ul>';
// process tools
var tools = Object.keys(docs.tools);
var toolsOptions = '';
var toolGroupFunctionsList = '';
var toolsDocumentation = [];
for (var i = 0; i < tools.length; ++i) {
// load tools group's documentation
var docRecordsArray = internals.loadGroupDocumentation(docs.tools[tools[i]], tools[i], 'tools');
var group = {
name: tools[i],
records: docRecordsArray
};
toolsDocumentation.push(group);
// console.log('watch ' + internals.loadToolGroup(docs.tools[tools[i]]));
// toolGroupFunctionsList = internals.loadGroup(docs.tools[tools[i]], tools[i]);
toolsOptions += '<li><a href="#">' + tools[i].toUpperCase() + '</a>'+
'<ul>' +
internals.loadGroup(docs.tools[tools[i]], tools[i], 'tools') +
'</ul>' +
'</li>';
};
var toolsOptgroup = '<ul class="dropdown-menu">' + toolsOptions + '</ul>';
// process promises
var promises = Object.keys(docs.promises);
var promisesOptions = '';
var promisesDocumentation = [];
for (var i = 0; i < promises.length; ++i) {
// load promises' documentation
console.log('loading promise!! ');
var docRecordsArray = internals.loadGroupDocumentation(docs.promises[promises[i]], promises[i], 'promises');
var group = {
name: promises[i],
records: docRecordsArray
};
promisesDocumentation.push(group);
// console.log('watch ' + internals.loadToolGroup(docs.tools[tools[i]]));
// toolGroupFunctionsList = internals.loadGroup(docs.tools[tools[i]], tools[i]);
promisesOptions += '<li><a href="#">' + promises[i].toUpperCase() + '</a>'+
'<ul>' +
internals.loadGroup(docs.promises[promises[i]], promises[i], 'promises') +
'</ul>' +
'</li>';
};
var promisesOptGroup = '<ul class="dropdown-menu">' + promisesOptions + '</ul>';
// ** make documentation object
internals.documentation = {
requests: requestsDocumentation,
tools: toolsDocumentation,
promises: promisesDocumentation
}
var dropDownMenu = '<ul class="nav nav-tabs">' +
'<li class="dropdown">' +
'<a class="dropdown-toggle" data-toggle="dropdown" href="#">Requests <span class="caret"></span></a>' +
requestsOptgroup +
'</li>' +
'<li class="dropdown">' +
'<a class="dropdown-toggle" data-toggle="dropdown" href="#">Tools<span class="caret"></span></a>' +
toolsOptgroup +
'</li>'+
'<li class="dropdown">' +
'<a class="dropdown-toggle" data-toggle="dropdown" href="#">Promises<span class="caret"></span></a>' +
promisesOptGroup +
'</li>'+
'</ul>';
// Insert Sidebar Navigation HTML
$( ".sidebar" ).append(dropDownMenu);
// Insert Documentation
async.waterfall([function (next) {
if (internals.documentation.requests.length !== 0) {
for (var i = 0; i < internals.documentation.requests.length; ++i) {
// console.log('watch: '+ i + ' ' + JSON.stringify(internals.documentation.requests[i]));
for (var i2 = 0; i2 < internals.documentation.requests[i].records.length; ++i2) {
// process request groups functions
// console.log('watch water: ' + internals.documentation.requests[i].records[i2]);
// console.log('watch: ' + internals.documentation.requests[i2].docs);
var htmlPartial = window.recordTemplate({ record: internals.documentation.requests[i].records[i2] });
$( "#wrap" ).append(htmlPartial);
if (i2 === internals.documentation.requests[i].records.length - 1) {
next();
}
}
}
} else {
next();
}
}, function (next) {
console.log('waterfall tools starts');
internals.next = next;
if (internals.documentation.tools.length !== 0) {
for (var i = 0; i < internals.documentation.tools.length; ++i) {
// console.log('watch: '+ i + ' ' + JSON.stringify(internals.documentation.requests[i]));
for (var i2 = 0; i2 < internals.documentation.tools[i].records.length; ++i2) {
// process request groups functions
// console.log('watch water: ' + internals.documentation.tools[i].records[i2]);
// console.log('watch: ' + internals.documentation.requests[i2].docs);
var htmlPartial = window.recordTemplate({ record: internals.documentation.tools[i].records[i2] });
$( "#wrap" ).append(htmlPartial);
console.log('tools group ' + i);
if (i === internals.documentation.tools.length - 1) {
console.log('exit tools' + i);
internals.next();
}
}
}
} else {
next();
}
}, function (next) {
if (internals.documentation.promises.length !== 0) {
for (var i = 0; i < internals.documentation.promises.length; ++i) {
// console.log('watch: '+ i + ' ' + JSON.stringify(internals.documentation.requests[i]));
console.log('--');
var exit = internals.documentation.promises[i].records.length - 1;
for (var i2 = 0; i2 < internals.documentation.promises[i].records.length; ++i2) {
// process request groups functions
// console.log('watch water: ' + internals.documentation.tools[i].records[i2]);
// console.log('watch: ' + internals.documentation.requests[i2].docs);
var htmlPartial = window.recordTemplate({ record: internals.documentation.promises[i].records[i2] });
$( "#wrap" ).append(htmlPartial);
console.log('promises.length ' + internals.documentation.promises[i].records.length);
console.log('promises.length ' + i2);
console.log('exit ' + exit);
if (i2 === exit) {
console.log('got it');
next();
}
}
}
} else {
next();
}
}], function (err) {
console.log('waterfall done');
});
};
internals.loadGroup = function (toolgroup, groupName, pluginType) {
var toolGroupFunctions = Object.keys(toolgroup.methods);
var functionListHTML = '';
// load functions with in toolGroup
for (var i = 0; i < toolGroupFunctions.length; ++i) {
functionListHTML += '<li><a href="#' +
pluginType + '-' + groupName + '-' + toolgroup.methods[toolGroupFunctions[i]].name +
'">' +
toolgroup.methods[toolGroupFunctions[i]].name +
'</a></li>';
};
return functionListHTML;
};
internals.loadGroupDocumentation = function (group, groupName, pluginType) {
var groupFunctions = Object.keys(group.methods);
var groupDocumentation = [];
// load functions with in toolGroup
for (var i = 0; i < groupFunctions.length; ++i) {
var record = {
pluginType: pluginType,
groupName: groupName,
name: group.methods[groupFunctions[i]].name,
signature: group.methods[groupFunctions[i]].signature,
comment: group.methods[groupFunctions[i]].comment
};
groupDocumentation.push(record);
};
return groupDocumentation;
};
|
<filename>src/domain/gestion.entity.ts
import { Entity, Column, ManyToOne } from 'typeorm';
import { BaseEntity } from './base/base.entity';
import Solicitud from './solicitud.entity';
@Entity('gestion')
export default class Gestion extends BaseEntity {
@Column({ name: 'detalle' })
detalle: string;
@Column({ type: 'date' })
fecha: any;
@Column({ nullable: true })
observacion: string;
@Column({ type: 'boolean', nullable: true })
privado: boolean;
@ManyToOne(type => Solicitud)
solicitud: Solicitud;
solicitudId?: string;
}
|
def filterIcons(iconSet, iconsData):
filteredIcons = []
for icon in iconsData:
criteria_satisfied = True
for criteria in iconSet['icons']:
if criteria['type'] == 'number':
if criteria['criteria'] == '>=' and icon['value'] < criteria['value']:
criteria_satisfied = False
elif criteria['criteria'] == '<' and icon['value'] >= criteria['value']:
criteria_satisfied = False
elif criteria['criteria'] == '<=' and icon['value'] > criteria['value']:
criteria_satisfied = False
elif criteria['type'] == 'percentile':
if criteria['criteria'] == '>=' and icon['percentile'] < criteria['value']:
criteria_satisfied = False
elif criteria['criteria'] == '<' and icon['percentile'] >= criteria['value']:
criteria_satisfied = False
elif criteria['criteria'] == '<=' and icon['percentile'] > criteria['value']:
criteria_satisfied = False
elif criteria['type'] == 'percent':
if criteria['criteria'] == '>=' and icon['percent'] < criteria['value']:
criteria_satisfied = False
elif criteria['criteria'] == '<' and icon['percent'] >= criteria['value']:
criteria_satisfied = False
elif criteria['criteria'] == '<=' and icon['percent'] > criteria['value']:
criteria_satisfied = False
if criteria_satisfied:
filteredIcons.append(icon)
return filteredIcons |
#!/bin/bash
BUILD_DIR=${BUILD_DIR:=$HOME/raven_libs/build}
INSTALL_DIR=${INSTALL_DIR:=$HOME/raven_libs/pylibs}
PYTHON_CMD=${PYTHON_CMD:=python}
JOBS=${JOBS:=1}
mkdir -p $BUILD_DIR
mkdir -p $INSTALL_DIR
DOWNLOADER='curl -C - -L -O '
SCRIPT_DIRNAME=`dirname $0`
SCRIPT_DIR=`(cd $SCRIPT_DIRNAME; pwd)`
ORIGPYTHONPATH="$PYTHONPATH"
update_python_path ()
{
if ls -d $INSTALL_DIR/*/python*/site-packages/
then
export PYTHONPATH=`ls -d $INSTALL_DIR/*/python*/site-packages/`:"$ORIGPYTHONPATH"
fi
}
update_python_path
PATH=$INSTALL_DIR/bin:$PATH
if which coverage
then
echo coverage already available, skipping building it.
else
if curl http://www.energy.gov > /dev/null
then
echo Successfully got data from the internet
else
echo Could not connect to internet
fi
cd $BUILD_DIR
#SHA256=56e448f051a201c5ebbaa86a5efd0ca90d327204d8b059ab25ad0f35fbfd79f1
$DOWNLOADER https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz
tar -xvzf coverage-3.7.1.tar.gz
cd coverage-3.7.1
(unset CC CXX; $PYTHON_CMD setup.py install --prefix=$INSTALL_DIR)
fi
update_python_path
cd $SCRIPT_DIR
cd tests/framework
#coverage help run
FRAMEWORK_DIR=`(cd ../../framework && pwd)`
source $SCRIPT_DIR/scripts/establish_conda_env.sh --quiet --load
# get display var
DISPLAY_VAR=`(echo $DISPLAY)`
# reset it
export DISPLAY=
EXTRA="--rcfile=$FRAMEWORK_DIR/../tests/framework/.coveragerc --source=$FRAMEWORK_DIR --parallel-mode --omit=$FRAMEWORK_DIR/contrib/*"
export COVERAGE_FILE=`pwd`/.coverage
coverage erase
($FRAMEWORK_DIR/../run_tests "$@" --python-command="coverage run $EXTRA " || echo run_test done but some tests failed)
#get DISPLAY BACK
DISPLAY=$DISPLAY_VAR
if which Xvfb
then
Xvfb :8888 &
xvfbPID=$!
oldDisplay=$DISPLAY
export DISPLAY=:8888
cd $FRAMEWORK_DIR/../tests/framework/PostProcessors/TopologicalPostProcessor
coverage run $EXTRA $FRAMEWORK_DIR/Driver.py test_topology_ui.xml interactiveCheck || true
cd $FRAMEWORK_DIR/../tests/framework/PostProcessors/DataMiningPostProcessor/Clustering/
coverage run $EXTRA $FRAMEWORK_DIR/Driver.py hierarchical_ui.xml interactiveCheck || true
kill -9 $xvfbPID || true
export DISPLAY=$oldDisplay
else
## Try these tests anyway, we can get some coverage out of them even if the
## UI fails or is unavailable.
cd $FRAMEWORK_DIR/../tests/framework/PostProcessors/TopologicalPostProcessor
coverage run $EXTRA $FRAMEWORK_DIR/Driver.py test_topology_ui.xml interactiveCheck || true
cd $FRAMEWORK_DIR/../tests/framework/PostProcessors/DataMiningPostProcessor/Clustering/
coverage run $EXTRA $FRAMEWORK_DIR/Driver.py hierarchical_ui.xml interactiveCheck || true
fi
## Go to the final directory and generate the html documents
cd $SCRIPT_DIR/tests/
pwd
rm -f .cov_dirs
for FILE in `find . -name '.coverage.*'`; do dirname $FILE; done | sort | uniq > .cov_dirs
coverage combine `cat .cov_dirs`
coverage html
|
//
// GD_SecondTableView.h
// allrichstore
//
// Created by zhaozhe on 16/11/17.
// Copyright © 2016年 allrich88. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface GD_SecondTableView : UITableView
@end
|
import Gauge from "./gauge.min";
export default Gauge;
|
#!/usr/bin/env bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Shared functions to build and test Python package for TensorFlow on MacOS
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
function die() {
echo "$@" 1>&2 ; exit 1;
}
# Write an entry to the sponge key-value store for this job.
write_to_sponge() {
# The location of the key-value CSV file sponge imports.
TF_SPONGE_CSV="${KOKORO_ARTIFACTS_DIR}/custom_sponge_config.csv"
echo "$1","$2" >> "${TF_SPONGE_CSV}"
}
# Runs bazel build and saves wheel files them in PIP_WHL_DIR
function bazel_build_wheel {
if [[ -z "${1}" ]]; then
die "Missing wheel file path to install and test build"
fi
PIP_WHL_DIR=$1
shift
PIP_WHL_FLAGS=$@
mkdir -p "${PIP_WHL_DIR}"
PIP_WHL_DIR=$(realpath "${PIP_WHL_DIR}") # Get absolute path
VENV_DIR=".tf-venv"
rm -rf "${VENV_DIR}"
# Set up and install MacOS pip dependencies.
python -m venv ${VENV_DIR} && source ${VENV_DIR}/bin/activate
install_macos_pip_deps
# Update .tf_configure.bazelrc with venv python path for bazel
export PYTHON_BIN_PATH="$(which python)"
yes "" | ./configure
# Build the pip package
bazel build \
--config=release_cpu_macos \
--action_env=PYENV_VERSION="${PYENV_VERSION}" \
--action_env=PYTHON_BIN_PATH="${PYTHON_BIN_PATH}" \
//tensorflow/tools/pip_package:build_pip_package \
|| die "Error: Bazel build failed"
# Build the wheel
./bazel-bin/tensorflow/tools/pip_package/build_pip_package ${PIP_WHL_DIR} ${PIP_WHL_FLAGS}
# Set wheel path and verify that there is only one .whl file in the path.
WHL_PATH=$(ls "${PIP_WHL_DIR}"/*.whl)
if [[ $(echo "${WHL_PATH}" | wc -w) -ne 1 ]]; then
die "ERROR: Failed to find exactly one built TensorFlow .whl file in "\
"directory: ${PIP_WHL_DIR}"
fi
# Print the size of the wheel file and log to sponge.
WHL_SIZE=$(ls -l "${WHL_PATH}" | awk '{print $5}')
echo "Size of the PIP wheel file built: ${WHL_SIZE}"
write_to_sponge TF_INFO_WHL_SIZE "${WHL_SIZE}"
# Build the wheel (with cpu flag)
./bazel-bin/tensorflow/tools/pip_package/build_pip_package ${PIP_WHL_DIR} ${PIP_WHL_FLAGS} --cpu
for WHL_PATH in $(ls "${PIP_WHL_DIR}"/*.whl); do
# change 10_15 to 10_14
NEW_WHL_PATH=${WHL_PATH/macosx_10_15/macosx_10_14}
mv "${WHL_PATH}" "${NEW_WHL_PATH}"
WHL_PATH=${NEW_WHL_PATH}
done
# Deactivate Virtual Env
deactivate || source deactivate
rm -rf ${VENV_DIR}
# Reset Python bin path
export PYTHON_BIN_PATH="$(which python)"
}
function bazel_test_wheel {
if [[ -z "${1}" ]]; then
die "Missing wheel file path to install and test build"
fi
WHL_PATH=$1
# Create new Virtual Env for Testing
VENV_DIR=".tf-venv"
rm -rf "${VENV_DIR}"
python -m venv ${VENV_DIR} && source ${VENV_DIR}/bin/activate
export PYTHON_BIN_PATH="$(which python)"
# Create Temp Dir to run the test
TMP_DIR=$(mktemp -d)
pushd "${TMP_DIR}"
pip install "${WHL_PATH}"
# Run a quick check on tensorflow installation.
RET_VAL=$(python -c "import tensorflow as tf; t1=tf.constant([1,2,3,4]); t2=tf.constant([5,6,7,8]); print(tf.add(t1,t2).shape)")
# Check result to see if tensorflow is properly installed.
if ! [[ ${RET_VAL} == *'(4,)'* ]]; then
die "PIP test on virtualenv (non-clean) FAILED"
fi
# Return to original directory.
popd
rm -rf "${TMP_DIR}"
# Run Bazel Test
PY_MAJ_MINOR_VER=$(python -c "print(__import__('sys').version)" 2>&1 | awk '{ print $1 }' | head -n 1 | awk -F'.' '{printf "%s%s\n", $1, $2 }')
TF_TEST_FLAGS="--define=no_tensorflow_py_deps=true --test_lang_filters=py --test_output=errors --verbose_failures=true --keep_going --test_env=TF2_BEHAVIOR=1"
TF_TEST_FILTER_TAGS="-nopip,-no_pip,-nomac,-no_mac,-no_oss,-oss_serial,-no_oss_py${PY_MAJ_MINOR_VER},-v1only,-gpu,-tpu,-benchmark-test"
BAZEL_PARALLEL_TEST_FLAGS="--local_test_jobs=$(sysctl -n hw.ncpu)"
# Install additional test requirements
# TODO - Add these to setup.py test requirements
pip install portpicker~=1.4.0 scipy~=1.7.2
PIP_TEST_PREFIX=bazel_pip
TEST_ROOT=$(pwd)/${PIP_TEST_PREFIX}
rm -rf "$TEST_ROOT"
mkdir -p "$TEST_ROOT"
ln -s "$(pwd)"/tensorflow "$TEST_ROOT"/tensorflow
bazel clean
yes "" | ./configure
# Adding quotes around TF_TEST_FLAGS variable leads to additional quotes in variable replacement
# shellcheck disable=SC2086
bazel test --build_tests_only \
${TF_TEST_FLAGS} \
"${BAZEL_PARALLEL_TEST_FLAGS}" \
--test_tag_filters="${TF_TEST_FILTER_TAGS}" \
-k -- //bazel_pip/tensorflow/python/...
unlink "${TEST_ROOT}"/tensorflow
# Deactivate Virtual Env
deactivate || source deactivate
rm -rf ${VENV_DIR}
# Reset Python bin path
export PYTHON_BIN_PATH="$(which python)"
}
function upload_nightly_wheel {
if [[ -z "${1}" ]]; then
die "Missing wheel file path"
fi
WHL_PATH=$1
# test the whl pip package
chmod +x tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh
./tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh ${WHL_PATH}
RETVAL=$?
# Upload the PIP package if whl test passes.
if [ ${RETVAL} -eq 0 ]; then
echo "Basic PIP test PASSED, Uploading package: ${WHL_PATH}"
python -m pip install 'twine ~= 3.2.0'
python -m twine upload -r pypi-warehouse "${WHL_PATH}"
else
die "Basic PIP test FAILED, will not upload ${WHL_PATH} package"
fi
}
|
const { BrowserWindow } = require('electron');
createAboutWindow = () => {
aboutWindow = new BrowserWindow({
title: 'About ImageShrink',
width: 300,
height: 300,
icon: `./assets/icons/Icon_128x128.png`,
resizable: false,
backgroundColor: 'white',
})
aboutWindow.loadFile('./app/about.html')
return aboutWindow;
}
module.exports = { createAboutWindow } |
<form action="/submit_post" method="post">
<div>
<label>Title</label>
<input type="text" name="title" />
</div>
<div>
<label>Description</label>
<textarea name="description"></textarea>
</div>
<div>
<label>Author</label>
<input type="text" name="author" />
</div>
<div>
<label>Category</label>
<select name="category">
<option value="">--Select--</option>
<option value="tech">Tech</option>
<option value="lifestyle">Lifestyle</option>
<option value="health">Health</option>
</select>
</div>
<div>
<input type="submit" value="Submit" />
</div>
</form> |
package domaine.bizz.interfaces;
import domaine.dto.DocumentDto;
import util.AppUtil;
public interface DocumentBizz extends DocumentDto {
/**
* Vérifie si tous les champs sont valides.
*
*/
default void checkBeforeInsert() {
AppUtil.checkObject(getGenre(), "De quel genre est ce document");
AppUtil.checkString(getNom(), "Il faut un nom à ce document");
AppUtil.checkObject(getProgramme(), "Pour quel programme est ce document");
AppUtil.checkObject(getTypeProgramme(), "Pour quel type programme est ce document");
}
}
|
#!/bin/bash
#SBATCH -J Act_leakyrelu-0.01_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py leakyrelu-0.01 1 Adadelta 1 0.3321876549651387 469 1.082125163166774 lecun_uniform PE-infersent
|
#!/bin/bash
docker build --tag="kurron/groovy:2.3.7" .
|
<filename>ci/log.ts
// tslint:disable-next-line: no-implicit-dependencies
import chalk from 'chalk';
// tslint:disable: no-console
export const logStep = ( str: string ) => console.log( chalk.cyan( `==> ${str}` ) );
export const logError = ( str: string ) => console.error( chalk.red( `[ERR] ${str}` ) );
export const logDebug = ( str: string ) => console.debug( chalk.magenta( `[DBG] ${str}` ) );
export const logInfo = ( str: string ) => console.info( chalk.blue( `[INF] ${str}` ) );
|
#!/bin/bash
dieharder -d 11 -g 42 -S 1204488103
|
#!/bin/bash
export DEBIAN_FRONTEND=noninteractive
apt-get -qq update
apt-get install -qq -y mysql-server
exit 0
|
package org.felix.ml.sampling.util;
import org.felix.ml.sampling.exception.ExpreException;
import org.apache.commons.jexl3.*;
import java.util.HashMap;
import java.util.Map;
import static java.lang.String.format;
/**
*
* @8
*/
public class ExpressionUtil {
private static JexlEngine jexl = new JexlBuilder().create();
private static Map<String, JexlExpression> expressionMap = new HashMap<String, JexlExpression>();
public static Object eval(String expression, Map<String, Object> varMap) throws ExpreException {
JexlExpression e = getExpression(expression);
JexlContext jc = new MapContext(varMap);
jc.set("_ctx", varMap);
try {
Object ret = e.evaluate(jc);
return ret;
} catch (Exception excep) {
throw new ExpreException(ExpreException.evalError, format("error evaluate:%s", expression), excep);
}
}
public static JexlExpression getExpression(String expression) throws ExpreException {
JexlExpression e = expressionMap.get(expression);
if (e == null) {
try {
e = jexl.createExpression(expression);
} catch (Exception excep) {
throw new ExpreException(ExpreException.initError, format("error evaluate:%s", expression), excep);
}
expressionMap.put(expression, e);
}
return e;
}
}
|
def string_match(string1, string2):
m = len(string1)
n = len(string2)
l = [[0 for x in range(n+1)] for x in range(m+1)]
# Build the matrix l[][]
for i in range(m+1):
for j in range(n+1):
if i == 0 or j == 0:
l[i][j] = 0
elif string1[i-1] == string2[j-1]:
l[i][j] = l[i-1][j-1] + 1
else:
l[i][j] = max(l[i-1][j], l[i][j-1])
# Return the longest common substring
index = l[m][n]
common_str = [""] * (index+1)
common_str[index] = ""
i = m
j = n
while i > 0 and j > 0:
if string1[i-1] == string2[j-1]:
common_str[index-1] = string1[i-1]
i-=1
j-=1
index-=1
elif l[i-1][j] > l[i][j-1]:
i-=1
else:
j-=1
return "".join(common_str) |
#!/usr/bin/env bash
#
# Copyright (c) 2019-2020 The Bitcoin and Qogecoin Core Authors
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# This script runs all contrib/devtools/extended-lint-*.sh files, and fails if
# any exit with a non-zero status code.
# This script is intentionally locale dependent by not setting "export LC_ALL=C"
# in order to allow for the executed lint scripts to opt in or opt out of locale
# dependence themselves.
set -u
SCRIPTDIR=$(dirname "${BASH_SOURCE[0]}")
LINTALL=$(basename "${BASH_SOURCE[0]}")
for f in "${SCRIPTDIR}"/extended-lint-*.sh; do
if [ "$(basename "$f")" != "$LINTALL" ]; then
if ! "$f"; then
echo "^---- failure generated from $f"
exit 1
fi
fi
done
|
import pandas as pd
import numpy as np
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
# Read financial product data
data = pd.read_csv("data.csv")
# Preprocess the data
encoder = LabelEncoder()
encoded_data = data.apply(encoder.fit_transform)
# Split the data for training
features = encoded_data.drop('default', axis=1)
labels = encoded_data['default']
x_train, x_test, y_train, y_test = train_test_split(features, labels, test_size=0.2)
# Create a random forest model
model = RandomForestClassifier(n_estimators=10, max_depth=20)
model.fit(x_train, y_train)
# Compute the model accuracy on the test set
accuracy = model.score(x_test, y_test)
print("Accuracy: ", accuracy) |
var presses = 0;
NRF.setAdvertising({},{manufacturer: 0x0590, manufacturerData:[presses]});
setWatch(function() {
presses++;
NRF.setAdvertising({},{manufacturer: 0x0590, manufacturerData:[presses]});
}, BTN, {edge:"rising", repeat:1, debounce:20}); |
import { createReducer, on } from '@ngrx/store';
import { EntityState, EntityAdapter, createEntityAdapter } from '@ngrx/entity';
import * as DomainsCardsActions from './domains-cards.actions';
import { DomainsCardsEntity } from './domains-cards.models';
export const DOMAINS_CARDS_FEATURE_KEY = 'domainsCards';
export interface DomainsCardsState extends EntityState<DomainsCardsEntity> {
selectedIds: string[];
initialized: boolean;
loaded: boolean;
errorMsg?: string;
}
export interface DomainsCardsPartialState {
readonly [DOMAINS_CARDS_FEATURE_KEY]: DomainsCardsState;
}
export const domainsCardsAdapter: EntityAdapter<DomainsCardsEntity> =
createEntityAdapter<DomainsCardsEntity>();
export const initialDomainsCardsState: DomainsCardsState =
domainsCardsAdapter.getInitialState({
selectedIds: [],
initialized: false,
loaded: false,
});
export const domainsCardsReducer = createReducer(
initialDomainsCardsState,
on(DomainsCardsActions.initDomainsCardsNewGame, (state) => ({
...state,
initialized: false,
})),
on(DomainsCardsActions.initDomainsCardsSavedGame, (state) => ({
...state,
loaded: false,
errorMsg: undefined,
})),
on(DomainsCardsActions.loadDomainsCardsSuccess, (state, { domainsCards }) =>
domainsCardsAdapter.setAll(domainsCards, { ...state, loaded: true })
),
on(DomainsCardsActions.loadDomainsCardsFailure, (state, { error }) => ({
...state,
errorMsg: error,
})),
on(
DomainsCardsActions.setDomainsCardsInitialized,
(state, { domainsCards }) =>
domainsCardsAdapter.setAll(domainsCards, { ...state, initialized: true })
),
on(DomainsCardsActions.updateDomainCard, (state, { update }) =>
domainsCardsAdapter.updateOne(update, state)
),
on(DomainsCardsActions.updateDomainsCards, (state, { updates }) =>
domainsCardsAdapter.updateMany(updates, state)
),
on(DomainsCardsActions.addDomainCard, (state, { domainCard }) =>
domainsCardsAdapter.addOne(domainCard, state)
),
on(DomainsCardsActions.toggleDomainCardSelection, (state, { id }) => {
const foundId = state.selectedIds.find((selectedId) => selectedId === id);
let newSelectedIds;
if (foundId === undefined) {
newSelectedIds = [...state.selectedIds, id];
} else {
newSelectedIds = state.selectedIds.filter(
(selectedId) => selectedId !== foundId
);
}
return {
...state,
selectedIds: newSelectedIds,
};
}),
on(DomainsCardsActions.clearDomainCardSelection, (state) => ({
...state,
selectedIds: [],
})),
on(DomainsCardsActions.swapSelectedCards, (state) => {
const card0 = state.entities[state.selectedIds[0]];
const card1 = state.entities[state.selectedIds[1]];
if (card0 === undefined || card1 === undefined) {
throw new Error(
`Something went wrong, card0 and card1 shouldn't be undefined at this point.`
);
}
const newCard0 = { ...card0, col: card1.col, row: card1.row };
const newCard1 = { ...card1, col: card0.col, row: card0.row };
return {
...state,
entities: {
...state.entities,
[card0.id]: newCard0,
[card1.id]: newCard1,
},
};
}),
on(DomainsCardsActions.removeDomainCard, (state, { id }) =>
domainsCardsAdapter.removeOne(id, state)
),
on(DomainsCardsActions.setDomainsCardsError, (state, { error }) => ({
...state,
errorMsg: error,
}))
);
|
<filename>components/common/Navbar/Navbar.tsx
import { FC } from 'react'
import Link from 'next/link'
import { Logo, Container } from '@components/ui'
import { Searchbar, UserNav } from '@components/common'
import NavbarRoot from './NavbarRoot'
import s from './Navbar.module.css'
import Categories from '../Categories'
interface Props {
navChildren: any
}
const Navbar = ({ navChildren }: Props) => {
// console.log(navChildren)
return (
<NavbarRoot>
<Container>
<div className="relative flex flex-row justify-between py-4 align-center md:py-6">
<div className="flex items-center flex-1">
<Link href="/">
<a className={s.logo} aria-label="Logo">
<Logo />
</a>
</Link>
<nav className="hidden ml-6 space-x-4 lg:block">
<Categories navChildren={navChildren} />
</nav>
</div>
<div className="justify-center flex-1 hidden lg:flex">
<Searchbar />
</div>
<div className="flex justify-end flex-1 space-x-8">
<UserNav />
</div>
</div>
<div
className="flex pb-4 lg:px-6 lg:hidden"
style={{ flexDirection: 'column', alignItems: 'center' }}
>
<Searchbar id="mobile-search" />
<Categories navChildren={navChildren} />
</div>
</Container>
</NavbarRoot>
)
}
export default Navbar
|
<reponame>bukinr/mdepx
/*-
* Copyright (c) 2019 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef _ARM_NORDICSEMI_NRF5340_NET_CORE_H_
#define _ARM_NORDICSEMI_NRF5340_NET_CORE_H_
#include <arm/nordicsemi/nrf9160_cryptocell.h>
#include <arm/nordicsemi/nrf9160_dppi.h>
#include <arm/nordicsemi/nrf9160_egu.h>
#include <arm/nordicsemi/nrf9160_gpiote.h>
#include <arm/nordicsemi/nrf9160_i2s.h>
#include <arm/nordicsemi/nrf9160_kmu.h>
#include <arm/nordicsemi/nrf9160_pdm.h>
#include <arm/nordicsemi/nrf9160_power.h>
#include <arm/nordicsemi/nrf9160_pwm.h>
#include <arm/nordicsemi/nrf9160_rtc.h>
#include <arm/nordicsemi/nrf9160_saadc.h>
#include <arm/nordicsemi/nrf9160_spim.h>
#include <arm/nordicsemi/nrf9160_spis.h>
#include <arm/nordicsemi/nrf9160_spu.h>
#include <arm/nordicsemi/nrf9160_timer.h>
#include <arm/nordicsemi/nrf9160_twim.h>
#include <arm/nordicsemi/nrf9160_twis.h>
#include <arm/nordicsemi/nrf9160_uicr.h>
#include <arm/nordicsemi/nrf9160_wdt.h>
#include <arm/nordicsemi/nrf_gpio.h>
#include <arm/nordicsemi/nrf_ipc.h>
#include <arm/nordicsemi/nrf_reset.h>
#include <arm/nordicsemi/nrf_uarte.h>
#define NRF_DCNF 0x41000000 /* Domain configuration */
#define NRF_VREQCTRL 0x41004000 /* Voltage request control */
#define NRF_CLOCK 0x41005000 /* Clock control */
#define NRF_POWER 0x41005000 /* Power control */
#define NRF_RESET 0x41005000 /* Reset status */
#define NRF_CTRLAP 0x41006000 /* Control access port CPU side */
#define NRF_RADIO 0x41008000 /* 2.4 GHz radio */
#define NRF_RNG 0x41009000 /* Random number generator */
#define NRF_GPIOTE 0x4100A000 /* GPIO tasks and events */
#define NRF_WDT 0x4100B000 /* Watchdog timer */
#define NRF_TIMER0 0x4100C000 /* Timer 0 */
#define NRF_ECB 0x4100D000 /* AES ECB mode encryption */
#define NRF_AAR 0x4100E000 /* Accelerated address resolver */
#define NRF_CCM 0x4100E000 /* AES CCM mode encryption */
#define NRF_DPPIC 0x4100F000 /* DPPI controller */
#define NRF_TEMP 0x41010000 /* Temperature sensor */
#define NRF_RTC0 0x41011000 /* Real-time counter 0 */
#define NRF_IPC 0x41012000 /* Interprocessor communication */
#define NRF_SPIM0 0x41013000 /* SPI master 0 */
#define NRF_SPIS0 0x41013000 /* SPI slave 0 */
#define NRF_TWIM0 0x41013000 /* Two-wire interface master 0 */
#define NRF_TWIS0 0x41013000 /* Two-wire interface slave 0 */
#define NRF_UARTE0 0x41013000 /* Universal asynchronous receiver/transmitter */
#define NRF_EGU0 0x41014000 /* Event generator unit 0 */
#define NRF_RTC1 0x41016000 /* Real-time counter 1 */
#define NRF_TIMER1 0x41018000 /* Timer 1 */
#define NRF_TIMER2 0x41019000 /* Timer 2 */
#define NRF_SWI0 0x4101A000 /* Software interrupt 0 */
#define NRF_SWI1 0x4101B000 /* Software interrupt 1 */
#define NRF_SWI2 0x4101C000 /* Software interrupt 2 */
#define NRF_SWI3 0x4101D000 /* Software interrupt 3 */
#define NRF_ACL 0x41080000 /* Access control lists */
#define NRF_NVMC 0x41080000 /* Non-Volatile Memory Controller */
#define NRF_VMC 0x41081000 /* Volatile memory controller */
#define NRF_P0 0x418C0500 /* General purpose input and output */
#define NRF_P1 0x418C0800 /* General purpose input and output */
#define NRF_FICR 0x01FF0000 /* Factory information configuration */
#define NRF_UICR 0x01FF8000 /* User information configuration */
#define NRF_CTI 0xE0042000 /* Cross-trigger interface */
#define BASE_FICR 0x00FF0000 /* Factory information configuration */
#define BASE_UICR 0x00FF8000 /* User information configuration */
#define BASE_TAD 0xE0080000 /* Trace and debug control */
#define BASE_SCS 0xE000E000 /* System Control Space */
#define BASE_SCS_NS 0xE002E000 /* System Control Space Non-Secure */
#define BASE_UICR 0x00FF8000 /* User information configuration */
#define BASE_FICR 0x00FF0000 /* Factory information configuration */
#define ID_DCNF 0
#define ID_VREQCTRL 4
#define ID_CLOCK 5
#define ID_POWER 5
#define ID_RESET 5
#define ID_CTRLAP 6
#define ID_RADIO 8
#define ID_RNG 9
#define ID_GPIOTE 10
#define ID_WDT 11
#define ID_TIMER0 12
#define ID_ECB 13
#define ID_AAR 14
#define ID_CCM 14
#define ID_DPPIC 15
#define ID_TEMP 16
#define ID_RTC0 17
#define ID_IPC 18
#define ID_SPIM0 19
#define ID_SPIS0 19
#define ID_TWIM0 19
#define ID_TWIS0 19
#define ID_UARTE0 19
#define ID_EGU0 20
#define ID_RTC1 22
#define ID_TIMER1 24
#define ID_TIMER2 25
#define ID_SWI0 26
#define ID_SWI1 27
#define ID_SWI2 28
#define ID_SWI3 29
#define ID_ACL 128
#define ID_NVMC 128
#define ID_VMC 129
#define ID_P0 192
#define ID_P1 192
#endif /* !_ARM_NORDICSEMI_NRF5340_NET_CORE_H_ */
|
import { runPipeline } from '@sitecore-jss/sitecore-pipelines';
import { GeneratePipelineArgs } from '../../manifest.types';
const generateMedia = async ({ items, templates, pipelines }: any) => {
const pipeline = { ...pipelines.generateMedia };
pipeline.args = {
...pipeline.args,
routes: items.routes,
content: items.nonRoutes,
templates,
pipelines,
};
const pipelineResult = await runPipeline(pipeline);
return pipelineResult.media;
};
export default async (args: GeneratePipelineArgs) => {
const { items, templates } = args.pipelineResult;
const media = await generateMedia({ items, templates, pipelines: args.pipelines });
return {
...args,
pipelineResult: {
...args.pipelineResult,
media: [...(args.pipelineResult.media as any[]), ...media],
},
};
};
|
<filename>crypto/kdf/phc.go
package kdf
import (
"encoding/base64"
"strconv"
"strings"
"github.com/pkg/errors"
)
// phcEncoding is the alphabet used to encode/decode the hashes. It's based on
// the PHC string format:
//
// https://github.com/P-H-C/phc-string-format/blob/master/phc-sf-spec.md
var phcEncoding = base64.RawStdEncoding
// phcAtoi returns the number in the string value or n if value is empty.
func phcAtoi(value string, n int) (int, error) {
if value == "" {
return n, nil
}
return strconv.Atoi(value)
}
// phcParamsToMap parses the parameters in the string s and returns them in a
// map of keys and values.
func phcParamsToMap(s string) map[string]string {
parameters := strings.Split(s, ",")
m := make(map[string]string, len(parameters))
for _, p := range parameters {
subs := strings.SplitN(p, "=", 2)
if len(subs) == 2 {
m[subs[0]] = subs[1]
} else {
m[subs[0]] = ""
}
}
return m
}
// phcEncode creates a string using the PHC format.
func phcEncode(identifier, params string, salt, hash []byte) string {
ret := "$" + identifier
if len(params) > 0 {
ret += "$" + params
}
if len(salt) > 0 {
ret += "$" + phcEncoding.EncodeToString(salt)
}
if len(hash) > 0 {
ret += "$" + phcEncoding.EncodeToString(hash)
}
return ret
}
// phcDecode returns the different parts of a PHC encoded string.
func phcDecode(s string) (id string, version int, params string, salt []byte, hash []byte, err error) {
subs := strings.SplitN(s, "$", 6)
if subs[0] != "" || len(subs) < 2 || (subs[1] == bcryptHash && len(subs) != 4) {
return "", 0, "", nil, nil, errors.New("cannot decode password hash")
}
// Special case for bcrypt
// return just the id and the full hash
if subs[1] == bcryptHash {
return bcryptHash, 0, "", nil, []byte(s), nil
}
switch len(subs) {
case 6: // id + version + params + salt + hash
// version: v=<dec>
m := phcParamsToMap(subs[2])
if version, err = phcAtoi(m["v"], 0); err != nil {
return "", 0, "", nil, nil, err
}
if hash, err = phcEncoding.DecodeString(subs[5]); err != nil {
return "", 0, "", nil, nil, err
}
if salt, err = phcEncoding.DecodeString(subs[4]); err != nil {
return "", 0, "", nil, nil, err
}
id, params = subs[1], subs[3]
case 5: // id + params + salt + hash
if hash, err = phcEncoding.DecodeString(subs[4]); err != nil {
return "", 0, "", nil, nil, err
}
if salt, err = phcEncoding.DecodeString(subs[3]); err != nil {
return "", 0, "", nil, nil, err
}
id, params = subs[1], subs[2]
case 4: // id + salt + hash
if hash, err = phcEncoding.DecodeString(subs[3]); err != nil {
return "", 0, "", nil, nil, err
}
if salt, err = phcEncoding.DecodeString(subs[2]); err != nil {
return "", 0, "", nil, nil, err
}
id = subs[1]
case 3: // id + params
id, params = subs[1], subs[2]
case 2: // id
id = subs[1]
default:
return "", 0, "", nil, nil, errors.New("cannot decode password hash")
}
return
}
|
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash -eu
#
# Copyright 2021 The On Combining Bags to Better Learn from Label Proportions Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
Rscript ./Code/RCode/RCodeMethod1.R 2>&1 | tee -a ./Code/RCode/RCodeMethod1.log
Rscript ./Code/RCode/RCodeMethod2.R 2>&1 | tee -a ./Code/RCode/RCodeMethod2.log
Rscript ./Code/RCode/RCodeMethod3.R 2>&1 | tee -a ./Code/RCode/RCodeMethod3.log
Rscript ./Code/RCode/RCodeMethod4.R 2>&1 | tee -a ./Code/RCode/RCodeMethod4.log
Rscript ./Code/RCode/RCodeMethod5.R 2>&1 | tee -a ./Code/RCode/RCodeMethod5.log
Rscript ./Code/RCode/RCodeMethod6.R 2>&1 | tee -a ./Code/RCode/RCodeMethod6.log
Rscript ./Code/RCode/RCodeMethod7.R 2>&1 | tee -a ./Code/RCode/RCodeMethod7.log
|
<filename>codes/src/main/java/org/glamey/training/codes/hash/demo/CacheNode.java
package org.glamey.training.codes.hash.demo;
import org.glamey.training.codes.hash.consistent.ShardInfo;
import lombok.Getter;
import lombok.Setter;
/**
* @author yang.zhou 2019.11.04.18
*/
public class CacheNode extends ShardInfo<CacheResource> {
@Getter
@Setter
private String ip;
@Getter
@Setter
private int port;
public CacheNode(int weight) {
super(weight);
}
@Override
public String getName() {
return ip + ":" + port;
}
@Override
public CacheResource createResource() {
return new CacheResource(this);
}
}
|
import React, {useState} from 'react';
import './Header.css';
const monthNames = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ];
const dayNames = [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ];
const Header = () => {
const [today, setDate] = useState(new Date());
return (
<div className="header-wrapper">
<div className="date-wrapper">
<div className="day">
{today.getDate()}
</div>
<div className="date-inner-wrapper">
<div className="month">
{monthNames[today.getMonth()]}
</div>
<div className="year">
{today.getFullYear()}
</div>
</div>
</div>
<div className="day-name">
{dayNames[today.getDay()]}
</div>
</div>
);
}
export default Header;
|
#!/usr/bin/env bash
usage() {
cat <<END
./build.sh [-h] -u some-token-value -t your-build-tag -p path-to-your-dockerfile
Used to build a docker image containing the Pivnet Cloud Foundry Autoscaler plugin.
In order to download it, you will need a uaa token from the Pivotal Newtork. This can
be obtaioned through the Pivotal Network UI, and will be tied to your Pivotal Network
account.
-u = uaa_token, the Pivotal Network token the script will exchange for your access token,
which can then be passed safely to the Dockerfile's args because it will expire.
-t = the docker build tag you want passed to the script
-p = the path to your Dockerfile. If left blank, the script will assume you are executing
from the working directory and will assign the output of pwd to this parameter
END
}
while getopts ":h:u:t:p:" opt; do
case $opt in
h) usage
exit 0
;;
u) uaa_token=$OPTARG
;;
t) docker_tag=$OPTARG
;;
p) docker_path=$OPTARG
;;
\?) usage
exit 0
;;
esac
done
shift $((OPTIND -1))
if [ -z ${docker_path+x} ]; then docker_path=$(pwd); fi;
access_token=$(curl -X POST https://network.pivotal.io/api/v2/authentication/access_tokens -d "{\"refresh_token\":\"$uaa_token\"}" -s 2>&1 | sed 's/{"access_token":"\(.*\)"}/\1/')
docker build -t $docker_tag $docker_path --build-arg token=$access_token
sleep 5m # this kills the API credential that would appear in the container history by waiting for it to expire
docker push $docker_tag |
def travellingSalesmanProblem(graph, s):
# store all vertex apart from source vertex
vertex = []
for i in range(len(graph)):
if i != s:
vertex.append(i)
# store minimum weight Hamiltonian Cycle
min_path = float("inf")
while True:
# stoar current Path weight(cost)
current_pathweight = 0
# compute current path weight
for i in range(len(vertex)):
current_pathweight += graph[s][vertex[i]]
# update minimum
min_path = min(min_path, current_pathweight)
if len(vertex) == 0:
break
else:
s = vertex[0]
vertex.remove(s)
return min_path
# driver program to test the above function
if __name__ == "__main__":
# matrix representation of graph
graph = [[0, 2, 3, 8],
[2, 0, 5, 7],
[3, 5, 0, 6],
[8, 7, 6, 0]]
s = 0
print(travellingSalesmanProblem(graph, s))
# Output: 13 |
import { Service } from 'egg'
import { CreateOptions } from 'sequelize'
export default class User extends Service {
public async list({ offset = 0, limit = 10 }: { offset: number; limit: number; }): Promise<{}> {
return this.ctx.model.User.findAndCountAll({
offset,
limit,
order: [[ 'created_at', 'desc' ], [ 'id', 'desc' ]]
})
}
public async find(id: number): Promise<any> {
const user = await this.ctx.model.User.findByPk(id)
if (!user) {
this.ctx.throw(404, 'user not found')
}
return user!
}
public async create(user: CreateOptions): Promise<any> {
return this.ctx.model.User.create(user)
}
public async update({ id, updates }: { id: number; updates: object }): Promise<any> {
const user = await this.ctx.model.User.findByPk(id)
if (!user) {
this.ctx.throw(404, 'user not found')
}
return user!.update(updates)
}
public async del(id: number): Promise<void> {
const user = await this.ctx.model.User.findByPk(id)
if (!user) {
this.ctx.throw(404, 'user not found')
}
return user!.destroy()
}
}
|
<gh_stars>1-10
package ed.ac.uk.kanren
import scala.collection.mutable.{Map => MutMap}
object Trace{
def apply(name:String)(g: => Goal) : Trace =
new Trace(name)(g)
private val nextIntMap = MutMap[String, Int]()
private def nextInt(name:String): Int = {
nextIntMap(name) = nextIntMap.getOrElse(name, -1) + 1
nextIntMap(name)
}
}
class Trace(name:String)(g: => Goal) extends Goal{
private val n = Trace.nextInt(name)
if(Kanren.TRACE)
System.err.println(s"BUILD $name$n")
override def apply(s: Substitution): SearchStream[Substitution] = {
if(Kanren.TRACE)
System.err.println(s"ENTER $name$n\t -> $s")
val res = g(s)
if(Kanren.TRACE)
System.err.println(s"LEAVE $name$n\t -> $res")
res
}
}
|
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2015_12_30_scalability_rexi_fd/run_rexi_fd_m0128_t014_n0128_r0028_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2015_12_30_scalability_rexi_fd/run_rexi_fd_m0128_t014_n0128_r0028_a1.err
#SBATCH -J rexi_fd_m0128_t014_n0128_r0028_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=28
#SBATCH --cpus-per-task=14
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=03:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=14
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2015_12_30_scalability_rexi_fd
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T14"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 14 -envall -ppn 2 -n 28 ./build/rexi_fd_m_tyes_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 0 --rexi-h 0.8 --timestepping-mode 1 --staggering 0 --rexi-m=128 -C -5.0
|
#!/bin/bash
go get -u ./go/
go build ./go/server.go ./go/util.go ./go/rankings.go
sudo setcap "cap_net_bind_service=+ep" ./server
./server localhost:80
|
<filename>tools/data/custom/prepare_annot.py<gh_stars>1-10
import json
from os import makedirs
from os.path import exists, join
from argparse import ArgumentParser
from collections import defaultdict
from random import shuffle
CLASS_MAP = {
'digit_0': 0,
'digit_1': 1,
'digit_1_hand_to_the_camera': 1,
'digit_2': 2,
'digit_2_hand_to_the_camera': 2,
'digit_3': 3,
'digit_3_hand_to_the_camera': 3,
'digit_3_middle_fingers': 3,
'digit_3_middle_fingers_hand_to_the_camera': 3,
'digit_3_with_big_finger': 3,
'digit_3_with_big_finger_hand_to_the_camera': 3,
'digit_4': 4,
'digit_4_hand_to_the_camera': 4,
'digit_5': 5,
'digit_5_hand_to_the_camera': 5,
'thumb_up': 6,
'thumb_down': 7,
'sliding_two_fingers_up': 8,
'sliding_two_fingers_down': 9,
'sliding_two_fingers_left': 10,
'sliding_two_fingers_right': 11,
}
def load_videos_info(file_path):
with open(file_path) as input_stream:
data = json.load(input_stream)
out_data = {k.lower(): v for k, v in data.items()}
return out_data
def update_samples_info(records, file_path, class_map):
out_records = []
with open(file_path) as input_stream:
for line in input_stream:
line_parts = line.strip().split(' ')
if len(line_parts) != 7:
continue
name, _, _, _, _, _, fps = line_parts
name = name.lower()
fps = max(5.0, min(30.0, float(fps)))
assert name in records, f'Cannot find \"{name}\" in records'
record = records[name]
video_annot = record['annot']
if video_annot is None:
continue
assert video_annot['clip_start'] >= video_annot['video_start']
assert video_annot['clip_end'] <= video_annot['video_end']
record['video_start'] = video_annot['video_start']
record['video_end'] = video_annot['video_end']
record['clip_start'] = video_annot['clip_start']
record['clip_end'] = video_annot['clip_end']
record['fps'] = fps
del record['annot']
label = record['label']
assert label in class_map, f'Cannot find {label} in class_map'
record['label'] = class_map[label]
record['name'] = name
out_records.append(record)
return out_records
def split_train_val(records, test_ratio):
data_by_id = defaultdict(list)
for record in records:
data_by_id[record['user_id']].append(record)
num_all_ids = len(data_by_id)
num_test_ids = max(1, int(test_ratio * float(num_all_ids)))
assert 0 < num_test_ids < num_all_ids
all_ids = list(data_by_id.keys())
shuffle(all_ids)
test_ids = set(all_ids[:num_test_ids])
train_records, test_records = [], []
for user_id, user_records in data_by_id.items():
if user_id in test_ids:
test_records.extend(user_records)
else:
train_records.extend(user_records)
return train_records, test_records
def dump_annot(records, out_path):
with open(out_path, 'w') as output_stream:
for record in records:
name = record['name']
label = record['label']
fps = record['fps']
video_start = record['video_start']
video_end = record['video_end']
clip_start = record['clip_start']
clip_end = record['clip_end']
output_stream.write(f'{name} {label} {clip_start} {clip_end} {video_start} {video_end} {fps}\n')
def main():
parser = ArgumentParser()
parser.add_argument('--videos_info', '-iv', type=str, required=True)
parser.add_argument('--samples_info', '-is', type=str, required=True)
parser.add_argument('--output_dir', '-o', type=str, required=True)
parser.add_argument('--test_ratio', '-t', type=float, required=False, default=0.2)
args = parser.parse_args()
assert exists(args.videos_info)
assert exists(args.samples_info)
if not exists(args.output_dir):
makedirs(args.output_dir)
records = load_videos_info(args.videos_info)
print(f'Loaded {len(records)} video records')
records = update_samples_info(records, args.samples_info, CLASS_MAP)
print(f'Merged {len(records)} final records')
train_records, test_records = split_train_val(records, test_ratio=args.test_ratio)
print(f'Split on {len(train_records)} train and {len(test_records)} test records')
train_out_path = join(args.output_dir, 'train.txt')
dump_annot(train_records, train_out_path)
print(f'Train annotation is dumped to: {train_out_path}')
test_out_path = join(args.output_dir, 'test.txt')
dump_annot(test_records, test_out_path)
print(f'Test annotation is dumped to: {test_out_path}')
if __name__ == '__main__':
main() |
#!/bin/bash
# Copyright (C) 2020 Intel Corporation
SCRIPT_DIR=$(dirname $(readlink -e $0))
BASE_IMAGE=service_runtime_base:ubuntu_20.04
OPENVINO_IMAGE=service_runtime_inference_engine:ubuntu_20.04
DEVEL_IMAGE=service_runtime_devel:latest
DEMO_IMAGE=service_runtime_demo:latest
build_base_image()
{
cd $SCRIPT_DIR
./docker_build.sh .config docker/ docker/Dockerfile.base_image \
$BASE_IMAGE
exit 0
}
build_openvino_image()
{
cd $SCRIPT_DIR
./docker_build.sh .config docker/ docker/Dockerfile.inference_engine \
$OPENVINO_IMAGE
exit 0
}
build_image()
{
cd $SCRIPT_DIR
docker_image_tag_file=package/service_runtime/docker_image_tag
docker_image_id=package/service_runtime/docker_image
if [ -e $docker_image_tag_file ]; then
TAG=$(cat $docker_image_tag_file)
else
TAG=v1_$(date +%Y%m%d_%H%M%S)
printf "%s" ${TAG} > $docker_image_tag_file
fi
cp -a $docker_image_tag_file docker/app_rootfs/
IMAGE=service_runtime:${TAG}
./docker_build.sh .config docker/ docker/Dockerfile.image $IMAGE \
--iidfile $docker_image_id
docker tag $IMAGE service_runtime:latest
exit 0
}
build_devel_image()
{
cd $SCRIPT_DIR
./docker_build.sh .config docker/ docker/Dockerfile.devel $DEVEL_IMAGE
exit 0
}
build_demo_image()
{
cd $SCRIPT_DIR
docker_image_id=package/service_runtime/docker_image
./docker_build.sh .config docker/ docker/Dockerfile.demo $DEMO_IMAGE \
--iidfile $docker_image_id
exit 0
}
build_package()
{
cd $SCRIPT_DIR/package/service_runtime
dpkg-buildpackage -us -uc -ui -i
exit 0
}
build_models_package()
{
cd $SCRIPT_DIR/package/models
dpkg-buildpackage -us -uc -ui -i
exit 0
}
case "$1" in
b* | base_image*)
build_base_image
;;
inf* | inference_engine_image)
build_openvino_image
;;
im* | image)
build_image
;;
dev* | devel_image)
build_devel_image
;;
demo* | demo_image)
build_demo_image
;;
p* | package)
build_package
;;
m* | models_package)
build_models_package
;;
*)
printf "Usage: ./release_build.sh {base_image|"
printf "inference_engine_image|image|devel_image|demo_image|package|models_package}\n"
exit 1
esac
|
<gh_stars>1-10
import { EditProjectModal } from "./EditProjectModal";
import {withRouter} from "react-router-dom";
export default withRouter(EditProjectModal);
|
from django.urls import reverse
def generate_user_api_urls(obj):
user_pk = obj.pk
urls = {
"data_downloads": reverse("misago:api:user-data-downloads", kwargs={"pk": user_pk}),
"details": reverse("misago:api:user-details", kwargs={"pk": user_pk}),
"change_email": reverse("misago:api:user-change-email", kwargs={"pk": user_pk}),
"change_password": reverse("misago:api:user-change-password", kwargs={"pk": user_pk}),
"edit_details": reverse("misago:api:user-edit-details", kwargs={"pk": user_pk})
}
return urls |
def maze_heuristic_algorithm(maze):
# Initialize an empty list to store the solution
solution = []
# Initialize an empty list to store visited nodes
visited = []
# Set the current node to the start position
current = maze.start
# Loop until the current node is the end position
while current != maze.end:
# Add the current node to the visited list
visited.append(current)
# Create a list containing the possible moves
possible_moves = [node for node in maze.neighbors(current) if node not in visited]
# If there are no possible moves, go back to the last visited node
if not possible_moves:
current = solution.pop()
else:
# Select the best possible move
best_move = sorted(possible_moves, key=lambda x: maze.distance(x, maze.end))[0]
solution.append(current)
current = best_move
# Add the end position to the solution
solution.append(current)
# Return the solution
return solution |
// Define the module structure
mod reconcile {
// Functions for reconciling transactions
pub fn reconcile_transactions() {
// Implementation
}
}
mod transfer {
// Functions for transferring funds
pub fn transfer_funds() {
// Implementation
}
}
mod wallet_summary {
// Functions for retrieving wallet summaries
pub fn retrieve_wallet_summary() {
// Implementation
}
}
mod withdraw {
// Functions for withdrawing funds
pub fn withdraw_funds() {
// Implementation
}
}
// Re-export the modules for easy access
pub use reconcile::*;
pub use transfer::*;
pub use wallet_summary::*;
pub use withdraw::*; |
#!/bin/bash
#$ -cwd #directory (current working dir)
#$ -o br-gam-Setophaga_americana-E.joblog #jobname
#$ -j y #combine STDOUT STDERR
#$ -pe shared 4 #number of cores - for entire node:
#$ -M cyoungl@mail #mail address
#$ -m ea #email at end and abort times
#$ -l h_data=8G,h_rt=24:00:00 #resource request - run time in hours
#load modules
source /u/local/Modules/default/init/modules.sh
module load R/3.6.1
echo `hostname`
#export lib
export R_LIBS=/u/home/c/cyoungfl/R/x86_64-pc-linux-gnu-library/3.6
#run script - time call return info on memory usage
/usr/bin/time -apv Rscript /u/home/c/cyoungfl/Bird_Phenology/Scripts/7-br-GAM/7-br-GAM.R Setophaga_americana E
|
<filename>gulimall-product/src/main/java/com/littlejenny/gulimall/product/service/CategoryService.java
package com.littlejenny.gulimall.product.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.littlejenny.common.utils.PageUtils;
import com.littlejenny.gulimall.product.entity.CategoryEntity;
import com.littlejenny.gulimall.product.vo.Catelog2VO;
import java.util.List;
import java.util.Map;
/**
* 商品三级分类
*
* @author littlejenny
* @email <EMAIL>
* @date 2021-07-16 15:11:54
*/
public interface CategoryService extends IService<CategoryEntity> {
PageUtils queryPage(Map<String, Object> params);
List<CategoryEntity> listWithTree();
void removeMenuByIds(List<Long> catIds);
Long[] getCategoryPath(Long attrGroupId);
void updateDetailByID(CategoryEntity category);
List<CategoryEntity> getAllLevelOne();
Map<String, List<Catelog2VO>> getCatalogJson();
}
|
import * as parseArgs from 'minimist';
import * as fs from 'fs';
import { resolve as resolvePath } from 'path';
export async function sanitizeParameters(rawargv: string[]) {
const argv = parseArgs(rawargv, {
boolean: 'docker',
string: ['notebooks', 'bindaddress'],
});
// --port
let port = 8000;
if ('port' in argv) {
if (!argv.port.toString().match(/^\d+$/g)) {
throw new Error("Invalid port");
}
port = parseInt(argv.port, 10);
if (port <= 0 || port > 65535) {
throw new Error("Port is out of range");
}
}
// --bindaddress
let bindaddress = '127.0.0.1';
if ('bindaddress' in argv) {
bindaddress = argv.bindaddress;
}
if (bindaddress.trim() === '') {
throw new Error('--bindaddress is invalid.')
}
// --docker
const docker = argv.docker;
// --notebooks
let notebooks;
if (!("notebooks" in argv) || typeof argv.notebooks !== "string" || argv.notebooks.trim() === '') {
if (argv['_'].length > 0) {
notebooks = argv['_'].shift().trim();
} else {
throw new Error("--notebooks path/to/notebooks is required if path not provided as argument.");
}
} else {
notebooks = argv.notebooks;
}
notebooks = resolvePath(notebooks);
if (!fs.existsSync(notebooks)) {
throw new Error("Notebooks path does not exist.");
}
if (!fs.statSync(notebooks).isDirectory()) {
throw new Error("Notebooks path is not a directory.");
}
// Check for unknown parameters
if (argv['_'].length > 0) {
// ex: node . "abcdef"
throw new Error("Unknown argument(s): " + argv['_'].join(', '));
}
const known = ['notebooks', 'port', 'bindaddress', 'docker'];
const unknown = Object.keys(argv).filter((key, _) => key !== '_' && (known.indexOf(key) === -1));
if (unknown.length > 0) {
throw new Error("Unknown parameter(s): " + unknown.join(', '));
}
return { notebooks, port, bindaddress, docker };
}
|
# mcscript_init.csh
#
# Initialization code to source from .cshrc.
#
# Mark A. Caprio
# University of Notre Dame
#
# 1/27/16 (pjf): Created (ba)sh version.
export PATH=${MCSCRIPT_DIR}/tools:${PATH}
# export PYTHONPATH=${MCSCRIPT_DIR}:${PYTHONPATH}
#alias cdr='cd ${MCSCRIPT_WORK_HOME}/${MCSCRIPT_RUN_PREFIX}\!*'
cdr() {
cd ${MCSCRIPT_WORK_HOME}/${MCSCRIPT_RUN_PREFIX}$1
}
|
import pandas as pd
import numpy as np
import json
from tests.utils import confound_filename, pipeline_null
class ConfoundsGenerator:
_params = {
'csf_mean': 4500,
'csf_std': 30,
'gs_mean': 4600,
'gs_std': 30,
'wm_mean': 4700,
'wm_std': 30,
'dvars_slope': 30,
'dvars_intercept': 20,
'dvars_noise': 6,
'tcompcor_meta_std': 0.05,
'acompcor_meta_std': 0.05,
'aroma_std': 0.05,
'trans_meta_std': 0.02,
'trans_meta_mean': 0.005,
'rot_meta_std': 0.001,
'rot_meta_mean': 0,
'sv_slope': 8000
}
def __init__(self, n_volumes=20, *, n_tcompcor=10, n_acompcor=300,
n_aroma=0, seed=0):
self._n_volumes = n_volumes
self._n_tcompcor = n_tcompcor
self._n_acompcor = n_acompcor
self._n_aroma = n_aroma
self._seed = seed
self._df = pd.DataFrame()
self._meta = {}
np.random.seed(seed)
self._create_all()
def __repr__(self):
return f'{self.__class__.__name__}' + \
f'(n_volumes={self._n_volumes}, n_tcompcor={self._n_tcompcor}, ' + \
f'n_acompcor={self._n_acompcor}, n_aroma={self._n_aroma}, ' + \
f'seed={self._seed})'
@property
def confounds(self):
return self._df
@property
def confounds_meta(self):
return self._meta
@property
def mean_fd(self):
return self.confounds['framewise_displacement'].mean()
@property
def max_fd(self):
return self.confounds['framewise_displacement'].max()
@property
def relevant_acompcors(self):
'''Returns computed list of 10 acompcor regressors (5 for both wm and
csf) with highest explained variance'''
if self._n_acompcor < 15:
return []
acompcors_filtered = []
for tissue in ['CSF', 'WM']:
acompcors = [(key, val['VarianceExplained'])
for key, val in self.confounds_meta.items()
if val.get('Mask') == tissue]
acompcors.sort(key=lambda x: x[1], reverse=True)
acompcors = [acompcor[0] for acompcor in acompcors[:5]]
acompcors_filtered.extend(acompcors)
return acompcors_filtered
def get_outlier_scans(self, fd_thr, dvars_thr):
outliers = (
(self._df['framewise_displacement'] >= fd_thr) |
(self._df['std_dvars'] >= dvars_thr)
)
return list(outliers[outliers].index)
def _create_tissue_signals(self):
tissue_signals = {}
tissue_signals['csf'] = (self._params['csf_mean']
+ self._params['csf_std']
* np.random.randn(self._n_volumes, ))
tissue_signals['white_matter'] = (self._params['wm_mean']
+ self._params['wm_std']
* np.random.randn(self._n_volumes, ))
tissue_signals['global_signal'] = (self._params['gs_mean']
+ self._params['gs_std']
* np.random.randn(self._n_volumes, ))
for conf_name, signal in tissue_signals.items():
self._df[conf_name] = signal
self._df[conf_name + '_power2'] = np.power(signal, 2)
self._df[conf_name + '_derivative1'] = np.diff(signal, prepend=np.nan)
self._df[conf_name + '_derivative1_power2'] = np.power(np.diff(signal, prepend=np.nan), 2)
def _create_motion_params(self):
motion_params = {}
for axis in 'xyz':
motion_params['trans_' + axis] = np.cumsum(
np.random.randn(self._n_volumes)
* (self._params['trans_meta_mean']
+ np.random.rand() * self._params['trans_meta_std']))
motion_params['rot_' + axis] = np.cumsum(
np.random.randn(self._n_volumes)
* (self._params['rot_meta_mean']
+ np.random.rand() * self._params['rot_meta_std']))
for conf_name, signal in motion_params.items():
self._df[conf_name] = signal
self._df[conf_name + '_power2'] = np.power(signal, 2)
self._df[conf_name + '_derivative1'] = np.diff(signal, prepend=np.nan)
self._df[conf_name + '_derivative1_power2'] = np.power(np.diff(signal, prepend=np.nan), 2)
def _create_framewise_displacement(self):
self._df['framewise_displacement'] = (
self._df['trans_x'].diff().abs() +
self._df['trans_y'].diff().abs() +
self._df['trans_z'].diff().abs() +
50 * self._df['rot_x'].diff().abs() +
50 * self._df['rot_y'].diff().abs() +
50 * self._df['rot_z'].diff().abs()
)
def _create_dvars(self):
self._df['dvars'] = (self._params['dvars_slope'] * self._df['framewise_displacement']
+ self._params['dvars_intercept']
+ self._params['dvars_noise'] * np.random.randn(self._n_volumes))
# Note: this is not nipype implementation of std_dvars
self._df['std_dvars'] = self._df['dvars'] / self._df['dvars'].mean()
def _create_tcompcors(self):
for i in range(self._n_tcompcor):
self._df[f't_comp_cor_{i:02}'] = (self._params['tcompcor_meta_std']
* np.random.randn()
* np.random.randn(self._n_volumes))
self._meta[f't_comp_cor_{i:02}'] = {
'Method': 'tCompCor',
'Retained': True
}
def _create_acompcors(self):
variance_acompcor = (np.arange(self._n_acompcor, 0, -1)
/ np.sum(np.arange(self._n_acompcor, 0, -1)))
variance_acompcor_cum = np.cumsum(variance_acompcor)
for i in range(self._n_acompcor):
self._df[f'a_comp_cor_{i:02}'] = (self._params['acompcor_meta_std']
* np.random.randn()
* np.random.randn(self._n_volumes))
self._meta[f'a_comp_cor_{i:02}'] = {
'CumulativeVarianceExplained': variance_acompcor_cum[i],
'Method': 'tCompCor',
'Mask': ['combined', 'CSF', 'WM'][i%3],
'Retained': True,
'SingularValue': self._params['sv_slope'] * variance_acompcor[i],
'VarianceExplained': variance_acompcor[i]
}
def _create_cosine_functions(self):
hfcut = 1 / 128 # low pass filter
t_r = 2 # repetition time
n_cosine = int(np.floor(2 * self._n_volumes * hfcut * t_r))
for i in range(n_cosine):
self._df[f'cosine{i:02}'] = np.cos(np.linspace(0, (i+1)*np.pi,
num=self._n_volumes))
def _create_motion_outliers(self):
outlier_scans = self.get_outlier_scans(fd_thr=0.5, dvars_thr=1.5)
for i, scan in enumerate(outlier_scans):
spike_regressor = np.zeros((self._n_volumes, ))
spike_regressor[scan] = 1.
self._df[f'motion_outlier_{i:02}'] = spike_regressor
def _create_aroma_regressors(self):
for i in range(self._n_aroma):
self._df[f'aroma_motion_{i:02}'] = (self._params['aroma_std']
* np.random.randn()
* np.random.randn(self._n_volumes))
def meta_to_json(self, filename):
with open(filename, 'w') as f:
json.dump(self._meta, f, sort_keys=True, indent=4, separators=(',', ': '))
def _create_all(self):
self._create_tissue_signals()
self._create_motion_params()
self._create_framewise_displacement()
self._create_dvars()
self._create_tcompcors()
self._create_acompcors()
self._create_cosine_functions()
self._create_motion_outliers()
self._create_aroma_regressors() |
#!/bin/bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
# Start the releasetool reporter
python3 -m pip install gcp-releasetool
python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
python3 -m pip install --upgrade twine wheel setuptools
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
# Move into the package, build the distribution and upload.
TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
cd github/python-containeranalysis
python3 setup.py sdist bdist_wheel
twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.core.filter.impl;
import org.hibernate.validator.spi.nodenameprovider.JavaBeanProperty;
import org.hibernate.validator.spi.nodenameprovider.Property;
import org.hibernate.validator.spi.nodenameprovider.PropertyNodeNameProvider;
import com.fasterxml.jackson.databind.BeanDescription;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.introspect.BeanPropertyDefinition;
import io.vertx.core.json.jackson.DatabindCodec;
/**
* hibernate validator will cache the resolved data<br>
* no need to worry about performance problem
*/
public class JacksonPropertyNodeNameProvider implements PropertyNodeNameProvider {
@Override
public String getName(Property property) {
if (property instanceof JavaBeanProperty) {
return getJavaBeanPropertyName((JavaBeanProperty) property);
}
return property.getName();
}
private String getJavaBeanPropertyName(JavaBeanProperty property) {
ObjectMapper objectMapper = DatabindCodec.mapper();
JavaType type = objectMapper.constructType(property.getDeclaringClass());
BeanDescription desc = objectMapper.getSerializationConfig().introspect(type);
return desc.findProperties()
.stream()
.filter(prop -> prop.getInternalName().equals(property.getName()))
.map(BeanPropertyDefinition::getName)
.findFirst()
.orElse(property.getName());
}
}
|
func mostCommonElement<T: Comparable>(array1: [T], array2: [T]) -> T? {
guard let firstElement = array1.first, let lastElement = array1.last else {
return nil
}
var commonElement = firstElement
var currentElementCount = 0
var mostCommonCount = 0
let array = array1 + array2
for element in array {
currentElementCount = array.filter({$0 == element}).count
if currentElementCount > mostCommonCount {
mostCommonCount = currentElementCount
commonElement = element
}
}
return commonElement
}
mostCommonElement(array1: [1, 3, 4], array2: [2, 3, 5]) // returns 3 |
#!/usr/bin/env bash
BASEDIR=$(dirname "$0")
src=$BASEDIR/src
test=$BASEDIR/test
dotnet format $src/Language
dotnet format $src/Language.SyntaxTree
dotnet format $src/Language.Utf8
dotnet format $src/Language.Visitors
dotnet format $test/Language.Tests |
<filename>Example/JobsGlobleDef/Pods/JobsGlobleDef/JobsGlobleDefCore/Classes/MacroDef/MacroDef_Cor/MacroDef_Cor.h
//
// MacroDef_Cor.h
// UBallLive
//
// Created by Jobs on 2020/10/9.
//
#ifndef MacroDef_Cor_h
#define MacroDef_Cor_h
#pragma mark ======================================== 色彩相关 ========================================
#define kTableViewBackgroundColor HEXCOLOR(0xf6f5fa)
///常见颜色
#define kClearColor [UIColor clearColor]
#define kBlackColor [UIColor blackColor]
#define kBlueColor [UIColor blueColor]
#define kWhiteColor [UIColor whiteColor]
#define kCyanColor [UIColor cyanColor]
#define kGrayColor [UIColor grayColor]
#define kOrangeColor [UIColor orangeColor]
#define kRedColor [UIColor redColor]
#define KBrownColor [UIColor brownColor]
#define KDarkGrayColor [UIColor darkGrayColor]
#define KDarkTextColor [UIColor darkTextColor]
#define KYellowColor [UIColor yellowColor]
#define KPurpleColor [UIColor purpleColor]
#define KLightTextColor [UIColor lightTextColor]
#define KLightGrayColor [UIColor lightGrayColor]
#define KGreenColor [UIColor greenColor]
#define KMagentaColor [UIColor magentaColor]
///System colors
/* Some colors that are used by system elements and applications.
* These return named colors whose values may vary between different contexts and releases.
* Do not make assumptions about the color spaces or actual colors used.
*/
#define KSystemRedColor [UIColor systemRedColor]
#define KSystemGreenColor [UIColor systemGreenColor]
#define KSystemBlueColor [UIColor systemBlueColor]
#define KSystemOrangeColor [UIColor systemOrangeColor]
#define KSystemYellowColor [UIColor systemYellowColor]
#define KSystemPinkColor [UIColor systemPinkColor]
#define KSystemPurpleColor [UIColor systemPurpleColor]
#define KSystemTealColor [UIColor systemTealColor]
#define KSystemIndigoColor [UIColor systemIndigoColor]
#define KSystemGrayColor [UIColor systemGrayColor]
/* The numbered variations, systemGray2 through systemGray6, are grays which increasingly
* trend away from systemGray and in the direction of systemBackgroundColor.
*
* In UIUserInterfaceStyleLight: systemGray1 is slightly lighter than systemGray.
* systemGray2 is lighter than that, and so on.
* In UIUserInterfaceStyleDark: systemGray1 is slightly darker than systemGray.
* systemGray2 is darker than that, and so on.
*/
#define KSystemGray2Color [UIColor systemGray2Color]
#define KSystemGray3Color [UIColor systemGray3Color]
#define KSystemGray4Color [UIColor systemGray4Color]
#define KSystemGray5Color [UIColor systemGray5Color]
#define KSystemGray6Color [UIColor systemGray6Color]
/* Foreground colors for static text and related elements.
*/
#define KLabelColor [UIColor labelColor]
#define KSecondaryLabelColor [UIColor secondaryLabelColor]
#define KTertiaryLabelColor [UIColor tertiaryLabelColor]
#define KQuaternaryLabelColor [UIColor quaternaryLabelColor]
/* Foreground color for standard system links.
*/
#define KLinkColor [UIColor linkColor]
#define KPlaceholderTextColor [UIColor placeholderTextColor]
#define KSeparatorColor [UIColor separatorColor]
#define KOpaqueSeparatorColor [UIColor opaqueSeparatorColor]
///Background colors
/* We provide two design systems (also known as "stacks") for structuring an iOS app's backgrounds.
*
* Each stack has three "levels" of background colors. The first color is intended to be the
* main background, farthest back. Secondary and tertiary colors are layered on top
* of the main background, when appropriate.
*
* Inside of a discrete piece of UI, choose a stack, then use colors from that stack.
* We do not recommend mixing and matching background colors between stacks.
* The foreground colors above are designed to work in both stacks.
*
* 1. systemBackground
* Use this stack for views with standard table views, and designs which have a white
* primary background in light mode.
*/
#define KSystemBackgroundColor [UIColor systemBackgroundColor]
#define KSecondarySystemBackgroundColor [UIColor secondarySystemBackgroundColor]
#define KTertiarySystemBackgroundColor [UIColor tertiarySystemBackgroundColor]
/* 2. systemGroupedBackground
* Use this stack for views with grouped content, such as grouped tables and
* platter-based designs. These are like grouped table views, but you may use these
* colors in places where a table view wouldn't make sense.
*/
#define KSystemGroupedBackgroundColor [UIColor systemGroupedBackgroundColor]
#define KSecondarySystemGroupedBackgroundColor [UIColor secondarySystemGroupedBackgroundColor]
#define KTertiarySystemGroupedBackgroundColor [UIColor tertiarySystemGroupedBackgroundColor]
///Fill colors
/* Fill colors for UI elements.
* These are meant to be used over the background colors, since their alpha component is less than 1.
*
* systemFillColor is appropriate for filling thin and small shapes.
* Example: The track of a slider.
*/
#define KSystemFillColor [UIColor systemFillColor]
/* secondarySystemFillColor is appropriate for filling medium-size shapes.
* Example: The background of a switch.
*/
#define KSecondarySystemFillColor [UIColor secondarySystemFillColor]
/* tertiarySystemFillColor is appropriate for filling large shapes.
* Examples: Input fields, search bars, buttons.
*/
#define KTertiarySystemFillColor [UIColor tertiarySystemFillColor]
/* quaternarySystemFillColor is appropriate for filling large areas containing complex content.
* Example: Expanded table cells.
*/
#define KQuaternarySystemFillColor [UIColor quaternarySystemFillColor]
///Other colors
/* lightTextColor is always light, and darkTextColor is always dark, regardless of the current UIUserInterfaceStyle.
* When possible, we recommend using `labelColor` and its variants, instead.
*/
#define KLightTextColor [UIColor lightTextColor]
#define KDarkTextColor [UIColor darkTextColor]
/* systemGroupedBackgroundColor is now the same as systemGroupedBackgroundColor.
*/
#define KSystemGroupedBackgroundColor [UIColor systemGroupedBackgroundColor]
#define KViewFlipsideBackgroundColor [UIColor viewFlipsideBackgroundColor]
#define KScrollViewTexturedBackgroundColor [UIColor scrollViewTexturedBackgroundColor]
#define KUnderPageBackgroundColor [UIColor underPageBackgroundColor]
///RGB颜色
#define RGBA_SAMECOLOR(x,a) [UIColor colorWithRed:(x)/255.0 green:(x)/255.0 blue:(x)/255.0 alpha:a]
#define RGB_SAMECOLOR(x) [UIColor colorWithRed:(x)/255.0 green:(x)/255.0 blue:(x)/255.0 alpha:1]
#define RGBA_COLOR(r,g,b,a) [UIColor colorWithRed:(r)/255.0 green:(g)/255.0 blue:(b)/255.0 alpha:a]
#define RGB_COLOR(r,g,b) [UIColor colorWithRed:(r)/255.0 green:(g)/255.0 blue:(b)/255.0 alpha:1]
///随机颜色
#define RandomColor [UIColor colorWithRed:arc4random_uniform(256) / 255.0 \
green:arc4random_uniform(256) / 255.0 \
blue:arc4random_uniform(256) / 255.0 \
alpha:1] \
///十六进制颜色
#define HEXCOLOR(hexValue) [UIColor colorWithRed:((float)((hexValue & 0xFF0000) >> 16))/255.0 green:((float)((hexValue & 0xFF00) >> 8))/255.0 blue:((float)(hexValue & 0xFF))/255.0 alpha:1]
#define HEXCOLOR_ALPHA(hexValue, al) [UIColor colorWithRed:((float)((hexValue & 0xFF0000) >> 16))/255.0 green:((float)((hexValue & 0xFF00) >> 8))/255.0 blue:((float)(hexValue & 0xFF))/255.0 alpha:al]
#endif /* MacroDef_Cor_h */
|
import L from 'leaflet';
import axios from 'axios';
var map = L.map('map', {
zoomControl: true,
maxZoom: 28,
minZoom: 1
}).fitBounds([
[-10.27244102712632, 11.78521081569146],
[4.808373156945728, 32.29619908606451]
]);
// couche openstreet map
map.createPane('pane_OpenStreetMap_1');
map.getPane('pane_OpenStreetMap_1').style.zIndex = 401;
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', {
pane: 'pane_OpenStreetMap_1',
opacity: 1.0,
attribution: '',
minZoom: 1,
maxZoom: 28,
minNativeZoom: 0,
maxNativeZoom: 19
}).addTo(map);
function handleParc(feature, layer) {
layer.on({
click: (e) => console.log(e),
})
}
var bounds_group = new L.featureGroup([]);
map.createPane('pane_limiteprovince');
map.getPane('pane_limiteprovince').style.zIndex = 402;
// map.getPane('pane_limiteprovince').style['mix-blend-mode'] = 'normal';
var provincelayer = L.geoJSON(null, {
onEachFeature: handleParc,
pane: 'pane_limiteprovince',
layerName: 'provincelayer',
style: {
color: "#00008c",
opacity: 0.6,
fillColor: '#333333',
fillOpacity: 1
}
}).addTo(map)
axios.get('/limiteProvinceRdc').then(data => {
var dta = data.data.limitprovince;
var pdta = JSON.parse(dta)
provincelayer.addData(pdta);
provincelayer.addTo(map)
console.log(map);
// L.control.layers(provincelayer).addTo(bounds_group);
// // bounds_group.addLayer(provincelayer)
// map.addLayer(provincelayer)
// console.log(map)
}) |
TERMUX_PKG_HOMEPAGE=https://cfengine.com/
TERMUX_PKG_DESCRIPTION="CFEngine is a configuration management technology."
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@craigcomstock"
TERMUX_PKG_VERSION=1:3.18.0
TERMUX_PKG_REVISION=3
TERMUX_PKG_SRCURL=(https://github.com/cfengine/core/archive/${TERMUX_PKG_VERSION:2}.zip
https://github.com/cfengine/masterfiles/archive/12b52c25e03439341aa7a6a5c7917efa06826f8d.zip
https://github.com/cfengine/libntech/archive/118d6e4bf5ae2611236fe3883b422d50f10da45c.zip)
TERMUX_PKG_SHA256=(846f4cf2a6154817c730b847cacc6f9aacd32c51abc00c137f56650d85e47134
9372e0c65322dc85c5f6f95be175ac0858c94d5ffb54317e8e332ddac634657a
49e03c1daf913bbe370a56aac03b0d2a7250d108c91b39780487304b3e6ac047)
TERMUX_PKG_DEPENDS="liblmdb, openssl, libandroid-glob, pcre, libyaml, libxml2"
# core doesn't work with out-of-tree builds
TERMUX_PKG_BUILD_IN_SRC=true
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="--with-workdir=$TERMUX_PREFIX/var/lib/cfengine --without-pam --without-selinux-policy --without-systemd-service --with-lmdb=$TERMUX_PREFIX --with-openssl=$TERMUX_PREFIX --with-yaml=$TERMUX_PREFIX --with-pcre=$TERMUX_PREFIX --with-prefix=$TERMUX_PREFIX --with-libxml2=$TERMUX_PREFIX"
termux_step_post_get_source() {
# commit-based zips from github include the commit sha so rename to normalize for later steps
mv masterfiles-* masterfiles
rm -rf libntech
mv libntech-* libntech
}
termux_step_pre_configure() {
export EXPLICIT_VERSION=${TERMUX_PKG_VERSION:2}
export LDFLAGS+=" -landroid-glob"
NO_CONFIGURE=1 ./autogen.sh $TERMUX_PKG_EXTRA_CONFIGURE_ARGS --prefix=$TERMUX_PREFIX/var/lib/cfengine --bindir=$TERMUX_PREFIX/bin
cd masterfiles
./autogen.sh --prefix=$TERMUX_PREFIX/var/lib/cfengine --bindir=$TERMUX_PREFIX/bin
make install
}
termux_step_create_debscripts() {
cat << EOF > ./postinst
#!$TERMUX_PREFIX/bin/sh
# Generate a host key
if [ ! -f $TERMUX_PREFIX/var/lib/cfengine/ppkeys/localhost.priv ]; then
$TERMUX_PREFIX/bin/cf-key >/dev/null || :
fi
EOF
}
|
import { without } from 'lodash';
import React from 'react';
import { SUPPORTED_LOCALES } from '../../features/i18n';
import { useI18nFeature } from '../../features/i18n/context';
import Container from '../container/Container';
import LanguageSwitcher from '../language-switcher/LanguageSwitcher';
import styles from './Layout.module.scss';
interface IProps {
children: React.ReactNode;
header?: React.ReactNode;
}
export const Layout = ({ children, header }: IProps) => {
const i18n = useI18nFeature();
const { locale, translate } = i18n.store;
return (
<>
<LanguageSwitcher
currentLanguage={{
code: locale,
title: translate(`locales.${locale}`),
}}
languageOptions={without(SUPPORTED_LOCALES, locale).map((l) => ({
code: l,
title: translate(`locales.${l}`),
}))}
onLanguageSwitch={(l) =>
i18n.actions.switchLocale.trigger({ locale: l })
}
/>
<div className={styles.content}>
{header}
<Container>{children}</Container>
</div>
</>
);
};
|
<reponame>schinmayee/nimbus
/*
* Copyright 2013 Stanford University.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* - Neither the name of the copyright holders nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Author: <NAME> <<EMAIL>>
*/
#include <sstream>
#include <string>
#include <vector>
#include "applications/physbam/water//app_utils.h"
#include "applications/physbam/water//data_names.h"
#include "applications/physbam/water//job_names.h"
#include "applications/physbam/water//physbam_utils.h"
#include "applications/physbam/water//reg_def.h"
#include "src/shared/dbg.h"
#include "src/shared/nimbus.h"
#include "src/worker/worker_thread.h"
#include "applications/physbam/water//projection/job_projection_main.h"
namespace application {
JobProjectionMain::JobProjectionMain(nimbus::Application *app) {
set_application(app);
};
nimbus::Job* JobProjectionMain::Clone() {
return new JobProjectionMain(application());
}
void JobProjectionMain::Execute(
nimbus::Parameter params,
const nimbus::DataArray& da) {
dbg(APP_LOG, "Executing PROJECTION_MAIN job\n");
// Get parameters: frame, time
InitConfig init_config;
std::string params_str(params.ser_data().data_ptr_raw(),
params.ser_data().size());
LoadParameter(params_str, &init_config);
T dt = init_config.dt;
const int& frame = init_config.frame;
const T& time = init_config.time;
dbg(APP_LOG, "Frame %i and time %f in PROJECTION_MAIN job\n",
frame, time);
SpawnJobs(frame, time, dt, da, init_config.global_region);
}
void JobProjectionMain::SpawnJobs(
int frame, T time, T dt, const nimbus::DataArray& da,
const nimbus::GeometricRegion& global_region) {
struct timeval start_time;
gettimeofday(&start_time, NULL);
// nimbus::JobQuery job_query(this);
int projection_job_num = 5;
std::vector<nimbus::job_id_t> projection_job_ids;
GetNewJobID(&projection_job_ids, projection_job_num);
nimbus::IDSet<nimbus::logical_data_id_t> read, write;
nimbus::IDSet<nimbus::job_id_t> before, after;
nimbus::Parameter default_params;
std::string default_params_str;
SerializeParameter(
frame, time, dt, kPNAInt,
global_region, global_region,
kPNAInt, &default_params_str);
default_params.set_ser_data(SerializedData(default_params_str));
std::vector<nimbus::Parameter> default_part_params;
default_part_params.resize(kProjAppPartNum);
StartTemplate("projection_main");
for (uint64_t i = 0; i < kProjAppPartNum; ++i) {
std::string default_params_str;
SerializeParameter(
frame, time, dt, kPNAInt,
global_region, ph.map()["kProjRegY2W0Central"][i],
kPNAInt, &default_params_str);
default_part_params[i].set_ser_data(SerializedData(default_params_str));
}
int construct_matrix_job_num = kProjAppPartNum;
std::vector<nimbus::job_id_t> construct_matrix_job_ids;
GetNewJobID(&construct_matrix_job_ids, construct_matrix_job_num);
int local_initialize_job_num = kProjAppPartNum;
std::vector<nimbus::job_id_t> local_initialize_job_ids;
GetNewJobID(&local_initialize_job_ids, local_initialize_job_num);
int calculate_boundary_condition_part_one_job_num = kProjAppPartNum;
std::vector<nimbus::job_id_t> calculate_boundary_condition_part_one_job_ids;
GetNewJobID(&calculate_boundary_condition_part_one_job_ids,
calculate_boundary_condition_part_one_job_num);
int calculate_boundary_condition_part_two_job_num = kProjAppPartNum;
std::vector<nimbus::job_id_t> calculate_boundary_condition_part_two_job_ids;
GetNewJobID(&calculate_boundary_condition_part_two_job_ids,
calculate_boundary_condition_part_two_job_num);
for (int index = 0;
index < calculate_boundary_condition_part_one_job_num;
++index) {
read.clear();
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W3Outer"][index], APP_FACE_VEL, APP_PHI, NULL);
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W1Outer"][index],
APP_DIVERGENCE, APP_PSI_D, APP_PSI_N,
APP_FILLED_REGION_COLORS, APP_PRESSURE, NULL);
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W0Central"][index],
APP_U_INTERFACE, NULL);
write.clear();
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W3CentralWGB"][index], APP_FACE_VEL, APP_PHI, NULL);
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W1CentralWGB"][index],
APP_DIVERGENCE, APP_PSI_D, APP_PSI_N,
APP_FILLED_REGION_COLORS, APP_PRESSURE, NULL);
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W0Central"][index], APP_U_INTERFACE, NULL);
before.clear();
StageJobAndLoadBeforeSet(&before, PROJECTION_CALCULATE_BOUNDARY_CONDITION_PART_ONE,
calculate_boundary_condition_part_one_job_ids[index],
read, write);
SpawnComputeJob(PROJECTION_CALCULATE_BOUNDARY_CONDITION_PART_ONE,
calculate_boundary_condition_part_one_job_ids[index],
read, write, before, after,
default_part_params[index], true,
ph.map()["kProjRegY2W3Central"][index]);
}
MarkEndOfStage();
for (int index = 0;
index < calculate_boundary_condition_part_two_job_num;
++index) {
read.clear();
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W3Outer"][index], APP_FACE_VEL, APP_PHI, NULL);
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W1Outer"][index],
APP_DIVERGENCE, APP_PSI_D, APP_PSI_N,
APP_FILLED_REGION_COLORS, APP_PRESSURE, NULL);
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W0Central"][index], APP_U_INTERFACE, NULL);
write.clear();
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W3CentralWGB"][index], APP_FACE_VEL, APP_PHI, NULL);
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W1CentralWGB"][index],
APP_DIVERGENCE, APP_PSI_D, APP_PSI_N,
APP_FILLED_REGION_COLORS, APP_PRESSURE, NULL);
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W0Central"][index],
APP_U_INTERFACE, NULL);
before.clear();
StageJobAndLoadBeforeSet(&before, PROJECTION_CALCULATE_BOUNDARY_CONDITION_PART_TWO,
calculate_boundary_condition_part_two_job_ids[index],
read, write);
SpawnComputeJob(PROJECTION_CALCULATE_BOUNDARY_CONDITION_PART_TWO,
calculate_boundary_condition_part_two_job_ids[index],
read, write, before, after,
default_part_params[index], true,
ph.map()["kProjRegY2W3Central"][index]);
}
MarkEndOfStage();
// Construct matrix.
for (int index = 0; index < construct_matrix_job_num; ++index) {
read.clear();
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W3Outer"][index], APP_FACE_VEL, APP_PHI, NULL);
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W1Outer"][index],
APP_DIVERGENCE, APP_PSI_D, APP_PSI_N,
APP_FILLED_REGION_COLORS, APP_PRESSURE, NULL);
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W0Central"][index], APP_U_INTERFACE, NULL);
write.clear();
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W3CentralWGB"][index], APP_FACE_VEL, APP_PHI, NULL);
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W1CentralWGB"][index],
APP_DIVERGENCE, APP_PSI_D, APP_PSI_N,
APP_FILLED_REGION_COLORS, APP_PRESSURE, NULL);
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W0Central"][index],
APP_U_INTERFACE, APP_MATRIX_A,
APP_VECTOR_B, APP_PROJECTION_LOCAL_TOLERANCE,
APP_INDEX_M2C, APP_INDEX_C2M,
APP_PROJECTION_LOCAL_N, APP_PROJECTION_INTERIOR_N,
NULL);
before.clear();
StageJobAndLoadBeforeSet(&before, PROJECTION_CONSTRUCT_MATRIX,
construct_matrix_job_ids[index],
read, write);
SpawnComputeJob(PROJECTION_CONSTRUCT_MATRIX,
construct_matrix_job_ids[index],
read, write, before, after,
default_part_params[index], true,
ph.map()["kProjRegY2W3Central"][index]);
}
MarkEndOfStage();
// Global initialize.
read.clear();
LoadLdoIdsInSet(&read, ph.map()["kRegW0Central"][0],
APP_PROJECTION_INTERIOR_N, APP_PROJECTION_LOCAL_TOLERANCE,
NULL);
write.clear();
LoadLdoIdsInSet(&write, ph.map()["kRegW0Central"][0],
APP_PROJECTION_GLOBAL_N,
APP_PROJECTION_GLOBAL_TOLERANCE,
APP_PROJECTION_DESIRED_ITERATIONS, NULL);
before.clear();
StageJobAndLoadBeforeSet(&before, PROJECTION_GLOBAL_INITIALIZE,
projection_job_ids[3],
read, write);
SpawnComputeJob(PROJECTION_GLOBAL_INITIALIZE,
projection_job_ids[3],
read, write, before, after,
default_params, true,
ph.map()["kRegW3Central"][0]);
// Global initialize is a job that serves as a bottleneck.
MarkEndOfStage();
// Local initialize.
for (int index = 0; index < local_initialize_job_num; ++index) {
read.clear();
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W0Central"][index],
APP_PROJECTION_LOCAL_N, APP_PROJECTION_INTERIOR_N,
APP_INDEX_M2C,
APP_INDEX_C2M,
APP_VECTOR_B,
APP_MATRIX_A, NULL);
LoadLdoIdsInSet(&read, ph.map()["kProjRegY2W1Outer"][index], APP_PRESSURE,
NULL);
write.clear();
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W0Central"][index],
APP_VECTOR_B, APP_PROJECTION_LOCAL_RESIDUAL, APP_MATRIX_C,
APP_VECTOR_TEMP, APP_VECTOR_Z,
APP_VECTOR_PRESSURE,
NULL);
LoadLdoIdsInSet(&write, ph.map()["kProjRegY2W1CentralWGB"][index],
APP_VECTOR_P_META_FORMAT, NULL);
before.clear();
StageJobAndLoadBeforeSet(&before, PROJECTION_LOCAL_INITIALIZE,
local_initialize_job_ids[index],
read, write);
SpawnComputeJob(PROJECTION_LOCAL_INITIALIZE,
local_initialize_job_ids[index],
read, write, before, after,
default_part_params[index], true,
ph.map()["kProjRegY2W3Central"][index]);
}
MarkEndOfStage();
// Projection loop.
read.clear();
LoadLdoIdsInSet(&read, ph.map()["kRegW0Central"][0],
APP_PROJECTION_INTERIOR_N,
APP_PROJECTION_LOCAL_RESIDUAL,
APP_PROJECTION_GLOBAL_TOLERANCE,
APP_PROJECTION_DESIRED_ITERATIONS,
NULL);
write.clear();
nimbus::Parameter projection_loop_iteration_params;
std::string projection_loop_iteration_str;
SerializeParameter(
frame, time, dt, kPNAInt,
global_region, global_region,
1, &projection_loop_iteration_str);
projection_loop_iteration_params.set_ser_data(
SerializedData(projection_loop_iteration_str));
before.clear();
StageJobAndLoadBeforeSet(&before, PROJECTION_LOOP_ITERATION,
projection_job_ids[4],
read, write,
true);
SpawnComputeJob(PROJECTION_LOOP_ITERATION,
projection_job_ids[4],
read, write, before, after,
projection_loop_iteration_params, false,
ph.map()["kRegW3Central"][0]);
MarkEndOfStage();
EndTemplate("projection_main");
// job_query.PrintTimeProfile();
// if (time == 0) {
// dbg(APP_LOG, "Print job dependency figure.\n");
// job_query.GenerateDotFigure("projection_main.dot");
// }
{
struct timeval t;
gettimeofday(&t, NULL);
double time = (static_cast<double>(t.tv_sec - start_time.tv_sec)) +
.000001 * (static_cast<double>(t.tv_usec - start_time.tv_usec));
dbg(APP_LOG, "\nThe query time spent in job PROJECTION_LOOP_ITERATION_MAIN is %f seconds.\n",
time);
}
}
} // namespace application
|
import React from 'react'
import Menu from '../components/Menu'
export default function BasicLayout(props) {
const { children, menuColor } = props;
return (
<>
<Menu menuColor={menuColor} />
{children}
</>
)
}
|
<gh_stars>10-100
/*
* Copyright 2014 akquinet engineering GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package de.akquinet.engineering.vaadinator.model;
public class MapPropertyProfileDescription {
public MapPropertyProfileDescription() {
super();
}
public MapPropertyProfileDescription(PropertyDescription property, String profileName, String targetPropertyName) {
super();
this.property = property;
this.profileName = profileName;
this.targetPropertyName = targetPropertyName;
}
private PropertyDescription property;
private String profileName;
private boolean excluded = false;
private boolean included = false;
private String targetPropertyName;
private String targetPropertyClassName = null;
private boolean readonly = false;
private boolean deep = false;
private boolean mandatory = false;
public PropertyDescription getProperty() {
return property;
}
public void setProperty(PropertyDescription property) {
this.property = property;
}
public String getProfileName() {
return profileName;
}
public void setProfileName(String profileName) {
this.profileName = profileName;
}
public MapProfileDescription getProfile() {
if (getProperty() == null) {
return null;
}
if (getProperty().getBean() == null) {
return null;
}
return getProperty().getBean().getMapProfile(getProfileName());
}
public boolean isExcluded() {
return excluded;
}
public void setExcluded(boolean excluded) {
this.excluded = excluded;
}
public boolean isIncluded() {
return included;
}
public void setIncluded(boolean included) {
this.included = included;
}
public String getTargetPropertyName() {
return targetPropertyName;
}
public void setTargetPropertyName(String targetPropertyName) {
this.targetPropertyName = targetPropertyName;
}
public String getTargetPropertyGetterName() {
if (getTargetPropertyName() == null) {
return null;
}
return "get" + getTargetPropertyName().substring(0, 1).toUpperCase() + getTargetPropertyName().substring(1);
}
public String getTargetPropertySetterName() {
if (getTargetPropertyName() == null) {
return null;
}
return "set" + getTargetPropertyName().substring(0, 1).toUpperCase() + getTargetPropertyName().substring(1);
}
public String getTargetPropertyClassName() {
return targetPropertyClassName;
}
public void setTargetPropertyClassName(String targetPropertyClassName) {
this.targetPropertyClassName = targetPropertyClassName;
}
public boolean isDeep() {
return deep;
}
public void setDeep(boolean deep) {
this.deep = deep;
}
public boolean isReadonly() {
return readonly;
}
public void setReadonly(boolean readonly) {
this.readonly = readonly;
}
public boolean isMandatory() {
return mandatory;
}
public void setMandatory(boolean mandatory) {
this.mandatory = mandatory;
}
@Override
public String toString() {
return "MapPropertyProfileDescription [profileName=" + profileName + ", excluded=" + excluded + ", included=" + included
+ ", targetPropertyName=" + targetPropertyName + ", targetPropertyClassName=" + targetPropertyClassName + ", deep=" + deep
+ ", readonly=" + readonly + ", mandatory=" + mandatory + ", getTargetPropertyGetterName()=" + getTargetPropertyGetterName() + "]";
}
}
|
#!/usr/bin/env bash
set -euo pipefail
# Get the directory of this file (where the package.json file is located)
bin_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd "$bin_dir/../"
temp_node_modules="build/nodejs/node_modules"
node_modules_path="node_modules"
artifact_dir="${bin_dir}/../build/artifacts"
mkdir -p "${artifact_dir}"
artifact_path="${artifact_dir}/layer-node-modules.zip"
echo "Moving node_modules directory"
rm -rf build/nodejs
mkdir build/nodejs
mv node_modules build/nodejs/
cd "build"
echo "Zipping node_modules directory to ${artifact_path}"
time zip -rqX ${artifact_path} nodejs
time mv nodejs/node_modules ../node_modules
|
<reponame>lukebigum/puppet-consul
require 'json'
require 'net/http'
require 'pp'
require 'uri'
require File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "..", "puppet_x", "consul", "acl_base.rb"))
Puppet::Type.type(:consul_token).provide(
:default
) do
mk_resource_methods
def self.prefetch(resources)
resources.each do |name, resource|
tokens = list_tokens(resource[:acl_api_token], resource[:hostname], resource[:port], resource[:protocol], resource[:ca_file], resource[:api_tries])
token = tokens.select{|token| token.accessor_id == resource[:accessor_id]}
resource.provider = new({}, @client, token.any? ? token.first : nil, resource)
end
end
def self.list_tokens(acl_api_token, hostname, port, protocol, ca_file, tries)
@token_collection ||= nil
if @token_collection
return @token_collection
end
@client ||= ConsulACLTokenClient.new(hostname, port, protocol, acl_api_token, ca_file)
@token_collection = @client.get_token_list(tries)
@token_collection
end
def initialize(messages, client = nil, existing_token = nil, resource = nil)
super(messages)
@property_flush = {}
@client = client
@existing_token = existing_token
if resource
@property_hash = {
:secret_id => resource[:secret_id],
}
end
if existing_token
@property_hash[:accessor_id] = existing_token.accessor_id
if existing_token.is_policy_list_equal(resource[:policies_by_id], resource[:policies_by_name])
@property_hash[:policies_by_id] = resource[:policies_by_id]
@property_hash[:policies_by_name] = resource[:policies_by_name]
end
end
end
def exists?
@existing_token
end
def create
@property_flush[:ensure] = :present
end
def destroy
@property_flush[:ensure] = :absent
end
def flush
if @resource[:ensure] != :absent && !@existing_token
created_token = @client.create_token(@resource[:accessor_id], @resource[:name], @resource[:policies_by_name], @resource[:policies_by_id], @resource[:api_tries], @resource[:secret_id] ? @resource[:secret_id] : nil )
@resource[:accessor_id] = created_token.accessor_id
@resource[:secret_id] = created_token.secret_id
Puppet.info("Created token #{created_token.description} with Accessor ID #{created_token.accessor_id}")
elsif @resource[:ensure] != :absent && @existing_token && !@existing_token.is_policy_list_equal(@resource[:policies_by_id], @resource[:policies_by_name])
new_policy_list = @client.update_token(@existing_token.accessor_id, @existing_token.description, @resource[:policies_by_name], @resource[:policies_by_id])
@existing_token.policies = new_policy_list
Puppet.info("Updated token #{@existing_token.description} (Accessor ID: #{@existing_token.accessor_id}")
elsif @resource[:ensure] == :absent && @existing_token
@client.delete_token(@resource[:accessor_id])
@resource[:accessor_id] = ''
Puppet.info("Deleted token #{@existing_token.description} (Accessor ID: #{@existing_token.accessor_id}")
end
end
def self.reset
@client = nil
@token_collection = nil
end
end
class ConsulToken
attr_reader :accessor_id, :secret_id, :description, :policies
attr_writer :policies
def initialize (accessor_id, secret_id, description, policies)
@accessor_id = accessor_id
@secret_id = secret_id
@description = description
@policies = policies
end
def is_policy_list_equal(policies_by_id, policies_by_name)
total_length = (policies_by_id.length + policies_by_name.length)
if @policies.length != total_length
return false;
end
actual_policies_by_id = @policies.map(&:policy_id)
actual_policies_by_name = @policies.map(&:policy_name)
(policies_by_id - actual_policies_by_id).empty? && (policies_by_name - actual_policies_by_name).empty?
end
end
class ConsulTokenPolicyLink
attr_reader :policy_id, :policy_name
def initialize (policy_id, policy_name)
@policy_id = policy_id
@policy_name = policy_name
end
end
class ConsulACLTokenClient < PuppetX::Consul::ACLBase::BaseClient
def get_token_list(tries)
begin
response = get('/tokens', tries)
rescue StandardError => e
Puppet.warning("Cannot retrieve ACL token list: #{e.message}")
response = {}
end
collection = []
response.each {|item|
collection.push(ConsulToken.new(item['AccessorID'], item['SecretID'], item['Description'], parse_policies(item['Policies'])))
}
collection
end
def create_token(accessor_id, description, policies_by_name, policies_by_id, tries, secret_id = nil)
begin
body = encode_body(accessor_id, description, policies_by_name, policies_by_id, secret_id)
response = put('/token', body, tries)
rescue StandardError => e
Puppet.warning("Unable to create token #{description}: #{e.message}")
return nil
end
ConsulToken.new(response['AccessorID'], response['SecretID'], description, parse_policies(response['Policies']))
end
def update_token(accessor_id, description, policies_by_name, policies_by_id)
begin
body = encode_body(accessor_id, description, policies_by_name, policies_by_id, nil)
response = put('/token/' + accessor_id, body)
rescue StandardError => e
Puppet.warning("Unable to update token #{description} (Accessor ID: #{accessor_id}): #{e.message}")
return nil
end
parse_policies(response['Policies'])
end
def delete_token(accessor_id)
begin
response = delete('/token/' + accessor_id)
if response == 'false'
raise 'Consul API returned false as response'
end
rescue StandardError => e
Puppet.warning("Unable to delete token #{accessor_id}: #{e.message}")
return nil
end
end
def parse_policies(response)
unless response
return []
end
policy_links = []
response.each {|policy|
policy_links.push(ConsulTokenPolicyLink.new(policy['ID'], policy['Name']))
}
policy_links
end
def encode_body(accessor_id, description, policies_by_name, policies_by_id, secret_id = nil)
policies = []
policies_by_name.each {|name|
policies.push({'Name' => name})
}
policies_by_id.each {|id|
policies.push({'ID' => id})
}
body = {}
body.store('AccessorID', accessor_id)
body.store('Description', description)
body.store('Local', false)
body.store('Policies', policies)
if !secret_id.nil? && !secret_id.to_s.strip.empty?
body.store('SecretID', secret_id)
end
body
end
end
|
/*
* Created Date: Sat, 28th Dec 2019, 16:01:17 pm
* Author: <NAME>
* Email: <EMAIL>
* Copyright (c) 2019 The Distance
*/
import Color from 'color';
const LightenColor = (inputColor, value = 0.5) =>
Color.rgb(inputColor)
.lighten(value)
.hex();
export default LightenColor;
|
import sys
import os
import subprocess
def generate_outline(input_file, output_dir):
if not os.path.exists(input_file):
print("Error: Input file does not exist.")
return
if not os.path.exists(output_dir):
print("Error: Output directory does not exist.")
return
output_file = os.path.join(output_dir, "generated_output.otl")
subprocess.run(["python", "freemind_outline.py", input_file, ">", output_file], shell=True)
expected_output_file = sys.argv[2]
diff_process = subprocess.run(["diff", output_file, expected_output_file])
if diff_process.returncode == 0:
print("Output matches the expected output.")
else:
print("Output does not match the expected output.")
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: python generate_outline.py /path/to/input.mm /path/to/output_directory/expected_output.otl")
else:
generate_outline(sys.argv[1], os.path.dirname(sys.argv[2])) |
# ARCH will be auto-detected as the host if not specified
#ARCH=i486
CC_BASE_PREFIX=/opt/cross
MAKEFLAGS=-j8
GCC_BOOTSTRAP_CONFFLAGS=--disable-lto-plugin
GCC_CONFFLAGS=--disable-lto-plugin
MUSL_CC_PREFIX="musl-"
# Disable these three lines when running build-gcc-deps.sh
CC="'"${MUSL_CC_PREFIX}gcc"' -Wl,-Bstatic -static-libgcc"
CXX="'"${MUSL_CC_PREFIX}g++"' -Wl,-Bstatic -static-libgcc"
export CC CXX
|
#pragma once
#include "AudioComponent.h"
class AnimationComponent
{
private:
class Animation
{
public:
bool done;
float animationTimer;
float timer;
int width;
int height;
sf::Sprite& sprite;
sf::Texture& textureSheet;
sf::IntRect startRect, endRect, currentRect;
const bool& Play(const float& DeltaTime, bool priority = false)
{
//UpdateTimer
this->done = false;
this->timer += 100.0f * DeltaTime;
if (this->timer >= animationTimer)
{
//Reset Timer
this->timer = 0.0f;
//Animate
if (this->currentRect != this->endRect)
{
this->currentRect.left += this->width;
}
else //Reset
{
this->currentRect.left = this->startRect.left;
done = true;
}
this->sprite.setTextureRect(this->currentRect);
}
return done;
}
const bool& Play(const float& DeltaTime, float speed_percent, bool priority = false)
{
//UpdateTimer
this->done = false;
if (speed_percent < 0.5f)
speed_percent = 0.5f;
this->timer += speed_percent * 100.0f * DeltaTime;
if (this->timer >= animationTimer)
{
//Reset Timer
this->timer = 0.0f;
//Animate
if (this->currentRect != this->endRect)
{
this->currentRect.left += this->width;
}
else //Reset
{
this->currentRect.left = this->startRect.left;
done = true;
}
this->sprite.setTextureRect(this->currentRect);
}
return done;
}
void Reset()
{
this->timer = 0.0f;
this->currentRect = this->startRect;
}
const bool& isDone() { return this->done; }
Animation(
sf::Sprite& sprite,
sf::Texture& textureSheet,
float animationTimer,
sf::Vector2i startFrameIndex, sf::Vector2i endFrameIndex,
sf::Vector2i Dimensions,
bool priority = false)
: sprite(sprite), textureSheet(textureSheet), animationTimer(animationTimer)
{
//this->animationTimer = 0.0f;
this->timer = 0.0f;
this->done = false;
this->width = Dimensions.x;
this->height = Dimensions.y;
this->startRect = sf::IntRect(startFrameIndex.x * this->width, startFrameIndex.y * this->height, this->width, this->height);
this->endRect = sf::IntRect(endFrameIndex.x * this->width, endFrameIndex.y * this->height, this->width, this->height);
this->currentRect = this->startRect;
this->sprite.setTexture(this->textureSheet, true);
this->sprite.setTextureRect(this->startRect);
}
~Animation()
{
}
};
sf::Sprite& sprite;
sf::Texture& texture_sheet;
std::map<std::string, Animation*> animations;
Animation* lastAnimated;
Animation* priorityAnimation;
public:
AnimationComponent(sf::Sprite& sprite, sf::Texture& texture_sheet);
~AnimationComponent();
const bool& isDone(const std::string key) { return this->animations[key]->isDone(); }
void Play(const std::string key, const float& DeltaTime, const bool priority = false);
void Play(const std::string key, const float& DeltaTime, float modifier,
float modifier_max, const bool priority = false);
void addAnimation(const std::string key,
float animationTimer,
sf::Vector2i startFrameIndex, sf::Vector2i endFrameIndex,
sf::Vector2i Dimensions);
};
|
def fitness(solution, cost):
"""
Return the fitness evaluation based on the given solution and cost dictionary.
Parameters
----------
solution : list
A list representing the solution, where each element is a city.
cost : dict
A dictionary containing the costs for traveling between pairs of cities.
Returns
-------
int
The fitness evaluation based on the total cost of the solution.
"""
# Initialize the total cost to 0
total_cost = 0
# Iterate through the solution and calculate the total cost
for i in range(len(solution) - 1):
# Get the current city and the next city in the solution
current_city = solution[i]
next_city = solution[i + 1]
# Add the cost of traveling from the current city to the next city to the total cost
total_cost += cost[(current_city, next_city)]
# Add the cost of traveling from the last city back to the first city
total_cost += cost[(solution[-1], solution[0])]
return total_cost |
<gh_stars>0
package io.github.marcelbraghetto.dailydeviations.features.collection.logic;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.annotation.Config;
import io.github.marcelbraghetto.dailydeviations.BuildConfig;
import io.github.marcelbraghetto.dailydeviations.testconfig.RobolectricProperties;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
/**
* Created by <NAME> on 12/06/16.
*/
@Config(constants = BuildConfig.class, sdk = RobolectricProperties.EMULATE_SDK)
@RunWith(RobolectricGradleTestRunner.class)
public class CollectionDisplayModeTest {
@Test
public void fromStringNullInput() {
// Input
String input = null;
// Run
CollectionDisplayMode mode = CollectionDisplayMode.fromString(input);
// Verify
assertThat(mode, is(CollectionDisplayMode.MultiColumn));
}
@Test
public void fromStringNoMatch() {
// Input
String input = "SomeRandomString";
// Run
CollectionDisplayMode mode = CollectionDisplayMode.fromString(input);
// Verify
assertThat(mode, is(CollectionDisplayMode.MultiColumn));
}
@Test
public void fromStringMultiColumn() {
// Input
String input = "MultiColumn";
// Run
CollectionDisplayMode mode = CollectionDisplayMode.fromString(input);
// Verify
assertThat(mode, is(CollectionDisplayMode.MultiColumn));
}
@Test
public void fromStringSingleColumn() {
// Input
String input = "SingleColumn";
// Run
CollectionDisplayMode mode = CollectionDisplayMode.fromString(input);
// Verify
assertThat(mode, is(CollectionDisplayMode.SingleColumn));
}
} |
// Modules required to handle the request, and parse the resultant XML.
const agent = require("superagent");
const request = require("request");
const parseString = require('xml2js').parseString;
// Function to get the image XML from the Gelbooru server.
exports.getImage = function (limit, tags, callback) {
var address = `http://gelbooru.com/index.php?page=dapi&s=post&q=index&limit=${limit}&tags=${tags}`;
agent.post(address).end(function (err, res) {
callback(res);
if (err) {
console.log(err);
callback(res);
}
});
};
// Function to search a user-defined number of images from Gelbooru, and then pick one randomly, and pass the URL on using a callback function.
exports.getRandomImage = function (limit, tags, callback) {
var address = `http://gelbooru.com/index.php?page=dapi&s=post&q=index&limit=${limit}&tags=${tags}`;
agent.post(address).end(function (err, res) {
let cleanedString = res.text.replace("\ufeff", "");
parseString(cleanedString, function (error, result) {
if (error)
console.log("Error", error);
else {
if (typeof(result.posts.post) != "undefined") {
let randomImagePicker = Math.floor(Math.random() * (result.posts.post.length - 1)) + 1;
var imageURL = result.posts.post[randomImagePicker].$.file_url;
callback(imageURL);
} else {
console.log("Error: Number of images is undefined!");
callback("727");
}
}
});
})
}
|
public class MyObject implements Comparable {
private String name;
private int number;
public MyObject(String name, int number) {
this.name = name;
this.number = number;
}
@Override
public int compareTo(MyObject obj) {
if (this.number == obj.number)
return this.name.compareTo(obj.name);
else
return this.number - obj.number;
}
@Override
public String toString() {
return name + ", " + number;
}
} |
<gh_stars>1-10
package com.grasea.grandroid.ble.annotations;
import com.grasea.grandroid.ble.Config;
import java.lang.reflect.Field;
import java.util.HashMap;
/**
* Created by <NAME> on 2016/5/18.
*/
public class NameBinder implements Parsable {
private static NameBinder ourInstance = new NameBinder();
public static NameBinder getInstance() {
return ourInstance;
}
private HashMap<String, String> nameMap;
private NameBinder() {
nameMap = new HashMap<>();
}
public static void bind(Object object) {
if (ourInstance == null) {
ourInstance = new NameBinder();
}
try {
ourInstance.startBindClass(object);
} catch (Exception e) {
Config.loge(e);
}
}
private void doUnbind() {
}
public static void unbind() {
ourInstance = null;
}
@Override
public void startBindClass(Object object) throws Exception {
if (!nameMap.isEmpty()) {
nameMap.clear();
}
Field[] fields = object.getClass().getFields();
for (Field field : fields) {
AliasName annotation = field.getAnnotation(AliasName.class);
if (annotation != null) {
String name = annotation.name();
if (!field.getType().equals(String.class)) {
throw new Exception("UUID's Name must to be 'String' type.");
}
field.setAccessible(true);
nameMap.put((String) field.get(object), name);
}
}
}
public String getName(String uuid) {
return nameMap.get(uuid);
}
}
|
#!/bin/bash
#
# NOTE: This has been modified from the version in the normal repo.
# -Chad
#
# Absolute path this script is in, thus /home/user/bin
SCRIPTPATH="$(cd "$(dirname "$BASH_SOURCE")"; pwd)"
echo "This script path : $SCRIPTPATH"
BASEDIR="$(cd "$SCRIPTPATH"; pwd)"
echo "SOS base path ...: $BASEDIR"
export PROJECT_BASE="$(cd "$BASEDIR"; pwd)"
echo "Project base ....: $PROJECT_BASE"
export SOS_PYTHON="$PROJECT_BASE/python/bin/python"
echo "\$PROJECT_BASE ...: $PROJECT_BASE"
echo "\$SOS_PYTHON .....: $SOS_PYTHON"
echo " --> Use this to run SOS's Python modules! <--"
echo ""
export PATH=$SOSSCRIPTS:$PATH
# For tracking the environment that SOS is built in:
export SOS_HOST_KNOWN_AS="\"(default)\""
export SOS_HOST_NODE_NAME="\"$(uname -n)\""
export SOS_HOST_DETAILED="\"$(uname -o) $(uname -r) $(uname -m)\""
export SOS_CMD_PORT=22500
export SOS_ROOT=$BASEDIR/sos_flow
export BUILDDIR=build-llnl
export SOS_BUILD_DIR=$SOS_ROOT/$BUILDDIR
<<<<<<< HEAD
export SOS_WORK=`pwd`
export SOS_EVPATH_MEETUP=$SOS_WORK
export SOS_DISCOVERY_DIR=$SOS_EVPATH_MEETUP
=======
export SOS_WORK=$HOME
export SOS_EVPATH_MEETUP=$SOS_WORK
>>>>>>> 70852aa8902f65689b6ff774c5e236245606b13c
export CC=gcc
export CXX=g++
export MPICC=mpicc
export MPICXX=mpiCC
export SOS_ENV_SET=1
export SOS_IN_MEMORY_DATABASE=TRUE
export SOS_EXPORT_DB_AT_EXIT=VERBOSE
|
<filename>x-pack/legacy/plugins/siem/public/components/import_data_modal/index.tsx
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
EuiButton,
EuiButtonEmpty,
EuiCheckbox,
// @ts-ignore no-exported-member
EuiFilePicker,
EuiModal,
EuiModalBody,
EuiModalFooter,
EuiModalHeader,
EuiModalHeaderTitle,
EuiOverlayMask,
EuiSpacer,
EuiText,
} from '@elastic/eui';
import React, { useCallback, useState } from 'react';
import { ImportRulesResponse, ImportRulesProps } from '../../containers/detection_engine/rules';
import {
displayErrorToast,
displaySuccessToast,
useStateToaster,
errorToToaster,
} from '../toasters';
import * as i18n from './translations';
interface ImportDataModalProps {
checkBoxLabel: string;
closeModal: () => void;
description: string;
errorMessage: string;
failedDetailed: (id: string, statusCode: number, message: string) => string;
importComplete: () => void;
importData: (arg: ImportRulesProps) => Promise<ImportRulesResponse>;
showCheckBox: boolean;
showModal: boolean;
submitBtnText: string;
subtitle: string;
successMessage: (totalCount: number) => string;
title: string;
}
/**
* Modal component for importing Rules from a json file
*/
export const ImportDataModalComponent = ({
checkBoxLabel,
closeModal,
description,
errorMessage,
failedDetailed,
importComplete,
importData,
showCheckBox = true,
showModal,
submitBtnText,
subtitle,
successMessage,
title,
}: ImportDataModalProps) => {
const [selectedFiles, setSelectedFiles] = useState<FileList | null>(null);
const [isImporting, setIsImporting] = useState(false);
const [overwrite, setOverwrite] = useState(false);
const [, dispatchToaster] = useStateToaster();
const cleanupAndCloseModal = useCallback(() => {
setIsImporting(false);
setSelectedFiles(null);
closeModal();
}, [setIsImporting, setSelectedFiles, closeModal]);
const importRulesCallback = useCallback(async () => {
if (selectedFiles != null) {
setIsImporting(true);
const abortCtrl = new AbortController();
try {
const importResponse = await importData({
fileToImport: selectedFiles[0],
overwrite,
signal: abortCtrl.signal,
});
// TODO: Improve error toast details for better debugging failed imports
// e.g. When success == true && success_count === 0 that means no rules were overwritten, etc
if (importResponse.success) {
displaySuccessToast(successMessage(importResponse.success_count), dispatchToaster);
}
if (importResponse.errors.length > 0) {
const formattedErrors = importResponse.errors.map(e =>
failedDetailed(e.rule_id, e.error.status_code, e.error.message)
);
displayErrorToast(errorMessage, formattedErrors, dispatchToaster);
}
importComplete();
cleanupAndCloseModal();
} catch (error) {
cleanupAndCloseModal();
errorToToaster({ title: errorMessage, error, dispatchToaster });
}
}
}, [selectedFiles, overwrite]);
const handleCloseModal = useCallback(() => {
setSelectedFiles(null);
closeModal();
}, [closeModal]);
return (
<>
{showModal && (
<EuiOverlayMask>
<EuiModal onClose={closeModal} maxWidth={'750px'}>
<EuiModalHeader>
<EuiModalHeaderTitle>{title}</EuiModalHeaderTitle>
</EuiModalHeader>
<EuiModalBody>
<EuiText size="s">
<h4>{description}</h4>
</EuiText>
<EuiSpacer size="s" />
<EuiFilePicker
id="rule-file-picker"
initialPromptText={subtitle}
onChange={(files: FileList | null) => {
setSelectedFiles(files && files.length > 0 ? files : null);
}}
display={'large'}
fullWidth={true}
isLoading={isImporting}
/>
<EuiSpacer size="s" />
{showCheckBox && (
<EuiCheckbox
id="import-data-modal-checkbox-label"
label={checkBoxLabel}
checked={overwrite}
onChange={() => setOverwrite(!overwrite)}
/>
)}
</EuiModalBody>
<EuiModalFooter>
<EuiButtonEmpty onClick={handleCloseModal}>{i18n.CANCEL_BUTTON}</EuiButtonEmpty>
<EuiButton
onClick={importRulesCallback}
disabled={selectedFiles == null || isImporting}
fill
>
{submitBtnText}
</EuiButton>
</EuiModalFooter>
</EuiModal>
</EuiOverlayMask>
)}
</>
);
};
ImportDataModalComponent.displayName = 'ImportDataModalComponent';
export const ImportDataModal = React.memo(ImportDataModalComponent);
ImportDataModal.displayName = 'ImportDataModal';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.