text stringlengths 1 1.05M |
|---|
#!/usr/bin/env python
u"""
polygon.py
Written by <NAME> (11/2021)
Reads polygons from GeoJSON, kml/kmz or ESRI shapefile files
INPUTS:
input polygon file
OUTPUT:
shapely multipolygon object of input file
OPTIONS:
EPSG: projection identifier for output coordinates
VARIABLES: reduce to a specific set of identifiers
PYTHON DEPENDENCIES:
numpy: Scientific Computing Tools For Python
https://numpy.org
https://numpy.org/doc/stable/user/numpy-for-matlab-users.html
fiona: Python wrapper for vector data access functions from the OGR library
https://fiona.readthedocs.io/en/latest/manual.html
geopandas: Python tools for geographic data
http://geopandas.readthedocs.io/
shapely: PostGIS-ish operations outside a database context for Python
http://toblerity.org/shapely/index.html
pyproj: Python interface to PROJ library
https://pypi.org/project/pyproj/
UPDATE HISTORY:
Updated 11/2021: add initial functions for clustering multipolygons
Updated 08/2021: add functions for convex hull and exterior coordinates
Written 10/2020
"""
from __future__ import print_function
import os
import io
import re
import copy
import fiona
import pyproj
import zipfile
import osgeo.gdal
import geopandas
import numpy as np
import sklearn.cluster
import shapely.geometry
# enable kml driver for geopandas
fiona.drvsupport.supported_drivers['LIBKML'] = 'rw'
class polygon(object):
"""
Data class for reading polygon files
"""
np.seterr(invalid='ignore')
def __init__(self, epsg=4326):
self.filename=None
self.feature=None
self.epsg=epsg
self.shape=None
def case_insensitive_filename(self,filename):
"""
Searches a directory for a filename without case dependence
"""
self.filename = os.path.expanduser(filename)
#-- check if file presently exists with input case
if not os.access(self.filename,os.F_OK):
#-- search for filename without case dependence
basename = os.path.basename(filename)
directory = os.path.dirname(os.path.expanduser(filename))
f = [f for f in os.listdir(directory) if re.match(basename,f,re.I)]
if not f:
raise IOError('{0} not found in file system'.format(filename))
self.filename = os.path.join(directory,f.pop())
return self
def from_geojson(self, filename, variables=None):
"""
read GeoJSON (.json, .geojson) files
"""
# set filename
self.case_insensitive_filename(filename)
# read the GeoJSON file
gj = geopandas.read_file(self.filename)
#-- converting x,y from polygon projection to output EPSG
crs1 = pyproj.CRS.from_string(gj.crs['init'])
crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(self.epsg))
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
# list of polygons
poly_list = []
# find features of interest
geometries = ('LineString','Polygon')
f = [f for f in gj.iterfeatures() if f['geometry']['type'] in geometries]
# reduce to variables of interest if specified
f = [ft for ft in f if ft['id'] in variables] if variables else f
# for each line string or polygon feature
for feature in f:
# extract coordinates for feature
x,y = np.transpose(feature['geometry']['coordinates'])
# convert points to EPSG
xi,yi = transformer.transform(x, y)
# create shapely polygon
poly_obj = shapely.geometry.Polygon(np.c_[xi,yi])
# cannot have overlapping exterior or interior rings
if (not poly_obj.is_valid):
poly_obj = poly_obj.buffer(0)
poly_list.append(poly_obj)
# create shapely multipolygon object
# return the polygon object
self.feature = shapely.geometry.MultiPolygon(poly_list)
self.shape = (len(self.feature),)
return self
def from_kml(self, filename, kmz=False, variables=None):
"""
read keyhole markup language (.kml) files
"""
# set filename
self.case_insensitive_filename(filename)
# if input file is compressed
if kmz:
# decompress and parse KMZ file
z = zipfile.ZipFile(self.filename, 'r')
kml_file, = [s for s in z.namelist() if re.search(r'\.(kml)$',s)]
# need to use osgeo virtual file system to add suffix to mmap name
mmap_name = "/vsimem/{0}".format(kml_file)
osgeo.gdal.FileFromMemBuffer(mmap_name, z.read(kml_file))
with fiona.Collection(mmap_name, driver='LIBKML') as f:
kml = geopandas.GeoDataFrame.from_features(f, crs=f.crs)
else:
kml = geopandas.read_file(self.filename)
#-- converting x,y from polygon projection to output EPSG
crs1 = pyproj.CRS.from_string(kml.crs['init'])
crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(self.epsg))
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
# list of polygons
poly_list = []
# find features of interest
geometries = ('LineString','Polygon')
f = [f for f in kml.iterfeatures() if f['geometry']['type'] in geometries]
# reduce to variables of interest if specified
f = [ft for ft in f if ft['id'] in variables] if variables else f
# for each line string or polygon feature
for feature in f:
# extract coordinates for feature
coords = np.squeeze(feature['geometry']['coordinates'])
# convert points to EPSG
xi,yi = transformer.transform(coords[:,0], coords[:,1])
# create polygon from coordinate set
poly_obj = shapely.geometry.Polygon(np.c_[xi,yi])
# cannot have overlapping exterior or interior rings
if (not poly_obj.is_valid):
poly_obj = poly_obj.buffer(0)
poly_list.append(poly_obj)
# create shapely multipolygon object
# return the polygon object
self.feature = shapely.geometry.MultiPolygon(poly_list)
self.shape = (len(self.feature),)
return self
def from_shapefile(self, filename, zip=False, variables=None):
"""
read ESRI shapefiles
"""
# set filename
self.case_insensitive_filename(filename)
# read input zipfile containing shapefiles
if zip:
# read the compressed shapefile and extract entities
shape = fiona.open('zip://{0}'.format(self.filename))
else:
# read the shapefile and extract entities
shape = fiona.open(self.filename,'r')
#-- converting x,y from polygon projection to output EPSG
crs1 = pyproj.CRS.from_string(shape.crs['init'])
crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(self.epsg))
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
# find features of interest
geometries = ('LineString','Polygon')
f = [f for f in shape.values() if f['geometry']['type'] in geometries]
# reduce to variables of interest if specified
f = [ft for ft in f if ft['id'] in variables] if variables else f
# list of polygons
poly_list = []
# for each entity
for i,ent in enumerate(f):
# extract coordinates for entity
for coords in ent['geometry']['coordinates']:
# convert points to latitude/longitude
x,y = np.transpose(coords)
# convert points to EPSG
xi,yi = transformer.transform(x, y)
# create shapely polygon
poly_obj = shapely.geometry.Polygon(np.c_[xi,yi])
# cannot have overlapping exterior or interior rings
if (not poly_obj.is_valid):
poly_obj = poly_obj.buffer(0)
poly_list.append(poly_obj)
# create shapely multipolygon object
# return the polygon object
self.feature = shapely.geometry.MultiPolygon(poly_list)
self.shape = (len(self.feature),)
return self
def simplify(self, tolerance, preserve_topology=True):
"""
Simplify representation of the geometric object
"""
self.feature = self.feature.simplify(tolerance,
preserve_topology=preserve_topology)
self.shape = (len(self.feature),)
return self
def cluster(self, max_clusters=25):
"""
Cluster polygons using k-means clustering
"""
# subdivide regions using k-means clustering
centroids = np.squeeze([g.centroid.xy for g in self.feature])
nmax = len(centroids)
# k-means within-cluster sum of squares
wcss = np.zeros((max_clusters-1))
AIC = np.zeros((max_clusters-1))
# for each test number of clusters
for i in range(1, max_clusters):
kmeans = sklearn.cluster.KMeans(n_clusters=i,
init = 'k-means++', random_state=42)
kmeans.fit(centroids)
# within-cluster sum of squares
wcss[i-1] = kmeans.inertia_
# estimate AIC criterion
log_lik = 0.5*(-nmax*(np.log(2.0 * np.pi) + 1.0 -
np.log(nmax) + np.log(wcss[i-1])))
AIC[i-1] = -2.0*log_lik + 2.0*np.float64(i + 1)
#-- maximum number of clusters based on elbow method
n_clusters = np.max(np.nonzero(AIC[1:] < AIC[0:-1]))
kmeans = sklearn.cluster.KMeans(n_clusters=n_clusters,
init='k-means++', random_state=5, max_iter=400)
# cluster for each centroid
k = kmeans.fit_predict(centroids)
# output polygon object
temp = polygon(epsg=self.epsg)
temp.feature = []
temp.shape = (n_clusters,)
for cluster in range(n_clusters):
mp = shapely.geometry.MultiPolygon(self.feature[k == cluster])
temp.feature[cluster] = mp.convex_hull
# return cluster
return temp
def chunk(self, max_features=500, max_vertices=5000):
"""
Reduce geometric object to lists with
number of features and vertices
"""
offset = 0
features = [[]]*(self.shape[0]//max_features)
for tol in range(0.0,1.0,0.1):
s = self.feature.simplify(tol)
n_vertices = np.max([len(p.exterior.coords) for p in s])
if (n_vertices < max_vertices):
break
def bounds(self):
"""
Return the bounding box of the geometric object
"""
return self.feature.bounds
def convex_hull(self):
"""
Calculate the convex hull of the geometric object
"""
self.feature = shapely.geometry.polygon.orient(
self.feature.convex_hull,sign=1)
self.shape = np.shape(self.feature)
return self
def xy(self):
"""
Return the coordinates of the geometric object
"""
return self.feature.exterior.xy
|
<filename>DesignMode/src/main/java/factory/IcCardFactory.java<gh_stars>0
package factory;
public class IcCardFactory extends Factory {
@Override
public Product createProduct(String name) {
Product product = new IcCard();
product.setName(name);
return product;
}
@Override
public void registerProduct(Product product) {
product.setRegister(Boolean.TRUE);
}
}
|
<reponame>rainmaple/duckdb
//===----------------------------------------------------------------------===//
// DuckDB
//
// duckdb/storage/buffer_manager.hpp
//
//
//===----------------------------------------------------------------------===//
#pragma once
#include "duckdb/storage/buffer/buffer_handle.hpp"
#include "duckdb/storage/buffer/buffer_list.hpp"
#include "duckdb/storage/buffer/managed_buffer.hpp"
#include "duckdb/storage/block_manager.hpp"
#include "duckdb/common/file_system.hpp"
#include "duckdb/common/unordered_map.hpp"
#include <mutex>
namespace duckdb {
//! The buffer manager is in charge of handling memory management for the database. It hands out memory buffers that can
//! be used by the database internally.
class BufferManager {
friend class BufferHandle;
public:
BufferManager(FileSystem &fs, BlockManager &manager, string temp_directory, idx_t maximum_memory);
~BufferManager();
//! Pin a block id, returning a block handle holding a pointer to the block
unique_ptr<BufferHandle> Pin(block_id_t block, bool can_destroy = false);
//! Allocate a buffer of arbitrary size, as long as it is >= BLOCK_SIZE. can_destroy signifies whether or not the
//! buffer can be destroyed when unpinned, or whether or not it needs to be written to a temporary file so it can be
//! reloaded.
unique_ptr<BufferHandle> Allocate(idx_t alloc_size, bool can_destroy = false);
//! Destroy the managed buffer with the specified buffer_id, freeing its memory
void DestroyBuffer(block_id_t buffer_id, bool can_destroy = false);
//! Set a new memory limit to the buffer manager, throws an exception if the new limit is too low and not enough
//! blocks can be evicted
void SetLimit(idx_t limit = (idx_t)-1);
static BufferManager &GetBufferManager(ClientContext &context);
private:
unique_ptr<BufferHandle> PinBlock(block_id_t block_id);
unique_ptr<BufferHandle> PinBuffer(block_id_t block_id, bool can_destroy = false);
//! Unpin a block id, decreasing its reference count and potentially allowing it to be freed.
void Unpin(block_id_t block);
//! Evict the least recently used block from the buffer manager, or throws an exception if there are no blocks
//! available to evict
unique_ptr<Block> EvictBlock();
//! Add a reference to the refcount of a buffer entry
void AddReference(BufferEntry *entry);
//! Write a temporary buffer to disk
void WriteTemporaryBuffer(ManagedBuffer &buffer);
//! Read a temporary buffer from disk
unique_ptr<BufferHandle> ReadTemporaryBuffer(block_id_t id);
//! Get the path of the temporary buffer
string GetTemporaryPath(block_id_t id);
void DeleteTemporaryFile(block_id_t id);
private:
FileSystem &fs;
//! The block manager
BlockManager &manager;
//! The current amount of memory that is occupied by the buffer manager (in bytes)
idx_t current_memory;
//! The maximum amount of memory that the buffer manager can keep (in bytes)
idx_t maximum_memory;
//! The directory name where temporary files are stored
string temp_directory;
//! The lock for the set of blocks
std::mutex block_lock;
//! A mapping of block id -> BufferEntry
unordered_map<block_id_t, BufferEntry *> blocks;
//! A linked list of buffer entries that are in use
BufferList used_list;
//! LRU list of unused blocks
BufferList lru;
//! The temporary id used for managed buffers
block_id_t temporary_id;
};
} // namespace duckdb
|
#
# Copyright (c) 2017 Juniper Networks, Inc. All rights reserved.
#
"""
VNC pod management for kubernetes
"""
import uuid
from vnc_api.vnc_api import *
from config_db import *
from kube_manager.common.kube_config_db import NamespaceKM
from kube_manager.common.kube_config_db import PodKM
from vnc_kubernetes_config import VncKubernetesConfig as vnc_kube_config
class VncPod(object):
def __init__(self, service_mgr, network_policy_mgr):
self._name = type(self).__name__
self._vnc_lib = vnc_kube_config.vnc_lib()
self._label_cache = vnc_kube_config.label_cache()
self._service_mgr = service_mgr
self._network_policy_mgr = network_policy_mgr
self._queue = vnc_kube_config.queue()
self._service_fip_pool = vnc_kube_config.service_fip_pool()
self._args = vnc_kube_config.args()
self._logger = vnc_kube_config.logger()
def _get_label_diff(self, new_labels, vm):
old_labels = vm.pod_labels
if old_labels == new_labels:
return None
diff = dict()
added = {}
removed = {}
changed = {}
keys = set(old_labels.keys()) | set(new_labels.keys())
for k in keys:
if k not in old_labels.keys():
added[k] = new_labels[k]
continue
if k not in new_labels.keys():
removed[k] = old_labels[k]
continue
if old_labels[k] == new_labels[k]:
continue
changed[k] = old_labels[k]
diff['added'] = added
diff['removed'] = removed
diff['changed'] = changed
return diff
def _set_label_to_pod_cache(self, new_labels, vm):
for label in new_labels.items():
key = self._label_cache._get_key(label)
self._label_cache._locate_label(key,
self._label_cache.pod_label_cache, label, vm.uuid)
vm.pod_labels = new_labels
def _clear_label_to_pod_cache(self, vm):
if not vm.pod_labels:
return
for label in vm.pod_labels.items() or []:
key = self._label_cache._get_key(label)
self._label_cache._remove_label(key,
self._label_cache.pod_label_cache, label, vm.uuid)
vm.pod_labels = None
def _update_label_to_pod_cache(self, new_labels, vm):
self._clear_label_to_pod_cache(vm)
self._set_label_to_pod_cache(new_labels, vm)
def _get_network(self, pod_id, pod_namespace):
"""
Get network corresponding to this namesapce.
"""
vn_fq_name = None
if self._is_pod_network_isolated(pod_namespace) == True:
ns = self._get_namespace(pod_namespace)
if ns:
vn_fq_name = ns.get_network_fq_name()
# If no network was found on the namesapce, default to the cluster
# pod network.
if not vn_fq_name:
vn_fq_name = ['default-domain', 'default', 'cluster-network']
vn_obj = self._vnc_lib.virtual_network_read(fq_name=vn_fq_name)
return vn_obj
def _get_namespace(self, pod_namespace):
return NamespaceKM.find_by_name_or_uuid(pod_namespace)
def _is_pod_network_isolated(self, pod_namespace):
return self._get_namespace(pod_namespace).is_isolated()
def _is_pod_nested(self):
# Pod is nested if we are configured to run in nested mode.
return DBBaseKM.is_nested()
def _get_host_ip(self, pod_name):
pod = PodKM.find_by_name_or_uuid(pod_name)
if pod:
return pod.get_host_ip()
return None
def _create_iip(self, pod_name, vn_obj, vmi):
# Instance-ip for pods are ALWAYS allocated from pod ipam on this
# VN. Get the subnet uuid of the pod ipam on this VN, so we can request
# an IP from it.
vn = VirtualNetworkKM.find_by_name_or_uuid(vn_obj.get_uuid())
pod_ipam_subnet_uuid = vn.get_ipam_subnet_uuid(
vnc_kube_config.pod_ipam_fq_name())
# Create instance-ip.
iip_obj = InstanceIp(name=pod_name, subnet_uuid=pod_ipam_subnet_uuid)
iip_obj.add_virtual_network(vn_obj)
# Creation of iip requires the vmi vnc object.
vmi_obj = self._vnc_lib.virtual_machine_interface_read(
fq_name=vmi.fq_name)
iip_obj.add_virtual_machine_interface(vmi_obj)
try:
self._vnc_lib.instance_ip_create(iip_obj)
except RefsExistError:
self._vnc_lib.instance_ip_update(iip_obj)
InstanceIpKM.locate(iip_obj.uuid)
return iip_obj
def _get_host_vmi(self, pod_name):
host_ip = self._get_host_ip(pod_name)
if host_ip:
iip = InstanceIpKM.get_object(host_ip)
if iip:
for vmi_id in iip.virtual_machine_interfaces:
vm_vmi = VirtualMachineInterfaceKM.get(vmi_id)
if vm_vmi and vm_vmi.host_id:
return vm_vmi
return None
def _create_cluster_service_fip(self, pod_name, vmi_uuid):
"""
Isolated Pods in the cluster will be allocated a floating ip
from the cluster service network, so that the pods can talk
to cluster services.
"""
if not self._service_fip_pool:
return
# Construct parent ref.
fip_pool_obj = FloatingIpPool()
fip_pool_obj.uuid = self._service_fip_pool.uuid
fip_pool_obj.fq_name = self._service_fip_pool.fq_name
fip_pool_obj.name = self._service_fip_pool.name
# Create Floating-Ip object.
fip_obj = FloatingIp(name="cluster-svc-fip-%s"% (pod_name),
parent_obj=fip_pool_obj,
floating_ip_traffic_direction='egress')
# Creation of fip requires the vmi vnc object.
vmi_obj = self._vnc_lib.virtual_machine_interface_read(
id=vmi_uuid)
fip_obj.set_virtual_machine_interface(vmi_obj)
try:
fip_uuid = self._vnc_lib.floating_ip_create(fip_obj)
except RefsExistError:
fip_uuid = self._vnc_lib.floating_ip_update(fip_obj)
# Cached service floating ip.
FloatingIpKM.locate(fip_uuid)
return
def _associate_security_groups(self, vmi_obj, proj_obj, ns=None):
sg_obj = SecurityGroup("default", proj_obj)
vmi_obj.add_security_group(sg_obj)
if ns:
ns_sg_name = "ns-" + ns
sg_obj = SecurityGroup(ns_sg_name, proj_obj)
vmi_obj.add_security_group(sg_obj)
return
def _create_vmi(self, pod_name, pod_namespace, vm_obj, vn_obj,
parent_vmi):
proj_fq_name = ['default-domain', pod_namespace]
proj_obj = self._vnc_lib.project_read(fq_name=proj_fq_name)
vmi_prop = None
if self._is_pod_nested() and parent_vmi:
# Pod is nested.
# Allocate a vlan-id for this pod from the vlan space managed
# in the VMI of the underlay VM.
parent_vmi = VirtualMachineInterfaceKM.get(parent_vmi.uuid)
vlan_id = parent_vmi.alloc_vlan()
vmi_prop = VirtualMachineInterfacePropertiesType(
sub_interface_vlan_tag=vlan_id)
obj_uuid = str(uuid.uuid1())
name = 'pod' + '-' + pod_name
vmi_obj = VirtualMachineInterface(name=name, parent_obj=proj_obj,
virtual_machine_interface_properties=vmi_prop)
vmi_obj.uuid = obj_uuid
vmi_obj.set_virtual_network(vn_obj)
vmi_obj.set_virtual_machine(vm_obj)
self._associate_security_groups(vmi_obj, proj_obj, pod_namespace)
try:
vmi_uuid = self._vnc_lib.virtual_machine_interface_create(vmi_obj)
except RefsExistError:
vmi_uuid = self._vnc_lib.virtual_machine_interface_update(vmi_obj)
VirtualMachineInterfaceKM.locate(vmi_uuid)
return vmi_uuid
def _create_vm(self, pod_id, pod_name, labels):
vm_obj = VirtualMachine(name=pod_name)
vm_obj.uuid = pod_id
annotations = {}
annotations['device_owner'] = 'K8S:POD'
for key in annotations:
vm_obj.add_annotations(KeyValuePair(key=key, value=annotations[key]))
vm_obj.add_annotations(KeyValuePair(key='labels', value=json.dumps(labels)))
try:
self._vnc_lib.virtual_machine_create(vm_obj)
except RefsExistError:
vm_obj = self._vnc_lib.virtual_machine_read(id=pod_id)
vm = VirtualMachineKM.locate(vm_obj.uuid)
return vm_obj
def _link_vm_to_node(self, vm_obj, pod_node):
vrouter_fq_name = ['default-global-system-config', pod_node]
try:
vrouter_obj = self._vnc_lib.virtual_router_read(fq_name=vrouter_fq_name)
except Exception as e:
return
self._vnc_lib.ref_update('virtual-router', vrouter_obj.uuid,
'virtual-machine', vm_obj.uuid, None, 'ADD')
vm = VirtualMachineKM.get(vm_obj.uuid)
if vm:
vm.virtual_router = vrouter_obj.uuid
def _check_pod_uuid_change(self, pod_uuid, pod_name, pod_namespace):
vm_fq_name = [pod_name]
vm_uuid = LoadbalancerKM.get_fq_name_to_uuid(vm_fq_name)
if vm_uuid != pod_uuid:
self.vnc_pod_delete(vm_uuid)
def vnc_pod_add(self, pod_id, pod_name, pod_namespace, pod_node, labels,
vm_vmi):
vm = VirtualMachineKM.get(pod_id)
if vm:
self._set_label_to_pod_cache(labels, vm)
return
if not vm:
self._check_pod_uuid_change(pod_id, pod_name, pod_namespace)
vn_obj = self._get_network(pod_id, pod_namespace)
vm_obj = self._create_vm(pod_id, pod_name, labels)
vmi_uuid = self._create_vmi(pod_name, pod_namespace, vm_obj, vn_obj,
vm_vmi)
vmi = VirtualMachineInterfaceKM.get(vmi_uuid)
if self._is_pod_nested() and vm_vmi:
# Pod is nested.
# Link the pod VMI to the VMI of the underlay VM.
self._vnc_lib.ref_update('virtual-machine-interface', vm_vmi.uuid,
'virtual-machine-interface', vmi_uuid, None, 'ADD')
self._vnc_lib.ref_update('virtual-machine-interface', vmi_uuid,
'virtual-machine-interface', vm_vmi.uuid, None, 'ADD')
# get host id for vm vmi
vr_uuid = None
for vr in VirtualRouterKM.values():
if vr.name == vm_vmi.host_id:
vr_uuid = vr.uuid
break
if not vr_uuid:
self._logger.error("No virtual-router object found for host: "
+ vm_vmi.host_id + ". Unable to add VM reference to a"
" valid virtual-router")
return
self._vnc_lib.ref_update('virtual-router', vr_uuid,
'virtual-machine', vm_obj.uuid, None, 'ADD')
self._create_iip(pod_name, vn_obj, vmi)
if self._is_pod_network_isolated(pod_namespace):
self._create_cluster_service_fip(pod_name, vmi_uuid)
self._link_vm_to_node(vm_obj, pod_node)
def vnc_pod_update(self, pod_id, pod_name, pod_namespace, pod_node, labels,
vm_vmi):
label_diff = None
vm = VirtualMachineKM.get(pod_id)
if not vm:
# If the vm is not created yet, do so now.
self.vnc_pod_add(pod_id, pod_name, pod_namespace,
pod_node, labels, vm_vmi)
vm = VirtualMachineKM.get(pod_id)
if vm:
label_diff = self._get_label_diff(labels, vm)
if not label_diff:
return label_diff
self._update_label_to_pod_cache(labels, vm)
return label_diff
def vnc_port_delete(self, vmi_id):
vmi = VirtualMachineInterfaceKM.get(vmi_id)
if not vmi:
return
for iip_id in list(vmi.instance_ips):
try:
self._vnc_lib.instance_ip_delete(id=iip_id)
except NoIdError:
pass
# Cleanup floating ip's on this interface.
for fip_id in list(vmi.floating_ips):
try:
self._vnc_lib.floating_ip_delete(id=fip_id)
except NoIdError:
pass
try:
self._vnc_lib.virtual_machine_interface_delete(id=vmi_id)
except NoIdError:
pass
def vnc_pod_delete(self, pod_id):
vm = VirtualMachineKM.get(pod_id)
if not vm:
return
self._clear_label_to_pod_cache(vm)
if vm.virtual_router:
self._vnc_lib.ref_update('virtual-router', vm.virtual_router,
'virtual-machine', vm.uuid, None, 'DELETE')
for vmi_id in list(vm.virtual_machine_interfaces):
self.vnc_port_delete(vmi_id)
try:
self._vnc_lib.virtual_machine_delete(id=pod_id)
except NoIdError:
pass
def _create_pod_event(self, event_type, pod_id, vm_obj):
event = {}
object = {}
object['kind'] = 'Pod'
object['metadata'] = {}
object['metadata']['uid'] = pod_id
object['metadata']['labels'] = vm_obj.pod_labels
if event_type == 'delete':
event['type'] = 'DELETED'
event['object'] = object
self._queue.put(event)
return
def _sync_pod_vm(self):
vm_uuid_list = list(VirtualMachineKM.keys())
pod_uuid_list = list(PodKM.keys())
for uuid in vm_uuid_list:
if uuid in pod_uuid_list:
continue
vm = VirtualMachineKM.get(uuid)
if not vm:
continue
if not vm.annotations:
continue
for kvp in vm.annotations['key_value_pair'] or []:
if kvp['key'] == 'device_owner' \
and kvp['value'] == 'K8S:POD':
self._create_pod_event('delete', uuid, vm)
break
return
def pod_timer(self):
self._sync_pod_vm()
return
def process(self, event):
event_type = event['type']
kind = event['object'].get('kind')
pod_id = event['object']['metadata'].get('uid')
pod_name = event['object']['metadata'].get('name')
pod_namespace = event['object']['metadata'].get('namespace')
labels = event['object']['metadata'].get('labels', {})
print("%s - Got %s %s %s:%s"
%(self._name, event_type, kind, pod_namespace, pod_name))
self._logger.debug("%s - Got %s %s %s:%s"
%(self._name, event_type, kind, pod_namespace, pod_name))
if event['type'] == 'ADDED' or event['type'] == 'MODIFIED':
# Proceed ONLY if host network is specified.
pod_node = event['object']['spec'].get('nodeName')
host_network = event['object']['spec'].get('hostNetwork')
if host_network:
return
# If the pod is nested, proceed ONLY if host vmi is found.
vm_vmi = None
if self._is_pod_nested():
vm_vmi = self._get_host_vmi(pod_name)
if not vm_vmi:
return
if event['type'] == 'ADDED':
self.vnc_pod_add(pod_id, pod_name, pod_namespace,
pod_node, labels, vm_vmi)
self._network_policy_mgr.vnc_pod_add(event)
else:
label_diff = self.vnc_pod_update(pod_id, pod_name,
pod_namespace, pod_node, labels, vm_vmi)
self._network_policy_mgr.vnc_pod_update(event, label_diff)
elif event['type'] == 'DELETED':
self.vnc_pod_delete(pod_id)
self._network_policy_mgr.vnc_pod_delete(event)
|
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('dummy/config/environment', __dir__)
require 'rspec/rails'
require 'selenium/webdriver'
require 'webdrivers/chromedriver'
Capybara.server = :puma, { Silent: true }
Dir['spec/support/**/*.rb'].each { |f| require File.expand_path(f) }
RSpec.configure do |config|
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
config.before(:each, type: :system) do
driven_by :selenium_chrome_headless
end
end
|
package com.littlejenny.gulimall.rabbitmq;
import com.littlejenny.common.constant.RabbitmqConstants;
import com.littlejenny.gulimall.rabbitmq.entity.User;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class RabbitmqTest {
@Autowired
User u1;
@Autowired
private RabbitTemplate rabbitTemplate;
@Test
public void send() {
rabbitTemplate.convertAndSend(RabbitmqConstants.HANDLESTOCK_EXCHANGE,RabbitmqConstants.HANDLESTOCK_DELAY_QUEUE_KEY,u1);
}
} |
<gh_stars>0
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Policy engine for keystone."""
import sys
from oslo_log import log
from oslo_policy import policy as common_policy
import keystone.conf
from keystone import exception
from keystone.policy.backends import base
import policy_enforcer as JiwanPolicy
import json
import traceback
import time
CONF = keystone.conf.CONF
LOG = log.getLogger(__name__)
_ENFORCER = None
def reset():
global _ENFORCER
_ENFORCER = None
def init():
global _ENFORCER
if not _ENFORCER:
_ENFORCER = common_policy.Enforcer(CONF)
def enforce(credentials, action, target, do_raise=True):
"""Verify that the action is valid on the target in this context.
:param credentials: user credentials
:param action: string representing the action to be checked, which should
be colon separated for clarity.
:param target: dictionary representing the object of the action for object
creation this should be a dictionary representing the
location of the object e.g. {'project_id':
object.project_id}
:raises keystone.exception.Forbidden: If verification fails.
Actions should be colon separated for clarity. For example:
* identity:list_users
"""
init()
# Add the exception arguments if asked to do a raise
extra = {}
if do_raise:
extra.update(exc=exception.ForbiddenAction, action=action,
do_raise=do_raise)
return _ENFORCER.enforce(action, target, credentials, **extra)
#ADDITIONAL CODE FOR ENFORCEMENT
def authorize_aura(credentials, target):
print "***********************"
print "POLICY ENFORCE"
print "***** TARGET:"
print target
print "***** Credentials:"
print credentials
print "***********************"
admin_unit = ['target.admin_unit']
admin_admin_unit = credentials.get('admin_unit')
admin_location = credentials.get('location')
#print admin_location
admin_admin_roles = credentials.get('admin_roles')
user_admin_unit = target.get('target.admin_unit')
user_location = target.get('target.location')
#print user_location
user_user_clearance = target.get('target.user_clearance')
target_role = target.get('target.role.name')
# for i in range(11):
# try:
# admin_attribute+str(i) = credentials.get('attribute'+str(i))
# user_attribute+str(i) = credentials.get('target.attribute'+str(i))
# except:
# continue
# with open('/opt/stack/keystone/keystone/policy/backends/policy_files/equal_attribute.json') as policy:
# policy = json.load(policy)
# for i in range(len(policy)):
# if True == (target_role in policy[i]["role"]):
# if True == (admin_admin_unit in policy[i]["admin"]["admin_unit"]) and True == (user_admin_unit in policy[i]["user"]["admin_unit"]):
# print "ADMIN UNIT IS PRESENT!"
# # else:
# # print "admin_unit mismatch"
# # else:
# # print "target role not found"
# raise exception.Forbidden("Unathorised user role assignment attempt!!")
with open('/opt/stack/keystone/keystone/policy/backends/attribute_policy.json') as policy:
policy = json.load(policy)
for i in range(len(policy)):
print "ADMIN ROLES: "
print policy[i]["role"]
print "Target Role: " + target_role
#for j in (policy[i]["role"]):
if True == (target_role in policy[i]["role"]):
print "target role is present: <<" + target_role + ">>"
#for j in (policy[i]["role"]):
#if j == target_role:
#print "ADMIN_ADMIN_UNIT: <<" + admin_admin_unit + ">> in ROLES:"
#print policy[i]["admin"]["admin_unit"]
#print "USER_ADMIN_UNIT: <<" + user_admin_unit + ">> in ROLES:"
#print policy[i]["user"]["admin_unit"]
if (True == (admin_admin_unit in policy[i]["admin"]["admin_unit"]) and True == (user_admin_unit in policy[i]["user"]["admin_unit"])):
print "ADMIN UNIT IS PRESENT: <<" + admin_admin_unit + ">>"
#print "USER ADMIN UNIT IS PRESENT: <<"+ user_admin_unit + ">>"
#print "ADMIN LOCATION: <<" + admin_location + ">>, User location: <<" + user_location + ">>"
#print "Admin policy location:"
#print policy[i]["admin"]["location"]
#print "User location policy:"
#print policy[i]["user"]["location"]
if admin_location in policy[i]["admin"]["location"] and \
user_location in policy[i]["user"]["location"]:
print "LOCATION IS PRESENT"
if (True == (admin_admin_roles in policy[i]["admin"]["admin_roles"])):
print "ADMIN ROLES MATCH.."
print "**********************************"
print "USER CLEARANCE :" + user_user_clearance
print policy[i]["user"]["clearance"]
print "**********************************"
if (True == (user_user_clearance in policy[i]["user"]["clearance"])):
print "USER CLEARANCE MATCH"
print "getting there......."
# print(time.time())
# print("**********")
# print ("Authorized granted!")
# print("**********")
# return True
for j in range(11):
print "-------------------------------------"
print "admin_attribute"+str(j)
print 'user_attribute'+str(j)
print policy[i]["admin"]
print policy[i]["user"]
if "attribute"+str(j) in policy[i]["admin"] and \
"attribute"+str(j) in policy[i]["user"]:
admin_attribute_i = credentials.get('attribute'+str(j))
user_attribute_i = target.get('target.attribute'+str(j))
if admin_attribute_i in policy[i]["admin"]["attribute"+str(j)] and \
user_attribute_i in policy[i]["user"]["attribute"+str(j)]:
print "USER + ADMIN ATTRIBUTE"+str(j)+" ARE VALID"
else:
print "USER + ADMIN ATTRIBUTE"+str(j)+" ARE **NOT** VALID"
print admin_attribute_i
print policy[i]["admin"]["attribute"+str(j)]
print user_attribute_i
print policy[i]["user"]["attribute"+str(j)]
raise exception.Forbidden("Unathorised user role assignment attempt!!")
class Policy(base.PolicyDriverBase):
def enforce(self, credentials, action, target):
msg = 'enforce %(action)s: %(credentials)s'
LOG.debug(msg, {
'action': action,
'credentials': credentials})
start_time = time.time()
enforce(credentials, action, target)
# Now call the AURA authorization
#traceback.print_stack()
if action == 'identity:create_grant' or action == 'identity:revoke_grant':
try:
authorize_aura(credentials, target)
except:
end_time = time.time()
delta = end_time - start_time
print("---------------print time diff ----------------")
print delta
raise exception.Forbidden("Unathorised user role assignment attempt!!")
end_time = time.time()
delta = end_time - start_time
print("---------------print time diff ----------------")
print delta
def create_policy(self, policy_id, policy):
raise exception.NotImplemented()
def list_policies(self):
raise exception.NotImplemented()
def get_policy(self, policy_id):
raise exception.NotImplemented()
def update_policy(self, policy_id, policy):
raise exception.NotImplemented()
def delete_policy(self, policy_id):
raise exception.NotImplemented()
|
package codecheck.github.transport
trait CompletionHandler {
def onCompleted(res: Response): Unit
def onThrowable(t: Throwable): Unit
}
|
/*!
* account.js - Multisig wallets' account
* Copyright (c) 2018, The Bcoin Developers (MIT License).
* https://github.com/bcoin-org/bcoin
*/
'use strict';
const assert = require('bsert');
const {Account} = require('bcoin').wallet;
/**
* Account object for multisig
* for formatting.
*/
class MultisigAccount {
constructor(account) {
this.account = null;
this.fromAccount(account);
}
fromAccount(account) {
assert(account, 'MultisigAccount needs account.');
assert(account.type === Account.types.MULTISIG,
'Account needs to be multisig');
assert(account.watchOnly === true, 'Account needs to be watchOnly');
this.account = account;
}
/**
* Get public keys.
* @returns {bcoin#HDPublicKey[]}
*/
getPublicKeys() {
const keys = [this.account.accountKey];
return keys.concat(this.account.keys);
}
/**
* Get current receive address.
* @returns {Address}
*/
receiveAddress() {
const key = this.account.receiveKey();
if (!key)
return null;
return key.getAddress();
}
/**
* Get current change address.
* @returns {Address}
*/
changeAddress() {
const key = this.account.changeKey();
if (!key)
return null;
return key.getAddress();
}
/**
* Get current nested address.
* @returns {Address}
*/
nestedAddress() {
const key = this.account.nestedKey();
if (!key)
return null;
return key.getAddress();
}
createReceive(b) {
return this.account.createReceive(b);
}
createChange(b) {
return this.account.createChange(b);
}
createNested(b) {
return this.account.createNested(b);
}
/**
* Derive path
* @param {Path} path
*/
derivePath(path) {
return this.account.deriveKey(path.branch, path.index);
}
/**
* @param {bcoin#Account}
* @returns {MultisigAccount}
*/
static fromAccount(account) {
return new MultisigAccount(account);
}
inspect() {
return this.toJSON();
}
toJSON(balance) {
const network = this.account.network;
const keys = this.getPublicKeys();
const receiveAddress = this.account.receiveAddress();
const changeAddress = this.account.changeAddress();
const nestedAddress = this.account.nestedAddress();
const {
initialized,
watchOnly,
witness,
receiveDepth,
changeDepth,
nestedDepth,
lookahead
} = this.account;
return {
initialized,
watchOnly,
witness,
receiveDepth,
changeDepth,
nestedDepth,
lookahead,
receiveAddress: receiveAddress ? receiveAddress.toString(network) : null,
changeAddress: changeAddress ? changeAddress.toString(network) : null,
nestedAddress: nestedAddress ? nestedAddress.toString(network) : null,
keys: keys.map(key => key.toBase58(network)),
balance: balance ? balance.toJSON(true) : null
};
}
}
module.exports = MultisigAccount;
|
<filename>server/src/instantiation/executor/commands-map.ts
import { CommandsMapItem } from './commands-map-item';
export class CommandsMap {
constructor(readonly items: CommandsMapItem[]) {}
}
|
/**
* Octopus module
* @module lib/app
*/
/**
* Required modules
*/
const got = require('got');
const { EOL } = require('os');
const async = require('async');
const { URL } = require('url');
const justify = require('justify');
const prettyMs = require('pretty-ms');
const prependHttp = require('prepend-http');
const cheerioLoad = require('cheerio')['load'];
const differenceBy = require('lodash.differenceby');
const windowWidth = require('term-size')()['columns'];
/**
* App defaults
*/
let config;
let baseUrl;
let baseHost;
let crawledLinks = [];
let inboundLinks = [];
let brokenLinks = [];
/**
* CLI colors
*/
const COLOR_GRAY = '\x1b[90m';
const COLOR_GREEN = '\x1b[32m';
const FORMAT_END = '\x1b[0m';
/**
* App timing
*/
const NS_PER_SEC = 1e9;
const MS_PER_NS = 1e-6;
const executionTime = process.hrtime();
/**
* Blacklisted protocols
*/
const ignoreProtocols = [
'[href^="javascript:"]',
'[href^="mailto:"]',
'[href^="telnet:"]',
'[href^="file:"]',
'[href^="news:"]',
'[href^="tel:"]',
'[href^="ftp:"]',
'[href^="#"]'
];
/**
* Output line length
*/
const maxLength = windowWidth - 20;
/**
* Console streaming
*/
require('draftlog').into(console);
console.stream = console.draft(EOL);
/**
* Magic function for the brokenLinks object
*/
const brokenLinksObserver = new Proxy(brokenLinks, {
set: function(target, key, value) {
// Extract variables
const {requestUrl, referenceUrl, statusMessage, statusCode} = value;
// Push to object
target.push(requestUrl);
// Terminal output
console.log(
'%s%s%s%s%s: %s%s%s: %s (%d)%s',
justify('⚠️', null, 5),
requestUrl.substr(0, maxLength),
EOL,
COLOR_GRAY,
justify(null, 'APPEARS ON', 14),
referenceUrl.substr(0, maxLength),
EOL,
justify(null,'STATUS MSG', 14),
statusMessage,
statusCode,
FORMAT_END
);
// Slack notification
config['slack-webhook'] && got( config['slack-webhook'], {
method: 'POST',
body: JSON.stringify({
"attachments": [
{
"fallback": `Broken url: ${requestUrl}${EOL}Appears on: ${referenceUrl}${EOL}Status msg: ${statusMessage} (${statusCode})`,
"fields": [
{
"title": "Broken url",
"value": requestUrl,
},
{
"title": "Appears on",
"value": referenceUrl,
},
{
"title": "Status code",
"value": statusCode,
"short": true
},
{
"title": "Status message",
"value": statusMessage,
"short": true
}
],
"color": "danger"
}
]
})
} );
}
} );
/**
* Executes the URL request
* @param {String} requestUrl - URL of the requested link
* @param {String} referenceUrl - URL of the reference page
* @param {Function} requestCallback - Callback function
* @returns {Function} Callback function
*/
const request = async (requestUrl, referenceUrl, requestCallback) => {
// Encode Url
const encodedUrl = requestUrl.match(/%[0-9a-f]{2}/i) ? requestUrl : encodeURI(requestUrl);
try {
// Start request
const response = await got( encodedUrl, {
timeout: config.timeout,
headers: {
'user-agent': 'Octopus'
}
} );
// Extract response data
const { statusCode, statusMessage, headers, timings, body } = response;
const contentType = headers['content-type'];
// Parse url
const parsedUrl = new URL(requestUrl);
// Default
let pageLinks = [];
// Update stream
if ( ! config.silent ) {
console.stream(
'%s%s %s(%d ms)%s',
justify('🤖', null, 4),
requestUrl.substr(0, maxLength),
COLOR_GRAY,
timings['phases'].total,
FORMAT_END
);
}
// Check for status code
if ( ! [200, 204].includes(statusCode) ) {
if ( ! brokenLinks.includes(requestUrl) ) {
brokenLinksObserver[brokenLinks.length] = {
requestUrl,
referenceUrl,
statusCode,
statusMessage
};
}
// Extract links only from internal HTML pages
} else if ( parsedUrl.host === baseHost && contentType.startsWith('text/html') ) {
const $ = cheerioLoad(body);
$('a[href]').not( ignoreProtocols.join(',') ).each( (i, elem) => {
if (elem.attribs.href) {
const hrefUrl = new URL(elem.attribs.href, baseUrl).href;
if ( ! pageLinks.includes(hrefUrl) ) {
pageLinks.push(hrefUrl);
}
}
});
if ( config['include-images'] ) {
$('img[src]').each((i, elem) => {
if (elem.attribs.src) {
const srcUrl = new URL(elem.attribs.src, baseUrl).href;
if (!pageLinks.includes(srcUrl)) {
pageLinks.push(srcUrl);
}
}
});
}
}
// Execute callback
return requestCallback(requestUrl, pageLinks);
} catch ( error ) {
// Add to broken links on request error
if ( ! brokenLinks.includes(requestUrl) ) {
const statusCode = error.statusCode || '';
const statusMessage = ( error.code || error.statusMessage ).toUpperCase();
brokenLinksObserver[brokenLinks.length] = {
requestUrl,
referenceUrl,
statusCode,
statusMessage
};
}
// Execute callback
return requestCallback(requestUrl, []);
}
};
/**
* Starts the page crawling
* @param {String} crawlUrl - URL of the crawled page
* @param {String} [referenceUrl] - URL of the reference page
* @returns {Promise} Promise object represents the crawling request
*/
const crawl = ( crawlUrl, referenceUrl = '' ) => {
return request( crawlUrl, referenceUrl, (requestUrl, pageLinks) => {
// Mark url as crawled
crawledLinks.push( {
'requestUrl': requestUrl
} );
// Async loop
async.eachSeries( pageLinks, (pageLink, crawlCallback) => {
// Parse url
const parsedLink = new URL(pageLink);
if (
( ! config['ignore-external'] || ( config['ignore-external'] && parsedLink.host === baseHost ) ) &&
( ! parsedLink.searchParams || ( parsedLink.searchParams && ! config['ignore-query'].filter(query => parsedLink.searchParams.get(query)).length ) ) &&
( ! inboundLinks.filter(item => item.requestUrl === pageLink).length )
) {
inboundLinks.push( {
'referenceUrl': requestUrl,
'requestUrl': pageLink
} );
}
crawlCallback();
}, () => {
// Evaluate links to crawl
const nextUrls = differenceBy( inboundLinks, crawledLinks, 'requestUrl' );
// Stream and check next link
if ( Object.getOwnPropertyNames(nextUrls).length > 1 ) {
return crawl( nextUrls[0].requestUrl, nextUrls[0].referenceUrl );
// Nothing to check, log & exit
} else {
const diff = process.hrtime(executionTime);
const ms = (diff[0] * NS_PER_SEC + diff[1]) * MS_PER_NS;
console.log(
'%s%s%s%d %s %s%s',
EOL,
COLOR_GREEN,
justify('✅', null, 3),
inboundLinks.length,
'links checked in',
prettyMs( ms, { compact: true } ),
FORMAT_END
);
process.exit( 0 );
}
} );
} );
};
/**
* Initializes the website crawling
* @param {Object} argv - CLI arguments provided from mri package
* @returns {Promise} Promise object represents the crawling loop
*/
module.exports = (argv) => {
// Config
config = {
'timeout': Number(argv.timeout),
'silent': Boolean(argv['silent']),
'ignore-query': (Array.isArray(argv['ignore-query']) ? argv['ignore-query'] : Array(argv['ignore-query'])),
'ignore-external': Boolean(argv['ignore-external']),
'include-images': Boolean(argv['include-images']),
'slack-webhook': String(argv['slack-webhook']),
};
// Skip nofollow links
if ( argv['ignore-nofollow'] ) {
ignoreProtocols.push('[rel~="nofollow"]');
}
// Base data
baseUrl = prependHttp(argv._[0], {https: true});
baseHost = new URL(baseUrl).host;
// Fire!
return crawl(baseUrl);
};
|
<reponame>sudarshan-1708/UserManagementAPI
import { Test, TestingModule } from '@nestjs/testing';
import { UsersController } from './users.controller';
import { UsersService } from './users.service';
describe('UsersController', () => {
let controller: UsersController;
const mockUserService = {
createUser: jest.fn(dto => {
return {
id : Date.now(),
...dto
}
}),
updateInfoOfUser: jest.fn().mockImplementation((id,dto) => ({
id,
...dto
})),
deleteOneUser : jest.fn(id => {
return 'User deleted'
}),
getAll : jest.fn(()=>{
return 'All user details';
}),
getOneById : jest.fn(id =>{
return 'Single user details with respective id'
})
}
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [UsersController],
providers : [UsersService],
}).overrideProvider(UsersService).useValue(mockUserService).compile();
controller = module.get<UsersController>(UsersController);
});
it('should be defined', () => {
expect(controller).toBeDefined();
})
it('should create a new user', () =>{
expect(controller.create({firstName : 'Sudarshan',
lastName : 'Shukla',
emailId : '<EMAIL>',
phoneNumber : 9889057397,
city : 'Pratapgarh',
state : 'UP',
country : 'India',
password : '<PASSWORD>'})).toEqual({
id : expect.any(Number),
firstName : 'Sudarshan',
lastName : 'Shukla',
emailId : '<EMAIL>',
phoneNumber : 9889057397,
city : 'Pratapgarh',
state : 'UP',
country : 'India',
password : '<PASSWORD>'
});
})
it('should update user information', () =>{
const dto = {firstName : 'Sudarshan',
lastName : 'Shukla',
emailId : '<EMAIL>',
phoneNumber : 9889057397,
city : 'Pratapgarh',
state : 'UP',
country : 'India',
password : <PASSWORD>'}
expect(controller.update(1,dto)).toEqual({
id:1,
...dto
})
});
it('should delete a user', () => {
expect(controller.delete(1)).toBe('User deleted')
})
it('should get all user information', () =>{
expect(controller.getUsers()).toEqual('All user details')
})
it('should fetch one user respective to user id', () =>{
expect(controller.getUserById(1)).toBe("Single user details with respective id")
})
});
|
<reponame>seichter/kfusion
#ifndef KFUSION_INTERFACE_OPENNI2
#define KFUSION_INTERFACE_OPENNI2
#include <openni2/OpenNI.h>
// #include <pthread.h>
#include "interface.h"
#include <thread>
class OpenNIDevice : public RGBD {
openni::Device device;
openni::VideoStream depth_stream;
openni::VideoStream color_stream;
bool gotDepth;
std::thread capture_thread;
bool die;
// RGBD interface
public:
OpenNIDevice();
int open();
bool available() const { return true; }
int update();
void close();
bool stopped() const { return this->die; }
void setDirty(bool isDirty) { gotDepth = isDirty; }
bool dirty() const { return gotDepth; }
void setDepthBuffer();
openni::VideoStream& getDepthStream();
virtual ~OpenNIDevice() {}
};
#endif
|
class QuantityRegistry:
def __init__(self):
self.registry = {}
def registerQuantity(self, name, units):
self.registry[name] = units
def getUnits(self, name):
return self.registry.get(name, "Quantity not found")
def getAllQuantities(self):
return list(self.registry.keys())
# Usage
registry = QuantityRegistry()
registry.registerQuantity("Voltage", "V")
registry.registerQuantity("Current", "A")
registry.registerQuantity("Momentum", "kg m/s")
registry.registerQuantity("Energy", "J")
registry.registerQuantity("Power", "J/s")
registry.registerQuantity("Mass", "kg")
registry.registerQuantity("Area", "m^2")
registry.registerQuantity("Volume", "m^3")
registry.registerQuantity("Displacement", "m^3/rev")
registry.registerQuantity("Density", "kg/m^3")
print(registry.getUnits("Energy")) # Output: "J"
print(registry.getUnits("Power")) # Output: "J/s"
print(registry.getUnits("Time")) # Output: "Quantity not found"
print(registry.getAllQuantities()) # Output: ["Voltage", "Current", "Momentum", "Energy", "Power", "Mass", "Area", "Volume", "Displacement", "Density"] |
<gh_stars>0
import cv2 as cv
import numpy as np
haar_cascade =cv.CascadeClassifier('haar_face.xml')
people =['Indian','Non-Indian']
features =np.load('features.npy',allow_pickle=True)
labels =np.load('labels.npy')
face_recognizer=cv.face.LBPHFaceRecognizer_create()
face_recognizer.read('face_trained.yml')
img =cv.imread(r'F:\Machine learning verzeo\Major Project\dataset\test\1-SS687216.jpg')
gray =cv.cvtColor(img,cv.COLOR_BGR2GRAY)
cv.imshow('Person',gray)
m = -1.0
i = None
#Detect the face in the image
faces_rect =haar_cascade.detectMultiScale(gray,1.1,4)
for (x,y,w,h) in faces_rect:
faces_roi=gray[y:y+h,x:x+w]
label,confidence =face_recognizer.predict(faces_roi)
if i is None:
i=label
if confidence>m:
i=label;
m=confidence
print(f'label ={people[i]} with a confidence of {m}')
cv.putText(img,str(people[i]),(20,20),cv.FONT_HERSHEY_COMPLEX,1.0,(0,255,0),2)
cv.rectangle(img,(x,y),(x+w,y+h),(0,255,0),1)
cv.imshow('Detected face',img)
cv.waitKey(0) |
<reponame>san00/mern_CRUD-app<filename>client/src/VacancyList.test.js<gh_stars>1-10
import React from "react";
import Enzyme, { mount } from "enzyme";
import EnzymeAdapter from "enzyme-adapter-react-16";
import { findByTestAttr } from "../tests/testUtils";
import VacancyList from "./components/VacancyList";
import hookActions from "./actions/hookActions";
Enzyme.configure({ adapter: new EnzymeAdapter() });
const mockGetItems = jest.fn();
/**
* Setup function for ReadAndDelete component
* @returns {ReactWrapper}
*/
const setup = () => {
mockGetItems.mockClear();
hookActions.getDatabaseItems = mockGetItems;
// use mount as useEffect is not called on `shallow`
// https://github.com/enzymejs/enzyme/issues/2086
return mount(<VacancyList />);
};
test("Component renders without error", () => {
const wrapper = setup();
const component = findByTestAttr(wrapper, "component-ReadAndDelete");
expect(component.length).toBe(1);
});
describe("Database call", () => {
test("setVacancy gets all db items & updates changes", () => {
setup();
expect(mockGetItems).toHaveBeenCalled();
});
});
|
<filename>src/test/resources/db/schema.sql
CREATE TABLE project (
project_id VARCHAR(255) NOT NULL PRIMARY KEY,
description TEXT
);
CREATE TABLE environment (
project_id VARCHAR(255) NOT NULL REFERENCES project(project_id) ON DELETE CASCADE,
environment_id VARCHAR(255) NOT NULL,
description TEXT,
-- ACL-related items
policy_arn VARCHAR(255) NOT NULL,
roles TEXT DEFAULT '',
groups TEXT DEFAULT '',
PRIMARY KEY(project_id, environment_id)
);
CREATE TABLE secret (
project_id VARCHAR(255) NOT NULL,
environment_id VARCHAR(255) NOT NULL,
secret_id VARCHAR(255) NOT NULL,
description TEXT,
PRIMARY KEY(project_id, environment_id, secret_id),
FOREIGN KEY(project_id, environment_id) REFERENCES environment(project_id, environment_id) ON DELETE CASCADE
);
|
cd /storage/home/users/pjt6/phy/orthofinder
python /storage/home/users/pjt6/misc_python/BLAST_output_parsing/Blast_RBH_two_fasta_file_evalue.py --threads 2 -o ../GCA_000365525.1_Phyt_para_P1976_V1_cds_from_genomi.fa_GCF_000247585.1_PP_INRA-310_V2_cds_from_genomi.fa GCA_000365525.1_Phyt_para_P1976_V1_cds_from_genomi.fa GCF_000247585.1_PP_INRA-310_V2_cds_from_genomi.fa |
# Generate list of divisors
number = 16
divisors = []
for i in range(1, number + 1):
if number % i == 0:
divisors.append(i)
print(f'The divisors of {number} are {divisors}.') |
// Abstract
class Car {
constructor() {
this.price = 10000;
this.model = 'Car'
}
getPrice() {
return this.price;
}
getDescription() {
return this.model
}
}
class Tesla extends Car {
constructor() {
super();
this.price = 25000;
this.model = 'Tesla';
}
}
// Decorator
class Autopilot {
constructor(car) {
this.car = car;
}
getPrice() {
return this.car.getPrice() + 5000;
}
getDescription() {
return `${this.car.getDescription()} with autopilot`;
}
}
// Decorator
class Parktronic {
constructor(car) {
this.car = car;
}
getPrice() {
return this.car.getPrice() + 3000;
}
getDescription() {
return `${this.car.getDescription()} with parktronic`;
}
}
/* INIT */
let tesla = new Tesla();
tesla = new Autopilot(tesla);
tesla = new Parktronic(tesla);
console.log(tesla.getPrice(), tesla.getDescription());
|
<gh_stars>10-100
'use strict';
module.exports = function (N, apiPath) {
N.validate(apiPath, {
user_hid: { type: 'integer', minimum: 1, required: true }
});
// Check permissions
//
N.wire.before(apiPath, async function check_permissions(env) {
if (!env.user_info.is_member) throw N.io.FORBIDDEN;
let settings = env.res.settings = await env.extras.settings.fetch([
'can_add_mod_notes',
'can_delete_mod_notes',
'mod_notes_edit_max_time'
]);
if (!settings.can_add_mod_notes) throw N.io.NOT_FOUND;
});
// Fetch member by 'user_hid'
//
N.wire.before(apiPath, function fetch_user_by_hid(env) {
return N.wire.emit('internal:users.fetch_user_by_hid', env);
});
// Fill notes
//
N.wire.on(apiPath, async function fill_notes(env) {
let notes = env.res.notes = await N.models.users.ModeratorNote.find()
.where('to').equals(env.data.user._id)
.sort({ ts: -1 })
.lean(true);
// Fetch users info
//
env.data.users = env.data.users || [];
env.data.users = env.data.users.concat(notes.map(x => x.from));
env.data.users.push(env.data.user._id);
env.res.user_id = env.data.user._id;
});
// Fill head meta
//
N.wire.after(apiPath, function fill_head(env) {
let user = env.data.user;
env.res.head = env.res.head || {};
env.res.head.title = env.t('title_with_user', { user: env.user_info.is_member ? user.name : user.nick });
});
// Fill breadcrumbs
//
N.wire.after(apiPath, async function fill_breadcrumbs(env) {
await N.wire.emit('internal:users.breadcrumbs.fill_root', env);
let user = env.data.user;
env.data.breadcrumbs = env.data.breadcrumbs || [];
env.data.breadcrumbs.push({
text : env.t('@users.mod_notes.breadcrumbs_title'),
route : 'users.mod_notes',
params: { user_hid: user.hid }
});
env.res.breadcrumbs = env.data.breadcrumbs;
});
};
|
import java.util.Arrays;
class BubbleSort
{
void bubbleSort(int nums[])
{
int n = nums.length;
for (int i = 0; i < n-1; i++)
for (int j = 0; j < n-i-1; j++)
if (nums[j] > nums[j+1])
{
// swap temp and nums[i]
int temp = nums[j];
nums[j] = nums[j+1];
nums[j+1] = temp;
}
}
// Method to test above
public static void main(String args[])
{
BubbleSort ob = new BubbleSort();
int nums[] = {7, -5, 3, 2, 1, 0, 45};
System.out.println("Original Array:");
System.out.println(Arrays.toString(nums));
ob.bubbleSort(nums);
System.out.println("Sorted Array");
System.out.println(Arrays.toString(nums));
}
}
|
//
// RxRelay.h
// RxRelay
//
// Created by Wassim on 4/28/17.
// Copyright © 2017 Wassim. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for RxRelay.
FOUNDATION_EXPORT double RxRelayVersionNumber;
//! Project version string for RxRelay.
FOUNDATION_EXPORT const unsigned char RxRelayVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <RxRelay/PublicHeader.h>
|
<reponame>signalwire/snippets-sentiment-analysis
import os
import requests
import pprint
import uuid
import json
from signalwire.voice_response import VoiceResponse, Say, Gather, Record
from flask import Flask,request
app = Flask(__name__)
# Your Microsoft Cognitive Services Key
subscription_key = os.environ['MICROSOFT_KEY']
def get_sentiment(input_text, input_language):
base_url = 'https://eastus2.api.cognitive.microsoft.com/text/analytics'
path = '/v2.0/sentiment'
constructed_url = base_url + path
headers = {
'Ocp-Apim-Subscription-Key': subscription_key,
'Content-type': 'application/json',
'X-ClientTraceId': str(uuid.uuid4())
}
# You can pass more than one object in body.
body = {
'documents': [
{
'language': input_language,
'id': '1',
'text': input_text
}
]
}
response = requests.post(constructed_url, headers=headers, json=body)
return response.json()
@app.route('/voice_entry', methods=['GET','POST'])
def voice_entry():
response = VoiceResponse()
# Prompt user
gather = Gather(action='/sentiment', input='speech', speechTimeout="auto", timeout="10", method='GET')
# Append say to gather to produce TTS
gather.say("Please say a phrase or statement, and we will than analyze the verbiage and tell you the sentiment. ")
# Append the gather
response.append(gather)
# Hangup the call
response.hangup()
return str(response)
@app.route('/sentiment', methods=['GET','POST'])
def sentiment():
input_text = request.values.get("SpeechResult")
input_lang = "en"
output_text = ""
output_lang = "en"
sentimentResponse = get_sentiment(input_text, input_lang)
# Round the score to two digits
score = round(sentimentResponse['documents'][0]['score'], 2)
response = VoiceResponse()
sentimentText = ""
# Convert score into easier to understand words
if score > .9:
sentimentText = "Extremely Happy"
elif score > .8:
sentimentText = "Very Happy"
elif score > .7:
sentimentText = "Happy"
elif score > .6:
sentimentText = "Slightly Happy"
elif score > .5:
sentimentText = "Middle of Road"
elif score > .4:
sentimentText = "Slightly Unhappy"
elif score > .3:
sentimentText = "Unhappy"
elif score > .2:
sentimentText = "Very Unhappy"
else:
sentimentText = "Extremely Unhappy"
# Return result to user
response.say("The sentiment score was " + str(score) + " which indicates user was " + sentimentText)
return str(response)
# Default Route
@app.route('/')
def hello():
return "Hello World!"
# List on all interfaces
if __name__ == '__main__':
app.run(host="0.0.0.0")
|
//início de tudo: o tão famos `Hello World`
var saudacao = `Hello World`;
console.log(saudacao); |
assembly=$1
polished_assembly=$2
contig_fasta=$3
reference='/work/ryought/hic/celegans/reference/vc2010.draft-20180405.pilon.fasta'
# chop contig fasta by assembly
python /work/ryought/hi-c-assembly/hic_hiker/tools/chop_contigs.py $assembly $contig_fasta $assembly.chopped.fasta
# align to reference
bwa mem -t 32 $reference $assembly.chopped.fasta > $assembly.chopped.sam
# run benchmark for aseembly
python /work/ryought/hi-c-assembly/hic_hiker/tools/calc_accuracy_local.py $contig_fasta $assembly.chopped.sam $assembly $polished_assembly > $assembly.chopped.bench
|
import numpy as np
def evaluate_error(X, y, w):
# Add a column of ones to X for the bias term
X_with_bias = np.hstack((np.ones((X.shape[0], 1)), X))
# Calculate the predicted values
y_pred = np.dot(X_with_bias, w)
# Calculate the mean squared error
mse = np.mean((y_pred - y) ** 2)
return mse |
<reponame>yota4649/Grove_Sample
module.exports = {
henry: {
metrics: {
'query-resolution-time': 'PT0.002967S',
'snippet-resolution-time': 'PT0.005458S',
'total-time': 'PT0.010867S'
},
'page-length': 10,
qtext: 'henry',
facets: {
EyeColor: {
type: 'xs:string',
facetValues: [
{
name: 'brown',
count: 60,
value: 'brown'
},
{
name: 'blue',
count: 30,
value: 'blue'
}
]
}
},
results: [
{
confidence: 0.6000114,
fitness: 0.7944844,
format: 'json',
href: '/v1/documents?uri=%2Fsample%2Fpeople%2F%2Fdata-1482.json',
index: 1,
matches: [
{
'match-text': [
'Elma ',
{
highlight: 'Henry'
}
],
path: "fn:doc('/sample/people//data-1482.json')/text('name')"
},
{
'match-text': [
'Hello, Elma ',
{
highlight: 'Henry'
},
'! You have 4 unread messages.'
],
path: "fn:doc('/sample/people//data-1482.json')/text('greeting')"
}
],
mimetype: 'application/json',
path: "fn:doc('/sample/people//data-1482.json')",
score: 115456,
uri: '/sample/people//data-1482.json'
},
{
confidence: 0.6000114,
fitness: 0.7944844,
format: 'json',
href: '/v1/documents?uri=%2Fsample%2Fmanufacturing%2F%2Fdata-1482.json',
index: 2,
matches: [
{
'match-text': [
'Elma ',
{
highlight: 'Henry'
}
],
path: "fn:doc('/sample/manufacturing//data-1482.json')/text('name')"
},
{
'match-text': [
'Hello, Elma ',
{
highlight: 'Henry'
},
'! You have 4 unread messages.'
],
path:
"fn:doc('/sample/manufacturing//data-1482.json')/text('greeting')"
}
],
mimetype: 'application/json',
path: "fn:doc('/sample/manufacturing//data-1482.json')",
score: 115456,
uri: '/sample/manufacturing//data-1482.json'
}
],
'snippet-format': 'snippet',
start: 1,
total: 2
},
henryPageTwo: {
metrics: {
'query-resolution-time': 'PT0.002967S',
'snippet-resolution-time': 'PT0.005458S',
'total-time': 'PT0.010867S'
},
'page-length': 10,
qtext: 'henry',
facets: {
EyeColor: {
type: 'xs:string',
facetValues: [
{
name: 'brown',
count: 60,
value: 'brown'
},
{
name: 'blue',
count: 30,
value: 'blue'
}
]
}
},
results: [
{
confidence: 0.6000114,
fitness: 0.7944844,
format: 'json',
href: '/v1/documents?uri=%2Fsample%2Fpeople%2F%2Fdata-1482.json',
index: 1,
matches: [
{
'match-text': [
'Elma ',
{
highlight: 'Henry'
}
],
path: "fn:doc('/sample/people//data-1482.json')/text('name')"
},
{
'match-text': [
'Hello, Elma ',
{
highlight: 'Henry'
},
'! You have 4 unread messages.'
],
path: "fn:doc('/sample/people//data-1482.json')/text('greeting')"
}
],
mimetype: 'application/json',
path: "fn:doc('/sample/people//data-1482.json')",
score: 115456,
uri: '/sample/people//data-1482.json'
},
{
confidence: 0.6000114,
fitness: 0.7944844,
format: 'json',
href: '/v1/documents?uri=%2Fsample%2Fmanufacturing%2F%2Fdata-1482.json',
index: 2,
matches: [
{
'match-text': [
'Elma ',
{
highlight: 'Henry'
}
],
path: "fn:doc('/sample/manufacturing//data-1482.json')/text('name')"
},
{
'match-text': [
'Hello, Elma ',
{
highlight: 'Henry'
},
'! You have 4 unread messages.'
],
path:
"fn:doc('/sample/manufacturing//data-1482.json')/text('greeting')"
}
],
mimetype: 'application/json',
path: "fn:doc('/sample/manufacturing//data-1482.json')",
score: 115456,
uri: '/sample/manufacturing//data-1482.json'
}
],
'snippet-format': 'snippet',
start: 11,
total: 12
}
};
|
from django.conf import settings
from django.db import models
from django.contrib.auth import get_user_model
USER_MODEL = settings.AUTH_USER_MODEL
class CommonCms(AbstractPermalink):
user = models.ForeignKey(USER_MODEL, on_delete=models.CASCADE)
content = models.TextField()
permalink = models.CharField(max_length=100, unique=True)
def create_permalink(self, content):
# Implement logic to create a unique permalink based on the content
# This could involve generating a slug from the content and checking for uniqueness
self.content = content
self.permalink = generate_unique_permalink(content)
self.save()
def generate_unique_permalink(self, content):
# Implement logic to generate a unique permalink based on the content
# This could involve generating a slug from the content and checking for uniqueness
# Return the generated unique permalink
pass
def update_permalink(self, new_content):
# Implement logic to update the permalink based on new content
# This could involve regenerating the permalink based on the new content
self.content = new_content
self.permalink = generate_unique_permalink(new_content)
self.save()
def delete_permalink(self):
# Implement logic to delete the permalink
self.delete() |
(function () {
'use strict';
function Beacon(beaconObject) {
beaconObject = beaconObject || {};
this.uuid = beaconObject.uuid || '';
this.major = Number(beaconObject.major) || 0;
this.minor = Number(beaconObject.minor) || 0;
this.rssi = Number(beaconObject.rssi || 0);
this.txPower = Number(beaconObject.txPower || 0);
}
Beacon.prototype = Object.create(Object.prototype, {
equals: {
enumerable: false, writable: false, configurable: false,
value: function equals(beacon) {
if (beacon === null || typeof beacon === 'undefined') return false;
if (beacon === this) return true;
return beacon.uuid == this.uuid && beacon.major == this.major && beacon.minor == this.minor;
}
},
toJSON: {
enumerable: false, writable: false, configurable: false,
value: function toJSON() {
return JSON.stringify(this);
}
},
toString: {
enumerable: false, writable: false, configurable: false,
value: function toString() {
return 'Beacon(' + [ this.uuid, this.major, this.minor ].join(', ') + ')';
}
}
});
module.exports = Beacon;
})(); |
package com.magic.szh.cnf_168p2p.content.login;
import android.os.Bundle;
import android.os.SystemClock;
import android.support.annotation.Nullable;
import android.support.design.widget.TextInputEditText;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.telephony.PhoneNumberFormattingTextWatcher;
import android.telephony.PhoneNumberUtils;
import android.telephony.TelephonyManager;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.util.Patterns;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.magic.szh.Magic;
import com.magic.szh.cnf_168p2p.R;
import com.magic.szh.cnf_168p2p.api.response.ResponseLoginByPhone;
import com.magic.szh.cnf_168p2p.api.response.ResponseUserInfo;
import com.magic.szh.cnf_168p2p.api.url.Api;
import com.magic.szh.cnf_168p2p.base.BaseFragment;
import com.magic.szh.cnf_168p2p.content.HomeActivity;
import com.magic.szh.cnf_168p2p.shared_preference.Constant;
import com.magic.szh.net.RestClient;
import com.magic.szh.net.callback.ISuccess;
import com.magic.szh.util.storage.MagicPreference;
import butterknife.BindView;
import butterknife.OnClick;
/**
* project: CNF_168p2p
* package: com.magic.szh.cnf_168p2p.content.login
* file: LoginPhoneFragment
* author: admin
* date: 2018/3/5
* description: 手机登录 fragment
*/
public class LoginPhoneFragment extends BaseFragment {
@BindView(R.id.text_login_phone)
TextView mTextPhone;
@BindView(R.id.edit_phone)
TextInputEditText mEditPhone;
@BindView(R.id.edit_password)
TextInputEditText mEditPassword;
@BindView(R.id.tool_bar)
Toolbar mToolbar;
/**
* 入口点标识码{@link LoginActivity}
*/
private int mLoginEntrance;
@Override
public Object setLayout() {
return R.layout.fragment_login_phone;
}
@Override
public void onBindView(@Nullable Bundle savedInstanceState, View rootView) {
initToolBar();
initData();
initLayout();
}
/**
* 初始化toolbar
*/
private void initToolBar() {
if (getActivity() != null) {
((AppCompatActivity)getActivity()).setSupportActionBar(mToolbar);
((AppCompatActivity) getActivity()).getSupportActionBar().setDisplayHomeAsUpEnabled(true);
((AppCompatActivity) getActivity()).getSupportActionBar().setHomeButtonEnabled(true);
setHasOptionsMenu(true);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
getActivity().finish();
return true;
default:
}
return super.onOptionsItemSelected(item);
}
/**
* 初始化数据参数
*/
private void initData() {
mLoginEntrance = getArguments().getInt(LoginActivity.KEY_LOGIN_ENTRANCE, 0);
}
/**
* 初始化布局
*/
private void initLayout() {
String telephone = MagicPreference.getString(Constant.USER_USER_NAME, null);
if (TextUtils.isEmpty(telephone)){
mTextPhone.setVisibility(View.GONE);
} else {
mTextPhone.setText(phoneNumberFormat(telephone));
mEditPhone.setText(telephone);
mTextPhone.setVisibility(View.VISIBLE);
}
mEditPhone.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
mTextPhone.setText(phoneNumberFormat(s.toString()));
}
@Override
public void afterTextChanged(Editable s) {
if (s.length() == 0) {
if (mTextPhone.getVisibility() == View.VISIBLE)
mTextPhone.setVisibility(View.GONE);
} else {
if (mTextPhone.getVisibility() == View.GONE)
mTextPhone.setVisibility(View.VISIBLE);
}
}
});
}
/**
* 本地验证登录信息
* @return boolean 是否验证通过
*/
private boolean checkForm() {
String phone = mEditPhone.getText().toString().trim();
String password = mEditPassword.getText().toString().trim();
if (phone.isEmpty()) {
Toast.makeText(getContext(), "手机号码不能为空", Toast.LENGTH_SHORT).show();
return false;
}
if (!Patterns.PHONE.matcher(phone).matches()) {
Toast.makeText(getContext(), "请输入正确的手机号码", Toast.LENGTH_SHORT).show();
return false;
}
if (password.isEmpty()) {
Toast.makeText(getContext(), "密码不能为空", Toast.LENGTH_SHORT).show();
return false;
}
if (password.length() < 6 || password.length() > 16) {
Toast.makeText(getContext(), "请输入6~16位字符密码", Toast.LENGTH_SHORT).show();
return false;
}
return true;
}
/**
* 登录
*/
@OnClick(R.id.button_sign_in)
void clickSignIn() {
if (checkForm()) {
String phone = mEditPhone.getText().toString().trim();
String password = mEditPassword.getText().toString().trim();
startSignUp(phone, password);
}
}
/**
* 开始请求登录接口
* @param phone 号码
* @param password 密码
*/
private void startSignUp(String phone, String password) {
RestClient.builder()
.url(Api.POST_LOGIN_BY_PHONE_NUMBER)
.params("phone", phone)
.params("password", password)
.success(response -> {
ResponseLoginByPhone json = ResponseLoginByPhone.getInstance(response);
if (json.getCode() == 200) {
// TODO 成功跳转
MagicPreference.putString(Constant.SESSION_ID, json.getSessionId());
MagicPreference.putString(Constant.USER_USER_NAME, phone);
LoginActivity.setLoginConfig();
// 获取用户信息
getUserInfo();
} else {
Toast.makeText(getContext(), json.getMsg(), Toast.LENGTH_SHORT).show();
}
})
.build()
.post();
}
/**
* 请求用户信息
*/
private void getUserInfo() {
RestClient.builder()
.url(Api.GET_USER_INFORMATION)
.success(response -> {
ResponseUserInfo json = ResponseUserInfo.getInstance(response);
redirect(mLoginEntrance);
})
.build()
.get();
}
/**
* 注册
*/
@OnClick(R.id.button_sign_up)
void clickSignUp() {
}
/**
* 找回密码
*/
@OnClick(R.id.text_forget_button)
void clickForgetPasswordView() {
}
/**
* 切换登录方式
*/
@OnClick(R.id.text_show_sign_in_type)
void clickChangeSignInType() {
}
/**
* 格式化手机号码
* @param string 需要格式化的电话号码
* @return 格式化后的电话号码
*/
private String phoneNumberFormat(String string) {
StringBuilder phoneBuilder = new StringBuilder();
if (string.length() <= 3)
phoneBuilder.append(string);
if (string.length() > 3 && string.length() <= 7)
phoneBuilder
.append(string.subSequence(0, 3))
.append("-")
.append(string.substring(3));
if (string.length() > 7)
phoneBuilder
.append(string.subSequence(0, 3))
.append("-")
.append(string.substring(3, 7))
.append("-")
.append(string.substring(7));
return phoneBuilder.toString();
}
/**
* 重定向至指定页面
*/
private void redirect(int entrance) {
int homeModuleCurrentPage;
switch (entrance) {
case LoginActivity.TYPE_INVESTMENT:
homeModuleCurrentPage = HomeActivity.TAB_INVESTMENT;
break;
case LoginActivity.TYPE_ACCOUNT:
homeModuleCurrentPage = HomeActivity.TAB_ACCOUNT;
break;
case LoginActivity.TYPE_FORUM:
homeModuleCurrentPage = HomeActivity.TAB_FORUM;
break;
case LoginActivity.TYPE_MORE:
homeModuleCurrentPage = HomeActivity.TAB_MORE;
break;
case LoginActivity.TYPE_LAUNCHER:
default:
homeModuleCurrentPage = HomeActivity.TAB_HOME;
}
HomeActivity.startHomeActivity(getContext(), homeModuleCurrentPage);
}
}
|
package org.bf2.cos.fleetshard.sync.client;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import org.bf2.cos.fleet.manager.model.ConnectorDeployment;
import org.bf2.cos.fleetshard.api.ManagedConnector;
import org.bf2.cos.fleetshard.api.ManagedConnectorCluster;
import org.bf2.cos.fleetshard.api.ManagedConnectorClusterBuilder;
import org.bf2.cos.fleetshard.api.ManagedConnectorClusterSpecBuilder;
import org.bf2.cos.fleetshard.api.ManagedConnectorOperator;
import org.bf2.cos.fleetshard.api.Operator;
import org.bf2.cos.fleetshard.support.resources.Clusters;
import org.bf2.cos.fleetshard.support.resources.Connectors;
import org.bf2.cos.fleetshard.support.resources.Resources;
import org.bf2.cos.fleetshard.support.resources.Secrets;
import org.bf2.cos.fleetshard.support.watch.Informers;
import org.bf2.cos.fleetshard.sync.FleetShardSyncConfig;
import io.fabric8.kubernetes.api.model.DeletionPropagation;
import io.fabric8.kubernetes.api.model.ObjectMetaBuilder;
import io.fabric8.kubernetes.api.model.Secret;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.informers.SharedIndexInformer;
@ApplicationScoped
public class FleetShardClient {
@Inject
KubernetesClient kubernetesClient;
@Inject
FleetShardSyncConfig config;
private volatile SharedIndexInformer<ManagedConnector> informer;
public void start() {
informer = kubernetesClient.resources(ManagedConnector.class)
.inNamespace(getConnectorsNamespace())
.withLabel(Resources.LABEL_CLUSTER_ID, getClusterId())
.inform();
}
public void stop() {
if (informer != null) {
informer.stop();
}
}
public String getConnectorsNamespace() {
return config.connectors().namespace();
}
public String getClusterId() {
return config.cluster().id();
}
public KubernetesClient getKubernetesClient() {
return kubernetesClient;
}
public long getMaxDeploymentResourceRevision() {
return this.informer.getIndexer().list().stream()
.mapToLong(c -> c.getSpec().getDeployment().getDeploymentResourceVersion())
.max()
.orElse(0);
}
// *************************************
//
// Secrets
//
// *************************************
public Optional<Secret> getSecret(ConnectorDeployment deployment) {
return getSecretByDeploymentId(deployment.getId());
}
public Secret createSecret(Secret secret) {
return this.kubernetesClient.secrets()
.inNamespace(getConnectorsNamespace())
.createOrReplace(secret);
}
public Optional<Secret> getSecretByDeploymentId(String deploymentId) {
return Optional.ofNullable(
kubernetesClient.secrets()
.inNamespace(getConnectorsNamespace())
.withName(Secrets.generateConnectorSecretId(deploymentId))
.get());
}
// *************************************
//
// Connectors
//
// *************************************
public Boolean deleteConnector(ManagedConnector managedConnector) {
return kubernetesClient.resources(ManagedConnector.class)
.inNamespace(getConnectorsNamespace())
.withName(managedConnector.getMetadata().getName())
.withPropagationPolicy(DeletionPropagation.FOREGROUND)
.delete();
}
public Optional<ManagedConnector> getConnectorByName(String name) {
if (informer == null) {
throw new IllegalStateException("Informer must be started before adding handlers");
}
final String key = getConnectorsNamespace() + "/" + name;
final ManagedConnector val = informer.getIndexer().getByKey(key);
return Optional.ofNullable(val);
}
public Optional<ManagedConnector> getConnectorByDeploymentId(String deploymentId) {
return getConnectorByName(Connectors.generateConnectorId(deploymentId));
}
public Optional<ManagedConnector> getConnector(ConnectorDeployment deployment) {
return getConnectorByName(Connectors.generateConnectorId(deployment.getId()));
}
public List<ManagedConnector> getAllConnectors() {
if (informer == null) {
throw new IllegalStateException("Informer must be started before adding handlers");
}
return informer.getIndexer().list();
}
public void watchConnectors(Consumer<ManagedConnector> handler) {
if (informer == null) {
throw new IllegalStateException("Informer must be started before adding handlers");
}
informer.addEventHandler(Informers.wrap(handler));
}
public ManagedConnector createConnector(ManagedConnector connector) {
return kubernetesClient.resources(ManagedConnector.class)
.inNamespace(getConnectorsNamespace())
.createOrReplace(connector);
}
public ManagedConnector editConnector(String name, Consumer<ManagedConnector> editor) {
return kubernetesClient.resources(ManagedConnector.class)
.inNamespace(getConnectorsNamespace())
.withName(name)
.accept(editor);
}
// *************************************
//
// Operators
//
// *************************************
public List<Operator> lookupOperators() {
return kubernetesClient.resources(ManagedConnectorOperator.class)
.inNamespace(this.getConnectorsNamespace())
.list()
.getItems()
.stream()
.map(mco -> new Operator(
mco.getMetadata().getName(),
mco.getSpec().getType(),
mco.getSpec().getVersion()))
.collect(Collectors.toList());
}
// *************************************
//
// Cluster
//
// *************************************
public Optional<ManagedConnectorCluster> getConnectorCluster() {
return Optional.ofNullable(
kubernetesClient.resources(ManagedConnectorCluster.class)
.inNamespace(getConnectorsNamespace())
.withName(Clusters.CONNECTOR_CLUSTER_PREFIX + "-" + getClusterId())
.get());
}
public ManagedConnectorCluster getOrCreateManagedConnectorCluster() {
return getConnectorCluster().orElseGet(() -> {
var cluster = new ManagedConnectorClusterBuilder()
.withMetadata(new ObjectMetaBuilder()
.withName(Clusters.CONNECTOR_CLUSTER_PREFIX + "-" + getClusterId())
.addToLabels(Resources.LABEL_CLUSTER_ID, getClusterId())
.build())
.withSpec(new ManagedConnectorClusterSpecBuilder()
.withClusterId(getClusterId())
.build())
.build();
return kubernetesClient.resources(ManagedConnectorCluster.class)
.inNamespace(getConnectorsNamespace())
.withName(cluster.getMetadata().getName())
.createOrReplace(cluster);
});
}
}
|
import copy
from direct.directnotify import DirectNotifyGlobal
from pirates.util.PythonUtilPOD import POD
from pirates.util.PythonUtil import makeTuple
from direct.task.Task import Task
from pirates.piratesbase import PLocalizer
from pirates.quest import QuestDB, QuestReward, QuestTaskDNA
from pirates.quest.QuestDNA import QuestDNA
from otp.otpbase import OTPGlobals
from pirates.piratesbase import Freebooter
class Quest(POD):
notify = DirectNotifyGlobal.directNotify.newCategory('Quest')
DataSet = {
'questId': None,
'giverId': None,
'combineOp': None,
'tasks': None,
'rewards': None,
'taskStates': [] }
SerialNum = 0
def __init__(self, questId = None, giverId = None, initialTaskStates = None, rewards = None):
self.questDNA = None
self._serialNum = Quest.SerialNum
Quest.SerialNum += 1
POD.__init__(self)
if questId is not None:
self.setupQuest(questId, giverId, initialTaskStates, rewards)
self._Quest__finished = False
self._Quest__finalized = False
self._Quest__timedOut = False
self._Quest__timeRemaining = 0
def destroy(self):
del self.questDNA
del self.tasks
del self.rewards
for taskState in self.taskStates:
taskState.release()
del self.taskStates
def setupQuest(self, questId, giverId, initialTaskStates, rewards):
self.setQuestId(questId)
self.setGiverId(giverId)
self.setRewards(rewards)
self.sendTaskStates(initialTaskStates)
def setQuestId(self, questId):
self.questId = questId
if questId not in (None, ''):
self.questDNA = QuestDB.QuestDict.get(self.questId)
if self.questDNA:
self.questDNA.makeCopy()
self.setCombineOp(self.questDNA.getCombineOp())
self.setTasks(self.questDNA.getTasks())
if self.questDNA.getTimeLimit():
self._Quest__timedOut = True
else:
self.questDNA = None
def getQuestDNA(self):
return self.questDNA
def getQuestGoalUid(self):
for (taskState, taskDNA) in zip(self.taskStates, self.questDNA.getTasks()):
return taskDNA.getGoalUid(taskState)
def getChangeEvent(self):
return 'Quest.questChange-%s' % self._serialNum
def setTaskStates(self, taskStates):
oldTaskStates = getattr(self, 'taskStates', None)
self.taskStates = taskStates
if self.taskStates:
for taskState in self.taskStates:
taskState.acquire()
if oldTaskStates:
for taskState in oldTaskStates:
taskState.release()
messenger.send(self.getChangeEvent())
def sendTaskStates(self, taskStates):
self.setTaskStates(taskStates)
def setRewardStructs(self, rewardStructs):
rewards = []
if self.questDNA != None:
rewardDNAs = list(self.questDNA.rewards)
for rewardStruct in rewardStructs:
rewardObj = QuestReward.QuestReward.makeFromStruct(rewardStruct)
for currRewardDNA in rewardDNAs:
if currRewardDNA.isSame(rewardObj):
rewardObj.setBonus(currRewardDNA.isBonus())
rewardDNAs.remove(currRewardDNA)
break
continue
rewards.append(rewardObj)
self.setRewards(rewards)
def getRewardStructs(self):
rewardStructs = []
for reward in self.getRewards():
rewardStructs.append(reward.getQuestRewardStruct())
return rewardStructs
def handleEvent(self, holder, questEvent):
modified = 0
for (taskState, taskDNA) in zip(self.taskStates, self.questDNA.getTasks()):
if taskDNA.locationMatches(questEvent):
taskState.resetModified()
if questEvent.applyTo(taskState, taskDNA):
if holder.getAccess() != 2 and self.questDNA.getVelvetRoped():
holder.d_popupProgressBlocker(self.getQuestId())
else:
questEvent.complete(taskState, taskDNA)
modified += taskState.isModified()
continue
if modified:
self.sendTaskStates(self.taskStates)
def isDroppable(self):
if self.questDNA != None:
pass
return self.questDNA.getDroppable()
def isShareable(self):
return True
def completeRequiresVisit(self):
return self.questDNA.getCompleteRequiresVisit()
def playStinger(self):
if not self.questDNA:
return False
return self.questDNA.getPlayStinger()
def getBranchParent(self, av):
def getBranchParentRecursive(container):
if container and container.parent:
if container.parent.isBranch():
return container.parent
else:
return getBranchParentRecursive(container.parent)
else:
return None
container = av.questStatus.getContainer(self.questId)
return getBranchParentRecursive(container)
def setFinished(self, finished):
self._Quest__finished = finished
def isFinished(self):
return self._Quest__finished
def isTimedOut(self):
return self._Quest__timedOut
def setTimedOut(self, timedOut):
self._Quest__timedOut = timedOut
def getTimeLimit(self):
return self.questDNA.getTimeLimit()
def getTimeRemaining(self):
return self._Quest__timeRemaining
def setTimeRemaining(self, time):
self._Quest__timeRemaining = time
def setFinalized(self, finalized):
self._Quest__finalized = finalized
def isFinalized(self):
return self._Quest__finalized
def isCompleteWithBonus(self, showComplete = False):
if self.isComplete(showComplete = showComplete):
pass
return self.isComplete(showComplete = showComplete, bonus = True)
def isComplete(self, showComplete = False, bonus = False):
if self._Quest__finished and not bonus:
return True
if hasattr(self, 'taskStates'):
if len(self.taskStates) == 0:
return True
else:
return False
if self.combineOp is QuestDNA.OR:
for task in self.taskStates:
if task.isComplete(bonus):
if not bonus:
self._Quest__finished = True
return True
continue
return False
elif self.combineOp is QuestDNA.AND:
for task in self.taskStates:
if not task.isComplete(bonus):
return False
continue
if not bonus:
self._Quest__finished = True
return True
else:
raise 'unknown task combineOp: %s' % self.combineOp
def percentComplete(self):
if self._Quest__finished or self.isComplete() == True:
return 1.0
if hasattr(self, 'taskStates') and len(self.taskStates) == 0:
return 1.0
if self.combineOp is QuestDNA.OR:
return 0.0
elif self.combineOp is QuestDNA.AND:
totalTasks = len(self.taskStates)
completedTasks = 0
for task in self.taskStates:
if task.isComplete():
completedTasks += 1
continue
return completedTasks / totalTasks
else:
raise 'unknown task combineOp: %s' % self.combineOp
def canBeReturnedTo(self, giverId):
noGiversSpecified = True
returnGiverIds = self.questDNA.getReturnGiverIds()
if returnGiverIds is not None:
noGiversSpecified = False
if giverId in returnGiverIds:
return True
for (task, taskState) in zip(self.getTasks(), self.getTaskStates()):
if taskState.isComplete() or self.isTimedOut():
returnGiverIds = task.getReturnGiverIds()
if returnGiverIds is not None:
noGiversSpecified = False
if giverId in returnGiverIds:
return True
returnGiverIds is not None
if noGiversSpecified:
if giverId == self.getGiverId():
return True
return False
def getSCSummaryText(self, taskNum):
taskState = self.getTaskStates()[taskNum]
return self.questDNA.getSCSummaryText(taskNum, taskState)
def getSCWhereIsText(self, taskNum):
return self.questDNA.getSCWhereIsText(taskNum)
def getSCHowToText(self, taskNum):
return self.questDNA.getSCHowToText(taskNum)
def getDescriptionText(self, bonus = False):
return self.questDNA.getDescriptionText(self.taskStates, bonus = bonus)
def getRewardText(self):
return QuestReward.QuestReward.getDescriptionText(self.getRewards())
def getRestartText(self):
if self.questDNA == None:
return ''
def getReturnText(self):
if self.questDNA == None:
return ''
choice = False
choiceComplete = False
container = localAvatar.questStatus.getContainer(self.questId)
if container and container.parent and container.parent.isChoice():
choice = True
if container.parent.isComplete(showComplete = True):
choiceComplete = True
timeLimit = self.questDNA.getTimeLimit()
timeRemaining = self.getTimeRemaining()
returnGiverIds = self.questDNA.getReturnGiverIds()
if returnGiverIds:
npcNames = map(lambda id: PLocalizer.NPCNames.get(id, PLocalizer.DefaultTownfolkName), returnGiverIds)
if len(returnGiverIds) == 1:
if timeLimit and not timeRemaining:
return PLocalizer.QuestRestartReturnId % {
'npcName': npcNames[0] }
elif choice and not choiceComplete:
return PLocalizer.SingleChoiceQuestReturnId % {
'npcName': npcNames[0] }
elif filter(lambda x: isinstance(self.getTasks()[0], x), [
QuestTaskDNA.VisitTaskDNA,
QuestTaskDNA.DeliverItemTaskDNA]):
return PLocalizer.SingleQuestReturnIdCollect % {
'npcName': npcNames[0] }
else:
return PLocalizer.SingleQuestReturnId % {
'npcName': npcNames[0] }
elif choice and not choiceComplete:
return PLocalizer.MultipleChoiceQuestReturnIds % {
'npcNames': npcNames }
else:
return PLocalizer.MultipleQuestReturnIds % {
'npcNames': npcNames }
else:
giverId = ''
taskDNAs = self.questDNA.getTaskDNAs()
for task in taskDNAs:
if isinstance(task, QuestTaskDNA.VisitTaskDNA):
giverId = task.getReturnGiverIds()[0]
break
continue
if not giverId:
giverId = self.getGiverId()
npcName = PLocalizer.NPCNames.get(giverId, PLocalizer.DefaultTownfolkName)
if timeLimit and not timeRemaining:
return PLocalizer.QuestRestartReturnId % {
'npcName': npcName }
if choice and not choiceComplete:
return PLocalizer.SingleChoiceQuestReturnId % {
'npcName': npcName }
else:
return PLocalizer.SingleQuestReturnId % {
'npcName': npcName }
def getTaskProgress(self):
progressList = []
taskStates = getattr(self, 'taskStates', None)
if taskStates:
for taskState in taskStates:
goal = taskState.getGoal()
progress = taskState.getProgress()
progressList.append((progress, goal))
return progressList
def getStatusText(self):
if self.questDNA == None:
return ''
if self.questDNA.getVelvetRoped() and base.localAvatar.getAccess() != OTPGlobals.AccessFull:
if not Freebooter.AllAccessHoliday:
return PLocalizer.VelvetRopeQuestBlock
taskDNAs = self.questDNA.getTaskDNAs()
taskStates = self.getTaskStates()
def getTaskText(taskDNA, taskState, format, bonus = False):
descText = self.getDescriptionText(bonus = bonus)
if descText == None:
return None
if bonus:
goal = taskState.getBonusGoal()
progress = taskState.getBonusProgress()
else:
goal = taskState.getGoal()
progress = taskState.getProgress()
progressStr = ''
if progress < goal:
if goal > 1 and not isinstance(taskDNA, QuestTaskDNA.DowsingRodTaskDNA):
progressStr = PLocalizer.QuestTaskProgress % {
'prog': progress,
'goal': goal }
else:
progressStr = PLocalizer.QuestProgressComplete
return format % {
'task': descText,
'prog': progressStr }
if len(taskDNAs) == 1:
str = PLocalizer.QuestStrOneTask % {
'task': getTaskText(taskDNAs[0], taskStates[0], PLocalizer.QuestStatusTaskSingle) }
bonusDescText = getTaskText(taskDNAs[0], taskStates[0], PLocalizer.QuestStatusTaskSingle, bonus = True)
if bonusDescText:
str += PLocalizer.QuestStatusTaskBonus + PLocalizer.QuestStrOneTask % {
'task': bonusDescText }
else:
headingStr = {
QuestDNA.OR: PLocalizer.QuestMultiHeadingOr,
QuestDNA.AND: PLocalizer.QuestMultiHeadingAnd }[self.getCombineOp()]
tasksStr = ''
for (taskDNA, taskState) in zip(taskDNAs, taskStates):
tasksStr += getTaskText(taskDNA, taskState, PLocalizer.QuestStatusTaskMulti)
str = PLocalizer.QuestStrMultiTask % {
'heading': headingStr,
'tasks': tasksStr }
return str
def __repr__(self):
return '<Quest %s>' % self.getQuestId()
def handleStart(self, avId):
for currTask in self.tasks:
currTask.handleStart(avId)
def getValidRewards(self):
if self.isComplete(bonus = True):
return self.getRewards()
else:
normalRewards = []
for currReward in self.getRewards():
if not currReward.isBonus():
normalRewards.append(currReward)
continue
return normalRewards
|
<reponame>mazgi/mt-to-hugo-article-converter
import re
from .single_line_attribute import SingleLineAttribute
class Tags(SingleLineAttribute):
regex = re.compile(r'^\s*TAGS:\s*(.*)\s*$')
def value(self):
str_value = super(Tags, self).value()
if not str_value:
return []
unquoted_str = str_value.replace('"', '')
comma_splited_values = unquoted_str.split(",")
whitespace_splited_values = []
for v in comma_splited_values:
whitespace_splited_values += v.split(" ")
trimed_values = list(filter(None, whitespace_splited_values))
sorted_values = sorted(list(set(trimed_values)))
return sorted_values
if __name__ == '__main__':
import doctest
doctest.testmod()
|
# Copyright 2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import imp
import importlib
import importlib._bootstrap
import os.path
import warnings
import types
import sys
from .compile import compile
from . import pyinternals
class Finder:
def __init__(self,path):
self.path = path
def find_module(self,fullname):
tail_module = fullname.rpartition('.')[2]
base_path = os.path.join(self.path, tail_module)
if os.path.isdir(base_path) and importlib._case_ok(self.path, tail_module):
for suffix,loader in PYTHON_FILE_SUFFIXES:
init_filename = '__init__' + suffix
full_path = os.path.join(base_path, init_filename)
if (os.path.isfile(full_path) and
importlib._case_ok(base_path, init_filename)):
return loader(fullname,full_path)
else:
msg = "Not importing directory {}: missing __init__"
warnings.warn(msg.format(base_path), ImportWarning)
for suffix,loader in FILE_SUFFIXES:
mod_filename = tail_module + suffix
full_path = os.path.join(self.path, mod_filename)
if os.path.isfile(full_path) and importlib._case_ok(self.path, mod_filename):
return loader(fullname,full_path)
return None
def xloader(base):
class inner(base):
@importlib._bootstrap.module_for_loader
def _load_module(self, module, *, sourceless=False):
name = module.__name__
code_object = self.get_code(name)
module.__file__ = self.get_filename(name)
if not sourceless:
module.__cached__ = imp.cache_from_source(module.__file__)
else:
module.__cached__ = module.__file__
module.__package__ = name
if self.is_package(name):
module.__path__ = [module.__file__.rsplit(path_sep, 1)[0]]
else:
module.__package__ = module.__package__.rpartition('.')[0]
module.__loader__ = self
ccode = compile(code_object)
# stick the CompiledCode object here to keep it alive
module.__nativecompile_compiled_code__ = ccode
pyinternals.cep_exec(ccode.entry_points[0], module.__dict__)
return module
return inner
SourceLoader = xloader(importlib._bootstrap._SourceFileLoader)
SourcelessLoader = xloader(importlib._bootstrap._SourcelessFileLoader)
ExtensionLoader = importlib._bootstrap._ExtensionFileLoader
PYTHON_FILE_SUFFIXES = (
[(suffix,SourceLoader) for suffix,mode,type in imp.get_suffixes()
if type == imp.PY_SOURCE] +
[(suffix,SourcelessLoader) for suffix,mode,type in imp.get_suffixes()
if type == imp.PY_COMPILED])
FILE_SUFFIXES = (
PYTHON_FILE_SUFFIXES +
[(suffix,ExtensionLoader) for suffix,mode,type in imp.get_suffixes()
if type == imp.C_EXTENSION])
def path_hook(path):
if os.path.isdir(path):
return Finder(path)
else:
raise ImportError("only directories are supported")
def install_importer():
sys.path_hooks.append(path_hook)
def uninstall_importer():
sys.path_hooks.remove(path_hook)
|
package org.mammon.scheme.brands.generic.coin;
import org.mammon.AssetType;
import org.mammon.math.FiniteField;
import org.mammon.math.Group;
import org.mammon.messaging.Identifiable;
import org.mammon.messaging.Message;
import org.mammon.messaging.MessageEmitter;
import org.mammon.messaging.Transitionable;
import org.mammon.scheme.brands.BrandsSchemeSetup;
import org.mammon.scheme.brands.PaymentHashFunction;
import org.mammon.scheme.brands.SignatureHashFunction;
import org.mammon.scheme.brands.accountholder.AccountHolderPrivate;
import org.mammon.scheme.brands.bank.Bank;
import org.mammon.scheme.brands.coin.CoinSignature;
import org.mammon.scheme.brands.messages.CoinHashRequest;
import org.mammon.scheme.brands.messages.CoinHashResponse;
import org.mammon.scheme.brands.messages.CoinTransferMessage;
import org.mammon.scheme.brands.messages.TransferToShopMessage;
import org.mammon.util.messaging.AbstractTransitionable;
public abstract class AbstractTransferringCoinOne<G extends Group<G>, F extends FiniteField<F>, T, H extends SignatureHashFunction<G, F>, H0 extends PaymentHashFunction<G, F, T>>
extends AbstractTransitionable implements Identifiable, Transitionable, MessageEmitter {
private final BrandsSchemeSetup<G, F, T, H, H0> setup;
private final Bank<G, F, T, H, H0> bank;
private final AccountHolderPrivate<G, F, T, H, H0> bearer;
private final Group.Element<G> blindedIdentity;
private final Group.Element<G> commitment;
private final CoinSignature<G, F> coinSignature;
private final AssetType assetType;
private final Number faceValue;
private final FiniteField.Element<F> s;
private final FiniteField.Element<F> x1;
private final FiniteField.Element<F> x2;
private final String identity;
private final String shop;
public AbstractTransferringCoinOne(BrandsSchemeSetup<G, F, T, H, H0> setup, Bank<G, F, T, H, H0> bank,
AccountHolderPrivate<G, F, T, H, H0> bearer, Group.Element<G> blindedIdentity,
Group.Element<G> commitment, CoinSignature<G, F> coinSignature, AssetType assetType, Number faceValue,
FiniteField.Element<F> s, FiniteField.Element<F> x1, FiniteField.Element<F> x2, String identity, String shop) {
this.setup = setup;
this.bank = bank;
this.bearer = bearer;
this.blindedIdentity = blindedIdentity;
this.commitment = commitment;
this.coinSignature = coinSignature;
this.assetType = assetType;
this.faceValue = faceValue;
this.s = s;
this.x1 = x1;
this.x2 = x2;
this.identity = identity;
this.shop = shop;
}
@Override
public String getIdentity() {
return identity;
}
public BrandsSchemeSetup<G, F, T, H, H0> getSetup() {
return setup;
}
public Bank<G, F, T, H, H0> getBank() {
return bank;
}
public AccountHolderPrivate<G, F, T, H, H0> getBearer() {
return bearer;
}
public Group.Element<G> getBlindedIdentity() {
return blindedIdentity;
}
public Group.Element<G> getCommitment() {
return commitment;
}
public CoinSignature<G, F> getCoinSignature() {
return coinSignature;
}
public AssetType getAssetType() {
return assetType;
}
public Number getFaceValue() {
return faceValue;
}
public FiniteField.Element<F> getS() {
return s;
}
public FiniteField.Element<F> getX1() {
return x1;
}
public FiniteField.Element<F> getX2() {
return x2;
}
public String getShop() {
return shop;
}
@Override
public Message emitMessage() {
return new CoinHashRequest<G, F, T, H, H0>(setup, bank, blindedIdentity, commitment, coinSignature,
assetType, faceValue, shop);
}
public CoinTransferMessage<F> transition(CoinHashResponse<F> response) {
FiniteField.Element<F> r1 = response.getHash().multiply(getBearer().getPrivateKey()).multiply(getS()).add(
getX1());
FiniteField.Element<F> r2 = response.getHash().multiply(getS()).add(getX2());
return new CoinTransferMessage<F>(r1, r2);
}
public Object transition(TransferToShopMessage message) {
if (getIdentity().equals(message.getDestination()) && getShop().equals(message.getShop())) {
return this;
}
return null;
}
}
|
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const PostSchema = new Schema({
name: {
type: String,
required: true
},
type: {
type: String,
required: true
},
uri: {
type: String,
required: true
}
});
module.exports = mongoose.model('documents', PostSchema); |
<gh_stars>0
import React, { useState } from "react"
import SEO from "../../components/layout/seo"
import { SectionContent } from "../../components/core/section"
import { Input, Textarea } from "../../components/core/input"
import { FaCheck, FaCubes, FaEnvelope, FaPen, FaSpinner, FaTimes, FaUser } from "react-icons/fa"
import { PrimaryDarkButton } from "../../components/core/button"
import { css } from "@emotion/react"
import { backgroundPrimaryColor, largeStart, primaryColor } from "../../components/core/variables"
import styled from "@emotion/styled"
import { ErrorAlert, SuccessAlert } from "../../components/core/alert"
import { Status } from "../../types/shared"
import { useCustomTranslation } from "../../i18n-hook"
import i18n from "i18next"
import contactFr from "../../locales/fr/about/contact.json"
import contactEn from "../../locales/en/about/contact.json"
import { facebook, instagram, twitter } from "../../utils"
import { PageProps } from "gatsby"
import { PrimaryBlogLayoutWithDrawer } from "../../components/layout/main-layout"
const namespace = "about/contact"
i18n.addResourceBundle("fr", namespace, contactFr)
i18n.addResourceBundle("en", namespace, contactEn)
const Hightlight = styled.span`
color: ${primaryColor};
font-weight: bold;
`
const IndexPage: React.FunctionComponent<PageProps> = ({ location }) => {
const [name, setName] = useState("")
const [mail, setMail] = useState("")
const [title, setTitle] = useState("")
const [message, setMessage] = useState("")
const [isPro] = useState(false)
const [status, setStatus] = useState<Status>("INITIAL")
const { t } = useCustomTranslation([namespace, "common"])
const description = `${t("section1")} ${t("section2")}`
const { i18n } = useCustomTranslation()
return (
<>
<SEO title={t("common:link.contact")} location={location} socialNetworkDescription={description} />
<PrimaryBlogLayoutWithDrawer page="who" noStickyHeader location={location}>
<div
className="tc mt4 mb3"
css={css`
margin-bottom: 1rem;
@media (min-width: ${largeStart}) {
margin-bottom: 2rem;
}
`}
>
<h4 className="tc">{t("headline")}</h4>
<SectionContent>{t("section1")}</SectionContent>
{/*<SectionContent>Un partenariat, un projet à nous proposer ?</SectionContent>*/}
<SectionContent>{t("section2")}</SectionContent>
<SectionContent>
{i18n.languageCode === "fr"
? "Soit via le formulaire ci-dessus, soit via"
: "Either via the form above, or via"}{" "}
<Hightlight>
<a href="mailto:<EMAIL>"><EMAIL></a>
</Hightlight>
.
</SectionContent>
<SectionContent>
{t("section3.part1")}{" "}
<Hightlight>
<a href={`https://www.facebook.com/${facebook}`} target="_blank" rel="noopener noreferrer">
Facebook
</a>
</Hightlight>
,{" "}
<Hightlight>
<a href={`https://twitter.com/${twitter}`} target="_blank" rel="noopener noreferrer">
Twitter
</a>
</Hightlight>{" "}
{t("section3.part2")}{" "}
<Hightlight>
<a href={`https://instagram.com/${instagram}`} target="_blank" rel="noopener noreferrer">
Instagram
</a>
</Hightlight>
.
</SectionContent>
{t("section4") && <SectionContent className="f6 i">{t("section4")}</SectionContent>}
<section>
<div
css={css`
@media (min-width: ${largeStart}) {
padding: 3rem;
background-color: ${backgroundPrimaryColor};
box-shadow: 0 6px 12px rgba(0, 0, 0, 0.5);
padding-top: 4px;
}
`}
>
<Input
label={`${t("common:form.name")} *`}
id="name"
placeholder={t("common:form.name")}
type="text"
value={name}
Icon={FaUser}
onChange={(event: React.ChangeEvent<HTMLInputElement>) => setName(event.target.value)}
/>
<Input
label={`${t("common:form.email")} *`}
id="email"
placeholder={t("common:form.email")}
type="text"
value={mail}
Icon={FaEnvelope}
onChange={(event: React.ChangeEvent<HTMLInputElement>) => setMail(event.target.value)}
/>
<Input
label={`${t("form.object")} *`}
id="objet"
placeholder={t("form.object")}
type="text"
value={title}
Icon={FaCubes}
onChange={(event: React.ChangeEvent<HTMLInputElement>) => setTitle(event.target.value)}
/>
<Textarea
rows={10}
label={`${t("form.message.label")} *`}
id="message"
placeholder={t("form.message.placeholder")}
value={message}
onChange={(event: React.ChangeEvent<HTMLInputElement>) => setMessage(event.target.value)}
Icon={FaPen}
/>
{/*<Checkbox label="Professionel" defaultChecked={isPro} onChange={() => setIsPro(!isPro)} id="pro" />*/}
<PrimaryDarkButton
className="form-element"
disabled={!name || !message || !mail || !title || status === "LOADING"}
onClick={() => {
setStatus("LOADING")
fetch("https://us-central1-blog-3dd22.cloudfunctions.net/contact", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
name,
message,
mail,
title,
isPro,
}),
})
.then((res) => {
if (!res.ok) {
throw new Error("Request failed: " + res.statusText)
}
})
.then(() => {
setMail("")
setName("")
setMessage("")
setTitle("")
// setIsPro(false)
setStatus("SUCCESS")
})
.catch(() => {
setStatus("ERROR")
})
}}
>
{status === "INITIAL" ? (
<FaEnvelope />
) : status === "LOADING" ? (
<FaSpinner className="fa-spin" />
) : status === "SUCCESS" ? (
<FaCheck />
) : (
<FaTimes />
)}
{t("form.submit")}
</PrimaryDarkButton>
{status === "SUCCESS" ? (
<SuccessAlert>{t("form.valid")}</SuccessAlert>
) : status === "ERROR" ? (
<ErrorAlert>{t("form.invalid")}</ErrorAlert>
) : undefined}
</div>
</section>
</div>
</PrimaryBlogLayoutWithDrawer>
</>
)
}
export default IndexPage
|
import { Configuration } from './configuration';
import * as Messages from './protocol/messages';
declare class PacketResponse {
readonly id: string;
readonly header: any;
readonly body?: any;
}
import { Protocol } from './protocol/protocol';
import { Channel } from './interfaces/channel';
declare class PacketInterface {
private readonly config;
private readonly activeRequests;
private readonly channel;
constructor(channel: Channel, config: Configuration);
readonly isConnected: boolean;
processReply(reply: any): void;
private storeRequest;
shutdown(): void;
sendInit(): Promise<Messages.InitReply>;
sendClose(): void;
sendWithReply(header: Protocol.Header, payload?: any): Promise<PacketResponse>;
send(header: Protocol.Header, payload?: any): string;
}
export { Protocol, Channel, PacketResponse, PacketInterface };
|
#!/bin/bash
source ${OKTA_HOME}/${REPO}/scripts/setup.sh
REGISTRY="${ARTIFACTORY_URL}/api/npm/npm-okta"
export TEST_SUITE_TYPE="build"
# Install required dependencies
export PATH="${PATH}:$(yarn global bin)"
yarn global add @okta/ci-update-package
yarn global add @okta/ci-pkginfo
if [ -n "${action_branch}" ];
then
echo "Publishing from bacon task using branch ${action_branch}"
TARGET_BRANCH=${action_branch}
else
echo "Publishing from bacon testSuite using branch ${BRANCH}"
TARGET_BRANCH=${BRANCH}
fi
if ! ci-update-package --branch ${TARGET_BRANCH}; then
echo "ci-update-package failed! Exiting..."
exit ${FAILED_SETUP}
fi
### looks like ci-update-package is not compatible with `yarn publish`
### which expects new-version is passed via command line parameter.
### keep using npm for now
if ! npm publish --registry ${REGISTRY}; then
echo "npm publish failed! Exiting..."
exit ${PUBLISH_ARTIFACTORY_FAILURE}
fi
DATALOAD=$(ci-pkginfo -t dataload)
if ! artifactory_curl -X PUT -u ${ARTIFACTORY_CREDS} ${DATALOAD} -v -f; then
echo "artifactory_curl failed! Exiting..."
exit ${PUBLISH_ARTIFACTORY_FAILURE}
fi
exit ${SUCCESS}
|
// 10093. 숫자
// 2019.09.06
// 입문용, 자료형
#include<iostream>
using namespace std;
int main()
{
unsigned long long a, b;
cin >> a >> b;
if (a > b)
{
printf("%llu\n", a - b - 1);
while (a - 1 > b)
{
b++;
printf("%llu ", b);
}
}
else if (a == b)
{
printf("0\n");
}
else
{
printf("%llu\n", b - a - 1);
while (b - 1 > a)
{
a++;
printf("%llu ", a);
}
}
return 0;
}
|
// simple dp
#include<bits/stdc++.h>
using namespace std;
#define PI acos(-1)
#define fi first
#define se second
#define pb push_back
#define sz(a) (int)(a).size()
#define all(c) (c).begin(), (c).end()
#define TIMESTAMP fprintf(stderr, "Execution time: %.3lf s.\n", 1.0*clock()/CLOCKS_PER_SEC)
typedef long long ll;
typedef long double ld;
typedef vector<int> vi;
typedef vector<ll> vll;
typedef pair <int, int> pii;
typedef vector <vi> vvi;
typedef vector <pii> vpii;
typedef vector<string> vs;
const int INF = 1e9;
const int MAXN = 500 + 9;
const int MOD = 1e9 + 7;
bool used[MAXN];
int n, m;
ll dp[MAXN][MAXN];
void precalc() {
for(int i = 1; i < MAXN; i++) {
dp[i][0] = dp[0][i] = 1;
}
for(int i = 1; i < MAXN; i++) {
for(int j = 1; j < MAXN; j++) {
dp[i][j] = (dp[i - 1][j] + dp[i][j - 1]) % MOD;
}
}
}
void solve() {
cin >> n >> m;
cout << dp[n][m] << endl;
}
int main() {
ios_base::sync_with_stdio(0);
cin.tie(0);
#ifdef LOCAL
freopen("xxx.in", "r", stdin);
freopen("xxx.out", "w", stdout);
#else
//freopen("xxx.in", "r", stdin);
//freopen("xxx.out", "w", stdout);
#endif
precalc();
int t;
cin >> t;
while(t--) {
solve();
}
return 0;
}
|
const check = (sites, url) => {
const entry = sites.reverse().find((l) => {
const regexp = `\^${l}\$`.replace(/\*/g, '[^ ]*').replace('!', '');
return url.match(new RegExp(regexp));
});
return entry ? (entry.match(/^!.+/) !== null ? false : true) : false;
};
export const activeForTab = ({ id, url }) => {
return new Promise((resolve) => {
chrome.storage.local.get((data) => {
const hiddenPanels = data['hidden-panels'] || [];
const sites = data.sites;
const enabledSite = check(sites.split('\n'), url.replace(/\/$/, ''));
const hidden = hiddenPanels.indexOf(id) !== -1;
return resolve({ hidden, disabled: !enabledSite });
});
});
};
|
#!/bin/bash -l
#SBATCH --job-name="VW Pubmed"
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --ntasks-per-node=1
#SBATCH --mem=4096
#SBATCH --time=1-15:00:00
#SBATCH --partition=batch
#SBATCH --mail-type=ALL
#SBATCH --mail-user=marc.zimmermann@epfl.ch
#SBATCH --output=/nfs4/bbp.epfl.ch/user/mazimmer/slurm-vw-pubmed-stdout.log
#SBATCH --error=/nfs4/bbp.epfl.ch/user/mazimmer/slurm-vw-pubmed-stderr.log
# In case there are per-group custom initialization files
#. /nfs4/bbp.epfl.ch/group/visualization/module/modules.bash
# Load your required module files here
#module load MODULE_TO_BE_LOADED
# To avoid Kerberos tickets becoming expired, run the following in
# the background to check every 30min and try to renew it
krenew -b -K 30
echo "On which node your job has been scheduled :"
echo $SLURM_JOB_NODELIST
echo "Print current shell limits :"
ulimit -a
./vw_eval.sh $HOME/private/corpora/pubmed 100
|
const SearchHistoryItem = ({item, onClick}) => {
return <li className='search-history-item' onClick={onClick}>{item}</li>
}
export default SearchHistoryItem;
|
<gh_stars>0
package com.github.danildorogoy.template;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
public class PieceQueen extends Piece {
private Image image;
public PieceQueen(int type, int xPos, int yPos) {
super(type, xPos, yPos);
name = "Queen";
if (type == 1) {
image = new Image("file:src/main/resources/wQueen.png");
} else {
image = new Image("file:src/main/resources/bQueen.png");
}
imageView.setImage(image);
imageView.fitHeightProperty();
imageView.fitWidthProperty();
imageView.setPreserveRatio(true);
imageView.setSmooth(true);
imageView.setCache(true);
}
@Override
public ImageView getImage() {
return (imageView);
}
@Override
public void SelectPiece(ChessBoard chessBoard) {
chessBoard.colorSquare(this.xPos, this.yPos, true);
// Bishop ability
int y = this.yPos + 1;
if (chessBoard.checkState && !this.isASavior)
return;
if (!gameLogic.slashDiagonalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.verticalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.horizontalProtection(chessBoard, this.xPos, this.yPos, this.type)) {
for (int x = this.xPos + 1; x < chessBoard.getBoardWidth() &&
y < chessBoard.getBoardHeight(); x++, y++) {
if (chessBoard.getBoardPosition(x, y) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
} else if (chessBoard.getBoardPosition(x, y) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
break;
}
}
y = this.yPos - 1;
for (int x = this.xPos - 1; x >= 0 && y >= 0; x--, y--) {
if (chessBoard.getBoardPosition(x, y) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
} else if (chessBoard.getBoardPosition(x, y) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
break;
}
}
}
if (!gameLogic.backslashDiagonalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.verticalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.horizontalProtection(chessBoard, this.xPos, this.yPos, this.type)) {
y = this.yPos + 1;
for (int x = this.xPos - 1; x >= 0 && y < chessBoard.getBoardHeight(); x--, y++) {
if (chessBoard.getBoardPosition(x, y) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
} else if (chessBoard.getBoardPosition(x, y) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
break;
}
}
y = this.yPos - 1;
for (int x = this.xPos + 1; x < chessBoard.getBoardWidth() && y >= 0; x++, y--) {
if (chessBoard.getBoardPosition(x, y) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
} else if (chessBoard.getBoardPosition(x, y) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, y, this.type))
chessBoard.colorSquare(x, y, false);
} else
chessBoard.colorSquare(x, y, false);
break;
}
}
}
// Rook ability
if (!gameLogic.horizontalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.slashDiagonalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.backslashDiagonalProtection(chessBoard, this.xPos, this.yPos, this.type)) {
for (y = this.yPos - 1; y >= 0; y--) {
if (chessBoard.getBoardPosition(this.xPos, y) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, this.xPos, y, this.type))
chessBoard.colorSquare(this.xPos, y, false);
} else
chessBoard.colorSquare(this.xPos, y, false);
} else if (chessBoard.getBoardPosition(this.xPos, y) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, this.xPos, y, this.type))
chessBoard.colorSquare(this.xPos, y, false);
} else
chessBoard.colorSquare(this.xPos, y, false);
break;
}
}
for (y = this.yPos + 1; y < chessBoard.getBoardHeight(); y++) {
if (chessBoard.getBoardPosition(this.xPos, y) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, this.xPos, y, this.type))
chessBoard.colorSquare(this.xPos, y, false);
} else
chessBoard.colorSquare(this.xPos, y, false);
} else if (chessBoard.getBoardPosition(this.xPos, y) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, this.xPos, y, this.type))
chessBoard.colorSquare(this.xPos, y, false);
} else
chessBoard.colorSquare(this.xPos, y, false);
break;
}
}
}
if (!gameLogic.verticalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.slashDiagonalProtection(chessBoard, this.xPos, this.yPos, this.type) &&
!gameLogic.backslashDiagonalProtection(chessBoard, this.xPos, this.yPos, this.type)) {
for (int x = this.xPos - 1; x >= 0; x--) {
if (chessBoard.getBoardPosition(x, this.yPos) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, this.yPos, this.type))
chessBoard.colorSquare(x, this.yPos, false);
} else
chessBoard.colorSquare(x, this.yPos, false);
} else if (chessBoard.getBoardPosition(x, this.yPos) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, this.yPos, this.type))
chessBoard.colorSquare(x, this.yPos, false);
} else
chessBoard.colorSquare(x, this.yPos, false);
break;
}
}
for (int x = this.xPos + 1; x < chessBoard.getBoardWidth(); x++) {
if (chessBoard.getBoardPosition(x, this.yPos) == 0) {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, this.yPos, this.type))
chessBoard.colorSquare(x, this.yPos, false);
} else
chessBoard.colorSquare(x, this.yPos, false);
} else if (chessBoard.getBoardPosition(x, this.yPos) == this.type)
break;
else {
if (chessBoard.checkState) {
if (gameLogic.isThisProtecting(chessBoard, x, this.yPos, this.type))
chessBoard.colorSquare(x, this.yPos, false);
} else
chessBoard.colorSquare(x, this.yPos, false);
break;
}
}
}
}
}
|
<reponame>IonThruster/ClockSim
var CMakeCCompilerId_8c =
[
[ "ARCHITECTURE_ID", "CMakeCCompilerId_8c.html#aba35d0d200deaeb06aee95ca297acb28", null ],
[ "C_DIALECT", "CMakeCCompilerId_8c.html#a07f8e5783674099cd7f5110e22a78cdb", null ],
[ "COMPILER_ID", "CMakeCCompilerId_8c.html#a81dee0709ded976b2e0319239f72d174", null ],
[ "DEC", "CMakeCCompilerId_8c.html#ad1280362da42492bbc11aa78cbf776ad", null ],
[ "HEX", "CMakeCCompilerId_8c.html#a46d5d95daa1bef867bd0179594310ed5", null ],
[ "PLATFORM_ID", "CMakeCCompilerId_8c.html#adbc5372f40838899018fadbc89bd588b", null ],
[ "STRINGIFY", "CMakeCCompilerId_8c.html#a43e1cad902b6477bec893cb6430bd6c8", null ],
[ "STRINGIFY_HELPER", "CMakeCCompilerId_8c.html#a2ae9b72bb13abaabfcf2ee0ba7d3fa1d", null ],
[ "main", "CMakeCCompilerId_8c.html#a0ddf1224851353fc92bfbff6f499fa97", null ],
[ "info_arch", "CMakeCCompilerId_8c.html#a59647e99d304ed33b15cb284c27ed391", null ],
[ "info_compiler", "CMakeCCompilerId_8c.html#a4b0efeb7a5d59313986b3a0390f050f6", null ],
[ "info_language_dialect_default", "CMakeCCompilerId_8c.html#a1ce162bad2fe6966ac8b33cc19e120b8", null ],
[ "info_platform", "CMakeCCompilerId_8c.html#a2321403dee54ee23f0c2fa849c60f7d4", null ]
]; |
<reponame>Shopify/github-authentication<filename>test/github_authentication/retriable_test.rb
# frozen_string_literal: true
# # frozen_string_literal: true
require 'test_helper'
require "github_authentication/retriable"
module GithubAuthentication
class RetriableTest < Minitest::Test
include Retriable
ExpectedError = Class.new(StandardError)
UnexpectedError = Class.new(StandardError)
def test_with_retries_will_eventially_raise
mock_object = mock
mock_object.expects(:call).raises(ExpectedError.new('foo')).times(3)
assert_raises(ExpectedError) do
with_retries(ExpectedError, max_attempts: 3) do
mock_object.call
end
end
end
def test_with_retries_will_succeed_and_return_the_final_return_value
mock_object = mock
mock_object.stubs(:call).raises(ExpectedError.new('foo'))
.then.raises(ExpectedError.new('bar'))
.then.returns("baz")
return_value = with_retries(ExpectedError, max_attempts: 3) do
mock_object.call
end
assert_equal "baz", return_value
end
def test_with_retries_does_not_sleep_between_attempts_when_sleep_between_attempts_0
mock_object = mock
mock_object.stubs(:call)
.raises(ExpectedError.new('foo'))
.then.raises(ExpectedError.new('bar'))
.then.raises(ExpectedError.new('bar'))
.then.raises(ExpectedError.new('bar'))
.then.returns("baz")
Kernel.expects(:sleep).never
return_value = with_retries(ExpectedError, max_attempts: 5,
sleep_between_attempts: 0, exponential_backoff: false) do
mock_object.call
end
assert_equal "baz", return_value
end
def test_with_retries_sleeps_without_exponential_backoff_and_returns_the_final_return_value
mock_object = mock
mock_object.stubs(:call)
.raises(ExpectedError.new('foo'))
.then.raises(ExpectedError.new('bar'))
.then.raises(ExpectedError.new('bar'))
.then.raises(ExpectedError.new('bar'))
.then.returns("baz")
Kernel.expects(:sleep).with(2.0).times(4)
return_value = with_retries(ExpectedError, max_attempts: 5,
sleep_between_attempts: 2, exponential_backoff: false) do
mock_object.call
end
assert_equal "baz", return_value
end
def test_with_retries_sleeps_with_exponential_backoff_default_and_returns_the_final_return_value
mock_object = mock
mock_object.stubs(:call)
.raises(ExpectedError.new('foo'))
.then.raises(ExpectedError.new('bar'))
.then.raises(ExpectedError.new('bar'))
.then.raises(ExpectedError.new('bar'))
.then.returns("baz")
sleep_sequence = sequence('sleep-sequence')
Kernel.expects(:rand).with(2.0..2.0).returns(2.0).in_sequence(sleep_sequence)
Kernel.expects(:sleep).with(2.0).in_sequence(sleep_sequence)
Kernel.expects(:rand).with(2.0..4.0).returns(4.0).in_sequence(sleep_sequence)
Kernel.expects(:sleep).with(4.0).in_sequence(sleep_sequence)
Kernel.expects(:rand).with(2.0..8.0).returns(8.0).in_sequence(sleep_sequence)
Kernel.expects(:sleep).with(8.0).in_sequence(sleep_sequence)
Kernel.expects(:rand).with(2.0..16.0).returns(16.0).in_sequence(sleep_sequence)
Kernel.expects(:sleep).with(16.0).in_sequence(sleep_sequence)
return_value = with_retries(ExpectedError, max_attempts: 5, sleep_between_attempts: 2) do
mock_object.call
end
assert_equal "baz", return_value
end
def test_with_retries_block_arguments
exception = ExpectedError.new('foo')
mock_object = mock
mock_object.expects(:call).with(1, nil).raises(exception)
mock_object.expects(:call).with(2, exception)
with_retries(ExpectedError, max_attempts: 3) do |attempt, previous_failure|
mock_object.call(attempt, previous_failure)
end
end
def test_with_retries_raises_immediately_for_unexpected_exceptions
exception = UnexpectedError.new('foo')
mock_object = mock
mock_object.expects(:call).raises(exception)
assert_raises(UnexpectedError) do
with_retries(ExpectedError, max_attempts: 3) do
mock_object.call
end
end
end
end
end
|
#!/bin/bash
HERE=$(cd `dirname $0`; pwd)
SPDZROOT=$HERE/..
export PLAYERS=${PLAYERS:-3}
if test "$THRESHOLD"; then
t="-T $THRESHOLD"
fi
. $HERE/run-common.sh
run_player shamir-party.x $* $t || exit 1
|
package io.openexchange.occ.scoring;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestFileScore {
@Test
void scoreFile() {
var scoreFactory = new FileScoreFactory();
var score = scoreFactory.load(TestFileScore.class.getResource("/testdata/names.txt"));
Assertions.assertEquals(3194, score.total());
}
@Test
void scoreBigData() {
var scoreFactory = new FileScoreFactory();
var score = scoreFactory.load(TestFileScore.class.getResource("/testdata/bigdata.txt"));
Assertions.assertEquals(871198282, score.total());
}
}
|
import json
import logging
import pyofo
import requests
from django.core.cache import cache
from django.http import JsonResponse
from django.conf import settings
logger = logging.getLogger(__name__)
def bikes(request):
'''
Retrieve shared bikes from around the area from ofo and mobike
'''
lat = request.GET.get('lat')
lng = request.GET.get('lng')
assert lat, 'No latitude'
assert lng, 'No longitude'
cache_key = "bikes!{0}!{1}".format(lat, lng)
data = cache.get(cache_key)
if data:
logger.info('Cache hit for %s', cache_key)
else:
logger.info('Cache miss for %s', cache_key)
# Retrieve ofo bikes
pyofo.set_token(settings.OFO_TOKEN)
ofo = pyofo.Ofo()
ro = ofo.nearby_ofo_car(lat=lat, lng=lng)
ofo_data = json.loads(ro.text)
# Retrieve mobike bikes
rm = requests.post('https://mwx.mobike.com/mobike-api/rent/nearbyBikesInfo.do',
data='latitude=%s&longitude=%s' % (lat, lng),
headers={
'content-type': 'application/x-www-form-urlencoded',
'user-agent': 'Mozilla/5.0 (Linux; Android 7.0; SM-G892A Build/NRD90M; wv) '
'AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 '
'Mobile Safari/537.36'
})
mobike_data = json.loads(rm.text)
data = {
'ofo': ofo_data,
'mobike': mobike_data
}
# Set cache for 15 minutes
cache.set(cache_key, data, timeout=900)
# Check if an error has happened
status = 500 if data['ofo']['errorCode'] != 200 or data['mobike']['code'] != 0 else 200
return JsonResponse(data, status=status)
|
for outer in arr:
for inner in outer:
print(inner) |
<filename>jwx/src/main/java/weixin/liuliangbao/weigatedoor/controller/WeiDoorController.java<gh_stars>0
package weixin.liuliangbao.weigatedoor.controller;
import com.google.gson.Gson;
import net.sf.json.JSONObject;
import org.apache.log4j.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import org.jeecgframework.core.common.controller.BaseController;
import org.jeecgframework.core.util.ResourceUtil;
import org.jeecgframework.web.system.service.SystemService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import weixin.guanjia.account.entity.WeixinAccountEntity;
import weixin.liuliangbao.jsonbean.FlowMainEntity;
import weixin.liuliangbao.weigatedoor.entity.WeidoorpptEntity;
import weixin.oauth2.AdvancedUtil;
import weixin.oauth2.SNSUserInfo;
import weixin.oauth2.WeixinOauth2Token;
import weixin.source.controller.WeixinSourceController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
/**
* Created by aa on 2015/12/10.
*/
@Controller
@RequestMapping("/weiDoorController")
public class WeiDoorController extends BaseController {
private static final Logger LOGGER = Logger.getLogger(WeiDoorController.class);
@Autowired
private SystemService systemService;
private String message;
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
@RequestMapping(params = "testgoWeidoor")
public ModelAndView testgoWeidoor(HttpServletRequest request){
String accountid="402881e55185f10f015185f401680002";
String nickname="许小乖ゾ";
String openid="oUYCmwDu3Y48oPYeJsXvYXNrpbqs";
String headimgUrl ="http://wx.qlogo.cn/mmopen/CttmTaYSYkTgX37xwUBp1olhzNIzOxoWX8khp930nw9R8ZlfEC1VYCFt4T38Tict0xwSRaE53Slmv61r3f9f7fw/0";
return new ModelAndView("liuliangbao/weigatedoor/weidoor");
}
/**
* 微门户 页面跳转 -许丹-2015年12月10日10:19:19
*
* @return
*/
@RequestMapping(params = "goWeidoor")
public ModelAndView goWeidoor(HttpServletRequest request) {
ModelAndView mav=new ModelAndView();
String linkType = "门户";
//通过页面位置和公众号ID去查图片,返回一个list(图片、图片跳转url)
String accountid = (String) request.getSession().getAttribute("accountid");
String nickname = (String) request.getSession().getAttribute("nickname");
String headimgUrl = (String) request.getSession().getAttribute("headimgUrl");
//根据accountId和linkType去查图片名称
String hql = "from WeidoorpptEntity where accountid='" + accountid + "' and pageLocation='" + linkType + "'";
List<WeidoorpptEntity> weidoorpptList = this.systemService.findHql(hql, null);
// LOGGER.info(weidoorpptList);
//获得图片路径
String prefixUrl = ResourceUtil.getMediaUrlPrefix();
// LOGGER.info(prefixUrl);
//TODO:根据weidoorpptList中的图片url放入一个数组中或者一个String list中 每个url加上图片路径prefixUrl
List<WeidoorpptEntity> weidoorpptListResult=new ArrayList<WeidoorpptEntity>();
for (int i=0;i<weidoorpptList.size();i++){
WeidoorpptEntity weidoor=new WeidoorpptEntity();
weidoor.setId(weidoorpptList.get(i).getId());
weidoor.setTitle(weidoorpptList.get(i).getTitle());
weidoor.setPictureName(weidoorpptList.get(i).getPictureName());
weidoor.setPictureUrl(prefixUrl + "/" +weidoorpptList.get(i).getPictureUrl());
weidoor.setJumpType(weidoorpptList.get(i).getJumpType());
weidoor.setJumpUrl(weidoorpptList.get(i).getJumpUrl());
weidoor.setOperatetime(weidoorpptList.get(i).getOperatetime());
weidoor.setAccountid(weidoorpptList.get(i).getAccountid());
weidoor.setDescription(weidoorpptList.get(i).getDescription());
weidoor.setPageLocation(weidoorpptList.get(i).getPageLocation());
weidoorpptListResult.add(weidoor);
}
/*//图片名称和url拼接号url
String imagePathName = prefixUrl + "/" + doorImgUrl;
LOGGER.info(imagePathName);
*/
mav.addObject("weidoorpptlist",weidoorpptListResult);
mav.setViewName("liuliangbao/weigatedoor/weidoor");
return mav;
//return new ModelAndView("liuliangbao/weigatedoor/weidoor");
}
@RequestMapping(params = "goFlowCenter")
public void goFlowCenter(HttpServletRequest request,HttpServletResponse response)throws IOException {
LOGGER.info("--------------------goFlowCenter---begin-----------------");
String openid = request.getParameter("openId");
String accountid = request.getParameter("accountid");
String nickname = request.getParameter("nickname");
String headimgUrl=request.getParameter("headimgUrl");
LOGGER.info(openid);
LOGGER.info(accountid+nickname+headimgUrl);
ObjectMapper objectMapper = new ObjectMapper();
Gson gson = new Gson();
JSONObject myJsonObject = new JSONObject();
String json = objectMapper.writeValueAsString(headimgUrl);
PrintWriter out = response.getWriter();
out.write(json);
}
/**
* 引导授权界面
*
* @param request
*/
@RequestMapping(params = "startWeidoor")
public ModelAndView startWeidoor(HttpServletRequest request, HttpServletResponse response) {
String hdid = request.getParameter("hdid");
FlowMainEntity hdEntity = this.systemService.get(FlowMainEntity.class, hdid);
if (hdEntity == null) {
return new ModelAndView("liuliangbao/weigatedoor/weidoorNotExists");
}
String rdUrl = "weiDoorController.do?goRedirectWeidoor";
String accountid = hdEntity.getAccountid();
request.setAttribute("accountid", accountid);
WeixinAccountEntity account = this.systemService.get(WeixinAccountEntity.class, accountid);
String requestUrl = "https://open.weixin.qq.com/connect/oauth2/authorize?appid=APPID&redirect_uri=REDIRECT_URI&response_type=code&scope=SCOPE&state=STATE#wechat_redirect";
requestUrl = requestUrl.replace("APPID", account.getAccountappid());
requestUrl = requestUrl.replace("SCOPE", "snsapi_userinfo");
requestUrl = requestUrl.replace("STATE", hdid);
String path = request.getContextPath();
String localhosturl = request.getScheme() + "://" + request.getServerName() + path + "/";
String url = "";
try {
url = URLEncoder.encode(localhosturl + rdUrl, "utf-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
requestUrl = requestUrl.replace("REDIRECT_URI", url);
LOGGER.info(requestUrl);
return new ModelAndView("redirect:" + requestUrl);
}
@RequestMapping(params = "goRedirectWeidoor")
public ModelAndView goRedirectWeidoor(HttpServletRequest request) {
String hdid = request.getParameter("state");
if (hdid == null || "".equals(hdid)) {
return new ModelAndView("liuliangbao/weigatedoor/weidoorNotExists");
}
String hdNotUrl = "liuliangbao/weigatedoor/weidoorNotExists";
String code = request.getParameter("code");
if (!"authdeny".equals(code)) {
FlowMainEntity hdEntity = this.systemService.get(FlowMainEntity.class, hdid);
String accountid = hdEntity.getAccountid();
WeixinAccountEntity account = this.systemService.get(WeixinAccountEntity.class, accountid);
// 获取网页授权access_token
WeixinOauth2Token weixinOauth2Token = AdvancedUtil.getOauth2AccessToken(account.getAccountappid(),
account.getAccountappsecret(), code);
// 网页授权接口访问凭证
String accessToken = weixinOauth2Token.getAccessToken();
// 用户标识
String openId = weixinOauth2Token.getOpenId();
// 获取用户信息
SNSUserInfo snsUserInfo = AdvancedUtil.getSnsUserInfo(accessToken, openId); // 设置要传递的参数
request.getSession().setAttribute("hdId", hdid);
request.getSession().setAttribute("openId", openId);
request.getSession().setAttribute("accountid", accountid);
request.getSession().setAttribute("nickname", snsUserInfo.getNickName());
LOGGER.info(snsUserInfo.getNickName());
LOGGER.info(snsUserInfo.getHeadImgUrl());
request.getSession().setAttribute("headimgUrl", snsUserInfo.getHeadImgUrl());
return new ModelAndView("redirect:" + "weiDoorController.do?goWeidoor");
} else {
return new ModelAndView(hdNotUrl);
}
}
}
|
<filename>src/components/banner/__tests__/CdrBanner.spec.js
import { mount } from '@vue/test-utils';
import CdrBanner from 'componentdir/banner/CdrBanner';
import IconCheckFill from 'componentdir/icon/comps/check-fill';
describe('CdrBanner', () => {
it('matches snapshot', () => {
const wrapper = mount(CdrBanner, {
propsData: {
type: 'info',
},
slots: {
default: [IconCheckFill, 'hey im a banner'],
}
});
expect(wrapper.element).toMatchSnapshot();
});
});
|
#!/bin/bash
#
# This script runs Go language unit tests for the repository. Arguments to this script
# are parsed as a list of packages to test until the first argument starting with '-' or '--' is
# found. That argument and all following arguments are interpreted as flags to be passed directly
# to `go test`. If no arguments are given, then "all" packages are tested.
#
# Coverage reports and jUnit XML reports can be generated by this script as well, but both cannot
# be generated at once.
#
# This script consumes the following parameters as environment variables:
# - DRY_RUN: prints all packages that would be tested with the args that would be used and exits
# - TIMEOUT: the timeout for any one unit test (default '60s')
# - DETECT_RACES: toggles the 'go test' race detector (defaults '-race')
# - COVERAGE_OUTPUT_DIR: locates the directory in which coverage output files will be placed
# - COVERAGE_SPEC: a set of flags for 'go test' that specify the coverage behavior (default '-cover -covermode=atomic')
# - GOTEST_FLAGS: any other flags to be sent to 'go test'
# - JUNIT_REPORT: toggles the creation of jUnit XML from the test output and changes this script's output behavior
# to use the 'junitreport' tool for summarizing the tests.
# - DLV_DEBUG toggles running tests using delve debugger
function cleanup() {
return_code=$?
os::test::junit::generate_report
if [[ "${JUNIT_REPORT_NUM_FAILED:-}" == "0 failed" ]]; then
if [[ "${return_code}" -ne "0" ]]; then
os::log::warning "While the jUnit report found no failed tests, the \`go test\` process failed."
os::log::warning "This usually means that the unit test suite failed to compile."
fi
fi
os::util::describe_return_code "${return_code}"
exit "${return_code}"
}
trap "cleanup" EXIT
source "$(dirname "${BASH_SOURCE}")/lib/init.sh"
os::build::setup_env
os::cleanup::tmpdir
# Internalize environment variables we consume and default if they're not set
dry_run="${DRY_RUN:-}"
test_timeout="${TIMEOUT:-120s}"
detect_races="${DETECT_RACES:-true}"
coverage_output_dir="${COVERAGE_OUTPUT_DIR:-}"
coverage_spec="${COVERAGE_SPEC:--cover -covermode atomic}"
gotest_flags="${GOTEST_FLAGS:-}"
junit_report="${JUNIT_REPORT:-}"
dlv_debug="${DLV_DEBUG:-}"
if [[ -n "${junit_report}" && -n "${coverage_output_dir}" ]]; then
echo "$0 cannot create jUnit XML reports and coverage reports at the same time."
exit 1
fi
# determine if user wanted verbosity
verbose=
if [[ "${gotest_flags}" =~ -v( |$) ]]; then
verbose=true
fi
# Build arguments for 'go test'
if [[ -z "${verbose}" && -n "${junit_report}" ]]; then
# verbosity can be set explicitly by the user or set implicitly by asking for the jUnit
# XML report, so we only want to add the flag if it hasn't been added by a user already
# and is being implicitly set by jUnit report generation
gotest_flags+=" -v"
fi
if [[ "${detect_races}" == "true" ]]; then
gotest_flags+=" -race"
fi
# check to see if user has not disabled coverage mode
if [[ -n "${coverage_spec}" ]]; then
# if we have a coverage spec set, we add it. '-race' implies '-cover -covermode atomic'
# but specifying both at the same time does not lead to an error so we can add both specs
gotest_flags+=" ${coverage_spec}"
fi
# check to see if user has not disabled test timeouts
if [[ -n "${test_timeout}" ]]; then
gotest_flags+=" -timeout ${test_timeout}"
fi
# Break up the positional arguments into packages that need to be tested and arguments that need to be passed to `go test`
package_args=
for arg in "$@"; do
if [[ "${arg}" =~ ^-.* ]]; then
# we found an arg that begins with a dash, so we stop interpreting arguments
# henceforth as packages and instead interpret them as flags to give to `go test`
break
fi
# an arg found before the first flag is a package
package_args+=" ${arg}"
shift
done
gotest_flags+=" $*"
# Determine packages to test
godeps_package_prefix="vendor/"
test_packages=
if [[ -n "${package_args}" ]]; then
for package in ${package_args}; do
# If we're trying to recursively test a package under Godeps, strip the Godeps prefix so go test can find the packages correctly
if [[ "${package}" == "${godeps_package_prefix}"*"/..." ]]; then
test_packages="${test_packages} ${package:${#godeps_package_prefix}}"
else
test_packages="${test_packages} ${OS_GO_PACKAGE}/${package}"
fi
done
else
# If no packages are given to test, we need to generate a list of all packages with unit tests
test_packages="$(os::util::list_test_packages_under '*')"
fi
if [[ -n "${dry_run}" ]]; then
echo "The following base flags for \`go test\` will be used by $0:"
echo "go test ${gotest_flags}"
echo "The following packages will be tested by $0:"
for package in ${test_packages}; do
echo "${package}"
done
exit 0
fi
# Run 'go test' with the accumulated arguments and packages:
if [[ -n "${junit_report}" ]]; then
# we need to generate jUnit xml
test_error_file="${LOG_DIR}/test-go-err.log"
os::log::info "Running \`go test\`..."
# we don't care if the `go test` fails in this pipe, as we want to generate the report and summarize the output anyway
set +o pipefail
go test ${gotest_flags} ${test_packages}
go test ${gotest_flags} ${test_packages} 2>"${test_error_file}" | tee "${JUNIT_REPORT_OUTPUT}"
test_return_code="${PIPESTATUS[0]}"
set -o pipefail
if [[ -s "${test_error_file}" ]]; then
os::log::warning "\`go test\` had the following output to stderr:
$( cat "${test_error_file}") "
fi
if grep -q 'WARNING: DATA RACE' "${JUNIT_REPORT_OUTPUT}"; then
locations=( $( sed -n '/WARNING: DATA RACE/=' "${JUNIT_REPORT_OUTPUT}") )
if [[ "${#locations[@]}" -gt 1 ]]; then
os::log::warning "\`go test\` detected data races."
os::log::warning "Details can be found in the full output file at lines ${locations[*]}."
else
os::log::warning "\`go test\` detected a data race."
os::log::warning "Details can be found in the full output file at line ${locations[*]}."
fi
fi
exit "${test_return_code}"
elif [[ -n "${coverage_output_dir}" ]]; then
# we need to generate coverage reports
go test ${gotest_flags} ${test_packages}
for test_package in ${test_packages}; do
mkdir -p "${coverage_output_dir}/${test_package}"
local_gotest_flags="${gotest_flags} -coverprofile=${coverage_output_dir}/${test_package}/profile.out"
go test ${local_gotest_flags} ${test_package}
done
# assemble all profiles and generate a coverage report
echo 'mode: atomic' > "${coverage_output_dir}/profiles.out"
find "${coverage_output_dir}" -name profile.out | xargs sed '/^mode: atomic$/d' >> "${coverage_output_dir}/profiles.out"
go tool cover "-html=${coverage_output_dir}/profiles.out" -o "${coverage_output_dir}/coverage.html"
os::log::info "Coverage profile written to ${coverage_output_dir}/coverage.html"
# clean up all of the individual coverage reports as they have been subsumed into the report at ${coverage_output_dir}/coverage.html
# we can clean up all of the coverage reports at once as they all exist in subdirectories of ${coverage_output_dir}/${OS_GO_PACKAGE}
# and they are the only files found in those subdirectories
rm -rf "${coverage_output_dir:?}/${OS_GO_PACKAGE}"
elif [[ -n "${dlv_debug}" ]]; then
# run tests using delve debugger
dlv test ${test_packages}
else
# we need to generate neither jUnit XML nor coverage reports
go test ${gotest_flags} ${test_packages}
go test ${gotest_flags} ${test_packages}
fi
|
#!/bin/bash
# go to root
cd
# Install Command
apt-get -y install ufw
apt-get -y install sudo
# Install Pritunl
echo "deb http://repo.pritunl.com/stable/apt stretch main" >> /etc/apt/sources.list.d/pritunl.list
sudo apt-get -y install dirmngr
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com --recv 7568D9BB55FF9E5287D586017AE645C0CF8E292A
apt-get -y update
sudo apt-get --assume-yes install pritunl mongodb-server
sudo systemctl start mongodb pritunl
sudo systemctl enable mongodb pritunl
# Install Squid
apt-get -y install squid3
cat > /etc/squid3/squid.conf << -END
acl manager proto cache_object
acl localhost src 127.0.0.1/32 ::1
acl to_localhost dst 127.0.0.0/8 0.0.0.0/32 ::1
acl SSL_ports port 443
acl Safe_ports port 80
acl Safe_ports port 21
acl Safe_ports port 443
acl Safe_ports port 70
acl Safe_ports port 210
acl Safe_ports port 1025-65535
acl Safe_ports port 280
acl Safe_ports port 488
acl Safe_ports port 591
acl Safe_ports port 777
acl CONNECT method CONNECT GET POST
acl SSH dst xxxxxxxxx-xxxxxxxxx/255.255.255.0
http_access allow SSH
http_access allow manager localhost
http_access deny manager
http_access allow localhost
http_access deny all
http_port 8000
http_port 8080
http_port 3128
http_port 191
coredump_dir /var/spool/squid3
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern . 0 20% 4320
visible_hostname https://anywhere.truevisions.tv
END
MYIP=`ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0' | grep -v '192.168'`;
sed -i s/xxxxxxxxx/$MYIP/g /etc/squid3/squid.conf;
service squid3 restart
# Enable Firewall
sudo ufw allow 22,80,81,82,83,109,110,143,191,222,443,1080,1194,1195,1196,1197,1198,1199,3128,3129,5002/tcp
sudo ufw allow 7300,8000,8080,8081,8082,8888,8989,9000,9700,10000,52000,60000/tcp
sudo ufw allow 22,80,81,82,83,109,110,143,191,222,443,1080,1194,1195,1196,1197,1198,1199,3128,3129,5002/udp
sudo ufw allow 7300,8000,8080,8081,8082,8888,8989,9000,9700,10000,52000,60000/udp
sudo yes | ufw enable
# Change to Time GMT+7
ln -fs /usr/share/zoneinfo/Asia/Bangkok /etc/localtime
# Install Web Server
apt-get -y install nginx php5-fpm php5-cli
cd
rm /etc/nginx/sites-enabled/default
rm /etc/nginx/sites-available/default
cat > /etc/nginx/nginx.conf << -END
user www-data;
worker_processes 1;
pid /var/run/nginx.pid;
events {
multi_accept on;
worker_connections 1024;
}
http {
gzip on;
gzip_vary on;
gzip_comp_level 5;
gzip_types text/plain application/x-javascript text/xml text/css;
autoindex on;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
server_tokens off;
include /etc/nginx/mime.types;
default_type application/octet-stream;
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
client_max_body_size 32M;
client_header_buffer_size 8m;
large_client_header_buffers 8 8m;
fastcgi_buffer_size 8m;
fastcgi_buffers 8 8m;
fastcgi_read_timeout 600;
include /etc/nginx/conf.d/*.conf;
}
END
mkdir -p /home/vps/public_html
echo "<?php phpinfo(); ?>" > /home/vps/public_html/info.php
cat > /etc/nginx/conf.d/vps.conf << -END
server {
listen 81;
server_name 127.0.0.1 localhost;
access_log /var/log/nginx/vps-access.log;
error_log /var/log/nginx/vps-error.log error;
root /home/vps/public_html;
location / {
index index.html index.htm index.php;
try_files $uri $uri/ /index.php?$args;
}
location ~ \.php$ {
include /etc/nginx/fastcgi_params;
fastcgi_pass 127.0.0.1:9000;
fastcgi_index index.php;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
}
}
END
sed -i 's/listen = \/var\/run\/php5-fpm.sock/listen = 127.0.0.1:9000/g' /etc/php5/fpm/pool.d/www.conf
service php5-fpm restart
service nginx restart
# Install Vnstat
apt-get -y install vnstat
vnstat -u -i eth0
sudo chown -R vnstat:vnstat /var/lib/vnstat
service vnstat restart
# Install Vnstat GUI
cd /home/vps/public_html/
wget http://www.sqweek.com/sqweek/files/vnstat_php_frontend-1.5.1.tar.gz
tar xf vnstat_php_frontend-1.5.1.tar.gz
rm vnstat_php_frontend-1.5.1.tar.gz
mv vnstat_php_frontend-1.5.1 vnstat
cd vnstat
sed -i "s/\$iface_list = array('eth0', 'sixxs');/\$iface_list = array('eth0');/g" config.php
sed -i "s/\$language = 'nl';/\$language = 'en';/g" config.php
sed -i 's/Internal/Internet/g' config.php
sed -i '/SixXS IPv6/d' config.php
cd
# About
clear
echo "Script install Auto :-"
echo "-Pritunl"
echo "-MongoDB"
echo "-Vnstat"
echo "-Web Server"
echo "-Squid Proxy Port 3128,8000,8080"
echo "BY KUNPHIPHIT"
echo "TimeZone : Bangkok"
echo "Vnstat : http://$MYIP:81"
echo "Pritunl : https://$MYIP"
echo "Setup login by pritunl && password by pritunl"
echo "Setup copy code paste update Pritunl database"
pritunl setup-key
|
#!/bin/sh
export LC_ALL='C'
echoerr() { echo "$@" 1>&2; }
BASEDIR=$(dirname $0)
cd $BASEDIR
if test ! -f ../bin/tlsh
then
echoerr "error: (127), you must compile tlsh"
exit 127
fi
if test ! -f ../bin/tlsh_pattern
then
echoerr "error: (127), you must compile ../bin/tlsh_pattern"
exit 127
fi
mkdir -p tmp
############################
# Test 1
# create a pattern file
############################
testnum=1
echo
echo "test_pattern $testnum"
echo
############
# create pattern file
# col 1: pattern number
# col 2: nitems in group
# col 3: TLSH
# col 4: radius
# col 5: pattern label
############
PATTERN_FILE=tmp/tenfile.pat
rm -f $PATTERN_FILE
patn=0
for f in 021106_yossivassa.txt 0Alice.txt 11-17-06_Academy.txt 1english-only.txt 2005NISSE.txt \
2006-07_Resource_Brochure.txt 2006_2007PhysicalEducationConceptMap.txt 2007ShowcaseFilm_Package.txt \
22-ppa-3rd_e2snewsletter_jun06.txt 42nd_street.txt ; do
FILE=example_data/$f
if test ! -f $FILE
then
echoerr "error: (1), cannot find file $FILE"
exit 1
fi
echo "../bin/tlsh -f $FILE | cut -f 1"
tlsh=`../bin/tlsh -f $FILE | cut -f 1`
echo "pat_$patn 1 $tlsh 30 $f" >> $PATTERN_FILE
patn=`expr $patn + 1`
done
EXPECTED_PATFILE=exp/tenfile.pat_EXP
if test ! -f $EXPECTED_PATFILE
then
echoerr "error: ($testnum), Expected Pattern file $EXPECTED_PATFILE does not exist"
exit 1
fi
diff --ignore-all-space $PATTERN_FILE $EXPECTED_PATFILE > /dev/null 2>/dev/null
if [ $? -ne 0 ]; then
echoerr "error: ($testnum) diff $PATTERN_FILE $EXPECTED_PATFILE"
exit $testnum
fi
echo "passed"
############################
# END OF test 1
############################
############################
# Test 2
# use tlsh_pattern
############################
testnum=2
echo
echo "test_pattern $testnum"
echo
for dir in example_data example_data_variants ; do
RESFILE=tmp/$dir.results
echo "../bin/tlsh_pattern -force -r $dir -pat $PATTERN_FILE > $RESFILE"
../bin/tlsh_pattern -force -r $dir -pat $PATTERN_FILE > $RESFILE
EXPECTED_RESFILE=exp/$dir.results_EXP
if test ! -f $EXPECTED_RESFILE
then
echoerr "error: ($testnum), Expected results file $EXPECTED_RESFILE does not exist"
exit 1
fi
diff --ignore-all-space $RESFILE $EXPECTED_RESFILE > /dev/null 2>/dev/null
if [ $? -ne 0 ]; then
echoerr "error: ($testnum) diff $RESFILE $EXPECTED_RESFILE"
exit $testnum
fi
echo "passed"
done
############################
# END OF test 2
############################
|
<gh_stars>0
import React from 'react';
import $ from 'jquery';
import 'fullcalendar';
import 'fullcalendar/dist/fullcalendar.css';
export default class BookingCalendar extends React.Component {
updateCalendarEvents(events) {
const { calendar } = this.refs;
$(calendar)
.fullCalendar('removeEvents')
$(calendar)
.fullCalendar('addEventSource', events);
}
updateCalendarDate(date) {
const { calendar } = this.refs;
const currentCalendarDate = $(calendar)
.fullCalendar('getDate').format('YYYY-MM-DD');
if (date != currentCalendarDate) {
$(calendar).fullCalendar('gotoDate', date);
}
}
setClassesToBookedDayCell() {
const { calendar } = this.refs;
const events = $(calendar).fullCalendar('clientEvents');
// Add booked-day class to day with events
$(calendar).find('.fc-day').removeClass('booked-day');
$(calendar).find('.fc-day-number').removeClass('booked-day-number');
events.forEach(event => {
const strDate = event.date.format('YYYY-MM-DD');
$(calendar)
.find(`.fc-day-number[data-date="${strDate}"]`)
.addClass('booked-day-number');
$(calendar)
.find(`.fc-day[data-date="${strDate}"]`)
.addClass('booked-day');
});
}
componentDidMount() {
const { calendar } = this.refs;
$(calendar).fullCalendar({
lang: 'it',
height:'auto',
events: this.props.events,
dayClick: date => {
const events = this.props.events;
events.filter(event => {
return date.format() === event.date.format('YYYY-MM-DD');
}).forEach(event => this.props.onEventClick(event));
},
defaultDate: this.props.calendarDate,
eventClick: event => this.props.onEventClick(event),
viewRender: view => this.props.onCalendarChange(view.calendar.getDate()),
eventAfterAllRender: view => this.setClassesToBookedDayCell(),
eventRender : (event, element) => {
element.find(".fc-title").prepend("<i class='glyphicon glyphicon-tags'></i>");
}
});
}
componentWillUnmount() {
const { calendar } = this.refs;
$(calendar).fullCalendar('destroy');
}
componentWillReceiveProps(nextProps) {
if (nextProps.events != this.props.events) {
this.updateCalendarEvents(nextProps.events)
}
if (nextProps.calendarDate != this.props.calendarDate) {
this.updateCalendarDate(nextProps.calendarDate);
}
}
render() {
return <div ref="calendar"></div>;
}
}
|
#!/usr/bin/env bash
echo "Stop previously running versions..."
docker stop model_text_server_test
docker rm model_text_server_test
echo "Build image..."
docker build -t model_text_server "${AMMS_HOME}"/examples/local_servables/sklearn_text_input/
echo "Run image on port 5000..."
docker run -d \
--name model_text_server_test \
-p 5000:8090 \
--volume "${AMMS_HOME}"/examples/docker_compose_sklearn_retraining/shared_volume:/shared_volume \
model_text_server:latest
|
package com.github.brymck.getlocalendpoint;
import java.net.URI;
import java.net.URISyntaxException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class allows retrieving service endpoints from environment variables.
*
* <p>For example, suppose the environment variable {@code FOO_ADDRESS} should contain the route to
* a service in the form {@code "host:port"}:
*
* <pre>{@code
* String fooEndpoint = getHttpEndpoint("FOO_ADDRESS");
* SomeApi api = new SomeApi(fooEndpoint);
* }</pre>
*
* @author <NAME>
*/
public class GetLocalEndpoint {
private static Logger logger = LoggerFactory.getLogger(GetLocalEndpoint.class);
/**
* Retrieves an HTTP endpoint from an environment variable.
*
* @param name the environment variable's name
* @return a string containing the path to an HTTP endpoint, or {@code null} if the environment
* variable has no value or errors were encountered constructing the URI
*/
public static @Nullable String getHttpEndpoint(@NotNull String name) {
String address = System.getenv(name);
if (address == null) {
return null;
}
String[] parts = address.split(":");
String host = parts[0];
URI uri;
try {
if (parts.length > 1) {
int port = Integer.parseInt(parts[1]);
uri = new URI("http", null, host, port, null, null, null);
} else {
uri = new URI("http", host, null, null);
}
} catch (NumberFormatException e) {
String message =
"Could not parse port from environment variable \""
+ name
+ "\" with value \""
+ address
+ "\"";
logger.warn(message, e);
return null;
} catch (URISyntaxException e) {
String message =
"Could not build URI from environment variable \""
+ name
+ "\" with value \""
+ address
+ "\"";
logger.warn(message, e);
return null;
}
return uri.toString();
}
}
|
<reponame>RahulM98/Language-Dictionary
//The Dictionary Program
//Password required for some purposes
//The Password is "<PASSWORD>..."
#include<stdio.h>
#include<stdlib.h>
#include<string.h>
#include<ctype.h>
#include<conio.h>
#define w_size 25
#define m_size 250
char word_list[100];
int cursor_pos = 0;
struct Tree
{
char arr[w_size];
char meaning[m_size];
struct Tree *left;
struct Tree *right;
};
struct Tree *get_node(char word[],char meaning[])
{
struct Tree *new_node = (struct Tree *)malloc(sizeof(struct Tree));
strcpy(new_node->arr,word);
strcpy(new_node->meaning,meaning);
new_node->left = new_node->right = NULL;
return new_node;
}
struct Tree *insert(struct Tree *root,char word[],char meaning[]) // Inserting into Binary Search Tree
{
if(root == NULL)
{
root = get_node(word,meaning);
}
else
{
if(strcmp(root->arr,word) < 0) //if(compare < 0) // if Lexicographically " WORD " comes later
{
root->right = insert(root->right,word,meaning);
}
else
{
root->left = insert(root->left,word,meaning);
}
}
return root;
}
struct Tree *smallest_ascii(struct Tree *root) // Finding the word of smallest ASCII value, required to delete from Binary Search Tree
{
while(root->left != NULL)
root = root->left;
return root;
}
struct Tree *Delete(struct Tree *root,char word[]) // Deleting the word from Binary Search Tree
{
struct Tree *store;
if(strcmp(root->arr,word) < 0)
root->right = Delete(root->right,word);
else if(strcmp(root->arr,word) > 0)
root->left = Delete(root->left,word);
else
{
if(root->left == NULL && root->right == NULL)
{
free(root);
root = NULL;
}
else if(root->left == NULL)
{
free(root);
root = root->right;
}
else if(root->right == NULL)
{
free(root);
root = root->left;
}
else
{
store = smallest_ascii(root->right);
strcpy(root->arr,store->arr);
strcpy(root->meaning,store->meaning);
free(store);
store = NULL;
}
}
return root;
}
int search(struct Tree *root,char word[]) // Searching of a word in Dictionary is done through searching in Binary Search Tree
{
if(root == NULL)
return 0;
else if(strcmp(root->arr,word) == 0)
return 1;
else if(strcmp(root->arr,word) < 0)
return search(root->right,word);
else
return search(root->left,word);
}
void FILE_input(char array[],char m_array[]) // taking input from FILE
{ // Taking only 1st word from each line
char s;
int i = 0;
FILE *fp;
fp = fopen("Dictionary_2.0.txt","r");
fseek(fp,cursor_pos,0);
s = fgetc(fp);
while((s != '~') && (s != EOF) && (i != w_size))
{
array[i] = s;
i++;
s = fgetc(fp);
}
if(i != w_size)
array[i] = '\0';
i = 0;
s = fgetc(fp);
while((s != '\n') && (s != EOF) && (i != m_size))
{
m_array[i] = s;
i++;
s = fgetc(fp);
}
if(i != m_size)
m_array[i] = '\0';
cursor_pos = ftell(fp) - 1;
fclose(fp);
}
struct Tree *load_BST(struct Tree *root) // Loading all the words from file in Binary Search Tree
{
FILE *fp;
char temp[w_size];
char m_temp[m_size];
int c;
fp = fopen("Dictionary_2.0.txt","r");
c = fgetc(fp);
cursor_pos = ftell(fp);
while(c != EOF)
{
if(c == '\n')
{
FILE_input(temp,m_temp); //////////////// ??????????? ////////////////////////
root = insert(root,temp,m_temp);
}
fseek(fp,cursor_pos,0);
c = fgetc(fp);
cursor_pos = ftell(fp);
}
fclose(fp);
return root;
}
void display(struct Tree *root)
{
if(root != NULL)
{
display(root->left);
printf("%s = %s\n",root->arr,root->meaning);
display(root->right);
}
}
void file_insert(char word[],char word_user[],char meaning[]) // Inserting the meaning with corresponding word in File
{
FILE *fp;
fp = fopen("Dictionary_2.0.txt","a"); // Only adding string, so 'a'
fprintf(fp,"\n");
fprintf(fp,word);
fputc('~',fp);
fputs(meaning,fp);
fclose(fp);
}
void find_meaning(struct Tree *root,char word[]) // Finding from BST
{
if(root == NULL)
return;
else if(strcmp(root->arr,word) == 0)
printf("\nThe meaning is : %s",root->meaning);
else if(strcmp(root->arr,word) < 0)
find_meaning(root->right,word);
else
find_meaning(root->left,word);
}
int line_number(char word[]) // Reading the line number of the given WORD from file
{
int c,line=0;
char words[w_size],m[m_size];
FILE *fp;
fp = fopen("Dictionary_2.0.txt","r");
while((c=fgetc(fp)) != EOF)
{
if(c == '\n')
{
line++;
FILE_input(words,m);
if((strcmp(word,words)) == 0)
break;
}
}
fclose(fp);
return line;
}
void del_from_file(char word[]) // Deleting from file
{
FILE *fp1,*fp2;
int c,line,count=0;
line = line_number(word);
fp1 = fopen("Dictionary_2.0.txt","r");
fp2 = fopen("Temporary.txt","w");
while((c=fgetc(fp1)) != EOF)
{
if(c == '\n')
{
count++;
}
if(count != line)
{
fputc(c,fp2);
}
}
fclose(fp1);
fclose(fp2);
remove("Dictionary_2.0.txt");
rename("Temporary.txt","Dictionary_2.0.txt");
}
void arrange_words_in_FILE(struct Tree *root)
{
if(root != NULL)
{
arrange_words_in_FILE(root->left);
FILE *fp;
fp = fopen("arrange.txt","a");
fprintf(fp,"\n%s",root->arr);
fclose(fp);
arrange_words_in_FILE(root->right);
}
}
void send_words_from_FILE(int n,char word[]) // Sends the similar words from the FILE where words are arranged
{ // LEXICOGRAPHICALLY, for word suggestion
int i,len,c,count = 0,max_word_count = 0;
int x;
char s;
char new_word[w_size];
FILE *fp;
fp = fopen("arrange.txt","r");
strupr(word);
word_list[0] = '\0';
s = fgetc(fp);
if(n == 0)
{
return;
}
while(s != EOF)
{
if(s == '\n')
{
i = 0;
while((s=fgetc(fp)) != '\n')
{
new_word[i] = s;
i++;
}
new_word[i] = '\0';
c = 0;
for(i=0;i<n;i++)
{
if(word[i] == new_word[i])
c++;
}
if(c == n)
{
max_word_count = max_word_count + 1;
len = strlen(new_word);
for(i=0;i<len;i++)
{
word_list[count] = new_word[i];
count++;
}
word_list[count] = ',';
count++;
}
}
new_word[n] = '\0';
if((max_word_count == 4) || (strcmp(word,new_word) < 0)) // Sends maximum 5 similar words
{
break;
}
}
word_list[count-1] = '\0';
fclose(fp);
}
void user_input(struct Tree *root,char array[],int size,char array_user[])
{
int ch,i;
FILE *fp;
fp = fopen("arrange.txt","w");
fclose(fp);
arrange_words_in_FILE(root);
for(i=0;i<size;i++)
{
ch = getch();
printf("\r ");
if(ch == 8) // For Backspace key
{
array[i-1] = '\0';
array_user[i-1] = '\0';
send_words_from_FILE(i-1,array);
printf("\rEnter word :%s %s",array_user,word_list);
i = i-2;
}
else if(ch == 13) // For Enter key
{
array[i] = '\0';
array_user[i] = '\0';
printf("\rEnter word :%s",array_user);
break;
}
else if(ch == 32) // For Space Bar key
{
if(i==0 || isspace(array[i-1]))
i = i-1;
else
{
array[i] = ch;
array_user[i] = ch;
array[i+1] = '\0';
array_user[i+1] = '\0';
}
printf("\rEnter word :%s %s",array_user,word_list);
}
else if((ch >= 91 && ch <= 96) || (ch >= 123 && ch <= 126))
{
array[i] = ch;
array_user[i] = ch;
array[i+1] = '\0';
array_user[i+1] = '\0';
if(ch == 95)
{
send_words_from_FILE(i+1,array);
}
printf("\rEnter word :%s %s",array_user,word_list);
}
else // For a valid character
{
array[i] = ch;
array_user[i] = ch;
array[i+1] = '\0';
array_user[i+1] = '\0';
send_words_from_FILE(i+1,array);
printf("\rEnter word :%s %s",array_user,word_list);
}
if(i < 0)
{
i = -1;
printf("\r ");
printf("\rEnter word :");
}
}
remove("arrange.txt");
}
int password()
{
int i,j,ch;
char password[12];
printf("Enter the password (maximum 10 characters) : ");
for(i=0;i<10;i++)
{
ch = getch();
if(ch == 8)
{
i = i-2;
printf("\r ");
printf("\rEnter the password (maximum 10 characters) : ");
for(j=0;j<=i;j++)
printf("*");
}
else if(ch == 13)
break;
else
{
password[i] = ch;
printf("*");
}
if(i < 0)
{
printf("\r ");
printf("\rEnter the password (maximum 10 characters) : ");
i = -1;
}
}
password[i] = '\0';
if((strcmp(password,"<PASSWORD>...")) == 0)
return 1;
else
return 0;
}
main()
{
struct Tree *root=NULL;
int ch;
int s;
char word[w_size],word_user[w_size];
word_list[0] = '\0';
root = load_BST(root); // First of all loading all the words from file in Binary Search Tree
printf("\n\t\t\t\tThe DICTIONARY\n\t\t\t\t\t--- By <NAME>\n\n");
printf("\t----------------------------------------------------------------\n");
printf("\t* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * \n");
printf("\t----------------------------------------------------------------\n\n");
printf("Welcome to the program \" The Dictionary \"");
printf("\nPlease let me know, what do you want to do with me by pressing the numbers from the MENU...\n");
while(ch != '5')
{
printf("\n\n\t-------- Program MENU --------\n\n");
printf("Press 1 To search a word's meaning\n\t2 To add a word\n\t3 To delete a word\n\t4 To edit a word's meaning\n\t5 To Exit\n\n\n");
printf("Enter your choice : ");
fflush(stdin);
ch = getchar();
switch(ch)
{
case '1':
printf("Enter word :");
user_input(root,word,24,word_user);
if(search(root,word) == 0)
{
printf("\nEntered word is not listed in Dictionary.\n\t--Do you want to add this word ?");
s = 0;
do
{
printf("\n\t--Press Y for YES or N for NO : ");
while((getchar()) != '\n');
s = getchar();
if(s == 'Y' || s == 'y')
{
if(password() == 1)
{
printf("\n\tAccess granted...\n\n");
char meaning[m_size];
printf("\nEnter the meaning of %s : ",word_user);
while((getchar()) != '\n');
gets(meaning);
root = insert(root,word,meaning);
file_insert(word,word_user,meaning);
printf("\n \" %s \" is added to the Dictionary.",word_user);
}
else
printf("\n\tIncorrect password.\n\tAccess denied...");
break;
}
else if(s == 'N' || s == 'n')
printf("\n \" %s \" is not added in dictionary.\n\n",word_user);
}while(s != 'N' && s != 'n');
}
else
{
find_meaning(root,word);
}
break;
case '2':
if(password() == 1)
{
printf("\n\tAccess granted...\n\n");
printf("Enter word :");
user_input(root,word,24,word_user);
if(search(root,word) == 0)
{
char meaning[m_size];
printf("\nEnter the meaning of %s : ",word_user);
while((getchar()) != '\n');
gets(meaning);
root = insert(root,word,meaning);
file_insert(word,word_user,meaning);
printf("\n \" %s \" is added to the Dictionary.",word_user);
}
else
printf("\n\"%s\" alrady exists in Dictionary.\n",word_user);
}
else
printf("\n\tIncorrect password.\n\tAccess denied...");
break;
case '3':
if(password() == 1)
{
printf("\n\tAccess granted...\n\n");
printf("Enter word :");
user_input(root,word,24,word_user);
if(search(root,word) == 0)
printf("Entered word is not listed in Dictionary.\n\n");
else
{
printf("\n\nThe word will be deleted permanently from the Dictionary.\n\n\tDo you want to delete this word ?");
s = -1;
while(s != 'N' && s != 'n')
{
printf("\n\t--Press Y for YES or N for NO : ");
fflush(stdin);
s = getchar();
if(s == 'Y' || s == 'y')
{
root = Delete(root,word);
del_from_file(word);
printf("\" %s \" is deleted from the Dictionary.\n",word_user);
break;
}
else if(s == 'N' || s == 'n')
printf("\" %s \" is not deleted from the Dictionary.\n",word_user);
}
}
}
else
printf("\n\tIncorrect password.\n\tAccess denied...");
break;
case '4':
if(password() == 1)
{
printf("\n\tAccess granted...\n\n");
printf("Enter word :");
user_input(root,word,24,word_user);
if(search(root,word) == 0)
printf("Entered word is not listed in Dictionary.\n\n");
else
{
root = Delete(root,word);
del_from_file(word);
char meaning[m_size];
printf("\nEnter the meaning of %s : ",word_user);
while((getchar()) != '\n');
gets(meaning);
root = insert(root,word,meaning);
file_insert(word,word_user,meaning);
//file_insert(word,word_user);
printf("The meaning of \" %s \" is edited.\n",word_user);
}
}
else
printf("\n\tIncorrect password.\n\tAccess denied...");
break;
case '5':
printf("\nThanks for visiting...\n\tPlease visit again...");
break;
default :
printf("Please press any of the numbers from the MENU\n\n\n");
}
printf("\n\n\n");
}
}
|
// Define the enumeration AlbumType
enum AlbumType: String {
case soundtrack = "SoundtrackAlbum"
case spokenWord = "SpokenWordAlbum"
case studio = "StudioAlbum"
}
// Implement the function to determine the album type
func determineAlbumType(title: String) -> AlbumType {
if title.lowercased().contains("soundtrack") {
return .soundtrack
} else if title.lowercased().contains("spoken word") {
return .spokenWord
} else {
return .studio
}
}
// Example usage
let albumTitle1 = "The Lion King Soundtrack"
let albumTitle2 = "The Great Gatsby Spoken Word Album"
let albumTitle3 = "Thriller"
print(determineAlbumType(title: albumTitle1)) // Output: soundtrack
print(determineAlbumType(title: albumTitle2)) // Output: spokenWord
print(determineAlbumType(title: albumTitle3)) // Output: studio |
<reponame>tacrazymage/Data-Structures-and-Algorithms
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
void print_param_of_matrix_chain(int (*s)[6],int i, int j, int n)
{
if(i == j) printf(" A[%d] ", i+1);
else{
printf("( ");
print_param_of_matrix_chain(s, i, s[i][j], n);
print_param_of_matrix_chain(s, s[i][j]+1, j, n);
printf(" )");
}
}
void matrix_chain_multiplication(int (*shape)[2], int n)
{
int nb_mul[n][n],div[n][n],temp=0;
for(int i=0;i<n;i++)
{
for(int j=0;j<n;j++)
{
nb_mul[i][j] = 0;
div[i][j] = 0;
}
}
for(int l=1;l<=n;l++)
{
for(int i=0;i<n-l;i++)
{
int j = i+l;
nb_mul[i][j] = INT_MAX;
for(int k=i;k<j;k++)
{
temp = nb_mul[i][k]+nb_mul[k+1][j]+shape[i][0]*shape[j][1]*shape[k][1];
if(temp<nb_mul[i][j])
{
nb_mul[i][j] = temp;
div[i][j] = k;
}
}
}
}
printf("Multiplication Matrix: [start][end][cut]nb_mul\n");
for(int i=0;i<n;i++)
{
for(int j=i;j<n;j++)
{
printf("[%d][%d][%d]%d\n", i+1, j+1, div[i][j]+1, nb_mul[i][j]);
}
}
printf("The optimal paranthesization is: ");
print_param_of_matrix_chain(div,0,5,6);
}
int main()
{
int n = 6;
int shapes[6][2] = {
{30,35},
{35,15},
{15,5},
{5,10},
{10,20},
{20,25}
};
matrix_chain_multiplication(shapes, 6);
} |
case $SCHEME in
vicare)
./travis/install-vicare.sh
;;
petite)
./travis/install-petite.sh
;;
esac
|
package overview
import (
"fmt"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
"github.com/vladimirvivien/ktop/application"
"github.com/vladimirvivien/ktop/ui"
"github.com/vladimirvivien/ktop/views/model"
"k8s.io/apimachinery/pkg/api/resource"
)
type podPanel struct {
app *application.Application
title string
root *tview.Flex
children []tview.Primitive
listCols []string
list *tview.Table
}
func NewPodPanel(app *application.Application, title string) ui.Panel {
p := &podPanel{app: app, title: title, list: tview.NewTable()}
p.Layout(nil)
return p
}
func (p *podPanel) GetTitle() string {
return p.title
}
func (p *podPanel) Layout(_ interface{}) {
p.list.SetFixed(1, 0)
p.list.SetBorder(false)
p.list.SetBorders(false)
p.list.SetFocusFunc(func() {
p.list.SetSelectable(true,false)
p.list.SetSelectedStyle(tcell.StyleDefault.Background(tcell.ColorYellow).Foreground(tcell.ColorBlue))
})
p.list.SetBlurFunc(func() {
p.list.SetSelectable(false,false)
})
p.root = tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(p.list, 0, 1, true)
p.root.SetBorder(true)
p.root.SetTitle(p.GetTitle())
p.root.SetTitleAlign(tview.AlignLeft)
}
func (p *podPanel) DrawHeader(data interface{}) {
cols, ok := data.([]string)
if !ok {
panic(fmt.Sprintf("podPanel.DrawBody got unexpected data type %T", data))
}
p.listCols = cols
for i, col := range p.listCols {
p.list.SetCell(0, i,
tview.NewTableCell(col).
SetTextColor(tcell.ColorWhite).
SetBackgroundColor(tcell.ColorDarkGreen).
SetAlign(tview.AlignLeft).
SetExpansion(100).
SetSelectable(false),
)
}
p.list.SetFixed(1, 0)
}
func (p *podPanel) DrawBody(data interface{}) {
pods, ok := data.([]model.PodModel)
if !ok {
panic(fmt.Sprintf("PodPanel.DrawBody got unexpected type %T", data))
}
client := p.app.GetK8sClient()
metricsDisabled := client.AssertMetricsAvailable() != nil
colorKeys := ui.ColorKeys{0: "green", 50: "yellow", 90: "red"}
var cpuRatio, memRatio ui.Ratio
var cpuGraph, memGraph string
var cpuMetrics, memMetrics string
for i, pod := range pods {
i++ // offset to n+1
p.list.SetCell(
i, 0,
&tview.TableCell{
Text: pod.Namespace,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 1,
&tview.TableCell{
Text: pod.Name,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 2,
&tview.TableCell{
Text: fmt.Sprintf("%d/%d", pod.ReadyContainers, pod.TotalContainers),
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 3,
&tview.TableCell{
Text: pod.Status,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 4,
&tview.TableCell{
Text: fmt.Sprintf("%d", pod.Restarts),
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 5,
&tview.TableCell{
Text: pod.TimeSince,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
// Volume
p.list.SetCell(
i, 6,
&tview.TableCell{
Text: fmt.Sprintf("%d/%d", pod.Volumes, pod.VolMounts),
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 7,
&tview.TableCell{
Text: pod.IP,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 8,
&tview.TableCell{
Text: pod.Node,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
if metricsDisabled {
cpuRatio = ui.GetRatio(float64(pod.PodRequestedCpuQty.MilliValue()), float64(pod.NodeAllocatableCpuQty.MilliValue()))
cpuGraph = ui.BarGraph(10, cpuRatio, colorKeys)
cpuMetrics = fmt.Sprintf(
"[white][%s[white]] %dm %02.1f%%",
cpuGraph, pod.PodRequestedCpuQty.MilliValue(), cpuRatio*100,
)
memRatio = ui.GetRatio(float64(pod.PodRequestedMemQty.MilliValue()), float64(pod.NodeAllocatableMemQty.MilliValue()))
memGraph = ui.BarGraph(10, memRatio, colorKeys)
memMetrics = fmt.Sprintf(
"[white][%s[white]] %dGi %02.1f%%", memGraph, pod.PodRequestedMemQty.ScaledValue(resource.Giga), memRatio*100,
)
} else {
cpuRatio = ui.GetRatio(float64(pod.PodUsageCpuQty.MilliValue()), float64(pod.NodeAllocatableCpuQty.MilliValue()))
cpuGraph = ui.BarGraph(10, cpuRatio, colorKeys)
cpuMetrics = fmt.Sprintf("[white][%s[white]] %dm %02.1f%%", cpuGraph, pod.PodUsageCpuQty.MilliValue(), cpuRatio*100)
memRatio = ui.GetRatio(float64(pod.PodUsageMemQty.MilliValue()), float64(pod.NodeUsageMemQty.MilliValue()))
memGraph = ui.BarGraph(10, memRatio, colorKeys)
memMetrics = fmt.Sprintf("[white][%s[white]] %dMi %02.1f%%", memGraph, pod.PodUsageMemQty.ScaledValue(resource.Mega), memRatio*100)
}
p.list.SetCell(
i, 9,
&tview.TableCell{
Text: cpuMetrics,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
p.list.SetCell(
i, 10,
&tview.TableCell{
Text: memMetrics,
Color: tcell.ColorYellow,
Align: tview.AlignLeft,
},
)
}
}
func (p *podPanel) DrawFooter(data interface{}) {}
func (p *podPanel) Clear() {
p.list.Clear()
p.Layout(nil)
p.DrawHeader(p.listCols)
}
func (p *podPanel) GetRootView() tview.Primitive {
return p.root
}
func (p *podPanel) GetChildrenViews() []tview.Primitive {
return p.children
}
|
import subDays from 'date-fns/subDays';
import format from 'date-fns/format';
import differenceInDays from 'date-fns/differenceInDays';
import { Op } from 'sequelize';
import { getDates } from '../utils/dates';
import Receipts from '../database/models/Receipts';
import Plantations from '../database/models/Plantations';
import User from '../database/models/Users';
const show = async (req, res) => {
try {
const user = await User.findByPk(req.userId);
const queryObject = {
deleted_at: null,
};
if (user.company_id) {
queryObject.company_id = user.company_id;
}
if (req.query.company_id) {
queryObject.company_id = req.query.company_id;
}
const firstPlantation = await Plantations.findOne({ where: queryObject, order: [['date', 'ASC']] });
let datePlantation;
if (firstPlantation) {
datePlantation = differenceInDays(new Date(), firstPlantation.date || new Date()) + 1;
}
if (req.query.date_filter) {
queryObject.date = {
[Op.gte]: subDays(new Date(), Number(req.query.date_filter)),
};
} else {
queryObject.date = {
[Op.gte]: subDays(new Date(), Number(datePlantation)),
};
}
const receipts = await Receipts.findAll({ where: queryObject, order: [['date', 'ASC']] });
const plantations = await Plantations.findAll({ where: queryObject, order: [['date', 'ASC']] });
const datesToMap = getDates(
subDays(new Date(), Number(req.query.date_filter || datePlantation)), new Date(),
);
const treeChartData = datesToMap.map((item, index) => {
let trees = 0;
const plantationData = plantations.filter((a) => format(new Date(a.date), 'yyyy-MM-dd') === format(new Date(item), 'yyyy-MM-dd'));
if (plantationData) {
plantationData.map((abc) => trees += Number(abc.trees));
}
return {
x: index,
y: trees,
};
});
const capitalChartData = datesToMap.map((item, index) => {
let value = 0;
const receiptsData = receipts.filter((a) => format(new Date(a.date), 'yyyy-MM-dd') === format(new Date(item), 'yyyy-MM-dd'));
if (receiptsData) {
receiptsData.map((abc) => value += Number(abc.value));
}
return {
x: index,
y: value,
};
});
let plantationsSum = 0;
let receiptsSum = 0;
plantations.map((item) => plantationsSum += Number(item.trees));
receipts.map((item) => receiptsSum += Number(item.value));
return res.status(200).json({
trees: plantationsSum,
capital: receiptsSum,
carbon: plantationsSum * 130,
treeChartData,
capitalChartData,
});
} catch (err) {
console.log(err);
return res.status(409).json({ msg: err.errors.map((e) => e.message) });
}
};
export default {
show,
};
|
n = input()
s = list(set(n))
k = [n.count(x) for x in s]
if sum(x%2 for x in k) > 1:
print("NO SOLUTION")
else:
t =''.join([str(x)*(k[i]//2) for i,x in enumerate(s)])
c = [str(x) for i,x in enumerate(s) if k[i]%2==1]
c = c[0] if len(c) else ''
print(t+c+t[::-1]) |
#!/bin/bash
clear
banner () {
echo -e "$(tput setaf 2) \033[1m
██╗ ██████╗ ██████╗ ██╗███╗ ██╗██████╗ ██████╗ ██████╗
██║ ██╔═══██╗██╔════╝ ██║████╗ ██║██╔══██╗██╔═══██╗██╔══██╗
██║ ██║ ██║██║ ███╗██║██╔██╗ ██║██████╔╝██║ ██║██║ ██║
██║ ██║ ██║██║ ██║██║██║╚██╗██║██╔═══╝ ██║ ██║██║ ██║
███████╗╚██████╔╝╚██████╔╝██║██║ ╚████║██║ ╚██████╔╝██████╔╝
╚══════╝ ╚═════╝ ╚═════╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═════╝ ╚═════╝$(tput setaf 3)
/─────────────────────────────────────────────────────\
[=>] Protect your accounts with strong passwords [<=]
[=>] Created by: Sandesh (3xploitGuy) [<=]
\_____________________________________________________/
$(tput setaf 1)
"
}
requirements () {
if [ $(dpkg-query -W -f='${Status}' python 2>/dev/null | grep -c "ok installed") -eq 0 ];
then
echo -e "$(tput setaf 1)Installing requirements....$(tput setaf 7)"
sudo apt-get install python;
fi
if [ $(dpkg-query -W -f='${Status}' python3 2>/dev/null | grep -c "ok installed") -eq 0 ];
then
echo -e "$(tput setaf 1)Installing requirements....$(tput setaf 7)"
sudo apt-get install python3;
fi
if [ ! -d ".add_account" ]
then
mkdir .add_account
touch .add_account/account
echo 1 > .add_account/count
echo LOGINPOD > .add_account/password
touch .add_account/serial_no
fi
}
scan_key () {
unset key
prompt_key="Enter Your 5'digit Encryption Key To Begin: "
while IFS= read -p "$prompt_key" -r -s -n 1 char
do
if [[ $char == $'\0' ]]
then
break
fi
prompt_key='*'
key+="$char"
done
if [[ -n ${key//[0-9]/} ]]; then
echo -e "$(tput setaf 7)\nerror: input Contains letters (integer expected)\n$(tput setaf 1)"
scan_key
fi
if [ ${#key} -gt 5 -o ${#key} -lt 5 ]; then
echo -e "$(tput setaf 7)\nerror: length of key is 5 digit's\n$(tput setaf 1)"
scan_key
fi
}
encrypter () {
clear
banner
echo -e "$(tput setaf 1) Enter your password and get encrypt Value$(tput setaf 7)\n\n"
unset password
prompt="Enter Password > "
while IFS= read -p "$prompt" -r -s -n 1 char
do
if [[ $char == $'\0' ]]
then
break
fi
prompt='*'
password+="$char"
done
if [ ${#password} -eq 0 ]; then
echo -e "$(tput setaf 1)\n\nerror: wrong input exiting...."
echo -e "$(tput setaf 3)"
read -p "Press enter to go back..." update_enter
menu_one
else
python3 encryption.py ${key:0:1} ${key:1:1} ${key:2:1} ${key:3:1} ${key:4:1} $password
echo -e "$(tput setaf 3)"
read -p "Press enter to go back..." encrypter_enter
menu_one
fi
}
menu_one () {
clear
banner
echo -e "$(tput setaf 1) (M) (A) (I) (N) (M) (E) (N) (U)\n\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)1$(tput setaf 1)] $(tput setaf 2)Password Vault\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)2$(tput setaf 1)] $(tput setaf 2)Encrypter\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)3$(tput setaf 1)] $(tput setaf 2)Exit\n\n$(tput setaf 7)"
read -p "Enter option: " menu_one_option
case $menu_one_option in
1) menu_two
;;
2) encrypter
;;
3) echo -e "$(tput setaf 1)\nAll safe....\n"
exit
;;
*) echo -e "$(tput setaf 1)\nPlease select correct input....\n$(tput setaf 7)"
sleep 0.6
banner
menu_one
;;
esac
}
error_msg () {
echo -e "$(tput setaf 1)\nerror: wrong input exiting...."
echo -e "$(tput setaf 3)"
read -p "Press enter to go back..." press_enter
menu_two
}
add_acc () {
clear
banner
echo -e "$(tput setaf 1) A_D_D A_C_C_O_U_N_T\n\n"
read -p "$(tput setaf 1)Account name: $(tput setaf 7)" account
echo ""
if [ ${#account} -eq 0 ]; then
error_msg
fi
read -p "$(tput setaf 1)Enter password: $(tput setaf 7)" text_pas
if [ ${#text_pas} -eq 0 ]; then
error_msg
fi
counter=`cat .add_account/count`
echo [$counter] >> .add_account/serial_no
echo $account >> .add_account/account
echo $text_pas >> .add_account/password
count=$((counter+1))
echo $count > .add_account/count
echo -e "\nAccount added successfully...."
echo -e "$(tput setaf 3)"
read -p "Press enter to go back..." add_acc_enter
banner
menu_two
}
update_passwd () {
clear
banner
echo -e "$(tput setaf 1) S_E_L_E_C_T A_C_C_O_U_N_T"
echo -e "$(tput setaf 1) (Update Password)\n\n"
echo -e "$(tput setaf 2)"
echo -e "$(tput setaf 1)No. Account$(tput setaf 2)"
paste .add_account/serial_no .add_account/account
echo -e "$(tput setaf 7)\n"
read -p "Enter account number: " update_number
if [[ -n ${update_number//[0-9]/} ]]; then
echo -e "$(tput setaf 7)\nerror: input Contains letters (integer expected)\n$(tput setaf 1)"
error_msg
fi
count_value1=`cat .add_account/count`
if [ ${#update_number} -eq 0 ]; then
error_msg
fi
if [ $update_number -ge $count_value1 -o $update_number == 0 ]; then
error_msg
else
echo -e "\n"
read -p "Enter new password: " update_password
if [ ${#update_password} -eq 0 ]; then
error_msg
fi
python update_password/update.py $update_number $update_password > temp2
cat temp2 > .add_account/password
rm temp2
echo -e "\nPassword updated successfully...."
echo -e "$(tput setaf 3)"
read -p "Press enter to go back..." update_enter
menu_two
fi
}
retrive_passwd () {
clear
banner
echo -e "$(tput setaf 1) S_E_L_E_C_T A_C_C_O_U_N_T"
echo -e "$(tput setaf 1) (Retrive Password)\n\n"
echo -e "$(tput setaf 2)"
echo -e "$(tput setaf 1)No. Account$(tput setaf 2)"
paste .add_account/serial_no .add_account/account
echo -e "$(tput setaf 7)\n"
read -p "Enter account number: " retrive_number
if [[ -n ${retrive_number//[0-9]/} ]]; then
echo -e "$(tput setaf 7)\nerror: input Contains letters (integer expected)\n$(tput setaf 1)"
error_msg
fi
if [ ${#retrive_number} -eq 0 ]; then
error_msg
fi
count_value2=`cat .add_account/count`
if [ $retrive_number -ge $count_value2 -o $retrive_number == 0 ]; then
echo -e "\n$(tput setaf 1)error: wrong input exiting...."
else
python retrive_password/retrive.py $retrive_number > temp1
retrived=`cat temp1`
rm temp1
python3 encryption.py ${key:0:1} ${key:1:1} ${key:2:1} ${key:3:1} ${key:4:1} $retrived
fi
echo -e "$(tput setaf 3)"
read -p "Press enter to go back..." retrive_enter
menu_two
}
delete_account () {
clear
banner
echo -e "$(tput setaf 1) S_E_L_E_C_T A_C_C_O_U_N_T"
echo -e "$(tput setaf 1) (Delete Account)\n\n"
echo -e "$(tput setaf 2)"
echo -e "$(tput setaf 1)No. Account$(tput setaf 2)"
paste .add_account/serial_no .add_account/account
echo -e "$(tput setaf 7)\n"
read -p "Enter account number: " delete_number
if [[ -n ${delete_number//[0-9]/} ]]; then
echo -e "$(tput setaf 7)\nerror: input Contains letters (integer expected)\n$(tput setaf 1)"
error_msg
fi
if [ ${#delete_number} -eq 0 ]; then
error_msg
fi
count_value=`cat .add_account/count`
if [ $delete_number -ge $count_value -o $delete_number == 0 ]; then
echo -e "\n$(tput setaf 1)error: wrong input exiting...."
else
python delete_account/delete_acc.py $delete_number > temp3
cat temp3 > .add_account/account
rm temp3
python delete_account/delete_pass.py $delete_number > temp4
cat temp4 > .add_account/password
rm temp4
python delete_account/delete_serial.py > temp5
cat temp5 > .add_account/serial_no
rm temp5
counter1=`cat .add_account/count`
count1=$((counter1-1))
echo $count1 > .add_account/count
echo -e "$(tput setaf 7)\nAccount deleted successfully...."
fi
echo -e "$(tput setaf 3)"
read -p "Press enter to go back..." retrive_enter
menu_two
}
menu_two () {
clear
banner
echo -e "$(tput setaf 1) (V) (A) (U) (L) (T) (M) (E) (N) (U)\n\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)1$(tput setaf 1)] $(tput setaf 2)Add account and password\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)2$(tput setaf 1)] $(tput setaf 2)Update password for a account\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)3$(tput setaf 1)] $(tput setaf 2)Retrive password for a account\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)4$(tput setaf 1)] $(tput setaf 2)Delete a account\n"
echo -e " $(tput setaf 1)[$(tput setaf 4)5$(tput setaf 1)] $(tput setaf 2)Main Menu\n\n$(tput setaf 7)"
read -p "Enter option: " menu_two_option
case $menu_two_option in
1) add_acc
;;
2) update_passwd
;;
3) retrive_passwd
;;
4)delete_account
;;
5) banner
menu_one
;;
*) echo -e "$(tput setaf 1)\nPlease select correct input....\n$(tput setaf 7)"
sleep 0.6
menu_two
;;
esac
}
requirements
banner
scan_key
menu_one
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var SortingDirection;
(function (SortingDirection) {
SortingDirection["Ascending"] = "ASC";
SortingDirection["Descending"] = "DESC";
SortingDirection["None"] = "NONE";
})(SortingDirection = exports.SortingDirection || (exports.SortingDirection = {}));
//# sourceMappingURL=enums.js.map |
#!/bin/bash
service httpd stop
yum install -y docker
service docker start
sleep 10
docker pull nginx |
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/testcommon/testcommon.framework"
install_framework "${BUILT_PRODUCTS_DIR}/testlibB/testlibB.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/testcommon/testcommon.framework"
install_framework "${BUILT_PRODUCTS_DIR}/testlibB/testlibB.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>jamesccoholan/discoastronautclub
import React from "react";
// nodejs library that concatenates classes
import classNames from "classnames";
// @material-ui/core components
import { makeStyles } from "@material-ui/core/styles";
// core components
import GridContainer from "components/Grid/GridContainer.js";
import GridItem from "components/Grid/GridItem.js";
// import Button from "components/CustomButtons/Button.js";
import Card from "components/Card/Card.js";
import CardBody from "components/Card/CardBody.js";
import CardFooter from "components/Card/CardFooter.js";
import styles from "assets/jss/material-kit-react/views/landingPageSections/teamStyle.js";
const team = [
{
name: "James",
job: "Creative Director and Virtual Producer",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/2_T0dR38pVf?ik-sdk-version=javascript-1.4.3&updatedAt=1643769195521",
},
{
name: "Andy",
job: "Partnerships, Promotion and Strategy",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/3_xvb2rDJVx?ik-sdk-version=javascript-1.4.3&updatedAt=1643769156579",
},
{
name: "Ilan",
job: "Smart Contracts",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/4_2QbWZ8_lE?ik-sdk-version=javascript-1.4.3&updatedAt=1643773553282",
},
{
name: "Stephen",
job: "Web Dev and Smart Contracts",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/5_ufGRMXH-h?ik-sdk-version=javascript-1.4.3&updatedAt=1643773516872",
},
{
name: "Dave",
job: "Content Strategist and Promotion",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/6_iaiuTA35f?ik-sdk-version=javascript-1.4.3&updatedAt=1643773577781",
},
{
name: "Ben",
job: "Music Production and Distribution",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/7_gsewDuGLM?ik-sdk-version=javascript-1.4.3&updatedAt=1643773486839",
},
{
name: "Jamison",
job: "Franchise Development",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/11_Sy5D6Z-YW?ik-sdk-version=javascript-1.4.3&updatedAt=1643773690863",
},
{
name: "Dre",
job: "Content Strategist and Promotion",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/9_1frbXBDJw?ik-sdk-version=javascript-1.4.3&updatedAt=1643773717216",
},
{
name: "Alex",
job: "Video Production",
img: "https://ik.imagekit.io/5ierklngtbt/img/NewSuits/12_3G5yM3e2q?ik-sdk-version=javascript-1.4.3&updatedAt=1643773632536",
},
];
const useStyles = makeStyles(styles);
export default function TeamSection() {
const classes = useStyles();
const imageClasses = classNames(
classes.imgRaised,
classes.imgRoundedCircle,
classes.imgFluid
);
return (
<div className={classes.section}>
<h2 className={classes.title}>The Team</h2>
<div>
<GridContainer>
{team.map(function (member) {
return (
<GridItem key={member.name} xs={12} sm={12} md={4}>
<Card plain>
<GridItem xs={12} sm={12} md={6} className={classes.itemGrid}>
<img
src={member.img}
alt="Team Member"
className={imageClasses}
/>
</GridItem>
<h4 className={classes.cardTitle}>
{member.name}
<br />
<small className={classes.smallTitle}>{member.job}</small>
</h4>
<CardBody></CardBody>
<CardFooter className={classes.justifyCenter}></CardFooter>
</Card>
</GridItem>
);
})}
</GridContainer>
</div>
</div>
);
}
|
import os
videos = ['/path/to/video1.mp4', '/path/to/video2.mp4', '/path/to/video3.mp4'] # Example list of video file paths
models = ['/path/to/model1.config', '/path/to/model2.config', '/path/to/model3.config'] # Example list of model file paths
for m in models:
for f in videos:
print("for model: {}".format(m))
print("for video: {}".format(f))
print("......")
of = os.path.splitext(os.path.basename(m))[0] # Remove extension from model file name
output_dir = "../data/results/{}".format(of)
os.makedirs(output_dir, exist_ok=True) # Create output directory if it doesn't exist
command = "python demo/demo.py --video-input {} --output {} --config-file {}".format(f, output_dir, m)
os.system(command) |
<gh_stars>0
package DB.Domain.Magazine;
import java.time.LocalDate;
import javax.servlet.http.Part;
/**
* This class is a representation of a post at database
*
* @author jefemayoneso
*/
public class MagazinePost {
private int id;
private String title;
private String dateString;
private String magazine;
private String pdfNamePath;
private LocalDate date;
private Part pdfPart;
public MagazinePost() {
}
public MagazinePost(int id, String title, String magazine, LocalDate date, String pdfNamePath) {
this.id = id;
this.title = title;
this.magazine = magazine;
this.date = date;
this.pdfNamePath = pdfNamePath;
// casts
this.dateString = date.toString();
}
public MagazinePost(int id, String title, String dateString, String magazine, String pdfNamePath, LocalDate date, Part pdfPart) {
this.id = id;
this.title = title;
this.dateString = dateString;
this.magazine = magazine;
this.pdfNamePath = pdfNamePath;
this.date = date;
this.pdfPart = pdfPart;
}
/**
* @return the id
*/
public int getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(int id) {
this.id = id;
}
/**
* @return the title
*/
public String getTitle() {
return title;
}
/**
* @param title the title to set
*/
public void setTitle(String title) {
this.title = title;
}
/**
* @return the dateString
*/
public String getDateString() {
return dateString;
}
/**
* @param dateString the dateString to set
*/
public void setDateString(String dateString) {
this.dateString = dateString;
}
/**
* @return the magazine
*/
public String getMagazine() {
return magazine;
}
/**
* @param magazine the magazine to set
*/
public void setMagazine(String magazine) {
this.magazine = magazine;
}
/**
* @return the date
*/
public LocalDate getDate() {
return date;
}
/**
* @param date the date to set
*/
public void setDate(LocalDate date) {
this.date = date;
}
/**
* @return the pdf
*/
public String getPdfNamePath() {
return pdfNamePath;
}
/**
* @param pdfNamePath
*/
public void setPdfNamePath(String pdfNamePath) {
this.pdfNamePath = pdfNamePath;
}
@Override
public String toString() {
return "post id=" + this.getId() + " title=" + this.getTitle() + " magazine=" + this.getMagazine() + " date=" + this.getDateString() + "path=" + this.getPdfNamePath();
}
/**
* @return the pdfPart
*/
public Part getPdfPart() {
return pdfPart;
}
/**
* @param pdfPart the pdfPart to set
*/
public void setPdfPart(Part pdfPart) {
this.pdfPart = pdfPart;
}
}
|
import store from '@/store';
export function hasPermission(permission) {
const permissions = store.getters && store.getters.permissions;
return permissions.indexOf(permission) >= 0;
}
|
<gh_stars>0
import { sign } from 'jsonwebtoken';
import IHashProvider from '../providers/HashProvider/models/IHashProvider';
import AppError from '@shared/errors/AppError';
import ICustomersRepository from '@modules/customer/Repositories/ICustomerRepository';
import Customer from '@modules/customer/infra/typeorm/models/Customer';
import authConfig from '@config/auth';
import { injectable, inject} from 'tsyringe';
interface Request {
email: string;
password: string;
}
interface Response {
customer: Customer;
token: string;
}
@injectable()
class AuthenticateUserService {
constructor(
@inject('CustomerRepository')
private customerRepository:ICustomersRepository,
@inject('HashProvider')
private hashProvider:IHashProvider,
){}
public async execute({ email, password }: Request): Promise<Response> {
const customer = await this.customerRepository.findByEmail(email);
if (!customer) {
throw new AppError('Incorrect email/password combination.', 401);
}
const passwordMatched = await this.hashProvider.compareHash(password, customer.password_hash);
if (!passwordMatched) {
throw new AppError('Incorrect email/password combination.', 401);
}
const { secret, expiresIn } = authConfig.jwt;
const token = sign({}, secret, {
subject: customer.id,
expiresIn,
});
return {
customer,
token,
};
}
}
export default AuthenticateUserService;
|
#!/usr/bin/env bash
# ----------------------------------------------------------------------------------
# Filename: pve_medialab_ct_sonarr_installer.sh
# Description: Installer script for Proxmox Sonarr Server
# ----------------------------------------------------------------------------------
#---- Bash command to run script ---------------------------------------------------
#---- Source Github
# bash -c "$(wget -qLO - https://raw.githubusercontent.com/ishanjain28/pve-medialab/master/pve_medialab_ct_sonarr_installer.sh)"
#---- Source local Git
# /mnt/pve/nas-01-git/ishanjain28/pve-medialab/pve_medialab_ct_sonarr_installer.sh
#---- Source -----------------------------------------------------------------------
#---- Dependencies -----------------------------------------------------------------
# Check for Internet connectivity
if nc -zw1 google.com 443; then
echo
else
echo "Checking for internet connectivity..."
echo -e "Internet connectivity status: \033[0;31mDown\033[0m\n\nCannot proceed without a internet connection.\nFix your PVE hosts internet connection and try again..."
echo
exit 0
fi
# Installer cleanup
function installer_cleanup () {
rm -R ${REPO_TEMP}/common &> /dev/null
rm -R ${REPO_TEMP}/${GIT_REPO} &> /dev/null
rm ${REPO_TEMP}/common.tar.gz &> /dev/null
rm ${REPO_TEMP}/${GIT_REPO}.tar.gz &> /dev/null
}
#---- Static Variables -------------------------------------------------------------
# Git server
GIT_SERVER='https://github.com'
# Git user
GIT_USER='ishanjain28'
# Git repository
GIT_REPO='pve-medialab'
# Git branch
GIT_BRANCH='master'
# Git common
GIT_COMMON='0'
# Installer App script
GIT_APP_SCRIPT='pve_medialab_ct_sonarr.sh'
# Set Package Installer Temp Folder
REPO_TEMP='/tmp'
cd ${REPO_TEMP}
#---- Other Variables --------------------------------------------------------------
#---- Other Files ------------------------------------------------------------------
#---- Package loader
if [ -f /mnt/pve/nas-*[0-9]-git/${GIT_USER}/developer_settings.git ] && [ -f /mnt/pve/nas-*[0-9]-git/${GIT_USER}/common/bash/source/pve_repo_loader.sh ]; then
# Developer Options loader
source /mnt/pve/nas-*[0-9]-git/${GIT_USER}/common/bash/source/pve_repo_loader.sh
else
# Download Github loader
wget -qL - https://raw.githubusercontent.com/${GIT_USER}/common/master/bash/source/pve_repo_loader.sh -O ${REPO_TEMP}/pve_repo_loader.sh
chmod +x ${REPO_TEMP}/pve_repo_loader.sh
source ${REPO_TEMP}/pve_repo_loader.sh
fi
#---- Body -------------------------------------------------------------------------
#---- Run Installer
source ${REPO_TEMP}/${GIT_REPO}/scripts/${GIT_APP_SCRIPT}
#---- Finish Line ------------------------------------------------------------------
#---- Cleanup
installer_cleanup |
<gh_stars>0
import React, { FC, useMemo } from 'react';
import { IconNode, HasDependences, useLabelText } from '@rediagram/cdk';
import { resolveAsset } from '../../assets';
import { useAssertProvider } from '../../hooks/assert-provider';
export type IAMType =
| 'Add-on'
| 'AWS STS Alternate'
| 'AWS STS'
| 'Data Encryption Key'
| 'Encrypted Data'
| 'Long term Security Credential'
| 'MFA Token'
| 'Permissions'
| 'Role'
| 'Temporary Security Credential';
export type IAMProps = {
type?: IAMType;
name: string;
} & HasDependences;
function resolveImage(type?: IAMType): string {
switch (type) {
case 'Add-on':
return resolveAsset('security-identity-compliance/IAM/Add-on.png');
case 'AWS STS Alternate':
return resolveAsset('security-identity-compliance/IAM/AWS-STS-Alternate.png');
case 'AWS STS':
return resolveAsset('security-identity-compliance/IAM/AWS-STS.png');
case 'Data Encryption Key':
return resolveAsset('security-identity-compliance/IAM/Data-Encryption-Key.png');
case 'Encrypted Data':
return resolveAsset('security-identity-compliance/IAM/Encrypted-Data.png');
case 'Long term Security Credential':
return resolveAsset('security-identity-compliance/IAM/Long-term-Security-Credential.png');
case 'MFA Token':
return resolveAsset('security-identity-compliance/IAM/MFA-Token.png');
case 'Permissions':
return resolveAsset('security-identity-compliance/IAM/Permissions.png');
case 'Role':
return resolveAsset('security-identity-compliance/IAM/Role.png');
case 'Temporary Security Credential':
return resolveAsset('security-identity-compliance/IAM/Temporary-Security-Credential.png');
default:
return resolveAsset('security-identity-compliance/IAM.png');
}
}
function useIcon(type?: IAMType): { path: string; size: number } {
return useMemo(() => {
return {
path: resolveImage(type),
size: type === undefined ? 56 : 37,
};
}, [type]);
}
export const IAM: FC<IAMProps> = ({ type, name, children, upstream, downstream }) => {
useAssertProvider();
const icon = useIcon(type);
const label = useLabelText(children, { defaultValue: name, htmlLike: true });
return <IconNode name={name} icon={icon} label={label} upstream={upstream} downstream={downstream} />;
};
IAM.displayName = 'IAM';
|
package facade.amazonaws.services
import scalajs._
import scalajs.js.annotation.JSImport
import scala.scalajs.js.|
import scala.concurrent.Future
import facade.amazonaws._
package object eks {
type AddonIssueList = js.Array[AddonIssue]
type AddonVersionInfoList = js.Array[AddonVersionInfo]
type Addons = js.Array[AddonInfo]
type AutoScalingGroupList = js.Array[AutoScalingGroup]
type BoxedBoolean = Boolean
type BoxedInteger = Int
type Capacity = Int
type ClusterName = String
type Compatibilities = js.Array[Compatibility]
type DescribeAddonVersionsRequestMaxResults = Int
type EncryptionConfigList = js.Array[EncryptionConfig]
type ErrorDetails = js.Array[ErrorDetail]
type FargateProfileLabel = js.Dictionary[String]
type FargateProfileSelectors = js.Array[FargateProfileSelector]
type FargateProfilesRequestMaxResults = Int
type IdentityProviderConfigs = js.Array[IdentityProviderConfig]
type IssueList = js.Array[Issue]
type ListAddonsRequestMaxResults = Int
type ListClustersRequestMaxResults = Int
type ListIdentityProviderConfigsRequestMaxResults = Int
type ListNodegroupsRequestMaxResults = Int
type ListUpdatesRequestMaxResults = Int
type LogSetups = js.Array[LogSetup]
type LogTypes = js.Array[LogType]
type RoleArn = String
type StringList = js.Array[String]
type TagKey = String
type TagKeyList = js.Array[TagKey]
type TagMap = js.Dictionary[TagValue]
type TagValue = String
type Timestamp = js.Date
type UpdateParams = js.Array[UpdateParam]
type labelKey = String
type labelValue = String
type labelsKeyList = js.Array[String]
type labelsMap = js.Dictionary[labelValue]
type requiredClaimsKey = String
type requiredClaimsMap = js.Dictionary[requiredClaimsValue]
type requiredClaimsValue = String
implicit final class EKSOps(private val service: EKS) extends AnyVal {
@inline def associateEncryptionConfigFuture(params: AssociateEncryptionConfigRequest): Future[AssociateEncryptionConfigResponse] = service.associateEncryptionConfig(params).promise().toFuture
@inline def associateIdentityProviderConfigFuture(params: AssociateIdentityProviderConfigRequest): Future[AssociateIdentityProviderConfigResponse] = service.associateIdentityProviderConfig(params).promise().toFuture
@inline def createAddonFuture(params: CreateAddonRequest): Future[CreateAddonResponse] = service.createAddon(params).promise().toFuture
@inline def createClusterFuture(params: CreateClusterRequest): Future[CreateClusterResponse] = service.createCluster(params).promise().toFuture
@inline def createFargateProfileFuture(params: CreateFargateProfileRequest): Future[CreateFargateProfileResponse] = service.createFargateProfile(params).promise().toFuture
@inline def createNodegroupFuture(params: CreateNodegroupRequest): Future[CreateNodegroupResponse] = service.createNodegroup(params).promise().toFuture
@inline def deleteAddonFuture(params: DeleteAddonRequest): Future[DeleteAddonResponse] = service.deleteAddon(params).promise().toFuture
@inline def deleteClusterFuture(params: DeleteClusterRequest): Future[DeleteClusterResponse] = service.deleteCluster(params).promise().toFuture
@inline def deleteFargateProfileFuture(params: DeleteFargateProfileRequest): Future[DeleteFargateProfileResponse] = service.deleteFargateProfile(params).promise().toFuture
@inline def deleteNodegroupFuture(params: DeleteNodegroupRequest): Future[DeleteNodegroupResponse] = service.deleteNodegroup(params).promise().toFuture
@inline def describeAddonFuture(params: DescribeAddonRequest): Future[DescribeAddonResponse] = service.describeAddon(params).promise().toFuture
@inline def describeAddonVersionsFuture(params: DescribeAddonVersionsRequest): Future[DescribeAddonVersionsResponse] = service.describeAddonVersions(params).promise().toFuture
@inline def describeClusterFuture(params: DescribeClusterRequest): Future[DescribeClusterResponse] = service.describeCluster(params).promise().toFuture
@inline def describeFargateProfileFuture(params: DescribeFargateProfileRequest): Future[DescribeFargateProfileResponse] = service.describeFargateProfile(params).promise().toFuture
@inline def describeIdentityProviderConfigFuture(params: DescribeIdentityProviderConfigRequest): Future[DescribeIdentityProviderConfigResponse] = service.describeIdentityProviderConfig(params).promise().toFuture
@inline def describeNodegroupFuture(params: DescribeNodegroupRequest): Future[DescribeNodegroupResponse] = service.describeNodegroup(params).promise().toFuture
@inline def describeUpdateFuture(params: DescribeUpdateRequest): Future[DescribeUpdateResponse] = service.describeUpdate(params).promise().toFuture
@inline def disassociateIdentityProviderConfigFuture(params: DisassociateIdentityProviderConfigRequest): Future[DisassociateIdentityProviderConfigResponse] = service.disassociateIdentityProviderConfig(params).promise().toFuture
@inline def listAddonsFuture(params: ListAddonsRequest): Future[ListAddonsResponse] = service.listAddons(params).promise().toFuture
@inline def listClustersFuture(params: ListClustersRequest): Future[ListClustersResponse] = service.listClusters(params).promise().toFuture
@inline def listFargateProfilesFuture(params: ListFargateProfilesRequest): Future[ListFargateProfilesResponse] = service.listFargateProfiles(params).promise().toFuture
@inline def listIdentityProviderConfigsFuture(params: ListIdentityProviderConfigsRequest): Future[ListIdentityProviderConfigsResponse] = service.listIdentityProviderConfigs(params).promise().toFuture
@inline def listNodegroupsFuture(params: ListNodegroupsRequest): Future[ListNodegroupsResponse] = service.listNodegroups(params).promise().toFuture
@inline def listTagsForResourceFuture(params: ListTagsForResourceRequest): Future[ListTagsForResourceResponse] = service.listTagsForResource(params).promise().toFuture
@inline def listUpdatesFuture(params: ListUpdatesRequest): Future[ListUpdatesResponse] = service.listUpdates(params).promise().toFuture
@inline def tagResourceFuture(params: TagResourceRequest): Future[TagResourceResponse] = service.tagResource(params).promise().toFuture
@inline def untagResourceFuture(params: UntagResourceRequest): Future[UntagResourceResponse] = service.untagResource(params).promise().toFuture
@inline def updateAddonFuture(params: UpdateAddonRequest): Future[UpdateAddonResponse] = service.updateAddon(params).promise().toFuture
@inline def updateClusterConfigFuture(params: UpdateClusterConfigRequest): Future[UpdateClusterConfigResponse] = service.updateClusterConfig(params).promise().toFuture
@inline def updateClusterVersionFuture(params: UpdateClusterVersionRequest): Future[UpdateClusterVersionResponse] = service.updateClusterVersion(params).promise().toFuture
@inline def updateNodegroupConfigFuture(params: UpdateNodegroupConfigRequest): Future[UpdateNodegroupConfigResponse] = service.updateNodegroupConfig(params).promise().toFuture
@inline def updateNodegroupVersionFuture(params: UpdateNodegroupVersionRequest): Future[UpdateNodegroupVersionResponse] = service.updateNodegroupVersion(params).promise().toFuture
}
}
package eks {
@js.native
@JSImport("aws-sdk/clients/eks", JSImport.Namespace, "AWS.EKS")
class EKS() extends js.Object {
def this(config: AWSConfig) = this()
def associateEncryptionConfig(params: AssociateEncryptionConfigRequest): Request[AssociateEncryptionConfigResponse] = js.native
def associateIdentityProviderConfig(params: AssociateIdentityProviderConfigRequest): Request[AssociateIdentityProviderConfigResponse] = js.native
def createAddon(params: CreateAddonRequest): Request[CreateAddonResponse] = js.native
def createCluster(params: CreateClusterRequest): Request[CreateClusterResponse] = js.native
def createFargateProfile(params: CreateFargateProfileRequest): Request[CreateFargateProfileResponse] = js.native
def createNodegroup(params: CreateNodegroupRequest): Request[CreateNodegroupResponse] = js.native
def deleteAddon(params: DeleteAddonRequest): Request[DeleteAddonResponse] = js.native
def deleteCluster(params: DeleteClusterRequest): Request[DeleteClusterResponse] = js.native
def deleteFargateProfile(params: DeleteFargateProfileRequest): Request[DeleteFargateProfileResponse] = js.native
def deleteNodegroup(params: DeleteNodegroupRequest): Request[DeleteNodegroupResponse] = js.native
def describeAddon(params: DescribeAddonRequest): Request[DescribeAddonResponse] = js.native
def describeAddonVersions(params: DescribeAddonVersionsRequest): Request[DescribeAddonVersionsResponse] = js.native
def describeCluster(params: DescribeClusterRequest): Request[DescribeClusterResponse] = js.native
def describeFargateProfile(params: DescribeFargateProfileRequest): Request[DescribeFargateProfileResponse] = js.native
def describeIdentityProviderConfig(params: DescribeIdentityProviderConfigRequest): Request[DescribeIdentityProviderConfigResponse] = js.native
def describeNodegroup(params: DescribeNodegroupRequest): Request[DescribeNodegroupResponse] = js.native
def describeUpdate(params: DescribeUpdateRequest): Request[DescribeUpdateResponse] = js.native
def disassociateIdentityProviderConfig(params: DisassociateIdentityProviderConfigRequest): Request[DisassociateIdentityProviderConfigResponse] = js.native
def listAddons(params: ListAddonsRequest): Request[ListAddonsResponse] = js.native
def listClusters(params: ListClustersRequest): Request[ListClustersResponse] = js.native
def listFargateProfiles(params: ListFargateProfilesRequest): Request[ListFargateProfilesResponse] = js.native
def listIdentityProviderConfigs(params: ListIdentityProviderConfigsRequest): Request[ListIdentityProviderConfigsResponse] = js.native
def listNodegroups(params: ListNodegroupsRequest): Request[ListNodegroupsResponse] = js.native
def listTagsForResource(params: ListTagsForResourceRequest): Request[ListTagsForResourceResponse] = js.native
def listUpdates(params: ListUpdatesRequest): Request[ListUpdatesResponse] = js.native
def tagResource(params: TagResourceRequest): Request[TagResourceResponse] = js.native
def untagResource(params: UntagResourceRequest): Request[UntagResourceResponse] = js.native
def updateAddon(params: UpdateAddonRequest): Request[UpdateAddonResponse] = js.native
def updateClusterConfig(params: UpdateClusterConfigRequest): Request[UpdateClusterConfigResponse] = js.native
def updateClusterVersion(params: UpdateClusterVersionRequest): Request[UpdateClusterVersionResponse] = js.native
def updateNodegroupConfig(params: UpdateNodegroupConfigRequest): Request[UpdateNodegroupConfigResponse] = js.native
def updateNodegroupVersion(params: UpdateNodegroupVersionRequest): Request[UpdateNodegroupVersionResponse] = js.native
}
@js.native
sealed trait AMITypes extends js.Any
object AMITypes {
val AL2_x86_64 = "AL2_x86_64".asInstanceOf[AMITypes]
val AL2_x86_64_GPU = "AL2_x86_64_GPU".asInstanceOf[AMITypes]
val AL2_ARM_64 = "AL2_ARM_64".asInstanceOf[AMITypes]
@inline def values = js.Array(AL2_x86_64, AL2_x86_64_GPU, AL2_ARM_64)
}
/** An Amazon EKS add-on.
*/
@js.native
trait Addon extends js.Object {
var addonArn: js.UndefOr[String]
var addonName: js.UndefOr[String]
var addonVersion: js.UndefOr[String]
var clusterName: js.UndefOr[ClusterName]
var createdAt: js.UndefOr[Timestamp]
var health: js.UndefOr[AddonHealth]
var modifiedAt: js.UndefOr[Timestamp]
var serviceAccountRoleArn: js.UndefOr[String]
var status: js.UndefOr[AddonStatus]
var tags: js.UndefOr[TagMap]
}
object Addon {
@inline
def apply(
addonArn: js.UndefOr[String] = js.undefined,
addonName: js.UndefOr[String] = js.undefined,
addonVersion: js.UndefOr[String] = js.undefined,
clusterName: js.UndefOr[ClusterName] = js.undefined,
createdAt: js.UndefOr[Timestamp] = js.undefined,
health: js.UndefOr[AddonHealth] = js.undefined,
modifiedAt: js.UndefOr[Timestamp] = js.undefined,
serviceAccountRoleArn: js.UndefOr[String] = js.undefined,
status: js.UndefOr[AddonStatus] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined
): Addon = {
val __obj = js.Dynamic.literal()
addonArn.foreach(__v => __obj.updateDynamic("addonArn")(__v.asInstanceOf[js.Any]))
addonName.foreach(__v => __obj.updateDynamic("addonName")(__v.asInstanceOf[js.Any]))
addonVersion.foreach(__v => __obj.updateDynamic("addonVersion")(__v.asInstanceOf[js.Any]))
clusterName.foreach(__v => __obj.updateDynamic("clusterName")(__v.asInstanceOf[js.Any]))
createdAt.foreach(__v => __obj.updateDynamic("createdAt")(__v.asInstanceOf[js.Any]))
health.foreach(__v => __obj.updateDynamic("health")(__v.asInstanceOf[js.Any]))
modifiedAt.foreach(__v => __obj.updateDynamic("modifiedAt")(__v.asInstanceOf[js.Any]))
serviceAccountRoleArn.foreach(__v => __obj.updateDynamic("serviceAccountRoleArn")(__v.asInstanceOf[js.Any]))
status.foreach(__v => __obj.updateDynamic("status")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Addon]
}
}
/** The health of the add-on.
*/
@js.native
trait AddonHealth extends js.Object {
var issues: js.UndefOr[AddonIssueList]
}
object AddonHealth {
@inline
def apply(
issues: js.UndefOr[AddonIssueList] = js.undefined
): AddonHealth = {
val __obj = js.Dynamic.literal()
issues.foreach(__v => __obj.updateDynamic("issues")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AddonHealth]
}
}
/** Information about an add-on.
*/
@js.native
trait AddonInfo extends js.Object {
var addonName: js.UndefOr[String]
var addonVersions: js.UndefOr[AddonVersionInfoList]
var `type`: js.UndefOr[String]
}
object AddonInfo {
@inline
def apply(
addonName: js.UndefOr[String] = js.undefined,
addonVersions: js.UndefOr[AddonVersionInfoList] = js.undefined,
`type`: js.UndefOr[String] = js.undefined
): AddonInfo = {
val __obj = js.Dynamic.literal()
addonName.foreach(__v => __obj.updateDynamic("addonName")(__v.asInstanceOf[js.Any]))
addonVersions.foreach(__v => __obj.updateDynamic("addonVersions")(__v.asInstanceOf[js.Any]))
`type`.foreach(__v => __obj.updateDynamic("type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AddonInfo]
}
}
/** An issue related to an add-on.
*/
@js.native
trait AddonIssue extends js.Object {
var code: js.UndefOr[AddonIssueCode]
var message: js.UndefOr[String]
var resourceIds: js.UndefOr[StringList]
}
object AddonIssue {
@inline
def apply(
code: js.UndefOr[AddonIssueCode] = js.undefined,
message: js.UndefOr[String] = js.undefined,
resourceIds: js.UndefOr[StringList] = js.undefined
): AddonIssue = {
val __obj = js.Dynamic.literal()
code.foreach(__v => __obj.updateDynamic("code")(__v.asInstanceOf[js.Any]))
message.foreach(__v => __obj.updateDynamic("message")(__v.asInstanceOf[js.Any]))
resourceIds.foreach(__v => __obj.updateDynamic("resourceIds")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AddonIssue]
}
}
@js.native
sealed trait AddonIssueCode extends js.Any
object AddonIssueCode {
val AccessDenied = "AccessDenied".asInstanceOf[AddonIssueCode]
val InternalFailure = "InternalFailure".asInstanceOf[AddonIssueCode]
val ClusterUnreachable = "ClusterUnreachable".asInstanceOf[AddonIssueCode]
val InsufficientNumberOfReplicas = "InsufficientNumberOfReplicas".asInstanceOf[AddonIssueCode]
val ConfigurationConflict = "ConfigurationConflict".asInstanceOf[AddonIssueCode]
val AdmissionRequestDenied = "AdmissionRequestDenied".asInstanceOf[AddonIssueCode]
@inline def values = js.Array(AccessDenied, InternalFailure, ClusterUnreachable, InsufficientNumberOfReplicas, ConfigurationConflict, AdmissionRequestDenied)
}
@js.native
sealed trait AddonStatus extends js.Any
object AddonStatus {
val CREATING = "CREATING".asInstanceOf[AddonStatus]
val ACTIVE = "ACTIVE".asInstanceOf[AddonStatus]
val CREATE_FAILED = "CREATE_FAILED".asInstanceOf[AddonStatus]
val UPDATING = "UPDATING".asInstanceOf[AddonStatus]
val DELETING = "DELETING".asInstanceOf[AddonStatus]
val DELETE_FAILED = "DELETE_FAILED".asInstanceOf[AddonStatus]
val DEGRADED = "DEGRADED".asInstanceOf[AddonStatus]
@inline def values = js.Array(CREATING, ACTIVE, CREATE_FAILED, UPDATING, DELETING, DELETE_FAILED, DEGRADED)
}
/** Information about an add-on version.
*/
@js.native
trait AddonVersionInfo extends js.Object {
var addonVersion: js.UndefOr[String]
var architecture: js.UndefOr[StringList]
var compatibilities: js.UndefOr[Compatibilities]
}
object AddonVersionInfo {
@inline
def apply(
addonVersion: js.UndefOr[String] = js.undefined,
architecture: js.UndefOr[StringList] = js.undefined,
compatibilities: js.UndefOr[Compatibilities] = js.undefined
): AddonVersionInfo = {
val __obj = js.Dynamic.literal()
addonVersion.foreach(__v => __obj.updateDynamic("addonVersion")(__v.asInstanceOf[js.Any]))
architecture.foreach(__v => __obj.updateDynamic("architecture")(__v.asInstanceOf[js.Any]))
compatibilities.foreach(__v => __obj.updateDynamic("compatibilities")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AddonVersionInfo]
}
}
@js.native
trait AssociateEncryptionConfigRequest extends js.Object {
var clusterName: String
var encryptionConfig: EncryptionConfigList
var clientRequestToken: js.UndefOr[String]
}
object AssociateEncryptionConfigRequest {
@inline
def apply(
clusterName: String,
encryptionConfig: EncryptionConfigList,
clientRequestToken: js.UndefOr[String] = js.undefined
): AssociateEncryptionConfigRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"encryptionConfig" -> encryptionConfig.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AssociateEncryptionConfigRequest]
}
}
@js.native
trait AssociateEncryptionConfigResponse extends js.Object {
var update: js.UndefOr[Update]
}
object AssociateEncryptionConfigResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): AssociateEncryptionConfigResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AssociateEncryptionConfigResponse]
}
}
@js.native
trait AssociateIdentityProviderConfigRequest extends js.Object {
var clusterName: String
var oidc: OidcIdentityProviderConfigRequest
var clientRequestToken: js.UndefOr[String]
var tags: js.UndefOr[TagMap]
}
object AssociateIdentityProviderConfigRequest {
@inline
def apply(
clusterName: String,
oidc: OidcIdentityProviderConfigRequest,
clientRequestToken: js.UndefOr[String] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined
): AssociateIdentityProviderConfigRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"oidc" -> oidc.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AssociateIdentityProviderConfigRequest]
}
}
@js.native
trait AssociateIdentityProviderConfigResponse extends js.Object {
var tags: js.UndefOr[TagMap]
var update: js.UndefOr[Update]
}
object AssociateIdentityProviderConfigResponse {
@inline
def apply(
tags: js.UndefOr[TagMap] = js.undefined,
update: js.UndefOr[Update] = js.undefined
): AssociateIdentityProviderConfigResponse = {
val __obj = js.Dynamic.literal()
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AssociateIdentityProviderConfigResponse]
}
}
/** An Auto Scaling group that is associated with an Amazon EKS managed node group.
*/
@js.native
trait AutoScalingGroup extends js.Object {
var name: js.UndefOr[String]
}
object AutoScalingGroup {
@inline
def apply(
name: js.UndefOr[String] = js.undefined
): AutoScalingGroup = {
val __obj = js.Dynamic.literal()
name.foreach(__v => __obj.updateDynamic("name")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[AutoScalingGroup]
}
}
@js.native
sealed trait CapacityTypes extends js.Any
object CapacityTypes {
val ON_DEMAND = "ON_DEMAND".asInstanceOf[CapacityTypes]
val SPOT = "SPOT".asInstanceOf[CapacityTypes]
@inline def values = js.Array(ON_DEMAND, SPOT)
}
/** An object representing the <code>certificate-authority-data</code> for your cluster.
*/
@js.native
trait Certificate extends js.Object {
var data: js.UndefOr[String]
}
object Certificate {
@inline
def apply(
data: js.UndefOr[String] = js.undefined
): Certificate = {
val __obj = js.Dynamic.literal()
data.foreach(__v => __obj.updateDynamic("data")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Certificate]
}
}
/** An object representing an Amazon EKS cluster.
*/
@js.native
trait Cluster extends js.Object {
var arn: js.UndefOr[String]
var certificateAuthority: js.UndefOr[Certificate]
var clientRequestToken: js.UndefOr[String]
var createdAt: js.UndefOr[Timestamp]
var encryptionConfig: js.UndefOr[EncryptionConfigList]
var endpoint: js.UndefOr[String]
var identity: js.UndefOr[Identity]
var kubernetesNetworkConfig: js.UndefOr[KubernetesNetworkConfigResponse]
var logging: js.UndefOr[Logging]
var name: js.UndefOr[String]
var platformVersion: js.UndefOr[String]
var resourcesVpcConfig: js.UndefOr[VpcConfigResponse]
var roleArn: js.UndefOr[String]
var status: js.UndefOr[ClusterStatus]
var tags: js.UndefOr[TagMap]
var version: js.UndefOr[String]
}
object Cluster {
@inline
def apply(
arn: js.UndefOr[String] = js.undefined,
certificateAuthority: js.UndefOr[Certificate] = js.undefined,
clientRequestToken: js.UndefOr[String] = js.undefined,
createdAt: js.UndefOr[Timestamp] = js.undefined,
encryptionConfig: js.UndefOr[EncryptionConfigList] = js.undefined,
endpoint: js.UndefOr[String] = js.undefined,
identity: js.UndefOr[Identity] = js.undefined,
kubernetesNetworkConfig: js.UndefOr[KubernetesNetworkConfigResponse] = js.undefined,
logging: js.UndefOr[Logging] = js.undefined,
name: js.UndefOr[String] = js.undefined,
platformVersion: js.UndefOr[String] = js.undefined,
resourcesVpcConfig: js.UndefOr[VpcConfigResponse] = js.undefined,
roleArn: js.UndefOr[String] = js.undefined,
status: js.UndefOr[ClusterStatus] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined,
version: js.UndefOr[String] = js.undefined
): Cluster = {
val __obj = js.Dynamic.literal()
arn.foreach(__v => __obj.updateDynamic("arn")(__v.asInstanceOf[js.Any]))
certificateAuthority.foreach(__v => __obj.updateDynamic("certificateAuthority")(__v.asInstanceOf[js.Any]))
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
createdAt.foreach(__v => __obj.updateDynamic("createdAt")(__v.asInstanceOf[js.Any]))
encryptionConfig.foreach(__v => __obj.updateDynamic("encryptionConfig")(__v.asInstanceOf[js.Any]))
endpoint.foreach(__v => __obj.updateDynamic("endpoint")(__v.asInstanceOf[js.Any]))
identity.foreach(__v => __obj.updateDynamic("identity")(__v.asInstanceOf[js.Any]))
kubernetesNetworkConfig.foreach(__v => __obj.updateDynamic("kubernetesNetworkConfig")(__v.asInstanceOf[js.Any]))
logging.foreach(__v => __obj.updateDynamic("logging")(__v.asInstanceOf[js.Any]))
name.foreach(__v => __obj.updateDynamic("name")(__v.asInstanceOf[js.Any]))
platformVersion.foreach(__v => __obj.updateDynamic("platformVersion")(__v.asInstanceOf[js.Any]))
resourcesVpcConfig.foreach(__v => __obj.updateDynamic("resourcesVpcConfig")(__v.asInstanceOf[js.Any]))
roleArn.foreach(__v => __obj.updateDynamic("roleArn")(__v.asInstanceOf[js.Any]))
status.foreach(__v => __obj.updateDynamic("status")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
version.foreach(__v => __obj.updateDynamic("version")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Cluster]
}
}
@js.native
sealed trait ClusterStatus extends js.Any
object ClusterStatus {
val CREATING = "CREATING".asInstanceOf[ClusterStatus]
val ACTIVE = "ACTIVE".asInstanceOf[ClusterStatus]
val DELETING = "DELETING".asInstanceOf[ClusterStatus]
val FAILED = "FAILED".asInstanceOf[ClusterStatus]
val UPDATING = "UPDATING".asInstanceOf[ClusterStatus]
@inline def values = js.Array(CREATING, ACTIVE, DELETING, FAILED, UPDATING)
}
/** Compatibility information.
*/
@js.native
trait Compatibility extends js.Object {
var clusterVersion: js.UndefOr[String]
var defaultVersion: js.UndefOr[Boolean]
var platformVersions: js.UndefOr[StringList]
}
object Compatibility {
@inline
def apply(
clusterVersion: js.UndefOr[String] = js.undefined,
defaultVersion: js.UndefOr[Boolean] = js.undefined,
platformVersions: js.UndefOr[StringList] = js.undefined
): Compatibility = {
val __obj = js.Dynamic.literal()
clusterVersion.foreach(__v => __obj.updateDynamic("clusterVersion")(__v.asInstanceOf[js.Any]))
defaultVersion.foreach(__v => __obj.updateDynamic("defaultVersion")(__v.asInstanceOf[js.Any]))
platformVersions.foreach(__v => __obj.updateDynamic("platformVersions")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Compatibility]
}
}
@js.native
trait CreateAddonRequest extends js.Object {
var addonName: String
var clusterName: ClusterName
var addonVersion: js.UndefOr[String]
var clientRequestToken: js.UndefOr[String]
var resolveConflicts: js.UndefOr[ResolveConflicts]
var serviceAccountRoleArn: js.UndefOr[RoleArn]
var tags: js.UndefOr[TagMap]
}
object CreateAddonRequest {
@inline
def apply(
addonName: String,
clusterName: ClusterName,
addonVersion: js.UndefOr[String] = js.undefined,
clientRequestToken: js.UndefOr[String] = js.undefined,
resolveConflicts: js.UndefOr[ResolveConflicts] = js.undefined,
serviceAccountRoleArn: js.UndefOr[RoleArn] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined
): CreateAddonRequest = {
val __obj = js.Dynamic.literal(
"addonName" -> addonName.asInstanceOf[js.Any],
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
addonVersion.foreach(__v => __obj.updateDynamic("addonVersion")(__v.asInstanceOf[js.Any]))
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
resolveConflicts.foreach(__v => __obj.updateDynamic("resolveConflicts")(__v.asInstanceOf[js.Any]))
serviceAccountRoleArn.foreach(__v => __obj.updateDynamic("serviceAccountRoleArn")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateAddonRequest]
}
}
@js.native
trait CreateAddonResponse extends js.Object {
var addon: js.UndefOr[Addon]
}
object CreateAddonResponse {
@inline
def apply(
addon: js.UndefOr[Addon] = js.undefined
): CreateAddonResponse = {
val __obj = js.Dynamic.literal()
addon.foreach(__v => __obj.updateDynamic("addon")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateAddonResponse]
}
}
@js.native
trait CreateClusterRequest extends js.Object {
var name: ClusterName
var resourcesVpcConfig: VpcConfigRequest
var roleArn: String
var clientRequestToken: js.UndefOr[String]
var encryptionConfig: js.UndefOr[EncryptionConfigList]
var kubernetesNetworkConfig: js.UndefOr[KubernetesNetworkConfigRequest]
var logging: js.UndefOr[Logging]
var tags: js.UndefOr[TagMap]
var version: js.UndefOr[String]
}
object CreateClusterRequest {
@inline
def apply(
name: ClusterName,
resourcesVpcConfig: VpcConfigRequest,
roleArn: String,
clientRequestToken: js.UndefOr[String] = js.undefined,
encryptionConfig: js.UndefOr[EncryptionConfigList] = js.undefined,
kubernetesNetworkConfig: js.UndefOr[KubernetesNetworkConfigRequest] = js.undefined,
logging: js.UndefOr[Logging] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined,
version: js.UndefOr[String] = js.undefined
): CreateClusterRequest = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any],
"resourcesVpcConfig" -> resourcesVpcConfig.asInstanceOf[js.Any],
"roleArn" -> roleArn.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
encryptionConfig.foreach(__v => __obj.updateDynamic("encryptionConfig")(__v.asInstanceOf[js.Any]))
kubernetesNetworkConfig.foreach(__v => __obj.updateDynamic("kubernetesNetworkConfig")(__v.asInstanceOf[js.Any]))
logging.foreach(__v => __obj.updateDynamic("logging")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
version.foreach(__v => __obj.updateDynamic("version")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateClusterRequest]
}
}
@js.native
trait CreateClusterResponse extends js.Object {
var cluster: js.UndefOr[Cluster]
}
object CreateClusterResponse {
@inline
def apply(
cluster: js.UndefOr[Cluster] = js.undefined
): CreateClusterResponse = {
val __obj = js.Dynamic.literal()
cluster.foreach(__v => __obj.updateDynamic("cluster")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateClusterResponse]
}
}
@js.native
trait CreateFargateProfileRequest extends js.Object {
var clusterName: String
var fargateProfileName: String
var podExecutionRoleArn: String
var clientRequestToken: js.UndefOr[String]
var selectors: js.UndefOr[FargateProfileSelectors]
var subnets: js.UndefOr[StringList]
var tags: js.UndefOr[TagMap]
}
object CreateFargateProfileRequest {
@inline
def apply(
clusterName: String,
fargateProfileName: String,
podExecutionRoleArn: String,
clientRequestToken: js.UndefOr[String] = js.undefined,
selectors: js.UndefOr[FargateProfileSelectors] = js.undefined,
subnets: js.UndefOr[StringList] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined
): CreateFargateProfileRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"fargateProfileName" -> fargateProfileName.asInstanceOf[js.Any],
"podExecutionRoleArn" -> podExecutionRoleArn.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
selectors.foreach(__v => __obj.updateDynamic("selectors")(__v.asInstanceOf[js.Any]))
subnets.foreach(__v => __obj.updateDynamic("subnets")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateFargateProfileRequest]
}
}
@js.native
trait CreateFargateProfileResponse extends js.Object {
var fargateProfile: js.UndefOr[FargateProfile]
}
object CreateFargateProfileResponse {
@inline
def apply(
fargateProfile: js.UndefOr[FargateProfile] = js.undefined
): CreateFargateProfileResponse = {
val __obj = js.Dynamic.literal()
fargateProfile.foreach(__v => __obj.updateDynamic("fargateProfile")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateFargateProfileResponse]
}
}
@js.native
trait CreateNodegroupRequest extends js.Object {
var clusterName: String
var nodeRole: String
var nodegroupName: String
var subnets: StringList
var amiType: js.UndefOr[AMITypes]
var capacityType: js.UndefOr[CapacityTypes]
var clientRequestToken: js.UndefOr[String]
var diskSize: js.UndefOr[BoxedInteger]
var instanceTypes: js.UndefOr[StringList]
var labels: js.UndefOr[labelsMap]
var launchTemplate: js.UndefOr[LaunchTemplateSpecification]
var releaseVersion: js.UndefOr[String]
var remoteAccess: js.UndefOr[RemoteAccessConfig]
var scalingConfig: js.UndefOr[NodegroupScalingConfig]
var tags: js.UndefOr[TagMap]
var version: js.UndefOr[String]
}
object CreateNodegroupRequest {
@inline
def apply(
clusterName: String,
nodeRole: String,
nodegroupName: String,
subnets: StringList,
amiType: js.UndefOr[AMITypes] = js.undefined,
capacityType: js.UndefOr[CapacityTypes] = js.undefined,
clientRequestToken: js.UndefOr[String] = js.undefined,
diskSize: js.UndefOr[BoxedInteger] = js.undefined,
instanceTypes: js.UndefOr[StringList] = js.undefined,
labels: js.UndefOr[labelsMap] = js.undefined,
launchTemplate: js.UndefOr[LaunchTemplateSpecification] = js.undefined,
releaseVersion: js.UndefOr[String] = js.undefined,
remoteAccess: js.UndefOr[RemoteAccessConfig] = js.undefined,
scalingConfig: js.UndefOr[NodegroupScalingConfig] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined,
version: js.UndefOr[String] = js.undefined
): CreateNodegroupRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"nodeRole" -> nodeRole.asInstanceOf[js.Any],
"nodegroupName" -> nodegroupName.asInstanceOf[js.Any],
"subnets" -> subnets.asInstanceOf[js.Any]
)
amiType.foreach(__v => __obj.updateDynamic("amiType")(__v.asInstanceOf[js.Any]))
capacityType.foreach(__v => __obj.updateDynamic("capacityType")(__v.asInstanceOf[js.Any]))
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
diskSize.foreach(__v => __obj.updateDynamic("diskSize")(__v.asInstanceOf[js.Any]))
instanceTypes.foreach(__v => __obj.updateDynamic("instanceTypes")(__v.asInstanceOf[js.Any]))
labels.foreach(__v => __obj.updateDynamic("labels")(__v.asInstanceOf[js.Any]))
launchTemplate.foreach(__v => __obj.updateDynamic("launchTemplate")(__v.asInstanceOf[js.Any]))
releaseVersion.foreach(__v => __obj.updateDynamic("releaseVersion")(__v.asInstanceOf[js.Any]))
remoteAccess.foreach(__v => __obj.updateDynamic("remoteAccess")(__v.asInstanceOf[js.Any]))
scalingConfig.foreach(__v => __obj.updateDynamic("scalingConfig")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
version.foreach(__v => __obj.updateDynamic("version")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateNodegroupRequest]
}
}
@js.native
trait CreateNodegroupResponse extends js.Object {
var nodegroup: js.UndefOr[Nodegroup]
}
object CreateNodegroupResponse {
@inline
def apply(
nodegroup: js.UndefOr[Nodegroup] = js.undefined
): CreateNodegroupResponse = {
val __obj = js.Dynamic.literal()
nodegroup.foreach(__v => __obj.updateDynamic("nodegroup")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateNodegroupResponse]
}
}
@js.native
trait DeleteAddonRequest extends js.Object {
var addonName: String
var clusterName: ClusterName
}
object DeleteAddonRequest {
@inline
def apply(
addonName: String,
clusterName: ClusterName
): DeleteAddonRequest = {
val __obj = js.Dynamic.literal(
"addonName" -> addonName.asInstanceOf[js.Any],
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DeleteAddonRequest]
}
}
@js.native
trait DeleteAddonResponse extends js.Object {
var addon: js.UndefOr[Addon]
}
object DeleteAddonResponse {
@inline
def apply(
addon: js.UndefOr[Addon] = js.undefined
): DeleteAddonResponse = {
val __obj = js.Dynamic.literal()
addon.foreach(__v => __obj.updateDynamic("addon")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DeleteAddonResponse]
}
}
@js.native
trait DeleteClusterRequest extends js.Object {
var name: String
}
object DeleteClusterRequest {
@inline
def apply(
name: String
): DeleteClusterRequest = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DeleteClusterRequest]
}
}
@js.native
trait DeleteClusterResponse extends js.Object {
var cluster: js.UndefOr[Cluster]
}
object DeleteClusterResponse {
@inline
def apply(
cluster: js.UndefOr[Cluster] = js.undefined
): DeleteClusterResponse = {
val __obj = js.Dynamic.literal()
cluster.foreach(__v => __obj.updateDynamic("cluster")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DeleteClusterResponse]
}
}
@js.native
trait DeleteFargateProfileRequest extends js.Object {
var clusterName: String
var fargateProfileName: String
}
object DeleteFargateProfileRequest {
@inline
def apply(
clusterName: String,
fargateProfileName: String
): DeleteFargateProfileRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"fargateProfileName" -> fargateProfileName.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DeleteFargateProfileRequest]
}
}
@js.native
trait DeleteFargateProfileResponse extends js.Object {
var fargateProfile: js.UndefOr[FargateProfile]
}
object DeleteFargateProfileResponse {
@inline
def apply(
fargateProfile: js.UndefOr[FargateProfile] = js.undefined
): DeleteFargateProfileResponse = {
val __obj = js.Dynamic.literal()
fargateProfile.foreach(__v => __obj.updateDynamic("fargateProfile")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DeleteFargateProfileResponse]
}
}
@js.native
trait DeleteNodegroupRequest extends js.Object {
var clusterName: String
var nodegroupName: String
}
object DeleteNodegroupRequest {
@inline
def apply(
clusterName: String,
nodegroupName: String
): DeleteNodegroupRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"nodegroupName" -> nodegroupName.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DeleteNodegroupRequest]
}
}
@js.native
trait DeleteNodegroupResponse extends js.Object {
var nodegroup: js.UndefOr[Nodegroup]
}
object DeleteNodegroupResponse {
@inline
def apply(
nodegroup: js.UndefOr[Nodegroup] = js.undefined
): DeleteNodegroupResponse = {
val __obj = js.Dynamic.literal()
nodegroup.foreach(__v => __obj.updateDynamic("nodegroup")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DeleteNodegroupResponse]
}
}
@js.native
trait DescribeAddonRequest extends js.Object {
var addonName: String
var clusterName: ClusterName
}
object DescribeAddonRequest {
@inline
def apply(
addonName: String,
clusterName: ClusterName
): DescribeAddonRequest = {
val __obj = js.Dynamic.literal(
"addonName" -> addonName.asInstanceOf[js.Any],
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeAddonRequest]
}
}
@js.native
trait DescribeAddonResponse extends js.Object {
var addon: js.UndefOr[Addon]
}
object DescribeAddonResponse {
@inline
def apply(
addon: js.UndefOr[Addon] = js.undefined
): DescribeAddonResponse = {
val __obj = js.Dynamic.literal()
addon.foreach(__v => __obj.updateDynamic("addon")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeAddonResponse]
}
}
@js.native
trait DescribeAddonVersionsRequest extends js.Object {
var addonName: js.UndefOr[String]
var kubernetesVersion: js.UndefOr[String]
var maxResults: js.UndefOr[DescribeAddonVersionsRequestMaxResults]
var nextToken: js.UndefOr[String]
}
object DescribeAddonVersionsRequest {
@inline
def apply(
addonName: js.UndefOr[String] = js.undefined,
kubernetesVersion: js.UndefOr[String] = js.undefined,
maxResults: js.UndefOr[DescribeAddonVersionsRequestMaxResults] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): DescribeAddonVersionsRequest = {
val __obj = js.Dynamic.literal()
addonName.foreach(__v => __obj.updateDynamic("addonName")(__v.asInstanceOf[js.Any]))
kubernetesVersion.foreach(__v => __obj.updateDynamic("kubernetesVersion")(__v.asInstanceOf[js.Any]))
maxResults.foreach(__v => __obj.updateDynamic("maxResults")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeAddonVersionsRequest]
}
}
@js.native
trait DescribeAddonVersionsResponse extends js.Object {
var addons: js.UndefOr[Addons]
var nextToken: js.UndefOr[String]
}
object DescribeAddonVersionsResponse {
@inline
def apply(
addons: js.UndefOr[Addons] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): DescribeAddonVersionsResponse = {
val __obj = js.Dynamic.literal()
addons.foreach(__v => __obj.updateDynamic("addons")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeAddonVersionsResponse]
}
}
@js.native
trait DescribeClusterRequest extends js.Object {
var name: String
}
object DescribeClusterRequest {
@inline
def apply(
name: String
): DescribeClusterRequest = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeClusterRequest]
}
}
@js.native
trait DescribeClusterResponse extends js.Object {
var cluster: js.UndefOr[Cluster]
}
object DescribeClusterResponse {
@inline
def apply(
cluster: js.UndefOr[Cluster] = js.undefined
): DescribeClusterResponse = {
val __obj = js.Dynamic.literal()
cluster.foreach(__v => __obj.updateDynamic("cluster")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeClusterResponse]
}
}
@js.native
trait DescribeFargateProfileRequest extends js.Object {
var clusterName: String
var fargateProfileName: String
}
object DescribeFargateProfileRequest {
@inline
def apply(
clusterName: String,
fargateProfileName: String
): DescribeFargateProfileRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"fargateProfileName" -> fargateProfileName.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeFargateProfileRequest]
}
}
@js.native
trait DescribeFargateProfileResponse extends js.Object {
var fargateProfile: js.UndefOr[FargateProfile]
}
object DescribeFargateProfileResponse {
@inline
def apply(
fargateProfile: js.UndefOr[FargateProfile] = js.undefined
): DescribeFargateProfileResponse = {
val __obj = js.Dynamic.literal()
fargateProfile.foreach(__v => __obj.updateDynamic("fargateProfile")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeFargateProfileResponse]
}
}
@js.native
trait DescribeIdentityProviderConfigRequest extends js.Object {
var clusterName: String
var identityProviderConfig: IdentityProviderConfig
}
object DescribeIdentityProviderConfigRequest {
@inline
def apply(
clusterName: String,
identityProviderConfig: IdentityProviderConfig
): DescribeIdentityProviderConfigRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"identityProviderConfig" -> identityProviderConfig.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeIdentityProviderConfigRequest]
}
}
@js.native
trait DescribeIdentityProviderConfigResponse extends js.Object {
var identityProviderConfig: js.UndefOr[IdentityProviderConfigResponse]
}
object DescribeIdentityProviderConfigResponse {
@inline
def apply(
identityProviderConfig: js.UndefOr[IdentityProviderConfigResponse] = js.undefined
): DescribeIdentityProviderConfigResponse = {
val __obj = js.Dynamic.literal()
identityProviderConfig.foreach(__v => __obj.updateDynamic("identityProviderConfig")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeIdentityProviderConfigResponse]
}
}
@js.native
trait DescribeNodegroupRequest extends js.Object {
var clusterName: String
var nodegroupName: String
}
object DescribeNodegroupRequest {
@inline
def apply(
clusterName: String,
nodegroupName: String
): DescribeNodegroupRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"nodegroupName" -> nodegroupName.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeNodegroupRequest]
}
}
@js.native
trait DescribeNodegroupResponse extends js.Object {
var nodegroup: js.UndefOr[Nodegroup]
}
object DescribeNodegroupResponse {
@inline
def apply(
nodegroup: js.UndefOr[Nodegroup] = js.undefined
): DescribeNodegroupResponse = {
val __obj = js.Dynamic.literal()
nodegroup.foreach(__v => __obj.updateDynamic("nodegroup")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeNodegroupResponse]
}
}
@js.native
trait DescribeUpdateRequest extends js.Object {
var name: String
var updateId: String
var addonName: js.UndefOr[String]
var nodegroupName: js.UndefOr[String]
}
object DescribeUpdateRequest {
@inline
def apply(
name: String,
updateId: String,
addonName: js.UndefOr[String] = js.undefined,
nodegroupName: js.UndefOr[String] = js.undefined
): DescribeUpdateRequest = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any],
"updateId" -> updateId.asInstanceOf[js.Any]
)
addonName.foreach(__v => __obj.updateDynamic("addonName")(__v.asInstanceOf[js.Any]))
nodegroupName.foreach(__v => __obj.updateDynamic("nodegroupName")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeUpdateRequest]
}
}
@js.native
trait DescribeUpdateResponse extends js.Object {
var update: js.UndefOr[Update]
}
object DescribeUpdateResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): DescribeUpdateResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeUpdateResponse]
}
}
@js.native
trait DisassociateIdentityProviderConfigRequest extends js.Object {
var clusterName: String
var identityProviderConfig: IdentityProviderConfig
var clientRequestToken: js.UndefOr[String]
}
object DisassociateIdentityProviderConfigRequest {
@inline
def apply(
clusterName: String,
identityProviderConfig: IdentityProviderConfig,
clientRequestToken: js.UndefOr[String] = js.undefined
): DisassociateIdentityProviderConfigRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"identityProviderConfig" -> identityProviderConfig.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DisassociateIdentityProviderConfigRequest]
}
}
@js.native
trait DisassociateIdentityProviderConfigResponse extends js.Object {
var update: js.UndefOr[Update]
}
object DisassociateIdentityProviderConfigResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): DisassociateIdentityProviderConfigResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DisassociateIdentityProviderConfigResponse]
}
}
/** The encryption configuration for the cluster.
*/
@js.native
trait EncryptionConfig extends js.Object {
var provider: js.UndefOr[Provider]
var resources: js.UndefOr[StringList]
}
object EncryptionConfig {
@inline
def apply(
provider: js.UndefOr[Provider] = js.undefined,
resources: js.UndefOr[StringList] = js.undefined
): EncryptionConfig = {
val __obj = js.Dynamic.literal()
provider.foreach(__v => __obj.updateDynamic("provider")(__v.asInstanceOf[js.Any]))
resources.foreach(__v => __obj.updateDynamic("resources")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[EncryptionConfig]
}
}
@js.native
sealed trait ErrorCode extends js.Any
object ErrorCode {
val SubnetNotFound = "SubnetNotFound".asInstanceOf[ErrorCode]
val SecurityGroupNotFound = "SecurityGroupNotFound".asInstanceOf[ErrorCode]
val EniLimitReached = "EniLimitReached".asInstanceOf[ErrorCode]
val IpNotAvailable = "IpNotAvailable".asInstanceOf[ErrorCode]
val AccessDenied = "AccessDenied".asInstanceOf[ErrorCode]
val OperationNotPermitted = "OperationNotPermitted".asInstanceOf[ErrorCode]
val VpcIdNotFound = "VpcIdNotFound".asInstanceOf[ErrorCode]
val Unknown = "Unknown".asInstanceOf[ErrorCode]
val NodeCreationFailure = "NodeCreationFailure".asInstanceOf[ErrorCode]
val PodEvictionFailure = "PodEvictionFailure".asInstanceOf[ErrorCode]
val InsufficientFreeAddresses = "InsufficientFreeAddresses".asInstanceOf[ErrorCode]
val ClusterUnreachable = "ClusterUnreachable".asInstanceOf[ErrorCode]
val InsufficientNumberOfReplicas = "InsufficientNumberOfReplicas".asInstanceOf[ErrorCode]
val ConfigurationConflict = "ConfigurationConflict".asInstanceOf[ErrorCode]
val AdmissionRequestDenied = "AdmissionRequestDenied".asInstanceOf[ErrorCode]
@inline def values = js.Array(
SubnetNotFound,
SecurityGroupNotFound,
EniLimitReached,
IpNotAvailable,
AccessDenied,
OperationNotPermitted,
VpcIdNotFound,
Unknown,
NodeCreationFailure,
PodEvictionFailure,
InsufficientFreeAddresses,
ClusterUnreachable,
InsufficientNumberOfReplicas,
ConfigurationConflict,
AdmissionRequestDenied
)
}
/** An object representing an error when an asynchronous operation fails.
*/
@js.native
trait ErrorDetail extends js.Object {
var errorCode: js.UndefOr[ErrorCode]
var errorMessage: js.UndefOr[String]
var resourceIds: js.UndefOr[StringList]
}
object ErrorDetail {
@inline
def apply(
errorCode: js.UndefOr[ErrorCode] = js.undefined,
errorMessage: js.UndefOr[String] = js.undefined,
resourceIds: js.UndefOr[StringList] = js.undefined
): ErrorDetail = {
val __obj = js.Dynamic.literal()
errorCode.foreach(__v => __obj.updateDynamic("errorCode")(__v.asInstanceOf[js.Any]))
errorMessage.foreach(__v => __obj.updateDynamic("errorMessage")(__v.asInstanceOf[js.Any]))
resourceIds.foreach(__v => __obj.updateDynamic("resourceIds")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ErrorDetail]
}
}
/** An object representing an AWS Fargate profile.
*/
@js.native
trait FargateProfile extends js.Object {
var clusterName: js.UndefOr[String]
var createdAt: js.UndefOr[Timestamp]
var fargateProfileArn: js.UndefOr[String]
var fargateProfileName: js.UndefOr[String]
var podExecutionRoleArn: js.UndefOr[String]
var selectors: js.UndefOr[FargateProfileSelectors]
var status: js.UndefOr[FargateProfileStatus]
var subnets: js.UndefOr[StringList]
var tags: js.UndefOr[TagMap]
}
object FargateProfile {
@inline
def apply(
clusterName: js.UndefOr[String] = js.undefined,
createdAt: js.UndefOr[Timestamp] = js.undefined,
fargateProfileArn: js.UndefOr[String] = js.undefined,
fargateProfileName: js.UndefOr[String] = js.undefined,
podExecutionRoleArn: js.UndefOr[String] = js.undefined,
selectors: js.UndefOr[FargateProfileSelectors] = js.undefined,
status: js.UndefOr[FargateProfileStatus] = js.undefined,
subnets: js.UndefOr[StringList] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined
): FargateProfile = {
val __obj = js.Dynamic.literal()
clusterName.foreach(__v => __obj.updateDynamic("clusterName")(__v.asInstanceOf[js.Any]))
createdAt.foreach(__v => __obj.updateDynamic("createdAt")(__v.asInstanceOf[js.Any]))
fargateProfileArn.foreach(__v => __obj.updateDynamic("fargateProfileArn")(__v.asInstanceOf[js.Any]))
fargateProfileName.foreach(__v => __obj.updateDynamic("fargateProfileName")(__v.asInstanceOf[js.Any]))
podExecutionRoleArn.foreach(__v => __obj.updateDynamic("podExecutionRoleArn")(__v.asInstanceOf[js.Any]))
selectors.foreach(__v => __obj.updateDynamic("selectors")(__v.asInstanceOf[js.Any]))
status.foreach(__v => __obj.updateDynamic("status")(__v.asInstanceOf[js.Any]))
subnets.foreach(__v => __obj.updateDynamic("subnets")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[FargateProfile]
}
}
/** An object representing an AWS Fargate profile selector.
*/
@js.native
trait FargateProfileSelector extends js.Object {
var labels: js.UndefOr[FargateProfileLabel]
var namespace: js.UndefOr[String]
}
object FargateProfileSelector {
@inline
def apply(
labels: js.UndefOr[FargateProfileLabel] = js.undefined,
namespace: js.UndefOr[String] = js.undefined
): FargateProfileSelector = {
val __obj = js.Dynamic.literal()
labels.foreach(__v => __obj.updateDynamic("labels")(__v.asInstanceOf[js.Any]))
namespace.foreach(__v => __obj.updateDynamic("namespace")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[FargateProfileSelector]
}
}
@js.native
sealed trait FargateProfileStatus extends js.Any
object FargateProfileStatus {
val CREATING = "CREATING".asInstanceOf[FargateProfileStatus]
val ACTIVE = "ACTIVE".asInstanceOf[FargateProfileStatus]
val DELETING = "DELETING".asInstanceOf[FargateProfileStatus]
val CREATE_FAILED = "CREATE_FAILED".asInstanceOf[FargateProfileStatus]
val DELETE_FAILED = "DELETE_FAILED".asInstanceOf[FargateProfileStatus]
@inline def values = js.Array(CREATING, ACTIVE, DELETING, CREATE_FAILED, DELETE_FAILED)
}
/** An object representing an identity provider.
*/
@js.native
trait Identity extends js.Object {
var oidc: js.UndefOr[OIDC]
}
object Identity {
@inline
def apply(
oidc: js.UndefOr[OIDC] = js.undefined
): Identity = {
val __obj = js.Dynamic.literal()
oidc.foreach(__v => __obj.updateDynamic("oidc")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Identity]
}
}
/** An object representing an identity provider configuration.
*/
@js.native
trait IdentityProviderConfig extends js.Object {
var name: String
var `type`: String
}
object IdentityProviderConfig {
@inline
def apply(
name: String,
`type`: String
): IdentityProviderConfig = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any],
"type" -> `type`.asInstanceOf[js.Any]
)
__obj.asInstanceOf[IdentityProviderConfig]
}
}
/** An object that represents an identity configuration.
*/
@js.native
trait IdentityProviderConfigResponse extends js.Object {
var oidc: js.UndefOr[OidcIdentityProviderConfig]
}
object IdentityProviderConfigResponse {
@inline
def apply(
oidc: js.UndefOr[OidcIdentityProviderConfig] = js.undefined
): IdentityProviderConfigResponse = {
val __obj = js.Dynamic.literal()
oidc.foreach(__v => __obj.updateDynamic("oidc")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[IdentityProviderConfigResponse]
}
}
/** An object representing an issue with an Amazon EKS resource.
*/
@js.native
trait Issue extends js.Object {
var code: js.UndefOr[NodegroupIssueCode]
var message: js.UndefOr[String]
var resourceIds: js.UndefOr[StringList]
}
object Issue {
@inline
def apply(
code: js.UndefOr[NodegroupIssueCode] = js.undefined,
message: js.UndefOr[String] = js.undefined,
resourceIds: js.UndefOr[StringList] = js.undefined
): Issue = {
val __obj = js.Dynamic.literal()
code.foreach(__v => __obj.updateDynamic("code")(__v.asInstanceOf[js.Any]))
message.foreach(__v => __obj.updateDynamic("message")(__v.asInstanceOf[js.Any]))
resourceIds.foreach(__v => __obj.updateDynamic("resourceIds")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Issue]
}
}
/** The Kubernetes network configuration for the cluster.
*/
@js.native
trait KubernetesNetworkConfigRequest extends js.Object {
var serviceIpv4Cidr: js.UndefOr[String]
}
object KubernetesNetworkConfigRequest {
@inline
def apply(
serviceIpv4Cidr: js.UndefOr[String] = js.undefined
): KubernetesNetworkConfigRequest = {
val __obj = js.Dynamic.literal()
serviceIpv4Cidr.foreach(__v => __obj.updateDynamic("serviceIpv4Cidr")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[KubernetesNetworkConfigRequest]
}
}
/** The Kubernetes network configuration for the cluster.
*/
@js.native
trait KubernetesNetworkConfigResponse extends js.Object {
var serviceIpv4Cidr: js.UndefOr[String]
}
object KubernetesNetworkConfigResponse {
@inline
def apply(
serviceIpv4Cidr: js.UndefOr[String] = js.undefined
): KubernetesNetworkConfigResponse = {
val __obj = js.Dynamic.literal()
serviceIpv4Cidr.foreach(__v => __obj.updateDynamic("serviceIpv4Cidr")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[KubernetesNetworkConfigResponse]
}
}
/** An object representing a node group launch template specification. The launch template cannot include <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_CreateNetworkInterface.html"> <code>SubnetId</code> </a>, <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_IamInstanceProfile.html"> <code>IamInstanceProfile</code> </a>, <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_RequestSpotInstances.html"> <code>RequestSpotInstances</code> </a>, <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_HibernationOptionsRequest.html"> <code>HibernationOptions</code> </a>, or <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_TerminateInstances.html"> <code>TerminateInstances</code> </a>, or the node group deployment or update will fail. For more information about launch templates, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_CreateLaunchTemplate.html">
* <code>CreateLaunchTemplate</code> </a> in the Amazon EC2 API Reference. For more information about using launch templates with Amazon EKS, see [[https://docs.aws.amazon.com/eks/latest/userguide/launch-templates.html|Launch template support]] in the Amazon EKS User Guide. Specify either <code>name</code> or <code>id</code>, but not both.
*/
@js.native
trait LaunchTemplateSpecification extends js.Object {
var id: js.UndefOr[String]
var name: js.UndefOr[String]
var version: js.UndefOr[String]
}
object LaunchTemplateSpecification {
@inline
def apply(
id: js.UndefOr[String] = js.undefined,
name: js.UndefOr[String] = js.undefined,
version: js.UndefOr[String] = js.undefined
): LaunchTemplateSpecification = {
val __obj = js.Dynamic.literal()
id.foreach(__v => __obj.updateDynamic("id")(__v.asInstanceOf[js.Any]))
name.foreach(__v => __obj.updateDynamic("name")(__v.asInstanceOf[js.Any]))
version.foreach(__v => __obj.updateDynamic("version")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[LaunchTemplateSpecification]
}
}
@js.native
trait ListAddonsRequest extends js.Object {
var clusterName: ClusterName
var maxResults: js.UndefOr[ListAddonsRequestMaxResults]
var nextToken: js.UndefOr[String]
}
object ListAddonsRequest {
@inline
def apply(
clusterName: ClusterName,
maxResults: js.UndefOr[ListAddonsRequestMaxResults] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListAddonsRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
maxResults.foreach(__v => __obj.updateDynamic("maxResults")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListAddonsRequest]
}
}
@js.native
trait ListAddonsResponse extends js.Object {
var addons: js.UndefOr[StringList]
var nextToken: js.UndefOr[String]
}
object ListAddonsResponse {
@inline
def apply(
addons: js.UndefOr[StringList] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListAddonsResponse = {
val __obj = js.Dynamic.literal()
addons.foreach(__v => __obj.updateDynamic("addons")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListAddonsResponse]
}
}
@js.native
trait ListClustersRequest extends js.Object {
var maxResults: js.UndefOr[ListClustersRequestMaxResults]
var nextToken: js.UndefOr[String]
}
object ListClustersRequest {
@inline
def apply(
maxResults: js.UndefOr[ListClustersRequestMaxResults] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListClustersRequest = {
val __obj = js.Dynamic.literal()
maxResults.foreach(__v => __obj.updateDynamic("maxResults")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListClustersRequest]
}
}
@js.native
trait ListClustersResponse extends js.Object {
var clusters: js.UndefOr[StringList]
var nextToken: js.UndefOr[String]
}
object ListClustersResponse {
@inline
def apply(
clusters: js.UndefOr[StringList] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListClustersResponse = {
val __obj = js.Dynamic.literal()
clusters.foreach(__v => __obj.updateDynamic("clusters")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListClustersResponse]
}
}
@js.native
trait ListFargateProfilesRequest extends js.Object {
var clusterName: String
var maxResults: js.UndefOr[FargateProfilesRequestMaxResults]
var nextToken: js.UndefOr[String]
}
object ListFargateProfilesRequest {
@inline
def apply(
clusterName: String,
maxResults: js.UndefOr[FargateProfilesRequestMaxResults] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListFargateProfilesRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
maxResults.foreach(__v => __obj.updateDynamic("maxResults")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListFargateProfilesRequest]
}
}
@js.native
trait ListFargateProfilesResponse extends js.Object {
var fargateProfileNames: js.UndefOr[StringList]
var nextToken: js.UndefOr[String]
}
object ListFargateProfilesResponse {
@inline
def apply(
fargateProfileNames: js.UndefOr[StringList] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListFargateProfilesResponse = {
val __obj = js.Dynamic.literal()
fargateProfileNames.foreach(__v => __obj.updateDynamic("fargateProfileNames")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListFargateProfilesResponse]
}
}
@js.native
trait ListIdentityProviderConfigsRequest extends js.Object {
var clusterName: String
var maxResults: js.UndefOr[ListIdentityProviderConfigsRequestMaxResults]
var nextToken: js.UndefOr[String]
}
object ListIdentityProviderConfigsRequest {
@inline
def apply(
clusterName: String,
maxResults: js.UndefOr[ListIdentityProviderConfigsRequestMaxResults] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListIdentityProviderConfigsRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
maxResults.foreach(__v => __obj.updateDynamic("maxResults")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListIdentityProviderConfigsRequest]
}
}
@js.native
trait ListIdentityProviderConfigsResponse extends js.Object {
var identityProviderConfigs: js.UndefOr[IdentityProviderConfigs]
var nextToken: js.UndefOr[String]
}
object ListIdentityProviderConfigsResponse {
@inline
def apply(
identityProviderConfigs: js.UndefOr[IdentityProviderConfigs] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListIdentityProviderConfigsResponse = {
val __obj = js.Dynamic.literal()
identityProviderConfigs.foreach(__v => __obj.updateDynamic("identityProviderConfigs")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListIdentityProviderConfigsResponse]
}
}
@js.native
trait ListNodegroupsRequest extends js.Object {
var clusterName: String
var maxResults: js.UndefOr[ListNodegroupsRequestMaxResults]
var nextToken: js.UndefOr[String]
}
object ListNodegroupsRequest {
@inline
def apply(
clusterName: String,
maxResults: js.UndefOr[ListNodegroupsRequestMaxResults] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined
): ListNodegroupsRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
maxResults.foreach(__v => __obj.updateDynamic("maxResults")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListNodegroupsRequest]
}
}
@js.native
trait ListNodegroupsResponse extends js.Object {
var nextToken: js.UndefOr[String]
var nodegroups: js.UndefOr[StringList]
}
object ListNodegroupsResponse {
@inline
def apply(
nextToken: js.UndefOr[String] = js.undefined,
nodegroups: js.UndefOr[StringList] = js.undefined
): ListNodegroupsResponse = {
val __obj = js.Dynamic.literal()
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
nodegroups.foreach(__v => __obj.updateDynamic("nodegroups")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListNodegroupsResponse]
}
}
@js.native
trait ListTagsForResourceRequest extends js.Object {
var resourceArn: String
}
object ListTagsForResourceRequest {
@inline
def apply(
resourceArn: String
): ListTagsForResourceRequest = {
val __obj = js.Dynamic.literal(
"resourceArn" -> resourceArn.asInstanceOf[js.Any]
)
__obj.asInstanceOf[ListTagsForResourceRequest]
}
}
@js.native
trait ListTagsForResourceResponse extends js.Object {
var tags: js.UndefOr[TagMap]
}
object ListTagsForResourceResponse {
@inline
def apply(
tags: js.UndefOr[TagMap] = js.undefined
): ListTagsForResourceResponse = {
val __obj = js.Dynamic.literal()
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListTagsForResourceResponse]
}
}
@js.native
trait ListUpdatesRequest extends js.Object {
var name: String
var addonName: js.UndefOr[String]
var maxResults: js.UndefOr[ListUpdatesRequestMaxResults]
var nextToken: js.UndefOr[String]
var nodegroupName: js.UndefOr[String]
}
object ListUpdatesRequest {
@inline
def apply(
name: String,
addonName: js.UndefOr[String] = js.undefined,
maxResults: js.UndefOr[ListUpdatesRequestMaxResults] = js.undefined,
nextToken: js.UndefOr[String] = js.undefined,
nodegroupName: js.UndefOr[String] = js.undefined
): ListUpdatesRequest = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any]
)
addonName.foreach(__v => __obj.updateDynamic("addonName")(__v.asInstanceOf[js.Any]))
maxResults.foreach(__v => __obj.updateDynamic("maxResults")(__v.asInstanceOf[js.Any]))
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
nodegroupName.foreach(__v => __obj.updateDynamic("nodegroupName")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListUpdatesRequest]
}
}
@js.native
trait ListUpdatesResponse extends js.Object {
var nextToken: js.UndefOr[String]
var updateIds: js.UndefOr[StringList]
}
object ListUpdatesResponse {
@inline
def apply(
nextToken: js.UndefOr[String] = js.undefined,
updateIds: js.UndefOr[StringList] = js.undefined
): ListUpdatesResponse = {
val __obj = js.Dynamic.literal()
nextToken.foreach(__v => __obj.updateDynamic("nextToken")(__v.asInstanceOf[js.Any]))
updateIds.foreach(__v => __obj.updateDynamic("updateIds")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListUpdatesResponse]
}
}
/** An object representing the enabled or disabled Kubernetes control plane logs for your cluster.
*/
@js.native
trait LogSetup extends js.Object {
var enabled: js.UndefOr[BoxedBoolean]
var types: js.UndefOr[LogTypes]
}
object LogSetup {
@inline
def apply(
enabled: js.UndefOr[BoxedBoolean] = js.undefined,
types: js.UndefOr[LogTypes] = js.undefined
): LogSetup = {
val __obj = js.Dynamic.literal()
enabled.foreach(__v => __obj.updateDynamic("enabled")(__v.asInstanceOf[js.Any]))
types.foreach(__v => __obj.updateDynamic("types")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[LogSetup]
}
}
@js.native
sealed trait LogType extends js.Any
object LogType {
val api = "api".asInstanceOf[LogType]
val audit = "audit".asInstanceOf[LogType]
val authenticator = "authenticator".asInstanceOf[LogType]
val controllerManager = "controllerManager".asInstanceOf[LogType]
val scheduler = "scheduler".asInstanceOf[LogType]
@inline def values = js.Array(api, audit, authenticator, controllerManager, scheduler)
}
/** An object representing the logging configuration for resources in your cluster.
*/
@js.native
trait Logging extends js.Object {
var clusterLogging: js.UndefOr[LogSetups]
}
object Logging {
@inline
def apply(
clusterLogging: js.UndefOr[LogSetups] = js.undefined
): Logging = {
val __obj = js.Dynamic.literal()
clusterLogging.foreach(__v => __obj.updateDynamic("clusterLogging")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Logging]
}
}
/** An object representing an Amazon EKS managed node group.
*/
@js.native
trait Nodegroup extends js.Object {
var amiType: js.UndefOr[AMITypes]
var capacityType: js.UndefOr[CapacityTypes]
var clusterName: js.UndefOr[String]
var createdAt: js.UndefOr[Timestamp]
var diskSize: js.UndefOr[BoxedInteger]
var health: js.UndefOr[NodegroupHealth]
var instanceTypes: js.UndefOr[StringList]
var labels: js.UndefOr[labelsMap]
var launchTemplate: js.UndefOr[LaunchTemplateSpecification]
var modifiedAt: js.UndefOr[Timestamp]
var nodeRole: js.UndefOr[String]
var nodegroupArn: js.UndefOr[String]
var nodegroupName: js.UndefOr[String]
var releaseVersion: js.UndefOr[String]
var remoteAccess: js.UndefOr[RemoteAccessConfig]
var resources: js.UndefOr[NodegroupResources]
var scalingConfig: js.UndefOr[NodegroupScalingConfig]
var status: js.UndefOr[NodegroupStatus]
var subnets: js.UndefOr[StringList]
var tags: js.UndefOr[TagMap]
var version: js.UndefOr[String]
}
object Nodegroup {
@inline
def apply(
amiType: js.UndefOr[AMITypes] = js.undefined,
capacityType: js.UndefOr[CapacityTypes] = js.undefined,
clusterName: js.UndefOr[String] = js.undefined,
createdAt: js.UndefOr[Timestamp] = js.undefined,
diskSize: js.UndefOr[BoxedInteger] = js.undefined,
health: js.UndefOr[NodegroupHealth] = js.undefined,
instanceTypes: js.UndefOr[StringList] = js.undefined,
labels: js.UndefOr[labelsMap] = js.undefined,
launchTemplate: js.UndefOr[LaunchTemplateSpecification] = js.undefined,
modifiedAt: js.UndefOr[Timestamp] = js.undefined,
nodeRole: js.UndefOr[String] = js.undefined,
nodegroupArn: js.UndefOr[String] = js.undefined,
nodegroupName: js.UndefOr[String] = js.undefined,
releaseVersion: js.UndefOr[String] = js.undefined,
remoteAccess: js.UndefOr[RemoteAccessConfig] = js.undefined,
resources: js.UndefOr[NodegroupResources] = js.undefined,
scalingConfig: js.UndefOr[NodegroupScalingConfig] = js.undefined,
status: js.UndefOr[NodegroupStatus] = js.undefined,
subnets: js.UndefOr[StringList] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined,
version: js.UndefOr[String] = js.undefined
): Nodegroup = {
val __obj = js.Dynamic.literal()
amiType.foreach(__v => __obj.updateDynamic("amiType")(__v.asInstanceOf[js.Any]))
capacityType.foreach(__v => __obj.updateDynamic("capacityType")(__v.asInstanceOf[js.Any]))
clusterName.foreach(__v => __obj.updateDynamic("clusterName")(__v.asInstanceOf[js.Any]))
createdAt.foreach(__v => __obj.updateDynamic("createdAt")(__v.asInstanceOf[js.Any]))
diskSize.foreach(__v => __obj.updateDynamic("diskSize")(__v.asInstanceOf[js.Any]))
health.foreach(__v => __obj.updateDynamic("health")(__v.asInstanceOf[js.Any]))
instanceTypes.foreach(__v => __obj.updateDynamic("instanceTypes")(__v.asInstanceOf[js.Any]))
labels.foreach(__v => __obj.updateDynamic("labels")(__v.asInstanceOf[js.Any]))
launchTemplate.foreach(__v => __obj.updateDynamic("launchTemplate")(__v.asInstanceOf[js.Any]))
modifiedAt.foreach(__v => __obj.updateDynamic("modifiedAt")(__v.asInstanceOf[js.Any]))
nodeRole.foreach(__v => __obj.updateDynamic("nodeRole")(__v.asInstanceOf[js.Any]))
nodegroupArn.foreach(__v => __obj.updateDynamic("nodegroupArn")(__v.asInstanceOf[js.Any]))
nodegroupName.foreach(__v => __obj.updateDynamic("nodegroupName")(__v.asInstanceOf[js.Any]))
releaseVersion.foreach(__v => __obj.updateDynamic("releaseVersion")(__v.asInstanceOf[js.Any]))
remoteAccess.foreach(__v => __obj.updateDynamic("remoteAccess")(__v.asInstanceOf[js.Any]))
resources.foreach(__v => __obj.updateDynamic("resources")(__v.asInstanceOf[js.Any]))
scalingConfig.foreach(__v => __obj.updateDynamic("scalingConfig")(__v.asInstanceOf[js.Any]))
status.foreach(__v => __obj.updateDynamic("status")(__v.asInstanceOf[js.Any]))
subnets.foreach(__v => __obj.updateDynamic("subnets")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
version.foreach(__v => __obj.updateDynamic("version")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Nodegroup]
}
}
/** An object representing the health status of the node group.
*/
@js.native
trait NodegroupHealth extends js.Object {
var issues: js.UndefOr[IssueList]
}
object NodegroupHealth {
@inline
def apply(
issues: js.UndefOr[IssueList] = js.undefined
): NodegroupHealth = {
val __obj = js.Dynamic.literal()
issues.foreach(__v => __obj.updateDynamic("issues")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodegroupHealth]
}
}
@js.native
sealed trait NodegroupIssueCode extends js.Any
object NodegroupIssueCode {
val AutoScalingGroupNotFound = "AutoScalingGroupNotFound".asInstanceOf[NodegroupIssueCode]
val AutoScalingGroupInvalidConfiguration = "AutoScalingGroupInvalidConfiguration".asInstanceOf[NodegroupIssueCode]
val Ec2SecurityGroupNotFound = "Ec2SecurityGroupNotFound".asInstanceOf[NodegroupIssueCode]
val Ec2SecurityGroupDeletionFailure = "Ec2SecurityGroupDeletionFailure".asInstanceOf[NodegroupIssueCode]
val Ec2LaunchTemplateNotFound = "Ec2LaunchTemplateNotFound".asInstanceOf[NodegroupIssueCode]
val Ec2LaunchTemplateVersionMismatch = "Ec2LaunchTemplateVersionMismatch".asInstanceOf[NodegroupIssueCode]
val Ec2SubnetNotFound = "Ec2SubnetNotFound".asInstanceOf[NodegroupIssueCode]
val Ec2SubnetInvalidConfiguration = "Ec2SubnetInvalidConfiguration".asInstanceOf[NodegroupIssueCode]
val IamInstanceProfileNotFound = "IamInstanceProfileNotFound".asInstanceOf[NodegroupIssueCode]
val IamLimitExceeded = "IamLimitExceeded".asInstanceOf[NodegroupIssueCode]
val IamNodeRoleNotFound = "IamNodeRoleNotFound".asInstanceOf[NodegroupIssueCode]
val NodeCreationFailure = "NodeCreationFailure".asInstanceOf[NodegroupIssueCode]
val AsgInstanceLaunchFailures = "AsgInstanceLaunchFailures".asInstanceOf[NodegroupIssueCode]
val InstanceLimitExceeded = "InstanceLimitExceeded".asInstanceOf[NodegroupIssueCode]
val InsufficientFreeAddresses = "InsufficientFreeAddresses".asInstanceOf[NodegroupIssueCode]
val AccessDenied = "AccessDenied".asInstanceOf[NodegroupIssueCode]
val InternalFailure = "InternalFailure".asInstanceOf[NodegroupIssueCode]
val ClusterUnreachable = "ClusterUnreachable".asInstanceOf[NodegroupIssueCode]
@inline def values = js.Array(
AutoScalingGroupNotFound,
AutoScalingGroupInvalidConfiguration,
Ec2SecurityGroupNotFound,
Ec2SecurityGroupDeletionFailure,
Ec2LaunchTemplateNotFound,
Ec2LaunchTemplateVersionMismatch,
Ec2SubnetNotFound,
Ec2SubnetInvalidConfiguration,
IamInstanceProfileNotFound,
IamLimitExceeded,
IamNodeRoleNotFound,
NodeCreationFailure,
AsgInstanceLaunchFailures,
InstanceLimitExceeded,
InsufficientFreeAddresses,
AccessDenied,
InternalFailure,
ClusterUnreachable
)
}
/** An object representing the resources associated with the node group, such as Auto Scaling groups and security groups for remote access.
*/
@js.native
trait NodegroupResources extends js.Object {
var autoScalingGroups: js.UndefOr[AutoScalingGroupList]
var remoteAccessSecurityGroup: js.UndefOr[String]
}
object NodegroupResources {
@inline
def apply(
autoScalingGroups: js.UndefOr[AutoScalingGroupList] = js.undefined,
remoteAccessSecurityGroup: js.UndefOr[String] = js.undefined
): NodegroupResources = {
val __obj = js.Dynamic.literal()
autoScalingGroups.foreach(__v => __obj.updateDynamic("autoScalingGroups")(__v.asInstanceOf[js.Any]))
remoteAccessSecurityGroup.foreach(__v => __obj.updateDynamic("remoteAccessSecurityGroup")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodegroupResources]
}
}
/** An object representing the scaling configuration details for the Auto Scaling group that is associated with your node group. If you specify a value for any property, then you must specify values for all of the properties.
*/
@js.native
trait NodegroupScalingConfig extends js.Object {
var desiredSize: js.UndefOr[Capacity]
var maxSize: js.UndefOr[Capacity]
var minSize: js.UndefOr[Capacity]
}
object NodegroupScalingConfig {
@inline
def apply(
desiredSize: js.UndefOr[Capacity] = js.undefined,
maxSize: js.UndefOr[Capacity] = js.undefined,
minSize: js.UndefOr[Capacity] = js.undefined
): NodegroupScalingConfig = {
val __obj = js.Dynamic.literal()
desiredSize.foreach(__v => __obj.updateDynamic("desiredSize")(__v.asInstanceOf[js.Any]))
maxSize.foreach(__v => __obj.updateDynamic("maxSize")(__v.asInstanceOf[js.Any]))
minSize.foreach(__v => __obj.updateDynamic("minSize")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodegroupScalingConfig]
}
}
@js.native
sealed trait NodegroupStatus extends js.Any
object NodegroupStatus {
val CREATING = "CREATING".asInstanceOf[NodegroupStatus]
val ACTIVE = "ACTIVE".asInstanceOf[NodegroupStatus]
val UPDATING = "UPDATING".asInstanceOf[NodegroupStatus]
val DELETING = "DELETING".asInstanceOf[NodegroupStatus]
val CREATE_FAILED = "CREATE_FAILED".asInstanceOf[NodegroupStatus]
val DELETE_FAILED = "DELETE_FAILED".asInstanceOf[NodegroupStatus]
val DEGRADED = "DEGRADED".asInstanceOf[NodegroupStatus]
@inline def values = js.Array(CREATING, ACTIVE, UPDATING, DELETING, CREATE_FAILED, DELETE_FAILED, DEGRADED)
}
/** An object representing the [[https://openid.net/connect/|OpenID Connect]] (OIDC) identity provider information for the cluster.
*/
@js.native
trait OIDC extends js.Object {
var issuer: js.UndefOr[String]
}
object OIDC {
@inline
def apply(
issuer: js.UndefOr[String] = js.undefined
): OIDC = {
val __obj = js.Dynamic.literal()
issuer.foreach(__v => __obj.updateDynamic("issuer")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[OIDC]
}
}
/** An object that represents the configuration for an OpenID Connect (OIDC) identity provider.
*/
@js.native
trait OidcIdentityProviderConfig extends js.Object {
var clientId: js.UndefOr[String]
var clusterName: js.UndefOr[String]
var groupsClaim: js.UndefOr[String]
var groupsPrefix: js.UndefOr[String]
var identityProviderConfigArn: js.UndefOr[String]
var identityProviderConfigName: js.UndefOr[String]
var issuerUrl: js.UndefOr[String]
var requiredClaims: js.UndefOr[requiredClaimsMap]
var status: js.UndefOr[configStatus]
var tags: js.UndefOr[TagMap]
var usernameClaim: js.UndefOr[String]
var usernamePrefix: js.UndefOr[String]
}
object OidcIdentityProviderConfig {
@inline
def apply(
clientId: js.UndefOr[String] = js.undefined,
clusterName: js.UndefOr[String] = js.undefined,
groupsClaim: js.UndefOr[String] = js.undefined,
groupsPrefix: js.UndefOr[String] = js.undefined,
identityProviderConfigArn: js.UndefOr[String] = js.undefined,
identityProviderConfigName: js.UndefOr[String] = js.undefined,
issuerUrl: js.UndefOr[String] = js.undefined,
requiredClaims: js.UndefOr[requiredClaimsMap] = js.undefined,
status: js.UndefOr[configStatus] = js.undefined,
tags: js.UndefOr[TagMap] = js.undefined,
usernameClaim: js.UndefOr[String] = js.undefined,
usernamePrefix: js.UndefOr[String] = js.undefined
): OidcIdentityProviderConfig = {
val __obj = js.Dynamic.literal()
clientId.foreach(__v => __obj.updateDynamic("clientId")(__v.asInstanceOf[js.Any]))
clusterName.foreach(__v => __obj.updateDynamic("clusterName")(__v.asInstanceOf[js.Any]))
groupsClaim.foreach(__v => __obj.updateDynamic("groupsClaim")(__v.asInstanceOf[js.Any]))
groupsPrefix.foreach(__v => __obj.updateDynamic("groupsPrefix")(__v.asInstanceOf[js.Any]))
identityProviderConfigArn.foreach(__v => __obj.updateDynamic("identityProviderConfigArn")(__v.asInstanceOf[js.Any]))
identityProviderConfigName.foreach(__v => __obj.updateDynamic("identityProviderConfigName")(__v.asInstanceOf[js.Any]))
issuerUrl.foreach(__v => __obj.updateDynamic("issuerUrl")(__v.asInstanceOf[js.Any]))
requiredClaims.foreach(__v => __obj.updateDynamic("requiredClaims")(__v.asInstanceOf[js.Any]))
status.foreach(__v => __obj.updateDynamic("status")(__v.asInstanceOf[js.Any]))
tags.foreach(__v => __obj.updateDynamic("tags")(__v.asInstanceOf[js.Any]))
usernameClaim.foreach(__v => __obj.updateDynamic("usernameClaim")(__v.asInstanceOf[js.Any]))
usernamePrefix.foreach(__v => __obj.updateDynamic("usernamePrefix")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[OidcIdentityProviderConfig]
}
}
/** An object representing an OpenID Connect (OIDC) configuration. Before associating an OIDC identity provider to your cluster, review the considerations in [[https://docs.aws.amazon.com/eks/latest/userguide/authenticate-oidc-identity-provider.html|Authenticating users for your cluster from an OpenID Connect identity provider]] in the <i>Amazon EKS User Guide</i>.
*/
@js.native
trait OidcIdentityProviderConfigRequest extends js.Object {
var clientId: String
var identityProviderConfigName: String
var issuerUrl: String
var groupsClaim: js.UndefOr[String]
var groupsPrefix: js.UndefOr[String]
var requiredClaims: js.UndefOr[requiredClaimsMap]
var usernameClaim: js.UndefOr[String]
var usernamePrefix: js.UndefOr[String]
}
object OidcIdentityProviderConfigRequest {
@inline
def apply(
clientId: String,
identityProviderConfigName: String,
issuerUrl: String,
groupsClaim: js.UndefOr[String] = js.undefined,
groupsPrefix: js.UndefOr[String] = js.undefined,
requiredClaims: js.UndefOr[requiredClaimsMap] = js.undefined,
usernameClaim: js.UndefOr[String] = js.undefined,
usernamePrefix: js.UndefOr[String] = js.undefined
): OidcIdentityProviderConfigRequest = {
val __obj = js.Dynamic.literal(
"clientId" -> clientId.asInstanceOf[js.Any],
"identityProviderConfigName" -> identityProviderConfigName.asInstanceOf[js.Any],
"issuerUrl" -> issuerUrl.asInstanceOf[js.Any]
)
groupsClaim.foreach(__v => __obj.updateDynamic("groupsClaim")(__v.asInstanceOf[js.Any]))
groupsPrefix.foreach(__v => __obj.updateDynamic("groupsPrefix")(__v.asInstanceOf[js.Any]))
requiredClaims.foreach(__v => __obj.updateDynamic("requiredClaims")(__v.asInstanceOf[js.Any]))
usernameClaim.foreach(__v => __obj.updateDynamic("usernameClaim")(__v.asInstanceOf[js.Any]))
usernamePrefix.foreach(__v => __obj.updateDynamic("usernamePrefix")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[OidcIdentityProviderConfigRequest]
}
}
/** Identifies the AWS Key Management Service (AWS KMS) customer master key (CMK) used to encrypt the secrets.
*/
@js.native
trait Provider extends js.Object {
var keyArn: js.UndefOr[String]
}
object Provider {
@inline
def apply(
keyArn: js.UndefOr[String] = js.undefined
): Provider = {
val __obj = js.Dynamic.literal()
keyArn.foreach(__v => __obj.updateDynamic("keyArn")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Provider]
}
}
/** An object representing the remote access configuration for the managed node group.
*/
@js.native
trait RemoteAccessConfig extends js.Object {
var ec2SshKey: js.UndefOr[String]
var sourceSecurityGroups: js.UndefOr[StringList]
}
object RemoteAccessConfig {
@inline
def apply(
ec2SshKey: js.UndefOr[String] = js.undefined,
sourceSecurityGroups: js.UndefOr[StringList] = js.undefined
): RemoteAccessConfig = {
val __obj = js.Dynamic.literal()
ec2SshKey.foreach(__v => __obj.updateDynamic("ec2SshKey")(__v.asInstanceOf[js.Any]))
sourceSecurityGroups.foreach(__v => __obj.updateDynamic("sourceSecurityGroups")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[RemoteAccessConfig]
}
}
@js.native
sealed trait ResolveConflicts extends js.Any
object ResolveConflicts {
val OVERWRITE = "OVERWRITE".asInstanceOf[ResolveConflicts]
val NONE = "NONE".asInstanceOf[ResolveConflicts]
@inline def values = js.Array(OVERWRITE, NONE)
}
@js.native
trait TagResourceRequest extends js.Object {
var resourceArn: String
var tags: TagMap
}
object TagResourceRequest {
@inline
def apply(
resourceArn: String,
tags: TagMap
): TagResourceRequest = {
val __obj = js.Dynamic.literal(
"resourceArn" -> resourceArn.asInstanceOf[js.Any],
"tags" -> tags.asInstanceOf[js.Any]
)
__obj.asInstanceOf[TagResourceRequest]
}
}
@js.native
trait TagResourceResponse extends js.Object
object TagResourceResponse {
@inline
def apply(): TagResourceResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[TagResourceResponse]
}
}
@js.native
trait UntagResourceRequest extends js.Object {
var resourceArn: String
var tagKeys: TagKeyList
}
object UntagResourceRequest {
@inline
def apply(
resourceArn: String,
tagKeys: TagKeyList
): UntagResourceRequest = {
val __obj = js.Dynamic.literal(
"resourceArn" -> resourceArn.asInstanceOf[js.Any],
"tagKeys" -> tagKeys.asInstanceOf[js.Any]
)
__obj.asInstanceOf[UntagResourceRequest]
}
}
@js.native
trait UntagResourceResponse extends js.Object
object UntagResourceResponse {
@inline
def apply(): UntagResourceResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[UntagResourceResponse]
}
}
/** An object representing an asynchronous update.
*/
@js.native
trait Update extends js.Object {
var createdAt: js.UndefOr[Timestamp]
var errors: js.UndefOr[ErrorDetails]
var id: js.UndefOr[String]
var params: js.UndefOr[UpdateParams]
var status: js.UndefOr[UpdateStatus]
var `type`: js.UndefOr[UpdateType]
}
object Update {
@inline
def apply(
createdAt: js.UndefOr[Timestamp] = js.undefined,
errors: js.UndefOr[ErrorDetails] = js.undefined,
id: js.UndefOr[String] = js.undefined,
params: js.UndefOr[UpdateParams] = js.undefined,
status: js.UndefOr[UpdateStatus] = js.undefined,
`type`: js.UndefOr[UpdateType] = js.undefined
): Update = {
val __obj = js.Dynamic.literal()
createdAt.foreach(__v => __obj.updateDynamic("createdAt")(__v.asInstanceOf[js.Any]))
errors.foreach(__v => __obj.updateDynamic("errors")(__v.asInstanceOf[js.Any]))
id.foreach(__v => __obj.updateDynamic("id")(__v.asInstanceOf[js.Any]))
params.foreach(__v => __obj.updateDynamic("params")(__v.asInstanceOf[js.Any]))
status.foreach(__v => __obj.updateDynamic("status")(__v.asInstanceOf[js.Any]))
`type`.foreach(__v => __obj.updateDynamic("type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Update]
}
}
@js.native
trait UpdateAddonRequest extends js.Object {
var addonName: String
var clusterName: ClusterName
var addonVersion: js.UndefOr[String]
var clientRequestToken: js.UndefOr[String]
var resolveConflicts: js.UndefOr[ResolveConflicts]
var serviceAccountRoleArn: js.UndefOr[RoleArn]
}
object UpdateAddonRequest {
@inline
def apply(
addonName: String,
clusterName: ClusterName,
addonVersion: js.UndefOr[String] = js.undefined,
clientRequestToken: js.UndefOr[String] = js.undefined,
resolveConflicts: js.UndefOr[ResolveConflicts] = js.undefined,
serviceAccountRoleArn: js.UndefOr[RoleArn] = js.undefined
): UpdateAddonRequest = {
val __obj = js.Dynamic.literal(
"addonName" -> addonName.asInstanceOf[js.Any],
"clusterName" -> clusterName.asInstanceOf[js.Any]
)
addonVersion.foreach(__v => __obj.updateDynamic("addonVersion")(__v.asInstanceOf[js.Any]))
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
resolveConflicts.foreach(__v => __obj.updateDynamic("resolveConflicts")(__v.asInstanceOf[js.Any]))
serviceAccountRoleArn.foreach(__v => __obj.updateDynamic("serviceAccountRoleArn")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateAddonRequest]
}
}
@js.native
trait UpdateAddonResponse extends js.Object {
var update: js.UndefOr[Update]
}
object UpdateAddonResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): UpdateAddonResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateAddonResponse]
}
}
@js.native
trait UpdateClusterConfigRequest extends js.Object {
var name: String
var clientRequestToken: js.UndefOr[String]
var logging: js.UndefOr[Logging]
var resourcesVpcConfig: js.UndefOr[VpcConfigRequest]
}
object UpdateClusterConfigRequest {
@inline
def apply(
name: String,
clientRequestToken: js.UndefOr[String] = js.undefined,
logging: js.UndefOr[Logging] = js.undefined,
resourcesVpcConfig: js.UndefOr[VpcConfigRequest] = js.undefined
): UpdateClusterConfigRequest = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
logging.foreach(__v => __obj.updateDynamic("logging")(__v.asInstanceOf[js.Any]))
resourcesVpcConfig.foreach(__v => __obj.updateDynamic("resourcesVpcConfig")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateClusterConfigRequest]
}
}
@js.native
trait UpdateClusterConfigResponse extends js.Object {
var update: js.UndefOr[Update]
}
object UpdateClusterConfigResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): UpdateClusterConfigResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateClusterConfigResponse]
}
}
@js.native
trait UpdateClusterVersionRequest extends js.Object {
var name: String
var version: String
var clientRequestToken: js.UndefOr[String]
}
object UpdateClusterVersionRequest {
@inline
def apply(
name: String,
version: String,
clientRequestToken: js.UndefOr[String] = js.undefined
): UpdateClusterVersionRequest = {
val __obj = js.Dynamic.literal(
"name" -> name.asInstanceOf[js.Any],
"version" -> version.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateClusterVersionRequest]
}
}
@js.native
trait UpdateClusterVersionResponse extends js.Object {
var update: js.UndefOr[Update]
}
object UpdateClusterVersionResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): UpdateClusterVersionResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateClusterVersionResponse]
}
}
/** An object representing a Kubernetes label change for a managed node group.
*/
@js.native
trait UpdateLabelsPayload extends js.Object {
var addOrUpdateLabels: js.UndefOr[labelsMap]
var removeLabels: js.UndefOr[labelsKeyList]
}
object UpdateLabelsPayload {
@inline
def apply(
addOrUpdateLabels: js.UndefOr[labelsMap] = js.undefined,
removeLabels: js.UndefOr[labelsKeyList] = js.undefined
): UpdateLabelsPayload = {
val __obj = js.Dynamic.literal()
addOrUpdateLabels.foreach(__v => __obj.updateDynamic("addOrUpdateLabels")(__v.asInstanceOf[js.Any]))
removeLabels.foreach(__v => __obj.updateDynamic("removeLabels")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateLabelsPayload]
}
}
@js.native
trait UpdateNodegroupConfigRequest extends js.Object {
var clusterName: String
var nodegroupName: String
var clientRequestToken: js.UndefOr[String]
var labels: js.UndefOr[UpdateLabelsPayload]
var scalingConfig: js.UndefOr[NodegroupScalingConfig]
}
object UpdateNodegroupConfigRequest {
@inline
def apply(
clusterName: String,
nodegroupName: String,
clientRequestToken: js.UndefOr[String] = js.undefined,
labels: js.UndefOr[UpdateLabelsPayload] = js.undefined,
scalingConfig: js.UndefOr[NodegroupScalingConfig] = js.undefined
): UpdateNodegroupConfigRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"nodegroupName" -> nodegroupName.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
labels.foreach(__v => __obj.updateDynamic("labels")(__v.asInstanceOf[js.Any]))
scalingConfig.foreach(__v => __obj.updateDynamic("scalingConfig")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateNodegroupConfigRequest]
}
}
@js.native
trait UpdateNodegroupConfigResponse extends js.Object {
var update: js.UndefOr[Update]
}
object UpdateNodegroupConfigResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): UpdateNodegroupConfigResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateNodegroupConfigResponse]
}
}
@js.native
trait UpdateNodegroupVersionRequest extends js.Object {
var clusterName: String
var nodegroupName: String
var clientRequestToken: js.UndefOr[String]
var force: js.UndefOr[Boolean]
var launchTemplate: js.UndefOr[LaunchTemplateSpecification]
var releaseVersion: js.UndefOr[String]
var version: js.UndefOr[String]
}
object UpdateNodegroupVersionRequest {
@inline
def apply(
clusterName: String,
nodegroupName: String,
clientRequestToken: js.UndefOr[String] = js.undefined,
force: js.UndefOr[Boolean] = js.undefined,
launchTemplate: js.UndefOr[LaunchTemplateSpecification] = js.undefined,
releaseVersion: js.UndefOr[String] = js.undefined,
version: js.UndefOr[String] = js.undefined
): UpdateNodegroupVersionRequest = {
val __obj = js.Dynamic.literal(
"clusterName" -> clusterName.asInstanceOf[js.Any],
"nodegroupName" -> nodegroupName.asInstanceOf[js.Any]
)
clientRequestToken.foreach(__v => __obj.updateDynamic("clientRequestToken")(__v.asInstanceOf[js.Any]))
force.foreach(__v => __obj.updateDynamic("force")(__v.asInstanceOf[js.Any]))
launchTemplate.foreach(__v => __obj.updateDynamic("launchTemplate")(__v.asInstanceOf[js.Any]))
releaseVersion.foreach(__v => __obj.updateDynamic("releaseVersion")(__v.asInstanceOf[js.Any]))
version.foreach(__v => __obj.updateDynamic("version")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateNodegroupVersionRequest]
}
}
@js.native
trait UpdateNodegroupVersionResponse extends js.Object {
var update: js.UndefOr[Update]
}
object UpdateNodegroupVersionResponse {
@inline
def apply(
update: js.UndefOr[Update] = js.undefined
): UpdateNodegroupVersionResponse = {
val __obj = js.Dynamic.literal()
update.foreach(__v => __obj.updateDynamic("update")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateNodegroupVersionResponse]
}
}
/** An object representing the details of an update request.
*/
@js.native
trait UpdateParam extends js.Object {
var `type`: js.UndefOr[UpdateParamType]
var value: js.UndefOr[String]
}
object UpdateParam {
@inline
def apply(
`type`: js.UndefOr[UpdateParamType] = js.undefined,
value: js.UndefOr[String] = js.undefined
): UpdateParam = {
val __obj = js.Dynamic.literal()
`type`.foreach(__v => __obj.updateDynamic("type")(__v.asInstanceOf[js.Any]))
value.foreach(__v => __obj.updateDynamic("value")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateParam]
}
}
@js.native
sealed trait UpdateParamType extends js.Any
object UpdateParamType {
val Version = "Version".asInstanceOf[UpdateParamType]
val PlatformVersion = "PlatformVersion".asInstanceOf[UpdateParamType]
val EndpointPrivateAccess = "EndpointPrivateAccess".asInstanceOf[UpdateParamType]
val EndpointPublicAccess = "EndpointPublicAccess".asInstanceOf[UpdateParamType]
val ClusterLogging = "ClusterLogging".asInstanceOf[UpdateParamType]
val DesiredSize = "DesiredSize".asInstanceOf[UpdateParamType]
val LabelsToAdd = "LabelsToAdd".asInstanceOf[UpdateParamType]
val LabelsToRemove = "LabelsToRemove".asInstanceOf[UpdateParamType]
val MaxSize = "MaxSize".asInstanceOf[UpdateParamType]
val MinSize = "MinSize".asInstanceOf[UpdateParamType]
val ReleaseVersion = "ReleaseVersion".asInstanceOf[UpdateParamType]
val PublicAccessCidrs = "PublicAccessCidrs".asInstanceOf[UpdateParamType]
val IdentityProviderConfig = "IdentityProviderConfig".asInstanceOf[UpdateParamType]
val EncryptionConfig = "EncryptionConfig".asInstanceOf[UpdateParamType]
val AddonVersion = "AddonVersion".asInstanceOf[UpdateParamType]
val ServiceAccountRoleArn = "ServiceAccountRoleArn".asInstanceOf[UpdateParamType]
val ResolveConflicts = "ResolveConflicts".asInstanceOf[UpdateParamType]
@inline def values = js.Array(
Version,
PlatformVersion,
EndpointPrivateAccess,
EndpointPublicAccess,
ClusterLogging,
DesiredSize,
LabelsToAdd,
LabelsToRemove,
MaxSize,
MinSize,
ReleaseVersion,
PublicAccessCidrs,
IdentityProviderConfig,
EncryptionConfig,
AddonVersion,
ServiceAccountRoleArn,
ResolveConflicts
)
}
@js.native
sealed trait UpdateStatus extends js.Any
object UpdateStatus {
val InProgress = "InProgress".asInstanceOf[UpdateStatus]
val Failed = "Failed".asInstanceOf[UpdateStatus]
val Cancelled = "Cancelled".asInstanceOf[UpdateStatus]
val Successful = "Successful".asInstanceOf[UpdateStatus]
@inline def values = js.Array(InProgress, Failed, Cancelled, Successful)
}
@js.native
sealed trait UpdateType extends js.Any
object UpdateType {
val VersionUpdate = "VersionUpdate".asInstanceOf[UpdateType]
val EndpointAccessUpdate = "EndpointAccessUpdate".asInstanceOf[UpdateType]
val LoggingUpdate = "LoggingUpdate".asInstanceOf[UpdateType]
val ConfigUpdate = "ConfigUpdate".asInstanceOf[UpdateType]
val AssociateIdentityProviderConfig = "AssociateIdentityProviderConfig".asInstanceOf[UpdateType]
val DisassociateIdentityProviderConfig = "DisassociateIdentityProviderConfig".asInstanceOf[UpdateType]
val AssociateEncryptionConfig = "AssociateEncryptionConfig".asInstanceOf[UpdateType]
val AddonUpdate = "AddonUpdate".asInstanceOf[UpdateType]
@inline def values = js.Array(
VersionUpdate,
EndpointAccessUpdate,
LoggingUpdate,
ConfigUpdate,
AssociateIdentityProviderConfig,
DisassociateIdentityProviderConfig,
AssociateEncryptionConfig,
AddonUpdate
)
}
/** An object representing the VPC configuration to use for an Amazon EKS cluster.
*/
@js.native
trait VpcConfigRequest extends js.Object {
var endpointPrivateAccess: js.UndefOr[BoxedBoolean]
var endpointPublicAccess: js.UndefOr[BoxedBoolean]
var publicAccessCidrs: js.UndefOr[StringList]
var securityGroupIds: js.UndefOr[StringList]
var subnetIds: js.UndefOr[StringList]
}
object VpcConfigRequest {
@inline
def apply(
endpointPrivateAccess: js.UndefOr[BoxedBoolean] = js.undefined,
endpointPublicAccess: js.UndefOr[BoxedBoolean] = js.undefined,
publicAccessCidrs: js.UndefOr[StringList] = js.undefined,
securityGroupIds: js.UndefOr[StringList] = js.undefined,
subnetIds: js.UndefOr[StringList] = js.undefined
): VpcConfigRequest = {
val __obj = js.Dynamic.literal()
endpointPrivateAccess.foreach(__v => __obj.updateDynamic("endpointPrivateAccess")(__v.asInstanceOf[js.Any]))
endpointPublicAccess.foreach(__v => __obj.updateDynamic("endpointPublicAccess")(__v.asInstanceOf[js.Any]))
publicAccessCidrs.foreach(__v => __obj.updateDynamic("publicAccessCidrs")(__v.asInstanceOf[js.Any]))
securityGroupIds.foreach(__v => __obj.updateDynamic("securityGroupIds")(__v.asInstanceOf[js.Any]))
subnetIds.foreach(__v => __obj.updateDynamic("subnetIds")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[VpcConfigRequest]
}
}
/** An object representing an Amazon EKS cluster VPC configuration response.
*/
@js.native
trait VpcConfigResponse extends js.Object {
var clusterSecurityGroupId: js.UndefOr[String]
var endpointPrivateAccess: js.UndefOr[Boolean]
var endpointPublicAccess: js.UndefOr[Boolean]
var publicAccessCidrs: js.UndefOr[StringList]
var securityGroupIds: js.UndefOr[StringList]
var subnetIds: js.UndefOr[StringList]
var vpcId: js.UndefOr[String]
}
object VpcConfigResponse {
@inline
def apply(
clusterSecurityGroupId: js.UndefOr[String] = js.undefined,
endpointPrivateAccess: js.UndefOr[Boolean] = js.undefined,
endpointPublicAccess: js.UndefOr[Boolean] = js.undefined,
publicAccessCidrs: js.UndefOr[StringList] = js.undefined,
securityGroupIds: js.UndefOr[StringList] = js.undefined,
subnetIds: js.UndefOr[StringList] = js.undefined,
vpcId: js.UndefOr[String] = js.undefined
): VpcConfigResponse = {
val __obj = js.Dynamic.literal()
clusterSecurityGroupId.foreach(__v => __obj.updateDynamic("clusterSecurityGroupId")(__v.asInstanceOf[js.Any]))
endpointPrivateAccess.foreach(__v => __obj.updateDynamic("endpointPrivateAccess")(__v.asInstanceOf[js.Any]))
endpointPublicAccess.foreach(__v => __obj.updateDynamic("endpointPublicAccess")(__v.asInstanceOf[js.Any]))
publicAccessCidrs.foreach(__v => __obj.updateDynamic("publicAccessCidrs")(__v.asInstanceOf[js.Any]))
securityGroupIds.foreach(__v => __obj.updateDynamic("securityGroupIds")(__v.asInstanceOf[js.Any]))
subnetIds.foreach(__v => __obj.updateDynamic("subnetIds")(__v.asInstanceOf[js.Any]))
vpcId.foreach(__v => __obj.updateDynamic("vpcId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[VpcConfigResponse]
}
}
@js.native
sealed trait configStatus extends js.Any
object configStatus {
val CREATING = "CREATING".asInstanceOf[configStatus]
val DELETING = "DELETING".asInstanceOf[configStatus]
val ACTIVE = "ACTIVE".asInstanceOf[configStatus]
@inline def values = js.Array(CREATING, DELETING, ACTIVE)
}
}
|
package workspace_th.day06.fileEx;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
public class CopyEx {
public static void main(String[] args) throws IOException {
File src = new File("C:\\알고리즘\\heoolo.txt");
File dist = new File("C:\\Users\\mwe22\\OneDrive - 서울과학기술대학교\\2021-2학기");
FileInputStream fis = null;
FileOutputStream fos = null;
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
//stream 은 바이트 단위로 처리해서 속도가 느림
try {
fis = new FileInputStream(src); //파일 입력 스트림 연결
fos = new FileOutputStream(dist);//파일 출력바이트 스트림 연결
bis = new BufferedInputStream(fis);//버퍼 입력 스트림 연결
bos = new BufferedOutputStream(fos);//버퍼 출력 스트림 연결
}catch (Exception e) {
System.out.println("파일 복사 오류 발생!");
}finally {
bis.close(); bos.close(); fis.close(); fos.close();
}
// 복사할 원본
}
}
|
<reponame>Bedenke/dank-web
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const route_parser_1 = __importDefault(require("route-parser"));
const index_1 = require("../index");
function router(attributes) {
return index_1.$subscribe(index_1.ContextEvents.Request, context => {
for (let route of attributes.routes) {
let routeParser = new route_parser_1.default(route.path);
let params = routeParser.match(context.browser.request.pathname);
if (params) {
context.browser.request.params = params;
return route.render(context);
}
}
return attributes.renderNotFound(context);
});
}
exports.router = router;
|
#!/usr/bin/env bash
# get_read_counts.sh
#
function usage() {
cat <<EOF >&2
Usage: ./get_read_counts.sh -g human_genome_bwa -p phage_bwa -b bacteria_bwa virmap_output.final.fa virmap_input_sequence.fq
-g, --genome BWA_REF Path prefix to the human genome bwa index
-p, --phage BWA_REF Path prefix to the gbphage bwa index
-b, --bacteria BWA_REF Path prefix to the gbbact bwa index
-t, --threads T (Optional) use T threads
-k, --keep Don't delete the temporary directory used
EOF
}
#
# Description:
# Obtain counts of reads from virmap output in the original input sequence
###########################################################################
# Copyright 2019 University of New South Wales
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
set -o errexit
set -o nounset
set -o pipefail
if ! type bwa >/dev/null; then
echo >&2 "[!] bwa command not found"
exit 1
fi
if ! type samtools >/dev/null; then
echo >&2 "[!] samtools command not found"
exit 1
fi
function cleanup() {
rm -rf "$TMP_DIR"
}
function check_is_bwa_index() {
local display_name="$1"
local bwa_prefix="$2"
for file in "$bwa_prefix".{amb,ann,bwt,pac}; do
if [[ ! -f "$file" ]]; then
echo >&2 "[!] $display_name BWA index doesn't look right"
find "$file"
exit 1
fi
done
}
function get_unmapped() {
local name=$1
local input=$2
local ref_db=$3
local tmp_prefix="$TMP_DIR/$name.align"
echo >&2 "[ ] Extracting reads which don't map to $name"
bwa mem -t "$THREADS" "$ref_db" "$input" |
samtools view -h -f 4 |
samtools sort --threads "$THREADS" -o "$tmp_prefix.unmapped.sort.bam"
samtools fastq "$tmp_prefix.unmapped.sort.bam"
}
function get_mapped() {
local name=$1
local input=$2
local ref_db=$3
local tmp_prefix="$TMP_DIR/$name.align"
echo >&2 "[ ] Extracting reads mapped to $name"
bwa mem -t "$THREADS" "$ref_db" "$input" |
samtools view -h -F 4 |
samtools sort --threads "$THREADS" -o "$tmp_prefix.mapped.sort.bam"
samtools fastq "$tmp_prefix.mapped.sort.bam"
}
###################
### Script body ###
###################
OPTIONS=g:p:b:t:
LONGOPTIONS=genome:,phage:,bacteria:,threads:
PARSED=$(getopt --options=${OPTIONS} --longoptions=${LONGOPTIONS} --name "$0" -- "$@")
if [[ $? -ne 0 ]]; then
usage
exit 2
fi
eval set -- "${PARSED}"
# non option arguments
if [[ $# -eq 1 ]]; then
usage
exit 4
fi
while true; do
case "$1" in
-g | --genome)
GENMOE_REF="$2"
shift 2
;;
-p | --phage)
PHAGE_REF="$2"
shift 2
;;
-b | --bacteria)
BACT_REF="$2"
shift 2
;;
-t | --threads)
THREADS="$2"
shift 2
;;
-k | --keep)
KEEP=1
shift 1
;;
--)
shift
break
;;
*)
if [ -z "$1" ]; then break; else
echo >&2 "[!] '$1' is not a valid option"
exit 3
fi
;;
esac
done
INPUT="${1:-}"
ORIGINAL_SAMPLE="${2:-}"
if [[ $# -gt 2 ]]; then
echo >&2 "[!] ${*:3} invalid option(s)"
exit 4
fi
if [[ -z "${GENMOE_REF:-}" ]]; then
echo >&2 "[!] GENMOE_REF is missing, please specify with -g or --genome"
exit 1
fi
if [[ -z "${PHAGE_REF:-}" ]]; then
echo >&2 "[!] PHAGE_REF is missing, please specify with -p or --phage"
exit 1
fi
if [[ -z "${BACT_REF:-}" ]]; then
echo >&2 "[!] BACT_REF is missing, please specify with -b or --bacteria"
exit 1
fi
if [[ -z "$INPUT" ]]; then
echo >&2 "[!] Positional argument virmap_output is missing"
exit 1
fi
if [[ -z "$ORIGINAL_SAMPLE" ]]; then
echo >&2 "[!] Positional argument original_sample is missing"
exit 1
fi
check_is_bwa_index "Human genome" "$GENMOE_REF"
check_is_bwa_index "Phage" "$PHAGE_REF"
check_is_bwa_index "Bacteria" "$BACT_REF"
THREADS="${THREADS:-${PBS_NCPUS:-${NCPUS:-$(nproc)}}}"
TMP_DIR=$(mktemp -d)
echo "[ ] Using $(printf "%q\n" "$TMP_DIR") as temp dir"
if [[ "${KEEP:-}" != 1 ]]; then
trap cleanup INT
fi
# samtools can only accept sequence headers up to 252(?) bytes long, so we'll
# just change them all to be short
perl -pe 'BEGIN{$A=1;} s/^>.*/">SEQUENCE_INDEX_" . $A++/ge' "$ORIGINAL_SAMPLE" | gzip -ck9 >"$TMP_DIR/sanitized_input.fq.gz"
get_unmapped "human" "$TMP_DIR/sanitized_input.fq.gz" "$GENMOE_REF" >"$TMP_DIR/unmapped_1_human.fq.gz"
get_unmapped "phage" "$TMP_DIR/unmapped_1_human.fq.gz" "$PHAGE_REF" >"$TMP_DIR/unmapped_2_phage.fq.gz"
get_unmapped "bact" "$TMP_DIR/unmapped_2_phage.fq.gz" "$BACT_REF" >"$TMP_DIR/unmapped_3_bact.fq.gz"
ln -s "$TMP_DIR/unmapped_3_bact.fq.gz" "$TMP_DIR/unmapped_all.fq.gz"
ln -s "$TMP_DIR/bact.align.unmapped.sort.bam" "$TMP_DIR/unmapped_all.bam"
sample_ref="$TMP_DIR/virmap_final"
bwa index -p "$sample_ref" "$INPUT"
get_mapped "sample" "$TMP_DIR/unmapped_all.fq.gz" "$sample_ref" >"$TMP_DIR/mapped_sample.fq.gz"
samtools view -h -q 1 -F 4 -F 256 "$TMP_DIR/sample.align.mapped.sort.bam" |
grep -v -e 'XA:Z:' -e 'SA:Z:' |
samtools view -b >"$TMP_DIR/unique_mapped.bam"
name_prefix=$(basename "$INPUT")
# extract the sizes reported by virmap
grep '^>' "$INPUT" |
sed 's/^.*;taxId=\([0-9]*\);.*;size=\([0-9]*\);.*/\1 \2/' |
sort -k1,1 >"counts.$name_prefix.virmap"
# count the reads after removing human/phage/bact reads
samtools view "$TMP_DIR/sample.align.mapped.sort.bam" | cut -f 3 | sort | uniq -c |
sed 's/^ *\([0-9]*\).*;taxId=\([0-9]*\);.*;size=\([0-9]*\);.*/\2 \1/' |
sort -k1,1 >"counts.$name_prefix.unmapped"
# count the reads after removing duplicates
samtools view "$TMP_DIR/unique_mapped.bam" | cut -f 3 | sort | uniq -c |
sed 's/^ *\([0-9]*\).*;taxId=\([0-9]*\);.*;size=\([0-9]*\);.*/\2 \1/' |
sort -k1,1 >"counts.$name_prefix.no_dupe"
# join the count files on taxid
{
echo "taxid virmapSize unmappedCounts withoutSACounts"
join -a 1 -j 1 -o 1.1,1.2,2.2 "counts.$name_prefix.virmap" "counts.$name_prefix.unmapped" |
join -a 1 -j 1 -o 1.1,1.2,1.3,2.2 - "counts.$name_prefix.no_dupe" |
sort -nr -k2,2
} | tr ' ' '\t' >"counts.$name_prefix.all"
if type column >/dev/null; then
# If column is available, output the counts as a table
column -t -s $'\t' "counts.$name_prefix.all"
else
# Otherwise, just dump it out
cat "counts.$name_prefix.all"
fi
if [[ "${KEEP:-}" != 1 ]]; then
cleanup
fi
|
require "set"
module Sherbet
class Generator
def run(number_generated=1)
number_generated.times do
puts wrapper_emoji * 40
puts "\n"
puts info.join("\n\n")
puts "\n"
puts wrapper_emoji * 40
end
end
def info
[]
end
def wrapper_emoji
"*"
end
end
end
|
# Assuming the code is part of a web framework handler class method
# Import necessary modules
from web_framework import Handler, Session, Redirect
class FacebookHandler(Handler):
def handle_facebook_authentication(self):
# Retrieve the current user's information from Facebook
facebook_user = self.current_user
# Check if the user's Facebook username is already linked to an existing account
if self.social_link_service.by_handler("Graph:Facebook", facebook_user['username']):
# If linked, set an error message and redirect the user to the login page
errors = {'signup': "This email is already associated with a different account. Please log in and link your Facebook account."}
self.session.set("errors", errors)
return Redirect(self.component.conf['login_page'])
else:
# If not linked, proceed with the regular authentication flow
# (e.g., create a new account or log in the user)
# ...
# Return the appropriate response or redirect
# ... |
import { Module } from '@nestjs/common';
import { UserEntity } from '../user/user.entity';
import { TypeOrmModule } from '@nestjs/typeorm';
import { JwtModule } from '@nestjs/jwt';
import { jwtConstants } from '../const/key.const';
import { JwtStrategy } from './jwt.strategy';
import { AuthController } from './auth.controller';
import { UserModule } from '../user/user.module';
import { AuthService } from './auth.service';
import { UserInfoModule } from '../userinfo/userinfo.module';
@Module({
imports: [UserModule, UserInfoModule, TypeOrmModule.forFeature([UserEntity]),
JwtModule.register({
secretOrPrivateKey: jwtConstants.secret
})],
controllers: [AuthController],
providers: [AuthService, JwtStrategy]
})
export class AuthModule {
}
|
echo "Publishing phone"
chmod +x gradlew
./gradlew :publish || exit
echo "Finished publishing phone"
|
def findSmallest(num1, num2, num3):
# initialize result
result = 0
# use if-else statement
if (num1 <= num2) and (num1 <= num3):
result = num1
elif (num2 <= num1) and (num2 <= num3):
result = num2
else:
result = num3
return result
num1 = 8
num2 = 12
num3 = 4
smallest = findSmallest(num1, num2, num3)
print("Smallest of:", num1, num2, num3, "is", smallest) |
def binary_search(arr, target):
left = 0
right = len(arr) - 1
while left <= right:
mid = (left + right) // 2
if arr[mid] == target:
return mid
elif arr[mid] < target:
left = mid + 1
else:
right = mid - 1
return -1 |
#!/bin/bash
declare -a size_array=(64k 128k 256k 512k 1m)
declare -a array_64k=()
declare -a array_128k=()
declare -a array_256k=()
declare -a array_512k=()
declare -a array_1m=()
calculate_avg()
{
arr=("$@")
sum=0
for i in "${arr[@]}"
do
sum=$(echo $sum + $i | bc -l);
done
average=$(echo $sum / ${#arr[@]} | bc -l)
echo "$average"
}
server_ip="$(kubectl get pods -o wide | grep 'iperf-dev-s ' |awk {'print $6'})"
if [ "$1" != "" ]; then
count=$1
else
count=1
fi
echo "Start script to run iperf client pods"
for i in $(seq 1 $count)
do
for size in "${size_array[@]}"
do
#if kubectl get pods | grep iperf-client$ > /dev/null 2>&1 ; then
# kubectl delete -f iperf_client.yaml > /dev/null 2>&1
# echo "Deleted client pod"
#fi
sed -i 's/args.*/args: \["-c",'$server_ip',"-l","'$size'"\]/' iperf_client_"$size".yaml
echo "Creating client pod with packet length $size"
kubectl apply -f iperf_client"_$size".yaml > /dev/null 2>&1
sleep 10
kubectl logs -f iperf-client
bandwidth=$(kubectl logs -f iperf-client | grep receiver | awk {'print $7'})
if [ "$size" == "64k" ]; then
array_64k+=("$bandwidth")
elif [ "$size" == "128k" ]; then
array_128k+=("$bandwidth")
elif [ "$size" == "256k" ]; then
array_256k+=("$bandwidth")
elif [ "$size" == "512k" ]; then
array_512k+=("$bandwidth")
else
array_1m+=("$bandwidth")
fi
echo "Delete client pod with packet length $size"
kubectl delete -f iperf_client"_$size".yaml > /dev/null 2>&1
echo "-------------------------------------------------------------------------------------------------------------------"
done
done
echo "-------------------------------------------------------------------------------------------------------------------"
echo "iperf client results for 64k : ${array_64k[@]}"
echo "iperf client results for 128k : ${array_128k[@]}"
echo "iperf client results for 256k : ${array_256k[@]}"
echo "iperf client results for 512k : ${array_512k[@]}"
echo "iperf client results for 1m : ${array_1m[@]}"
if [ $count -gt 1 ]; then
echo -n "Average bandwidth for 64k packet size = "
calculate_avg "${array_64k[@]}"
echo ""
echo -n "Average bandwidth for 128k packet size = "
calculate_avg "${array_128k[@]}"
echo ""
echo -n "Average bandwidth for 256k packet size = "
calculate_avg "${array_256k[@]}"
echo ""
echo -n "Average bandwidth for 512k packet size = "
calculate_avg "${array_512k[@]}"
echo ""
echo -n "Average bandwidth for 1m packet size = "
calculate_avg "${array_1m[@]}"
echo ""
fi
echo "-------------------------------------------------------------------------------------------------------------------"
echo ""
|
#!/bin/bash
set -euo pipefail
# Author: Zheng Hao Tan <tanzhao@umich.edu>
# MIT License
# This command counts the number of lines of code in a Git repository
git ls-files | xargs wc -l
|
import JSZip from "jszip";
import Radium from "radium";
import React, { Component } from "react";
import { saveAs } from "./FileSaver";
import BarcodeInput from "./BarcodeInput";
import BarcodeRenderer from "./BarcodeRenderer";
@Radium
export default class App extends Component {
constructor(props) {
super(props);
this.state = {
barcodes: [],
};
}
setBarcodes = (barcodes) => {
this.setState({ barcodes });
};
download = (e) => {
e.preventDefault();
const zip = new JSZip();
const images = zip.folder("barcodes");
var canvases = document.querySelectorAll("canvas");
for (let n = 0; n < canvases.length; n++) {
const canvas = canvases[n];
const barcode = canvas.getAttribute("data-barcode");
const data = canvas.toDataURL("image/png");
const raw = data.slice(22);
const name = barcode.replace(/[^-_a-zA-Z0-9]/g, "_");
images.file(`${name}.png`, raw, { base64: true });
}
zip.generateAsync({ type: "blob" })
.then((content) => {
saveAs(content, "barcodes.zip");
});
};
render() {
const { barcodes } = this.state;
return (
<div style={styles.container}>
<div style={styles.left}>
<h3 style={styles.header}>Paste Barcodes</h3>
<div style={styles.content}>
<BarcodeInput style={styles.barcodeInput}
value={barcodes}
onChange={this.setBarcodes} />
</div>
</div>
<div style={styles.right}>
<h3 style={styles.header}>Output</h3>
<div style={styles.scroller}>
<BarcodeRenderer barcodes={barcodes} style={styles.renderer} />
</div>
<a href="#" onClick={this.download} style={styles.download}>Download All</a>
</div>
</div>
);
}
}
const styles = {
container: {
position: "absolute",
left: 0,
top: 0,
right: 0,
bottom: 0,
overflow: "hidden",
},
left: {
position: "absolute",
top: 0,
left: 0,
width: "50%",
bottom: 0,
display: "flex",
flexDirection: "column",
alignItems: "stretch",
padding: 20,
},
right: {
position: "absolute",
top: 0,
right: 0,
width: "50%",
bottom: 0,
display: "flex",
flexDirection: "column",
alignItems: "stretch",
padding: 20,
},
header: {
color: "#596774",
textTransform: "uppercase",
letterSpacing: "2px",
fontSize: "15px",
fontWeight: "200",
marginBottom: "13px",
},
content: {
position: "relative",
flex: 1,
},
barcodeInput: {
position: "absolute",
left: 0,
right: 0,
top: 0,
bottom: 0,
},
scroller: {
position: "relative",
flex: 1,
overflowY: "auto",
},
renderer: {
width: "100%",
},
download: {
color: "#596774",
textTransform: "uppercase",
letterSpacing: "2px",
fontSize: "15px",
fontWeight: "200",
marginBottom: "13px",
textDecoration: "none",
":hover": {
textDecoration: "underline",
},
},
};
|
def calculate_targets(todayTimeStamp, dueDayOfMonth, temp, no_overdue, no_overdue_amt):
if todayTimeStamp == dueDayOfMonth['due_date_add_1']:
temp['target'] = target['target']
temp['target_acc'] = (no_overdue * int(temp['target'])) / 100
temp['target_amt'] = (no_overdue_amt * int(temp['target'])) / 100
return temp['target'], temp['target_acc'], temp['target_amt'] |
#!/bin/bash
DIR="$(dirname "$(readlink -f "$0")")"
pushd $DIR/../../03-fuzzing-new-targets/zephyr-os/prebuilt_samples/CVE-2021-3329/POC
./run.sh $@
popd |
// polyfill ES6 support to older browsers
import "core-js/stable"
// polyfill the fetch API for older browsers
import "whatwg-fetch"
|
#include <iostream>
#include <string>
class TileServerOptions {
private:
std::string apiKeyParameterName;
public:
TileServerOptions& withApiKeyParameterName(std::string apiKeyParameterName) {
this->apiKeyParameterName = apiKeyParameterName;
return *this;
}
std::string getApiKeyParameterName() const {
return apiKeyParameterName;
}
};
int main() {
TileServerOptions options;
options.withApiKeyParameterName("access_token");
std::cout << "Access token parameter name: " << options.getApiKeyParameterName() << std::endl;
return 0;
} |
import { NextPage } from 'next';
import Header from '../components/Header';
export const About: NextPage = () => {
return (
<div>
<Header />
<h1>Welcome to the About page!</h1>
</div>
);
};
export default About;
|
<reponame>lutovna/klever
#
# Copyright (c) 2019 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand, CommandError
from rest_framework import serializers
from bridge.vars import USER_ROLES
from users.models import User
from users.serializers import ManageUserSerializer
class Command(BaseCommand):
help = 'Used to create a user. If user with specified username exists then it will be updated with provided data.'
requires_migrations_checks = True
def add_arguments(self, parser):
role_choices = list(x[0] for x in USER_ROLES)
parser.add_argument('--username', dest='username', help='Specifies the username.')
parser.add_argument('--password', dest='password', help='Specifies the password.')
parser.add_argument(
'--role', choices=role_choices, default=role_choices[0],
help="The user role (0 - no access, 1 - producer, 2 - manager, 3 - expert, 4 - service)"
)
parser.add_argument('--staff', dest='is_staff', action='store_true', help='Is user a staff?')
parser.add_argument('--superuser', dest='is_superuser', action='store_true', help='Is user a superuser?')
def handle(self, *args, **options):
# If user with specified username exists, then update its password and role
user = User.objects.filter(username=options['username']).first()
serializer = ManageUserSerializer(instance=user, data=options)
try:
serializer.is_valid(raise_exception=True)
except serializers.ValidationError as e:
raise CommandError(str(e))
serializer.save()
if options['verbosity'] >= 1:
self.stdout.write("User created successfully.")
|
#!/bin/bash
set -e
set -x
CLUSTER_ENABLE=${CLUSTER_ENABLE:-false}
CLUSTER_ZOOKEEPER_ADDRESS=${CLUSTER_ZOOKEEPER_ADDRESS:-localhost}
ADMIN_PASSWORD=${ADMIN_PASSWORD:-admin}
HBASE_HOST=${HBASE_HOST:-localhost}
HBASE_PORT=${HBASE_PORT:-2181}
DISABLE_DEBUG=${DISABLE_DEBUG:-true}
cp /assets/pinpoint-web.properties /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/pinpoint-web.properties
cp /assets/hbase.properties /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/hbase.properties
sed -i "s/cluster.enable=true/cluster.enable=${CLUSTER_ENABLE}/g" /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/pinpoint-web.properties
sed -i "s/cluster.zookeeper.address=192.168.176.136/cluster.zookeeper.address=${CLUSTER_ZOOKEEPER_ADDRESS}/g" /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/pinpoint-web.properties
sed -i "s/admin.password=admin/admin.password=${ADMIN_PASSWORD}/g" /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/pinpoint-web.properties
sed -i "s/hbase.client.host=192.168.176.136/hbase.client.host=${HBASE_HOST}/g" /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/hbase.properties
sed -i "s/hbase.client.port=2181/hbase.client.port=${HBASE_PORT}/g" /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/hbase.properties
if [ "$DISABLE_DEBUG" == "true" ]; then
sed -i 's/level value="DEBUG"/level value="INFO"/' /usr/local/tomcat/webapps/ROOT/WEB-INF/classes/log4j.xml
fi
exec /usr/local/tomcat/bin/catalina.sh run
|
<gh_stars>0
#include<stdio.h>
#include<time.h>
#include<wiringPi.h>
#define READY_FREQ 0
#define DIR_CAR 28
#define STEP_CAR 29
#define DIR_MI 27
#define STEP_MI 11
#define DIR_SI 5
#define STEP_SI 4
#define DIR_SOL 1
#define STEP_SOL 24
#define DIR_RE 23
#define STEP_RE 22
#define DIR_LA 21
#define STEP_LA 3
#define DIR_MIZ 2
#define STEP_MIZ 0
#define CW 1
#define CCW 0
#define SPR 200
int veq_aux_dir[6] = [DIR_MIZ, DIR_SI, DIR_SOL, DIR_RE, DIR_LA, DIR_MI];
int veq_aux_step[6] = [STEP_MIZ, STEP_SI, STEP_SOL, STEP_RE, STEP_LA, STEP_MI];
double atraso = 30;
void tensionar_corda(){
int i,j, dir;
for(j = 0; j < 6;j++){
for(i = 0; i < 400; i++){
digitalWrite(veq_aux_dir[j], CCW);
digitalWrite(veq_aux_step[j], HIGH);
delay(atraso);
digitalWrite(veq_aux_step[j], LOW);
delay(atraso);
}
}
}
void afrouxar_corda(){
int i,j, dir;
for(j = 0; j < 6;j++){
for(i = 0; i < 400; i++){
digitalWrite(veq_aux_dir[j], CW);
digitalWrite(veq_aux_step[j], HIGH);
delay(atraso);
digitalWrite(veq_aux_step[j], LOW);
delay(atraso);
}
}
}
void afrouxar_corda_esp(int dir, int step){
int i;
for(i = 0; i < 51; i++){
digitalWrite(step, HIGH);
delay(atraso);
digitalWrite(step, LOW);
delay(atraso);
}
}
int main(void){
int i,j;
wiringPiSetup();
for(j = 0; j < 6; j++){
pinMode(veq_aux_dir[j], OUTPUT);
pinMode(veq_aux_step[j], OUTPUT);
}
int step_count = SPR;
double atraso_carro = 1;
int i = 0;
int sentido, qtde;
printf("sentido \n");
scanf("%d", &sentido);
printf("qtde\n");
scanf("%d", &qtde);
// digitalWrite(DIR, CCW);
digitalWrite(DIR, sentido);
//tensionar_corda();
afrouxar_corda(0, STEP_SI);
/*
for(i = 0; i < qtde; i++){
digitalWrite(STEP, HIGH);
delay(atraso);
digitalWrite(STEP, LOW);
delay(atraso);
}
*/
return 0;
}
|
<reponame>invaderjill/AngularExWebApp
export interface IEpisodeFS {
totalCharges: number;
totalPayment: number;
totalAdjustment: number;
totalBadDebt: number;
unbilled: number;
lateCharge: number;
unAllocated: number;
exempt: number;
claims: number;
ubSelfP: number;
selfP: number;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.