text
stringlengths
1
1.05M
var screenWidth = $(window).width(), imgHeader = $('.contHeader_img img'); $(document).ready(function () { var input = $('#registro .form-group input'); input.focus( function() { $(this).parent().addClass('input-activo'); }); input.focusout(function() { $(this).parent().removeClass('input-activo'); if ( $(this).val() != '' ){ $(this).parent().addClass('input-activo'); console.log( ' valor '+ $(this).val() ) } }); var cont_contador = $( ".cont_contador" ); var position = cont_contador.position(); $('.ir-abajo').click(function(){ $('body, html').animate({ scrollTop: position.top+'px' }, 300); }); if (screenWidth <= 1680 ) { imgHeader.attr('src','images/curvaHeader.png'); }; if (screenWidth <= 1200 ) { imgHeader.attr('src','images/curvaHeader-2.png'); }; if (screenWidth <= 760 ) { imgHeader.attr('src','images/curvaHeader-3.png'); }; if (screenWidth <= 400 ) { imgHeader.attr('src','images/curvaHeader-4.png'); }; });
/************************************************************ Copyright (c) 1993 by Silicon Graphics Computer Systems, Inc. Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies and that both that copyright notice and this permission notice appear in supporting documentation, and that the name of Silicon Graphics not be used in advertising or publicity pertaining to distribution of the software without specific prior written permission. Silicon Graphics makes no representation about the suitability of this software for any purpose. It is provided "as is" without any express or implied warranty. SILICON GRAPHICS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL SILICON GRAPHICS BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ********************************************************/ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include <stdio.h> #include "Xlibint.h" #include "XKBlibint.h" #include <X11/extensions/XKBproto.h> /***====================================================================***/ int _XkbInitReadBuffer(Display *dpy,XkbReadBufferPtr buf,int size) { if ((dpy!=NULL) && (buf!=NULL) && (size>0)) { buf->error= 0; buf->size= size; buf->start= buf->data= _XkbAlloc(size); if (buf->start) { _XRead(dpy, buf->start, size); return 1; } } return 0; } #define _XkbReadBufferDataLeft(b) (((b)->size)-((b)->data-(b)->start)) int _XkbSkipReadBufferData(XkbReadBufferPtr from,int size) { if (size==0) return 1; if ((from==NULL)||(from->error)||(size<1)|| (_XkbReadBufferDataLeft(from)<size)) return 0; from->data+= size; return 1; } int _XkbCopyFromReadBuffer(XkbReadBufferPtr from,char *to,int size) { if (size==0) return 1; if ((from==NULL)||(from->error)||(to==NULL)||(size<1)|| (_XkbReadBufferDataLeft(from)<size)) return 0; memcpy(to,from->data,size); from->data+= size; return 1; } #ifdef XKB_FORCE_INT_KEYSYM int _XkbReadCopyKeySyms(int *wire,KeySym *to,int num_words) { while (num_words-->0) { *to++= *wire++; } return 1; } int _XkbReadBufferCopyKeySyms(XkbReadBufferPtr from,KeySym *to,int num_words) { if ((unsigned)(num_words*4)>_XkbReadBufferDataLeft(from)) return 0; _XkbReadCopyKeySyms((int *)from->data,to,num_words); from->data+= (4*num_words); return True; } int _XkbWriteCopyKeySyms (register KeySym *from,CARD32 *to,int len) { while (len-->0) { *to++= (CARD32)*from++; } return True; } #endif #ifdef LONG64 int _XkbReadCopyData32(int *wire,long *to,int num_words) { while (num_words-->0) { *to++= *wire++; } return 1; } #endif #ifdef WORD64 int _XkbReadCopyData32(int *from,long *lp,int num_words) { long *lpack; long mask32 = 0x00000000ffffffff; long maskw, i, bits; lpack = (long *)from; bits = 32; for (i=0;i<num_words;i++) { maskw = mask32 << bits; *lp++ = (*lpack & maskw) >> bits; bits = bits ^ 32; if (bits) lpack++; } return 1; } #endif #if defined(LONG64) || defined(WORD64) int _XkbReadBufferCopy32(XkbReadBufferPtr from,long *to,int num_words) { if ((unsigned)(num_words*4)>_XkbReadBufferDataLeft(from)) return 0; _XkbReadCopyData32((int *)from->data,to,num_words); from->data+= (4*num_words); return True; } #endif #ifdef LONG64 int _XkbWriteCopyData32 (register unsigned long *from,CARD32 *to,int len) { while (len-->0) { *to++= (CARD32)*from++; } return True; } #endif /* LONG64 */ #ifdef WORD64 _XkbWriteCopyData32 Not Implemented Yet for sizeof(int)==8 #endif char * _XkbPeekAtReadBuffer(XkbReadBufferPtr from,int size) { if ((from==NULL)||(from->error)||(size<1)|| (_XkbReadBufferDataLeft(from)<size)) return NULL; return from->data; } char * _XkbGetReadBufferPtr(XkbReadBufferPtr from,int size) { char *ptr; if ((from==NULL)||(from->error)||(size<1)|| (_XkbReadBufferDataLeft(from)<size)) return NULL; ptr= from->data; from->data+= size; return ptr; } int _XkbFreeReadBuffer(XkbReadBufferPtr buf) { if ((buf!=NULL) && (buf->start!=NULL)) { int left; left= (int)_XkbReadBufferDataLeft(buf); if (buf->start!=NULL) Xfree(buf->start); buf->size= 0; buf->start= buf->data= NULL; return left; } return 0; } Bool _XkbGetReadBufferCountedString(XkbReadBufferPtr buf,char **rtrn) { CARD16 len,*pLen; int left; char * str = NULL; if ((buf==NULL)||(buf->error)||((left=(int)_XkbReadBufferDataLeft(buf))<4)) return False; pLen= (CARD16 *)buf->data; len= *pLen; if (len>0) { if (XkbPaddedSize(len+2)>left) return False; str= _XkbAlloc(len+1); if (str) { memcpy(str,&buf->data[2],len); str[len]= '\0'; } } buf->data+= XkbPaddedSize(len+2); *rtrn= str; return True; }
package intercept.server; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import intercept.configuration.DefaultProxyConfig; import intercept.configuration.InterceptConfiguration; import intercept.configuration.ProxyConfig; import intercept.framework.Command; import intercept.framework.WebServer; import intercept.logging.ApplicationLog; import intercept.proxy.ProxyFactory; import intercept.proxy.ProxyServer; import intercept.server.components.ClasspathContentPresenter; import intercept.server.components.HomePagePresenter; import intercept.server.components.NewProxyCommand; import intercept.server.components.NewProxyPresenter; import intercept.utils.Block; import intercept.utils.Utils; import java.io.IOException; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.util.ArrayList; import java.util.List; import static intercept.server.UriMatchers.simpleMatcher; public class DefaultInterceptServer implements HttpHandler, WebServer, InterceptServer { private HttpServer server; private InterceptConfiguration configuration; private final ApplicationLog applicationLog; private static final int WAIT_TIME = 200; public DefaultInterceptServer(ApplicationLog applicationLog) { this.applicationLog = applicationLog; } Block<ProxyConfig> startProxies = new Block<ProxyConfig>() { public void yield(ProxyConfig item) { applicationLog.log("Starting proxy server \"" + item.getName() + "\" on port " + item.getPort()); ProxyServer proxy = ProxyFactory.startProxy(item, applicationLog); applicationLog.trace("Created web context for /" + proxy.getName()); server.createContext("/" + proxy.getName(), new ProxyConfigurationHttpHandler(proxy, applicationLog)); } }; @Override public void start(InterceptConfiguration configuration) { try { this.configuration = configuration; applicationLog.log("Starting Intercept server on port " + configuration.getConfigurationPort()); server = HttpServer.create(new InetSocketAddress(configuration.getConfigurationPort()), 0); server.createContext("/", this); server.setExecutor(null); server.start(); startProxyServers(configuration); waitUntilServerAcceptingConnections(); } catch (IOException e) { throw new RuntimeException("Failed to start intercept server ", e); } } private void waitUntilServerAcceptingConnections() { while (true) { Socket socket = null; try { socket = new Socket("localhost", configuration.getConfigurationPort()); if (socket.isConnected()) { Utils.sleep(WAIT_TIME); return; } } catch (IOException e) { } finally { Utils.close(socket); } } } private void startProxyServers(InterceptConfiguration configuration) { configuration.eachProxy(startProxies); } private void stopProxyServers() { ProxyFactory.shutdown(); } public void handle(HttpExchange httpExchange) { try { String method = httpExchange.getRequestMethod(); Dispatcher dispatcher = createDispatcher(); if (method.equalsIgnoreCase("GET")) { dispatcher.dispatchGetRequest(new WebContext(this, httpExchange)); } if (method.equalsIgnoreCase("POST")) { dispatcher.dispatchPostRequest(new WebContext(this, httpExchange)); } } catch (NoRouteException nre) { send404(httpExchange); } catch (Exception e) { send404(httpExchange); System.err.println("Error processing request"); e.printStackTrace(); } } private Dispatcher createDispatcher() { Dispatcher dispatcher = new Dispatcher(); dispatcher.register(simpleMatcher("/"), new HomePagePresenter(configuration)); dispatcher.register(simpleMatcher("/proxy/new"), new NewProxyPresenter(), new NewProxyCommand()); dispatcher.register(UriMatchers.classpathMatcher(), new ClasspathContentPresenter()); dispatcher.register(simpleMatcher("/stop"), new Command() { public void executeCommand(WebContext context) { stopProxyServers(); server.stop(0); } }); return dispatcher; } private void send404(HttpExchange httpExchange) { try { httpExchange.sendResponseHeaders(404, -1); } catch (IOException e) { e.printStackTrace(); } } @Override public ProxyConfig getConfig() { return null; } @Override public List<ProxyConfig> getRunningProxies() { final List<ProxyConfig> proxies = new ArrayList<ProxyConfig>(); configuration.eachProxy(new Block<ProxyConfig>() { public void yield(ProxyConfig item) { proxies.add(item); } }); return proxies; } @Override public void startNewProxy(String name, int port) { ProxyConfig proxyConfig = new DefaultProxyConfig(name, port); configuration.add(proxyConfig); ProxyServer proxy = ProxyFactory.startProxy(proxyConfig, applicationLog); startEntrypointForProxy(proxyConfig, proxy); } private void startEntrypointForProxy(ProxyConfig proxyConfig, ProxyServer proxy) { applicationLog.trace("Created web context for /" + proxyConfig.getName()); server.createContext("/" + proxyConfig.getName(), new ProxyConfigurationHttpHandler(proxy, applicationLog)); } @Override public void stop(InterceptConfiguration configuration) { try { Socket socket = new Socket("localhost", configuration.getConfigurationPort()); OutputStream outputStream = socket.getOutputStream(); String message = "POST /stop HTTP1.1\r\n\r\n"; outputStream.write(message.getBytes()); outputStream.close(); } catch (IOException e) { System.err.println("Failed to stop intercept server: " + e.getMessage()); } server.stop(0); applicationLog.log("Intercept server stopped"); } @Override public String uri(String path) { return "http://localhost:" + configuration.getConfigurationPort() + path; } }
<filename>pyfbx/utils/synchronized_func.py<gh_stars>0 import threading def synchronized(func): func.__lock__ = threading.Lock() def synced_func(*args, **kws): with func.__lock__: return func(*args, **kws) return synced_func
<filename>open-sphere-base/mantle/src/main/java/io/opensphere/mantle/data/geom/factory/impl/MapIconGeometryConverter.java package io.opensphere.mantle.data.geom.factory.impl; import java.awt.Color; import java.net.MalformedURLException; import java.net.URL; import org.apache.log4j.Logger; import io.opensphere.core.Toolbox; import io.opensphere.core.geometry.AbstractRenderableGeometry; import io.opensphere.core.geometry.ImageManager; import io.opensphere.core.geometry.PointSpriteGeometry; import io.opensphere.core.geometry.constraint.Constraints; import io.opensphere.core.geometry.renderproperties.DefaultBaseAltitudeRenderProperties; import io.opensphere.core.geometry.renderproperties.DefaultPointRenderProperties; import io.opensphere.core.geometry.renderproperties.DefaultPointSizeRenderProperty; import io.opensphere.core.geometry.renderproperties.PointRenderProperties; import io.opensphere.core.geometry.renderproperties.PointScaleRenderProperty; import io.opensphere.core.geometry.renderproperties.PointSizeRenderProperty; import io.opensphere.core.geometry.renderproperties.ZOrderRenderProperties; import io.opensphere.core.model.Altitude; import io.opensphere.core.model.GeographicPosition; import io.opensphere.core.model.LatLonAlt; import io.opensphere.mantle.data.BasicVisualizationInfo; import io.opensphere.mantle.data.DataTypeInfo; import io.opensphere.mantle.data.MapVisualizationInfo; import io.opensphere.mantle.data.element.VisualizationState; import io.opensphere.mantle.data.geom.MapGeometrySupport; import io.opensphere.mantle.data.geom.MapIconGeometrySupport; import io.opensphere.mantle.data.geom.factory.RenderPropertyPool; import io.opensphere.mantle.icon.IconImageProvider; import io.opensphere.mantle.icon.IconProvider; import io.opensphere.mantle.icon.IconRecord; import io.opensphere.mantle.icon.IconRegistry; import io.opensphere.mantle.icon.impl.IconProviderFactory; import io.opensphere.mantle.util.MantleConstants; import io.opensphere.mantle.util.MantleToolboxUtils; /** Factory class to create geometry from geometry support class. */ public final class MapIconGeometryConverter extends AbstractGeometryConverter { /** Logger reference. */ private static final Logger LOGGER = Logger.getLogger(MapIconGeometryConverter.class); /** * Convert. * * @param tb the tb * @param iconReg the icon reg * @param geomSupport the geom support * @param id the id * @param dti the dti * @param visState - the {@link VisualizationState} * @param renderPropertyPool the render property pool * @return the point geometry */ public static PointSpriteGeometry convert(Toolbox tb, IconRegistry iconReg, MapIconGeometrySupport geomSupport, long id, DataTypeInfo dti, VisualizationState visState, RenderPropertyPool renderPropertyPool) { MapVisualizationInfo mapVisInfo = dti.getMapVisualizationInfo(); BasicVisualizationInfo basicVisInfo = dti.getBasicVisualizationInfo(); PointSpriteGeometry.Builder<GeographicPosition> iconBuilder = new PointSpriteGeometry.Builder<>(); PointRenderProperties props = getIconSizeRenderPropertiesIfAvailable(visState, mapVisInfo, basicVisInfo, renderPropertyPool, geomSupport, visState.isSelected() ? MantleConstants.SELECT_COLOR : geomSupport.getColor()); iconBuilder.setPosition(new GeographicPosition( LatLonAlt.createFromDegreesMeters(geomSupport.getLocation().getLatD(), geomSupport.getLocation().getLonD(), geomSupport.getLocation().getAltM() + visState.getAltitudeAdjust(), geomSupport.followTerrain() ? Altitude.ReferenceLevel.TERRAIN : geomSupport.getLocation().getAltitudeReference()))); iconBuilder.setDataModelId(id); IconImageProvider iip = determineIconImageProvider(iconReg, geomSupport); iconBuilder.setImageManager(new ImageManager(null, iip)); // Add a time constraint if in time line mode. Constraints constraints = null; if (mapVisInfo != null && basicVisInfo.getLoadsTo().isTimelineEnabled() && !geomSupport.getTimeSpan().isTimeless()) { constraints = createTimeConstraints(tb, dti, geomSupport.getTimeSpan()); } PointSpriteGeometry geom = new PointSpriteGeometry(iconBuilder, props, constraints); return geom; } /** * Determine icon image provider. * * @param iconReg the icon reg * @param geomSupport the geom support * @return the icon image provider */ private static IconImageProvider determineIconImageProvider(IconRegistry iconReg, MapIconGeometrySupport geomSupport) { URL iconURL = null; try { iconURL = new URL(geomSupport.getIconURL()); } catch (MalformedURLException e) { LOGGER.warn("Using default icon, failed to load icon url: " + geomSupport.getIconURL(), e); iconURL = IconRegistry.DEFAULT_ICON_URL; } IconRecord record = iconReg.getIconRecord(iconURL); if (record == null) { IconProvider ip = IconProviderFactory.create(iconURL, null, MapIconGeometryConverter.class.getName()); record = iconReg.addIcon(ip, MapIconGeometryConverter.class); } return iconReg.getLoadedIconPool().getIconImageProvider(record, geomSupport.getImageProcessor()); } /** * Gets the size render properties if available, if not creates a new one * and adds it to the share with the provided size. * * @param visState the vis state * @param mapVisInfo Data type level info relevant for rendering. * @param basicVisInfo Basic information for the data type. * @param renderPropertyPool the render property pool * @param geomSupport the geom support * @param c the c * @return the point size render properties if available */ private static PointRenderProperties getIconSizeRenderPropertiesIfAvailable(VisualizationState visState, MapVisualizationInfo mapVisInfo, BasicVisualizationInfo basicVisInfo, RenderPropertyPool renderPropertyPool, MapIconGeometrySupport geomSupport, Color c) { PointSizeRenderProperty rp = getIconSizeRenderPropertiesIfAvailable(visState, renderPropertyPool, geomSupport); int zOrder = visState.isSelected() ? ZOrderRenderProperties.TOP_Z : mapVisInfo == null ? 1000 : mapVisInfo.getZOrder(); boolean pickable = basicVisInfo != null && basicVisInfo.getLoadsTo().isPickable(); PointRenderProperties prop = new DefaultPointRenderProperties( new DefaultBaseAltitudeRenderProperties(zOrder, true, pickable, false), rp); prop.setColor(c); prop.setRenderingOrder(visState.isSelected() ? 1 : 0); prop = renderPropertyPool.getPoolInstance(prop); return prop; } /** * Gets the size render properties if available, if not creates a new one * and adds it to the share with the provided size. * * @param visState the vis state * @param renderPropertyPool the render property pool * @param geomSupport the geom support * @return the point size render properties if available */ private static PointSizeRenderProperty getIconSizeRenderPropertiesIfAvailable(VisualizationState visState, RenderPropertyPool renderPropertyPool, MapIconGeometrySupport geomSupport) { PointSizeRenderProperty sizeRP; float iconSize = geomSupport.getIconSize() < 1f ? MapIconGeometrySupport.DEFAULT_ICON_SIZE : geomSupport.getIconSize(); float iconHighlightSize = geomSupport.getIconHighlightSize() < 1f ? MapIconGeometrySupport.DEFAULT_ICON_SIZE : geomSupport.getIconHighlightSize(); if (geomSupport.getImageProcessor() != null) { /* There is an image processor, so let the image processor determine * the size of the image. Core will just leave it as is (scale it by * a factor of 1). The icon may be scaled down here to account for * over-scaling due to a large highlight size. */ PointScaleRenderProperty scaleProperty = new PointScaleRenderProperty(); float iconScale = iconHighlightSize > iconSize ? iconSize / iconHighlightSize : 1f; scaleProperty.setSize(iconScale); scaleProperty.setHighlightSize(1f); scaleProperty.setScaleFunction(geomSupport.getScaleFunction()); sizeRP = scaleProperty; } else { sizeRP = new DefaultPointSizeRenderProperty(); sizeRP.setSize(iconSize); sizeRP.setHighlightSize(iconHighlightSize); } sizeRP = renderPropertyPool.getPoolInstance(sizeRP); return sizeRP; } /** * Instantiates a new map point geometry factory. * * @param tb the {@link Toolbox} */ public MapIconGeometryConverter(Toolbox tb) { super(tb); } @Override public AbstractRenderableGeometry createGeometry(MapGeometrySupport geomSupport, long id, DataTypeInfo dti, VisualizationState visState, RenderPropertyPool renderPropertyPool) { IconRegistry iconReg = MantleToolboxUtils.getMantleToolbox(getToolbox()).getIconRegistry(); if (getConvertedClassType().isAssignableFrom(geomSupport.getClass())) { MapIconGeometrySupport localSupport = (MapIconGeometrySupport)geomSupport; return MapIconGeometryConverter.convert(getToolbox(), iconReg, localSupport, id, dti, visState, renderPropertyPool); } throw new IllegalArgumentException("MapGeometrySupport \"" + geomSupport.getClass().getName() + "\" is not an instance of \"" + getConvertedClassType().getName() + "\""); } @Override public Class<?> getConvertedClassType() { return MapIconGeometrySupport.class; } }
import Component from '@ember/component'; import layout from 'ember-medium-editor/templates/components/me-image-dragging'; import { not } from '@ember/object/computed'; export default Component.extend({ layout, tagName: '', enabled: true, disabled: not('enabled') }).reopenClass({ positionalParams: ['enabled'] });
<reponame>seven-eXe/GDHACK<filename>GeometryDashHack/Entry.hpp #pragma once #include <iostream> #include <windows.h> #include <string> #include <TlHelp32.h> #include "cheatFuncs.hpp" struct cInfo { HANDLE hProcess; uintptr_t baseAddress; DWORD PlayerPTR; }; extern cInfo* INFO; #define GEOMETRY_TOUCH_OFFSET 0x20A23C #define GEOMETRY_MUSIC_HACK_OFFSET 0x3222E4 #define GEOMETRY_JUMP_OFFSET 0x1E9498 #define GEOMETRY_PRACTICE_MUSIC_OFFSET_1 0x20C925 #define GEOMETRY_PRACTICE_MUSIC_OFFSET_2 0x20D143 #define GEOMETRY_PRACTICE_MUSIC_OFFSET_3 0x20A563 #define GEOMETRY_PRACTICE_MUSIC_OFFSET_4 0x20A595 #define GEOMETRY_ICONS_OFFSET 0xC50A8 #define GEOMETRY_PLAYER_PTR_OFFSET INFO->baseAddress + 0x3222D0 #define GEOMETRY_PLAYER_NAME_LENGHT_OFFSET 0x1A8 #define GEOMETRY_PLAYER_NAME_OFFSET 0x198 #define GEOMETRY_UNLOCK_LEVELS_OFFSET 0x188CE1 #define GEOMETRY_SHOPHACK_1_OFFSET 0xF33BB #define GEOMETRY_SHOPHACK_2_OFFSET 0x14B339 #define GEOMETRY_SECRET_LVL_OFFSET 0x2214E0 // The anticheat system (lol) #define GEOMETRY_PATCH_SPEEDHACK_AC 0x202AAA #define GEOMETRY_PATCH_RESET_AC 0x20C4E6 #define GEOMETRY_PATCH_LOAD_LEVEL_AC 0x18B2B4 #define GEOMETRY_PATCH_LEVEL_KICK_AC 0x20D3B3 #define GEOMETRY_PATCH_LEVEL_KICK_AC_1 0x1FF7A2 #define GEOMETRY_PATCH_LEVEL_COMP_AC 0x1FD557 #define GEOMETRY_PATCH_LEVEL_COMP_AC_2 0x1FD742 #define GEOMETRY_PATCH_LEVEL_COMP_AC_3 0x1FD756 #define GEOMETRY_PATCH_LEVEL_COMP_AC_4 0x1FD79A #define GEOMETRY_PATCH_LEVEL_COMP_AC_5 0x1FD7AF #define GEOMETRY_PATCH_EDITOR_AC 0x15FC2E
#!/usr/bin/env bash sbt 'server/runMain app.server.Main localhost'
function bundleModules(importStatements) { const modules = {}; // Parse import statements and store module names and paths importStatements.forEach(statement => { const [_, moduleName, modulePath] = statement.match(/export { (.+) } from '(.+)';/); modules[moduleName] = modulePath; }); // Resolve dependencies and bundle the code let bundledCode = ''; const visitedModules = new Set(); function resolveDependencies(moduleName) { if (visitedModules.has(moduleName)) return; visitedModules.add(moduleName); const modulePath = modules[moduleName]; const moduleCode = fs.readFileSync(modulePath, 'utf8'); // Read module code from file // Find and resolve dependencies within the module code const dependencyStatements = moduleCode.match(/export { (.+) } from '(.+)';/g) || []; dependencyStatements.forEach(dependencyStatement => { const [_, depName, depPath] = dependencyStatement.match(/export { (.+) } from '(.+)';/); resolveDependencies(depName); // Recursively resolve dependencies }); bundledCode += moduleCode + '\n'; // Append module code to the bundled code } // Start resolving dependencies from the entry modules Object.keys(modules).forEach(moduleName => resolveDependencies(moduleName)); return bundledCode; } // Example usage const importStatements = [ "export { Credentials } from './Credentials';", "export { ScanInfo } from './ScanInfo';", "export { CustomValidators, REGEX_AUTH_TOKEN, REGEX_URL } from './CustomValidators';" ]; const bundledCode = bundleModules(importStatements); console.log(bundledCode);
# # extract_hourly.py # # Authors: # <NAME> # <NAME> # # This file extracts the features for each hourly time series buckets # by appluing the following functions to the reading during that hour # [(' min',min), (' max',max), (' mean',np.mean), # (' std',np.std), (' skew',skew) import pandas as pd import os import numpy as np import os from scipy.stats import skew import directories data_source_dir = directories.episode_data data_target_dir = "c/" data_ts_dir = directories.processed_data def isfloat(value): try: float(value) return True except ValueError: return False all_functions = [(' min',min), (' max',max), (' mean',np.mean), (' std',np.std), (' skew',skew), (' not_null_len',len)] functions_map = { "all": all_functions, "len": [len], "all_but_len": all_functions[:-1] } periods_map = { "all": (0, 0, 1, 0), "first4days": (0, 0, 0, 4*24), "first8days": (0, 0, 0, 8*24), "last12hours": (1, -12, 1, 0), "first25percent": (2, 25), "first50percent": (2, 50) } patient_ids = os.listdir(data_source_dir) len(patient_ids) # Imputes time series data for asthama patients , (backward, forward) count_patient = 0 for i in range(0,len(patient_ids)): patient_id=patient_ids[i] patient_target_dir = data_target_dir + patient_id if not os.path.exists(patient_target_dir): os.makedirs(patient_target_dir) episode_timeseries = [x for x in os.listdir(data_source_dir+patient_id) \ if ('timeseries' in x) and ('extracted_values') not in x] for episode in episode_timeseries: episode_pd = pd.read_csv(data_source_dir+patient_id+'/'+episode) event_hour_list = [] for hours in range(0,48): episode_hour_dict={} curr_event_pd = episode_pd[(episode_pd['Hours']>=hours) & (episode_pd['Hours']<hours+1)] for col in curr_event_pd.columns: if col!='Hours': curr_array = curr_event_pd[curr_event_pd[col].notnull()][col].values curr_array = [float(x) for x in curr_array if isfloat(x)] for function in all_functions: column = col+str(function[0]) if len(curr_array)!=0: episode_hour_dict[column]=np.apply_along_axis(function[1], 0, curr_array) else: episode_hour_dict[column]=0 else: episode_hour_dict[col]=hours event_hour_list.append(episode_hour_dict) event_hour_df = pd.DataFrame(event_hour_list) for col in event_hour_df.columns: if 'not_null_len' not in col: event_hour_df[col] = event_hour_df[col].fillna(method = 'backfill') event_hour_df[col] = event_hour_df[col].fillna(method = 'ffill') event_hour_df=event_hour_df.fillna(0) event_hour_df.head() event_hour_df.to_csv(patient_target_dir+'/extracted_values_'+episode) count_patient = count_patient + 1 if count_patient%10==0: print("Processed Patents {} out of {}".format(count_patient,len(patient_ids))) not_extracted_ids = [] for i in range(0,len(patient_ids)): curr_files=os.listdir(data_target_dir+patient_ids[i]) if all("extracted_values" not in file for file in curr_files) : not_extracted_ids.append((patient_ids[i],i)) not_first_stay = [] for i in range(0,len(patient_ids)): curr_files=os.listdir(data_source_dir+patient_ids[i]) if all("extracted_valuesepisode1_timeseries" not in file for file in curr_files) : not_first_stay.append((patient_ids[i],i)) #os.listdir(data_source_dir+"1620") #episode_pd = pd.read_csv(data_source_dir+"1620"+"/episode1_timeseries.csv") #len(episode_pd.columns) patient_ids_not_written_f = open(directories.processed_csv+'patient_ids_not_written.txt', 'w') for patient_id,_ in not_extracted_ids: patient_ids_not_written_f.write("%s\n" % patient_id) patient_ids_not_written_f.close() os.getcwd() file = open("processed_csv/patient_ids_not_written.txt", "r") print(file.read()) # copy first occurence to another directory import shutil curr_dir = data_target_dir new_dir = data_ts_dir for patient in patient_ids: ls = os.listdir(curr_dir+patient) extracted_ts = [x for x in ls if 'extracted_values_episode' in x ] if len(extracted_ts) == 0: print("no episode found for patiend_if = {}".format(patient)) else: if not os.path.exists(new_dir+patient): os.makedirs(new_dir+patient) shutil.copy(curr_dir+patient+"/"+extracted_ts[0],new_dir+patient+"/ts1.csv")
const express = require('../lib/express'); const path = require('path'); const html = require('../lib/html'); const app = express(); const fs = require('fs'); //views是用来设置模板存放根目录 app.set('views', path.resolve('views')); //设置模板引擎 ,如果render的没有指定模板后台名,会以这个作为后缀名 app.set('view engine', 'html'); //用来设置模板引擎,遇到html结尾的模板用html来进行渲染 // require('ejs').__express render(filepath,options,callback) app.engine('.html', html); app.use(function (req, res, next) { res.render = function (name, options) { let ext = '.' + app.get('view engine'); name = name.indexOf('.') != -1 ? name : name + ext; let filepath = path.join(app.get('views'), name); let render = app.engines[ext]; function done(err, html) { res.setHeader('Content-Type', 'text/html'); res.end(html); } render(filepath, options, done); } next(); }); //当客户端以GET方式访问/路径的时候执行对应的回调函数 app.get('/', function (req, res, next) { //render第一个参数是模板的相对路径 模板名称 ,数据对象 res.render('index', { title: 'hello', user: { name: 'zfpx' } }); }); app.listen(3000);
/* * Copyright (c) Open Source Strategies, Inc. * * Opentaps is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Opentaps is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Opentaps. If not, see <http://www.gnu.org/licenses/>. */ package org.opentaps.tests; import org.ofbiz.base.util.UtilMisc; import java.util.Map; import java.math.BigDecimal; /** * Global test cases for opentaps, and meta tests. Test things like * the unit test methods themselves here. */ public class OpentapsTests extends OpentapsTestCase { public void testAssertFieldDifference() { Map initialMap = UtilMisc.toMap("one", new Double(1.0), "two", new Double(1.1), "three", new BigDecimal("-0.1")); Map finalMap = UtilMisc.toMap("one", new Integer(5), "two", null, "three", new BigDecimal("10.00000")); Map expectedMap = UtilMisc.toMap("one", new BigDecimal("4"), "two", "-1.1", "three", new Double(10.1)); assertMapDifferenceCorrect(initialMap, finalMap, expectedMap); } }
#!/bin/bash # Check if XDG_DATA_HOME is set, otherwise use the default value if [ -z "$XDG_DATA_HOME" ]; then XDG_DATA_HOME="$HOME/.local/share" fi # Check if the data directory exists if [ -d "$XDG_DATA_HOME" ]; then # Execute the initialization command for the package manager eval "$(basalt global init zsh)" # Check if the package manager's binary directory exists within the data directory if [ -d "$XDG_DATA_HOME/basalt/source/pkg/bin" ]; then # Install the specific package named "pkg" basalt install pkg fi fi
import random random_number = random.randint(1, 10) print(random_number)
<reponame>zonesgame/StendhalArcClient package mindustry.io.versions; import arc.func.Prov; import mindustry.entities.type.Bullet; import mindustry.entities.effect.*; import mindustry.entities.type.Player; import mindustry.entities.type.base.*; /* Latest data: [build 81] 0 = Player 1 = Fire 2 = Puddle 3 = MinerDrone 4 = RepairDrone 5 = BuilderDrone 6 = GroundUnit 7 = GroundUnit 8 = GroundUnit 9 = GroundUnit 10 = GroundUnit 11 = FlyingUnit 12 = FlyingUnit 13 = Revenant Before removal of lightining/bullet: [build 80] 0 = Player 1 = Fire 2 = Puddle 3 = Bullet 4 = Lightning 5 = MinerDrone 6 = RepairDrone 7 = BuilderDrone 8 = GroundUnit 9 = GroundUnit 10 = GroundUnit 11 = GroundUnit 12 = GroundUnit 13 = FlyingUnit 14 = FlyingUnit 15 = Revenant Before addition of new units: [build 79 and below] 0 = Player 1 = Fire 2 = Puddle 3 = Bullet 4 = Lightning 5 = RepairDrone 6 = GroundUnit 7 = GroundUnit 8 = GroundUnit 9 = GroundUnit 10 = GroundUnit 11 = FlyingUnit 12 = FlyingUnit 13 = BuilderDrone 14 = Revenant */ public class LegacyTypeTable{ /* 0 = Player 1 = Fire 2 = Puddle 3 = Draug 4 = Spirit 5 = Phantom 6 = Dagger 7 = Crawler 8 = Titan 9 = Fortress 10 = Eruptor 11 = Wraith 12 = Ghoul 13 = Revenant */ private static final Prov[] build81Table = { Player::new, Fire::new, Puddle::new, MinerDrone::new, RepairDrone::new, BuilderDrone::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, FlyingUnit::new, FlyingUnit::new, HoverUnit::new }; private static final Prov[] build80Table = { Player::new, Fire::new, Puddle::new, Bullet::new, Lightning::new, MinerDrone::new, RepairDrone::new, BuilderDrone::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, FlyingUnit::new, FlyingUnit::new, HoverUnit::new }; private static final Prov[] build79Table = { Player::new, Fire::new, Puddle::new, Bullet::new, Lightning::new, RepairDrone::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, GroundUnit::new, FlyingUnit::new, FlyingUnit::new, BuilderDrone::new, HoverUnit::new }; public static Prov[] getTable(int build){ if(build == -1 || build == 81){ //return most recent one since that's probably it; not guaranteed return build81Table; }else if(build == 80){ return build80Table; }else{ return build79Table; } } }
function captainform_popup_default(){ var popup_params={ popup_url: '', popup_w: 500, popup_h: 250, popup_title: '', pupup_body: '', popup_buttons: [], action_name:'', action_params: [] } return popup_params; } var element2; function captainform_create_form_popup(msg) { jQuery('#captainform_popup_form').remove(); url = msg.url; w = msg.popup_w; document.documentElement.style.overflow = 'hidden'; // firefox, chrome document.body.scroll = "no"; // ie only style_ios = ''; if (captainform_is_ios()) { style_ios = ' style="-webkit-overflow-scrolling: touch"'; } htm = ''; htm += '<div id="captainform_popup_form" onclick="close_popup_fx()" class="captainform_popup_bg_form">'; htm += '<div id="cfloader" class="captainform_loader_form"></div>'; htm += '<div id="popup_box_fx" class="captainform_popup_box_form">'; htm += '<div class="close_cnt"><div id="xclose" class="captainform_popup_close_form" onclick="close_popup_fx()"></div></div>'; htm += '<div id="popup_body_fx" class="captainform_popup_body_form"' + style_ios + '><iframe id="ppiframefx" src="' + url + '" class="popup_iframe_form" scrolling="no"></iframe></div>'; htm += '</div>';; htm += '</div>'; ppi = document.getElementById('ppiframefx'); jQuery('body').append(htm); jQuery('#popup_box_fx').width(w); jQuery('#popup_box_fx').height(0); jQuery('#captainform_popup_form').show(); jQuery("#ppiframefx").on("load", function () { jQuery('#popup_box_fx').show(); resize_popup_iframe_fx(); }); jQuery("#popup_box_fx").on("click", function (e) { e.preventDefault(); return false; }); element2 = document.getElementById('ppiframefx'); var isOldIE = (navigator.userAgent.indexOf("MSIE") !== -1); // Detect IE10 and below iFrameResize({ log: false, scrolling: false, enablePublicMethods: true, checkOrigin: false, heightCalculationMethod: isOldIE ? 'max' : 'documentElementOffset', // old wy max e obligatoriu pt ie8 resizedCallback: function (messageData) { hh = messageData.height; hhf = parseInt(hh) - 0; iframe_height = hhf; jQuery('#popup_box_fx').height(hhf); resize_popup_iframe_fx(); jQuery('#cfloader').remove(); }, scrollCallback: function () { }, messageCallback: function (messageData) { // Callback fn when message is received } }, element2); } function resize_popup_iframe_fx() { if (jQuery('#ppiframefx').length == 0) { return false; } max_h = jQuery(window).height() - 50; jQuery('#popup_box_fx').css('max-height', max_h + 'px'); max_w = jQuery(window).width() - 50; jQuery('#popup_box_fx').css('max-width', max_w + 'px'); jQuery('#popup_body_fx').css('max-height', jQuery('#popup_box_fx').height() + 'px'); diff = jQuery('#ppiframefx').outerHeight() - jQuery('#popup_box_fx').outerHeight(); jQuery('#popup_body_fx').delay(500).queue(function(next){ next(); }); } function captainform_is_ios() { var iDevices = [ 'iPad Simulator', 'iPhone Simulator', 'iPod Simulator', 'iPad', 'iPhone', 'iPod' ]; if (!!navigator.platform) { while (iDevices.length) { if (navigator.platform === iDevices.pop()){ return true; } } } return false; } function close_popup_fx() { jQuery('#captainform_popup_form').remove(); //jQuery("window").css("overflow", "auto"); document.documentElement.style.overflow = 'auto'; // firefox, chrome document.body.scroll = "yes"; // ie only } function resize_payment_fx() { alert('Is payment!'); } window.onresize = function(event) { resize_popup_iframe_fx(); }; window.addEventListener('message', function(e){ msg_pp = e.data; w=parseInt(msg_pp.fwidth); if ( jQuery('#popup_box_fx').length && msg_pp.hasOwnProperty('fwidth') ) { jQuery('#popup_box_fx').css('width',w+'px'); jQuery('#popup_body_fx').off('scrollTo').scrollTo(jQuery('#ppiframefx'), 300); } if ( msg_pp.hasOwnProperty('msgpreviewpopup') ) { captainform_create_form_popup({url: 'https://app.captainform.com/form-'+ msg_pp.msgpreviewpopup +'/?style=preview_iframe:1', popup_w: 1000}) } }); jQuery.fn.scrollTo = function(elem, speed) { jQuery(this).animate({ scrollTop: jQuery(this).scrollTop() - jQuery(this).offset().top + jQuery(elem).offset().top }, speed == undefined ? 500 : speed); return this; };
/* * Copyright (c) 2021 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "runtime/include/thread.h" #include "runtime/include/method.h" #include <gtest/gtest.h> namespace panda { #define CHECK_OFFSET(klass, member) \ do { \ ASSERT_EQ(MEMBER_OFFSET(klass, member), offset); \ offset += klass::ELEMENTS_ALIGN; \ } while (0) TEST(Offsets, Thread) { size_t offset = 0; ASSERT_EQ(MEMBER_OFFSET(ManagedThread, stor_32_), std::is_polymorphic_v<ManagedThread> * sizeof(uint64_t)); CHECK_OFFSET(ManagedThread::StoragePacked32, is_compiled_frame_); CHECK_OFFSET(ManagedThread::StoragePacked32, fts_); ASSERT_EQ(ManagedThread::StoragePacked32::ELEMENTS_NUM * ManagedThread::StoragePacked32::ELEMENTS_ALIGN, offset); offset = 0; ASSERT_EQ(MEMBER_OFFSET(ManagedThread, stor_ptr_), MEMBER_OFFSET(ManagedThread, stor_32_) + ManagedThread::StoragePacked32::GetSize()); CHECK_OFFSET(ManagedThread::StoragePackedPtr, object_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, frame_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, exception_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, native_pc_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, tlab_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, card_table_addr_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, card_table_min_addr_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, concurrent_marking_addr_); CHECK_OFFSET(ManagedThread::StoragePackedPtr, string_class_ptr_); ASSERT_EQ(ManagedThread::StoragePackedPtr::ELEMENTS_NUM * ManagedThread::StoragePackedPtr::ELEMENTS_ALIGN, offset); } TEST(Offsets, Method) { size_t offset = 0; ASSERT_EQ(MEMBER_OFFSET(Method, stor_32_), std::is_polymorphic_v<Method> * sizeof(uint64_t)); CHECK_OFFSET(Method::StoragePacked32, access_flags_); CHECK_OFFSET(Method::StoragePacked32, vtable_index_); CHECK_OFFSET(Method::StoragePacked32, num_args_); CHECK_OFFSET(Method::StoragePacked32, hotness_counter_); ASSERT_EQ(Method::StoragePacked32::ELEMENTS_NUM * Method::StoragePacked32::ELEMENTS_ALIGN, offset); offset = 0; ASSERT_EQ(MEMBER_OFFSET(Method, stor_ptr_), MEMBER_OFFSET(Method, stor_32_) + Method::StoragePacked32::GetSize()); CHECK_OFFSET(Method::StoragePackedPtr, class_); CHECK_OFFSET(Method::StoragePackedPtr, compiled_entry_point_); CHECK_OFFSET(Method::StoragePackedPtr, native_pointer_); ASSERT_EQ(Method::StoragePackedPtr::ELEMENTS_NUM * Method::StoragePackedPtr::ELEMENTS_ALIGN, offset); } } // namespace panda
package component; import java.awt.Color; import java.awt.Cursor; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.event.ActionListener; import javax.swing.BorderFactory; import javax.swing.ButtonGroup; import javax.swing.Icon; import javax.swing.JComponent; import javax.swing.JToggleButton; import javax.swing.border.TitledBorder; /** * Pallet that is a grid of buttons associated with a single data */ public class ButtonPallet extends JComponent { private static final long serialVersionUID = 1L; private ButtonGroup group = new ButtonGroup(); private final int nbCols; private final int nbRows; private JToggleButton[][] tab; /** * @param icons list of icons displayed * @param actions list of listeners associated to icons * @param width of pallet i n pixels * @param height of pallet in pixels * @param nbCols number of icons in a row * @param nbRows number of icons in a column * @param title of the pallet */ public ButtonPallet(Icon[] icons, ActionListener[] actions, int width, int height, int nbCols, int nbRows, String title){ this.setLayout(new GridLayout(nbRows, nbCols)); this.setPreferredSize(new Dimension(width, height)); this.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); this.nbCols = nbCols; this.nbRows = nbRows; this.tab = new JToggleButton[nbCols][nbRows]; int i = 0; for (int r = 0 ; r < nbRows ; r++) { for (int c = 0 ; c < nbCols ; c++) { JToggleButton button = new JToggleButton(icons[i]); button.setPreferredSize(new Dimension(30, 30)); button.addActionListener(actions[i]); tab[c][r] = button; group.add(button); add(button); i++; } } this.setBorder(BorderFactory.createTitledBorder( BorderFactory.createCompoundBorder(), title, TitledBorder.CENTER, TitledBorder.BOTTOM, null, Color.BLACK) ); } /** * Clear the selection */ public void clear() { group.clearSelection(); } /** * @param col of the button * @param row of the button * @return the button at the specified position */ public JToggleButton getButton(int col, int row){ return tab[col][row]; } /** * @return the buttons */ public JToggleButton[] getButtons(){ JToggleButton[] buttons = new JToggleButton[group.getButtonCount()]; int i = 0; for (int r = 0 ; r < nbRows ; r++) { for (int c = 0 ; c < nbCols ; c++) { buttons[i++] = tab[c][r]; } } return buttons; } }
def generateRadioButtonCSS(size, colorPalette, checked): css_code = f"width: {size}px;\nheight: {size}px;\nborder: 3px solid palette({colorPalette});\nborder-radius: 7px;\n}}\n\nQRadioButton::indicator:checked {{\n /* Indicator style for checked state */\n}}" return css_code
// Implementation of the shift_left function pub fn shift_left(&mut self, data: impl Source<u8> + Target<u8>) { self.curr_instr = "SLA ".to_string() + &data.to_string(); let (byte, overflow) = data.read(self).overflowing_shl(1); data.write(self, byte); } // Implementation of the Source<u8> trait trait Source<T> { fn read(&self, cpu: &CPU) -> T; } // Implementation of the Target<u8> trait trait Target<T> { fn write(&self, cpu: &CPU, value: T); } // Implementation of the memory structure struct Memory { // Define the memory structure and its associated methods } // Implement the Source<u8> trait for the Memory structure impl Source<u8> for Memory { fn read(&self, cpu: &CPU) -> u8 { // Implement the logic to read data from the memory } } // Implement the Target<u8> trait for the Memory structure impl Target<u8> for Memory { fn write(&self, cpu: &CPU, value: u8) { // Implement the logic to write data to the memory } }
<reponame>Logub/logub<filename>frontend/models/dto/FieldSearchDto.ts import {LogLevel} from "~/models/LogLevel"; export enum FieldTypeDto { Tag, FullText, Geo, Numeric } export interface FieldSearchDto { type: FieldTypeDto; name?: string; values: string[]; negation: boolean; }
<reponame>quventurets/WUKS<gh_stars>0 require 'test_helper' class CareerControllerTest < ActionDispatch::IntegrationTest test "should get top" do get career_top_url assert_response :success end end
#!/bin/sh # Copyright (c) 2014-2015 The Coinbit Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. set -e ROOTDIR=dist BUNDLE="${ROOTDIR}/Coinbit-Qt.app" CODESIGN=codesign TEMPDIR=sign.temp TEMPLIST=${TEMPDIR}/signatures.txt OUT=signature-osx.tar.gz OUTROOT=osx if [ ! -n "$1" ]; then echo "usage: $0 <codesign args>" echo "example: $0 -s MyIdentity" exit 1 fi rm -rf ${TEMPDIR} ${TEMPLIST} mkdir -p ${TEMPDIR} ${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}" grep -v CodeResources < "${TEMPLIST}" | while read i; do TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`" SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'` OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'` SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign" DIRNAME="`dirname "${SIGNFILE}"`" mkdir -p "${DIRNAME}" echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}" dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null done grep CodeResources < "${TEMPLIST}" | while read i; do TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`" RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}" DIRNAME="`dirname "${RESOURCE}"`" mkdir -p "${DIRNAME}" echo "Adding resource for: "${TARGETFILE}"" cp "${i}" "${RESOURCE}" done rm ${TEMPLIST} tar -C "${TEMPDIR}" -czf "${OUT}" . rm -rf "${TEMPDIR}" echo "Created ${OUT}"
<reponame>RTM-Frank/hotspot-app import { MakerAntenna } from '../antennaMakerTypes' const CLOUDGATE = { name: 'CloudGate', gain: 1, } as MakerAntenna export default { CLOUDGATE }
. ./host.conf file=$(find . -iname "*.jar*") echo $file curl -X POST -H "Expect:" -F "jarfile=@$file" $host/jars/upload
<gh_stars>0 /* Letter Case Permutation Examples: Input: S = "a1b2" Output: ["a1b2", "a1B2", "A1b2", "A1B2"] Input: S = "3z4" Output: ["3z4", "3Z4"] Input: S = "12345" Output: ["12345"] */ #include <iostream> #include <string> #include <vector> #include <stdio.h> #include <ctype.h> using namespace std; void combinations (string S, int i, vector<string> &v) { string aux; char c = S[i]; c = (islower(c)) ? toupper(c) : tolower(c); S[i] = c; v.push_back(S); for (int j=0; j<v.size(); j++) { aux = v[j].substr(0,i); aux.insert(i+1, S.substr(i+1, S.length()-1)); } } int main() { string S = "aaa"; /* S = aaa; v = aaa 0 Aaa 1 aAa 2 AAa 3 aaA 4 AaA 5 aAA 6 AAA 7 */ //-------------------------------------------------------------------- std::vector<string> v; v.push_back(S); int i=0; while (S[i]) { if (isalpha(S[i])) combinations(S, i, v); i++; } //return v; //-------------------------------------------------------------------- for (int i=0; i<v.size(); i++) { cout << v[i] << endl; } return 0; }
package com.nepxion.discovery.console.desktop.toggle; /** * <p>Title: Nepxion Discovery</p> * <p>Description: Nepxion Discovery</p> * <p>Copyright: Copyright (c) 2017-2050</p> * <p>Company: Nepxion</p> * @author <NAME> * @version 1.0 */ import java.awt.BorderLayout; import javax.swing.JComponent; import javax.swing.JPanel; import com.nepxion.discovery.console.desktop.workspace.ServiceTopology; import com.nepxion.swing.element.IElementNode; public class ConsoleToggleSpace extends JPanel { private static final long serialVersionUID = 1L; private IElementNode listElementNode; private JPanel blankPane = new JPanel(); public ConsoleToggleSpace(IElementNode listElementNode) { this.listElementNode = listElementNode; setLayout(new BorderLayout()); add(createContentPane(), BorderLayout.CENTER); } private JComponent createContentPane() { JComponent contentPane = null; String name = listElementNode.getName(); if (name.equals(ConsoleToggleConstants.SERVICE_TOPOLOGY)) { contentPane = new ServiceTopology(); } else { contentPane = blankPane; } return contentPane; } }
class MiddlewareTest extends \PHPUnit\Framework\TestCase { public function testDoNotConfigureProxyIfNoEnvironmentVarsAreSet() { // Create a mock environment with no proxy settings $mockEnvironment = $this->createMock(Environment::class); $mockEnvironment->method('getEnv')->willReturn(null); // Assume getEnv returns null for unset variables // Create an instance of the middleware and set the mock environment $middleware = new YourMiddleware($mockEnvironment); // Run the middleware $middleware->runMiddleware(); // Assert that http_proxy environment variable is empty $this->assertEmpty( $mockEnvironment->getEnv('http_proxy'), 'Proxy information is not set if no outbound proxy is configured' ); } }
package org.apache.tapestry5.integration.app1.pages; import org.apache.tapestry5.annotations.Persist; import org.apache.tapestry5.annotations.Property; import org.apache.tapestry5.ioc.annotations.Inject; import org.apache.tapestry5.services.Request; public class OnActivateRedirect { @Persist("flash") @Property(write = false) private String message; @Inject private Request request; Object onActivate() { if (request.isXHR()) { message = "Redirected from XHR"; return this; } return null; } void onAction() { } }
using System; using System.Collections.Generic; using System.Linq; public class BankSorter { public Dictionary<string, Dictionary<string, decimal>> SortBanks(Dictionary<string, Dictionary<string, decimal>> unsortedBanks) { var sortedBanks = unsortedBanks .OrderByDescending(bank => bank.Value.Sum(account => account.Value)) .ThenByDescending(bank => bank.Value.Max(account => account.Value)) .ToDictionary(x => x.Key, x => x.Value); return sortedBanks; } } // Usage public class Program { public static void Main() { var unsortedBanks = new Dictionary<string, Dictionary<string, decimal>> { { "BankA", new Dictionary<string, decimal> { { "A1", 1000 }, { "A2", 1500 }, { "A3", 500 } } }, { "BankB", new Dictionary<string, decimal> { { "B1", 2000 }, { "B2", 3000 } } }, { "BankC", new Dictionary<string, decimal> { { "C1", 1200 }, { "C2", 800 }, { "C3", 1000 } } } }; var bankSorter = new BankSorter(); var sortedBanks = bankSorter.SortBanks(unsortedBanks); foreach (var bank in sortedBanks) { Console.WriteLine(bank.Key + ": " + string.Join(", ", bank.Value.Select(account => $"{account.Key} - {account.Value}"))); } } }
#!/usr/bin/env bash # Copyright (c) 2016-2019 The BitPal Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. export LC_ALL=C set -e INPUTFILE="Xcode_7.3.1.dmg" HFSFILENAME="5.hfs" SDKDIR="Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk" 7z x "${INPUTFILE}" "${HFSFILENAME}" SDKNAME="$(basename "${SDKDIR}")" SDKDIRINODE=$(ifind -n "${SDKDIR}" "${HFSFILENAME}") fls "${HFSFILENAME}" -rpF ${SDKDIRINODE} | while read type inode filename; do inode="${inode::-1}" if [ "${filename:0:14}" = "usr/share/man/" ]; then continue fi filename="${SDKNAME}/$filename" echo "Extracting $filename ..." mkdir -p "$(dirname "$filename")" if [ "$type" = "l/l" ]; then ln -s "$(icat "${HFSFILENAME}" $inode)" "$filename" else icat "${HFSFILENAME}" $inode >"$filename" fi done echo "Building ${SDKNAME}.tar.gz ..." MTIME="$(istat "${HFSFILENAME}" "${SDKDIRINODE}" | perl -nle 'm/Content Modified:\s+(.*?)\s\(/ && print $1')" find "${SDKNAME}" | sort | tar --no-recursion --mtime="${MTIME}" --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > "${SDKNAME}.tar.gz" echo 'All done!'
<gh_stars>10-100 # Copyright 2019 Population Health Sciences and Image Analysis, German Center for Neurodegenerative Diseases(DZNE) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import nibabel as nib import scipy.ndimage import os def calculated_new_ornt(iornt,base_ornt): new_iornt=iornt[:] for axno, direction in np.asarray(base_ornt): idx=np.where(iornt[:,0] == axno) idirection=iornt[int(idx[0][0]),1] if direction == idirection: new_iornt[int(idx[0][0]), 1] = 1.0 else: new_iornt[int(idx[0][0]), 1] = -1.0 return new_iornt def check_orientation(img,base_ornt=np.array([[0,-1],[1,1],[2,1]])): iornt=nib.io_orientation(img.affine) if not np.array_equal(iornt,base_ornt): img = img.as_reoriented(calculated_new_ornt(iornt,base_ornt)) return img def resample(image, spacing, new_spacing=[1, 1, 1],order=1,prefilter=True): # Determine current pixel spacing resize_factor = spacing / new_spacing new_real_shape = image.shape * resize_factor new_shape = np.round(new_real_shape) real_resize_factor = new_shape / image.shape new_spacing = spacing / real_resize_factor image = scipy.ndimage.interpolation.zoom(image,real_resize_factor,order=order,prefilter=prefilter) return image, new_spacing def define_size(mov_dim,ref_dim): new_dim=np.zeros(len(mov_dim),dtype=np.int) borders=np.zeros((len(mov_dim),2),dtype=int) padd = [int(mov_dim[0] // 2), int(mov_dim[1] // 2), int(mov_dim[2] // 2)] for i in range(len(mov_dim)): new_dim[i]=int(max(2*mov_dim[i],2*ref_dim[i])) borders[i,0]= int(new_dim[i] // 2) -padd [i] borders[i,1]= borders[i,0] +mov_dim[i] return list(new_dim),borders def map_size(arr,base_shape,axial): if axial: base_shape[2]=arr.shape[2] print('Volume will be resize from %s to %s ' % (arr.shape, base_shape)) new_shape,borders=define_size(np.array(arr.shape),np.array(base_shape)) new_arr=np.zeros(new_shape) final_arr=np.zeros(base_shape) new_arr[borders[0,0]:borders[0,1],borders[1,0]:borders[1,1],borders[2,0]:borders[2,1]]= arr[:] middle_point = [int(new_arr.shape[0] // 2), int(new_arr.shape[1] // 2), int(new_arr.shape[2] // 2)] padd = [int(base_shape[0]/2), int(base_shape[1]/2), int(base_shape[2]/2)] low_border=np.array((np.array(middle_point)-np.array(padd)),dtype=int) high_border=np.array(np.array(low_border)+np.array(base_shape),dtype=int) final_arr[:,:,:]= new_arr[low_border[0]:high_border[0], low_border[1]:high_border[1], low_border[2]:high_border[2]] return final_arr def map_image(img_arr,base_zoom,izoom,order,axial): if axial: base_zoom[2] = izoom[2] print('Volume will be sample from %s to %s ' % (izoom, base_zoom)) resample_arr, izoom= resample(img_arr, spacing=np.array(izoom), new_spacing=np.array(base_zoom), order=order) resample_arr[resample_arr < 0] = 0 return resample_arr,izoom def conform(img,flags,order,save_path,mod,axial=False): """ Args: img: nibabel img: Loaded source image flags: dict : Dictionary containing the image size, spacing and orientation order: int : interpolation order (0=nearest,1=linear(default),2=quadratic,3=cubic) Returns: new_img: nibabel img : conformed nibabel image """ save=False # check orientation LAS img=check_orientation(img,base_ornt=flags['base_ornt']) img_arr=img.get_data() img_header = img.header # check voxel sizer i_zoom=img.header.get_zooms() #check the spacing idx for interpolation if axial: idx=2 else: idx=3 if not np.allclose(np.array(i_zoom)[:idx],np.array(flags['spacing'])[:idx],rtol=0.3): img_arr,i_zoom= map_image(img_arr,flags['spacing'],i_zoom,order,axial) save=True ishape = img_arr.shape # check dimensions if int(ishape[0]) != int(flags['imgSize'][0]) or int(ishape[1]) != int(flags['imgSize'][1]) or int(ishape[2]) != int(flags['imgSize'][2]): img_arr=map_size(img_arr,flags['imgSize'],axial) save = True img_header.set_data_shape(img_arr.shape) img_header.set_zooms(i_zoom) affine = img_header.get_qform() affine[0][3] += ((flags['imgSize'][0] - ishape[0]) / 2 * i_zoom[0]) affine[1][3] -= ((flags['imgSize'][1] - ishape[1]) / 2 * i_zoom[1]) affine[2][3] -= ((flags['imgSize'][2] - ishape[2]) / 2 * i_zoom[2]) img_header.set_qform(affine) new_img = nib.Nifti1Image(img_arr, affine, img_header) #save images if modified if save: if not os.path.isdir(os.path.join(save_path, 'MRI')): os.mkdir(os.path.join(save_path, 'MRI')) mri_path = os.path.join(save_path, 'MRI') if mod == 'fat': new_img_path = os.path.join(mri_path, 'FatImaging_F.nii.gz') else: new_img_path = os.path.join(mri_path, 'FatImaging_W.nii.gz') nib.save(new_img, new_img_path) return new_img
import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.util.CharsetUtil; public class ApiNettyServerHandler extends SimpleChannelInboundHandler<FullHttpRequest> { @Override protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest request) { if (request.method().name().equals("GET")) { String jsonResponse = "{\"message\": \"Hello, World!\"}"; DefaultFullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, Unpooled.copiedBuffer(jsonResponse, CharsetUtil.UTF_8)); response.headers().set("Content-Type", "application/json"); ctx.writeAndFlush(response); } else { DefaultFullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.METHOD_NOT_ALLOWED); ctx.writeAndFlush(response); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { cause.printStackTrace(); ctx.close(); } }
#!/bin/sh for midi_file in ./downloads/*.mid; do fluidsynth -F "${midi_file}.wav" -i "./soundfont.sf2" "${midi_file}" done
import json import pytest from bray.geoclient import Geoclient, HTTPEndpoint, SearchDecoder from . import getfile SEARCH_FILES = { 'ok_exact_match': getfile('search-ok-exact-match.json'), 'ok_mixed_matches': getfile('search-ok-mixed-matches.json'), 'ok_possible_matches': getfile('search-ok-possible-matches.json'), 'rejected_no_rejects': getfile('search-rejected-no-rejects.json'), 'rejected_with_rejects': getfile('search-rejected-with-rejects.json') } # # The 'decoded_search' marker is registered in <project_root>/pytest.ini. # This file must updated if this fixture function is renamed. # @pytest.fixture def decoded_search(request): """Yields the result of deserializing a JSON response from the search endpoint. """ marker_name = 'decoded_search' marker = request.node.get_closest_marker(marker_name) if marker is None: raise pytest.UsageError(f'Call to request.node.get_closest_marker("{marker_name}") returned None.') label = marker.args[0] with open(SEARCH_FILES[label], 'r') as f: yield json.load(f, cls=SearchDecoder) class TestSearchDecoder: @pytest.mark.decoded_search('ok_exact_match') def test_transform_ok_exact_match(self, decoded_search): assert decoded_search[SearchDecoder.NEW_RESULT_STATUS] == 'EXACT_MATCH' assert decoded_search[SearchDecoder.SUMMARY] == '1 EXACT_MATCH, 0 POSSIBLE_MATCH, 0 REJECTED' @pytest.mark.decoded_search('ok_possible_matches') def test_transform_ok_possible_matches(self, decoded_search): assert decoded_search[SearchDecoder.NEW_RESULT_STATUS] == 'POSSIBLE_MATCH' assert decoded_search[SearchDecoder.SUMMARY] == '0 EXACT_MATCH, 3 POSSIBLE_MATCH, 2 REJECTED' @pytest.mark.decoded_search('rejected_with_rejects') def test_transform_rejected_with_rejects(self, decoded_search): assert decoded_search[SearchDecoder.NEW_RESULT_STATUS] == 'REJECTED' assert decoded_search[SearchDecoder.SUMMARY] == '0 EXACT_MATCH, 0 POSSIBLE_MATCH, 1 REJECTED' @pytest.mark.decoded_search('rejected_no_rejects') def test_transform_rejected_no_rejects(self, decoded_search): assert decoded_search[SearchDecoder.RESULT_STATUS] == 'REJECTED' assert SearchDecoder.NEW_RESULT_STATUS not in decoded_search assert SearchDecoder.SUMMARY not in decoded_search # # This test does not use the search_response fixture because # the result_and_stats method is tested in isolation. # def test_result_and_stats(self): # data = None with open(SEARCH_FILES['ok_mixed_matches'], 'r') as f: data = json.load(f) results_fixture = data[SearchDecoder.RESULTS] decoder = SearchDecoder() actual = decoder.result_and_stats(data) assert isinstance(actual, tuple) assert results_fixture[0] == actual[0] assert decoder.new_stats_map(exact=0, possible=2, rejected=5) == actual[1] class MockHTTPEndpoint: @staticmethod def get_json(uri, query, decoder=None): return { 'uri_arg': uri, 'query_arg': query, 'decoder_arg': decoder } @pytest.fixture def mock_json_obj(monkeypatch): """Mocks bray.geoclient.HTTPEndpoint.get_json(...) method.""" def mock_http_get_json(*args, **kwargs): return MockHTTPEndpoint().get_json(args[1], args[2], kwargs['decoder']) monkeypatch.setattr(HTTPEndpoint, "get_json", mock_http_get_json) class MockDecoder: pass class EndpointArgs: def __init__(self): self.uri = 'https://snafubar' self.query = {'app_id': 'foobar', 'app_key': 'xxxx'} self.runtime_args = {'houseNumber': '2860', 'street': 'broadway', 'borough': 'manhattan'} self.expected_query = { **self.runtime_args, **self.query } @pytest.fixture def endpoint_args(): return EndpointArgs() class TestGeoclient: def test_call_no_decoder(self, mock_json_obj, endpoint_args): ea = endpoint_args geoclient = Geoclient('no-decoder', ea.uri, ea.query) result = geoclient(ea.runtime_args) assert result['uri_arg'] == ea.uri assert result['query_arg'] == ea.expected_query assert result['decoder_arg'] is None def test_call_with_decoder(self, mock_json_obj, endpoint_args): ea = endpoint_args geoclient = Geoclient('with-decoder', ea.uri, ea.query, decoder=MockDecoder) result = geoclient(ea.runtime_args) assert result['uri_arg'] == ea.uri assert result['query_arg'] == ea.expected_query assert result['decoder_arg'] == MockDecoder
<filename>offer/src/main/java/com/java/study/algorithm/zuo/dadvanced/advanced_class_06/Code_07_RegularExpressionMatch.java<gh_stars>1-10 package com.java.study.algorithm.zuo.dadvanced.advanced_class_06; /** * 字符串匹配问题 * 【题目】 给定字符串str,其中绝对不含有字符'.'和'*'。再给定字符串exp,其中可以含有'.'或'*','*' 字符不能是exp的首字符,并且任意两个'*'字符不相邻。exp中的'.'代表任何一个字符, exp中的'*'表示'*'的前一个字符可以有0个或者多个。请写一个函数,判断str是否能被exp 匹配。 * 【举例】 * str="abc",exp="abc",返回true。 str="abc",exp="a.c",exp中单个'.'可以代表任意字符,所以返回true。 str="abcd",exp=".*"。exp中'*'的前一个字符是'.',所以可表示任意数量的'.'字符,当 exp是"...."时与"abcd"匹配,返回true。 str="",exp="..*"。exp中'*'的前一个字符是'.',可表示任意数量的'.'字符,但是".*"之前还 有一个'.'字符,该字符不受'*'的影响,所以str起码有一个字符才能被exp匹配。所以返回 false。 */ public class Code_07_RegularExpressionMatch{ }
<filename>pages/index.js import { useState, useEffect } from 'react' import Clock from '../components/clock' import Gif from '../components/gif' import Radar from '../components/radar' import Text from '../components/text' import Weather from '../components/weather' const cx = { main: 'ma0 center bg-black near-white sans-serif', smallBox: 'fl f1 fw3 w-100 h-50 pa2 inline-flex items-center justify-center tc', smallContainer: 'fl w-100 w-third-ns h-100', bigBox: 'fl w-100 w-two-thirds-ns h-100 inline-flex items-center justify-center' } const s = { main: { width: 800, height: 480 } } const Index = () => { const [index, setIndex] = useState(0) const boxes = [ <Gif key='box-gif' className={cx.bigBox} />, <Radar key='box-radar' className={cx.bigBox} />, <Text key='box-text' className={cx.bigBox} /> ] const handleContextMenu = e => { e.preventDefault() setIndex(index === boxes.length - 1 ? 0 : index + 1) } useEffect(() => { const interval = setInterval(() => { window.location = '/' }, 10800000) return () => clearInterval(interval) }, []) return ( <div className={cx.main} style={s.main} onContextMenu={handleContextMenu} > <div className={cx.smallContainer}> <Clock className={cx.smallBox} /> <Weather className={cx.smallBox} /> </div> {boxes[index]} <style global jsx> {` body { background-color: black; cursor: none; user-select: none; } `} </style> </div> ) } export default Index
<reponame>ispras/pu4spark package ru.ispras.pu4spark import org.apache.spark.sql.DataFrame /** * Performs positive unlabeled (PU) learning, i.e. training a binary classifier in a semi-supervised way * from only positive and unlabeled examples * * @author <NAME> (<EMAIL>) */ trait PositiveUnlabeledLearner { /** * Updates dataframe by applying positive-unlabeled learning (append column with result of classification). * * @param df dataframe containing, among others, column with labels and features to be used in PU-learning * @param labelColumnName name for column containing 1 - positives and 0 - unlabeled marks for each instance * @param featuresColumnName name for 1 column containing features array (e.g. after VectorAssembler) * @param finalLabel name for column containing labels of final classification (1 for positive and -1 for negatives) * @return dataframe with new column corresponding to final classification */ def weight(df: DataFrame, labelColumnName: String = "featuresCol", featuresColumnName: String = "labelCol", finalLabel: String = "finalLabel"): DataFrame } /** * Subclasses should be case classes in order to be easily serializable (e.g. to JSON) */ trait PositiveUnlabeledLearnerConfig { def build(): PositiveUnlabeledLearner } /** * Needed for serialization by json4s (should be passed to org.json4s.ShortTypeHints) */ object PositiveUnlabeledLearnerConfig { val subclasses = List(classOf[TraditionalPULearnerConfig], classOf[GradualReductionPULearnerConfig]) }
<gh_stars>0 require 'active_support/concern' require 'yaml' module ActiveMessenger module Core module ConfigParsable extend ActiveSupport::Concern included do cattr_accessor :config, :env end module ClassMethods def load_config!(config_path) self.config ||= parse_config!(config_path) end private def parse_config!(config_path) unless self.env raise ActiveMessenger::ConfigParseError.new("Adapter should be set 'env' to parse config!") end unless self.adapter raise ActiveMessenger::ConfigParseError.new("Adapter should be set 'adapter' to parse config!") end parsed_config = YAML.load_file(config_path)[self.env.to_s][self.adapter.to_s] if parsed_config.nil? raise ActiveMessenger::ConfigParseError.new("Config missing. Please check configure format!") end rescue SyntaxError => e raise ActiveMessenger::ConfigParseError.new(e.msessage) end end end end end
/* eslint-disable no-else-return */ import userErrors from './userErrors'; import depErrors from './depErrors'; import logger from './logger'; const transformer = {}; transformer.transformExpressValidationErrors = (errors) => { let msgs = ''; let usr1; let usr8; let usr6; let dep1; let usr2 = ''; if (!Array.isArray(errors)) return msgs; logger.error({ errors }); errors.forEach((item) => { if (item.param === 'email' || item.param === 'password') { usr1 = usr1 ? `${usr1} & ${item.param}` : item.param; } else if (item.param === 'shipping_region_id') { usr8 = 'shipping_region_id'; msgs = item.msg; } else if (item.param === 'day_phone' || item.param === 'eve_phone' || item.param === 'mob_phone') { usr6 = item.param; msgs = item.msg; } else if (item.param === 'department_id') { dep1 = item.param; } else if (!usr2.includes(item.param)) { usr2 += ` ${item.param} |`; msgs += ` ${item.msg} |`; } }); if (usr1) { return userErrors.usr1(usr1, 400); } else if (usr8) { return userErrors.usr8(usr8, 400, msgs); } else if (usr6) { return userErrors.usr6(usr6, 400, msgs); } else if (dep1) { return depErrors.dep1(); } return userErrors.usr2(usr2, 400, msgs); }; export default transformer;
#!/usr/bin/env bash vendor/bin/paratest --configuration platform/phpunit.xml.dist --no-coverage --testsuite=paratest --exclude-group=skip-paratest,needsWebserver --processes $(nproc) --runner WrapperRunner
<filename>src/app/shared/models/aggregation.ts import { AggregationBucket } from './aggregation-bucket'; export class Aggregation { key: string; aggregationType: AggregationType; label: string; order: number; taxonomy: boolean; buckets: AggregationBucket[]; } export enum AggregationType { Switch = 'switch', Checkbox = 'checkbox', DateRange = 'dateRange', }
import SettingsPage from "./Page"; import WithStore from "./WithStore"; export { SettingsPage }; export default WithStore;
#!/usr/bin/env bash tenant_id=${1:-demo_tenant} echo "Unregistering Inventory Storage Module" ./unregister-managed.sh ${tenant_id} if which python3 then pip3 install requests echo "Removing Okapi environment variables" python3 ./okapi-setup/environment/clear-environment-variables.py else echo "Install Python3 to remove environment variables from Okapi automatically" fi
export VULKAN_SDK="/opt/vulkan" export VK_ICD_FILENAMES="$VULKAN_SDK/etc/vulkan/icd.d/MoltenVK_icd.json" export VK_LAYER_PATH="$VULKAN_SDK/etc/vulkan/explicit_layers.d" export PATH="$VULKAN_SDK/bin:$PATH"
#!/usr/bin/bash declare i declare -A given_values declare -a given_names i=0 given_names=("Karbosky" "John" "Helen" "Valentine" "Andreea") given_values=( [Karbosky]="4" [John]="3" [Helen]="4" [Valentine]="13" [Andreea]="13") printing_chart() { printf "\n%40s\n" " " | tr " " "-" while ((i < ${#given_names[@]})); do name="${given_names[i]}" printf "%-11s%-4s " "${name}: " "| ${given_values[${name}]}" printf "%s %s" "|" " [" printf "%${given_values[${name}]}s" " " | tr " " "#" printf "%s\n" "]" ((i++)) done printf "%40s\n\n" " " | tr " " "-" } printing_chart
#!/bin/bash MXE_INCLUDE_PATH=/home/pt/workspace/mxe/usr/i686-w64-mingw32.static/include MXE_LIB_PATH=/home/pt/workspace/mxe/usr/i686-w64-mingw32.static/lib i686-w64-mingw32.static-qmake-qt5 \ BOOST_LIB_SUFFIX=-mt \ BOOST_THREAD_LIB_SUFFIX=_win32-mt \ BOOST_INCLUDE_PATH=$MXE_INCLUDE_PATH/boost \ BOOST_LIB_PATH=$MXE_LIB_PATH \ OPENSSL_INCLUDE_PATH=$MXE_INCLUDE_PATH/openssl \ OPENSSL_LIB_PATH=$MXE_LIB_PATH \ BDB_INCLUDE_PATH=$MXE_INCLUDE_PATH \ BDB_LIB_PATH=$MXE_LIB_PATH \ MINIUPNPC_INCLUDE_PATH=$MXE_INCLUDE_PATH \ MINIUPNPC_LIB_PATH=$MXE_LIB_PATH \ QMAKE_LRELEASE=/home/pt/workspace/mxe/usr/i686-w64-mingw32.static/qt5/bin/lrelease CryptoCraze-qt.pro make -j6 -f Makefile.Release
## INSTALL # Backing up previous Bash Profile mv ~/.bash_profile ~/.dotbackup/.bash_profile && mv ~/.bashrc ~/.dotbackup/.bashrc_backup # Clone repository to user's root folder cd; curl --silent -#L "https://github.com/josantana/dotfiles/tarball/master" | tar -xzv --strip-components 1 --exclude={LICENSE,README.md,install.sh,sync.sh} # Z directory jumping (https://github.com/rupa/z) curl -O "https://raw.githubusercontent.com/rupa/z/master/z.sh" && mv z.sh ~/.dotfiles/z.sh # Homebrew for the homeless if [ -z "$(which brew)" ]; then ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" fi # Run OSX scripts source ~/.dotfiles/.brew source ~/.dotfiles/.osx # Remove scripts rm -rf ~/.dotfiles/.brew rm -rf ~/.dotfiles/.osx
package com.cgfy.pattern.proxy.dynamicproxy; /** * 测试类 */ public class App { public static void main(String[] args) { // 目标对象 UserDao target = new UserDaoImpl(); // 【原始的类型 class cn.itcast.b_dynamic.UserDao】 System.out.println("目标对象:"+target.getClass()); // 给目标对象,创建代理对象 UserDao proxy = (UserDao) new ProxyFactory(target).getProxyInstance(); // class $Proxy0 内存中动态生成的代理对象 System.out.println("代理对象:"+proxy.getClass()); // 执行方法 【代理对象】 proxy.save(); } }
#!/bin/bash # Ask the user about the time zone, and output the resulting TZ value to stdout. # Interact with the user via stderr and stdin. PKGVERSION='(tzcode) ' TZVERSION=see_Makefile REPORT_BUGS_TO=tz@iana.org # Contributed by Paul Eggert. This file is in the public domain. # Porting notes: # # This script requires a Posix-like shell and prefers the extension of a # 'select' statement. The 'select' statement was introduced in the # Korn shell and is available in Bash and other shell implementations. # If your host lacks both Bash and the Korn shell, you can get their # source from one of these locations: # # Bash <https://www.gnu.org/software/bash/> # Korn Shell <http://www.kornshell.com/> # MirBSD Korn Shell <https://www.mirbsd.org/mksh.htm> # # For portability to Solaris 9 /bin/sh this script avoids some POSIX # features and common extensions, such as $(...) (which works sometimes # but not others), $((...)), and $10. # # This script also uses several features of modern awk programs. # If your host lacks awk, or has an old awk that does not conform to Posix, # you can use either of the following free programs instead: # # Gawk (GNU awk) <https://www.gnu.org/software/gawk/> # mawk <https://invisible-island.net/mawk/> # Specify default values for environment variables if they are unset. : ${AWK=awk} : ${TZDIR=`pwd`} # Output one argument as-is to standard output. # Safer than 'echo', which can mishandle '\' or leading '-'. say() { printf '%s\n' "$1" } # Check for awk Posix compliance. ($AWK -v x=y 'BEGIN { exit 123 }') </dev/null >/dev/null 2>&1 [ $? = 123 ] || { say >&2 "$0: Sorry, your '$AWK' program is not Posix compatible." exit 1 } coord= location_limit=10 zonetabtype=zone1970 usage="Usage: tzselect [--version] [--help] [-c COORD] [-n LIMIT] Select a timezone interactively. Options: -c COORD Instead of asking for continent and then country and then city, ask for selection from time zones whose largest cities are closest to the location with geographical coordinates COORD. COORD should use ISO 6709 notation, for example, '-c +4852+00220' for Paris (in degrees and minutes, North and East), or '-c -35-058' for Buenos Aires (in degrees, South and West). -n LIMIT Display at most LIMIT locations when -c is used (default $location_limit). --version Output version information. --help Output this help. Report bugs to $REPORT_BUGS_TO." # Ask the user to select from the function's arguments, # and assign the selected argument to the variable 'select_result'. # Exit on EOF or I/O error. Use the shell's 'select' builtin if available, # falling back on a less-nice but portable substitute otherwise. if case $BASH_VERSION in ?*) : ;; '') # '; exit' should be redundant, but Dash doesn't properly fail without it. (eval 'set --; select x; do break; done; exit') </dev/null 2>/dev/null esac then # Do this inside 'eval', as otherwise the shell might exit when parsing it # even though it is never executed. eval ' doselect() { select select_result do case $select_result in "") echo >&2 "Please enter a number in range." ;; ?*) break esac done || exit } # Work around a bug in bash 1.14.7 and earlier, where $PS3 is sent to stdout. case $BASH_VERSION in [01].*) case `echo 1 | (select x in x; do break; done) 2>/dev/null` in ?*) PS3= esac esac ' else doselect() { # Field width of the prompt numbers. select_width=`expr $# : '.*'` select_i= while : do case $select_i in '') select_i=0 for select_word do select_i=`expr $select_i + 1` printf >&2 "%${select_width}d) %s\\n" $select_i "$select_word" done ;; *[!0-9]*) echo >&2 'Please enter a number in range.' ;; *) if test 1 -le $select_i && test $select_i -le $#; then shift `expr $select_i - 1` select_result=$1 break fi echo >&2 'Please enter a number in range.' esac # Prompt and read input. printf >&2 %s "${PS3-#? }" read select_i || exit done } fi while getopts c:n:t:-: opt do case $opt$OPTARG in c*) coord=$OPTARG ;; n*) location_limit=$OPTARG ;; t*) # Undocumented option, used for developer testing. zonetabtype=$OPTARG ;; -help) exec echo "$usage" ;; -version) exec echo "tzselect $PKGVERSION$TZVERSION" ;; -*) say >&2 "$0: -$opt$OPTARG: unknown option; try '$0 --help'"; exit 1 ;; *) say >&2 "$0: try '$0 --help'"; exit 1 ;; esac done shift `expr $OPTIND - 1` case $# in 0) ;; *) say >&2 "$0: $1: unknown argument"; exit 1 ;; esac # Make sure the tables are readable. TZ_COUNTRY_TABLE=$TZDIR/iso3166.tab TZ_ZONE_TABLE=$TZDIR/$zonetabtype.tab for f in $TZ_COUNTRY_TABLE $TZ_ZONE_TABLE do <"$f" || { say >&2 "$0: time zone files are not set up correctly" exit 1 } done # If the current locale does not support UTF-8, convert data to current # locale's format if possible, as the shell aligns columns better that way. # Check the UTF-8 of U+12345 CUNEIFORM SIGN URU TIMES KI. ! $AWK 'BEGIN { u12345 = "\360\222\215\205"; exit length(u12345) != 1 }' && { tmp=`(mktemp -d) 2>/dev/null` || { tmp=${TMPDIR-/tmp}/tzselect.$$ && (umask 77 && mkdir -- "$tmp") };} && trap 'status=$?; rm -fr -- "$tmp"; exit $status' 0 HUP INT PIPE TERM && (iconv -f UTF-8 -t //TRANSLIT <"$TZ_COUNTRY_TABLE" >$tmp/iso3166.tab) \ 2>/dev/null && TZ_COUNTRY_TABLE=$tmp/iso3166.tab && iconv -f UTF-8 -t //TRANSLIT <"$TZ_ZONE_TABLE" >$tmp/$zonetabtype.tab && TZ_ZONE_TABLE=$tmp/$zonetabtype.tab newline=' ' IFS=$newline # Awk script to read a time zone table and output the same table, # with each column preceded by its distance from 'here'. output_distances=' BEGIN { FS = "\t" while (getline <TZ_COUNTRY_TABLE) if ($0 ~ /^[^#]/) country[$1] = $2 country["US"] = "US" # Otherwise the strings get too long. } function abs(x) { return x < 0 ? -x : x; } function min(x, y) { return x < y ? x : y; } function convert_coord(coord, deg, minute, ilen, sign, sec) { if (coord ~ /^[-+]?[0-9]?[0-9][0-9][0-9][0-9][0-9][0-9]([^0-9]|$)/) { degminsec = coord intdeg = degminsec < 0 ? -int(-degminsec / 10000) : int(degminsec / 10000) minsec = degminsec - intdeg * 10000 intmin = minsec < 0 ? -int(-minsec / 100) : int(minsec / 100) sec = minsec - intmin * 100 deg = (intdeg * 3600 + intmin * 60 + sec) / 3600 } else if (coord ~ /^[-+]?[0-9]?[0-9][0-9][0-9][0-9]([^0-9]|$)/) { degmin = coord intdeg = degmin < 0 ? -int(-degmin / 100) : int(degmin / 100) minute = degmin - intdeg * 100 deg = (intdeg * 60 + minute) / 60 } else deg = coord return deg * 0.017453292519943296 } function convert_latitude(coord) { match(coord, /..*[-+]/) return convert_coord(substr(coord, 1, RLENGTH - 1)) } function convert_longitude(coord) { match(coord, /..*[-+]/) return convert_coord(substr(coord, RLENGTH)) } # Great-circle distance between points with given latitude and longitude. # Inputs and output are in radians. This uses the great-circle special # case of the Vicenty formula for distances on ellipsoids. function gcdist(lat1, long1, lat2, long2, dlong, x, y, num, denom) { dlong = long2 - long1 x = cos(lat2) * sin(dlong) y = cos(lat1) * sin(lat2) - sin(lat1) * cos(lat2) * cos(dlong) num = sqrt(x * x + y * y) denom = sin(lat1) * sin(lat2) + cos(lat1) * cos(lat2) * cos(dlong) return atan2(num, denom) } # Parallel distance between points with given latitude and longitude. # This is the product of the longitude difference and the cosine # of the latitude of the point that is further from the equator. # I.e., it considers longitudes to be further apart if they are # nearer the equator. function pardist(lat1, long1, lat2, long2) { return abs(long1 - long2) * min(cos(lat1), cos(lat2)) } # The distance function is the sum of the great-circle distance and # the parallel distance. It could be weighted. function dist(lat1, long1, lat2, long2) { return gcdist(lat1, long1, lat2, long2) + pardist(lat1, long1, lat2, long2) } BEGIN { coord_lat = convert_latitude(coord) coord_long = convert_longitude(coord) } /^[^#]/ { here_lat = convert_latitude($2) here_long = convert_longitude($2) line = $1 "\t" $2 "\t" $3 sep = "\t" ncc = split($1, cc, /,/) for (i = 1; i <= ncc; i++) { line = line sep country[cc[i]] sep = ", " } if (NF == 4) line = line " - " $4 printf "%g\t%s\n", dist(coord_lat, coord_long, here_lat, here_long), line } ' # Begin the main loop. We come back here if the user wants to retry. while echo >&2 'Please identify a location' \ 'so that time zone rules can be set correctly.' continent= country= region= case $coord in ?*) continent=coord;; '') # Ask the user for continent or ocean. echo >&2 'Please select a continent, ocean, "coord", or "TZ".' quoted_continents=` $AWK ' BEGIN { FS = "\t" } /^[^#]/ { entry = substr($3, 1, index($3, "/") - 1) if (entry == "America") entry = entry "s" if (entry ~ /^(Arctic|Atlantic|Indian|Pacific)$/) entry = entry " Ocean" printf "'\''%s'\''\n", entry } ' <"$TZ_ZONE_TABLE" | sort -u | tr '\n' ' ' echo '' ` eval ' doselect '"$quoted_continents"' \ "coord - I want to use geographical coordinates." \ "TZ - I want to specify the timezone using the Posix TZ format." continent=$select_result case $continent in Americas) continent=America;; *" "*) continent=`expr "$continent" : '\''\([^ ]*\)'\''` esac ' esac case $continent in TZ) # Ask the user for a Posix TZ string. Check that it conforms. while echo >&2 'Please enter the desired value' \ 'of the TZ environment variable.' echo >&2 'For example, AEST-10 is abbreviated' \ 'AEST and is 10 hours' echo >&2 'ahead (east) of Greenwich,' \ 'with no daylight saving time.' read TZ $AWK -v TZ="$TZ" 'BEGIN { tzname = "(<[[:alnum:]+-]{3,}>|[[:alpha:]]{3,})" time = "(2[0-4]|[0-1]?[0-9])" \ "(:[0-5][0-9](:[0-5][0-9])?)?" offset = "[-+]?" time mdate = "M([1-9]|1[0-2])\\.[1-5]\\.[0-6]" jdate = "((J[1-9]|[0-9]|J?[1-9][0-9]" \ "|J?[1-2][0-9][0-9])|J?3[0-5][0-9]|J?36[0-5])" datetime = ",(" mdate "|" jdate ")(/" time ")?" tzpattern = "^(:.*|" tzname offset "(" tzname \ "(" offset ")?(" datetime datetime ")?)?)$" if (TZ ~ tzpattern) exit 1 exit 0 }' do say >&2 "'$TZ' is not a conforming Posix timezone string." done TZ_for_date=$TZ;; *) case $continent in coord) case $coord in '') echo >&2 'Please enter coordinates' \ 'in ISO 6709 notation.' echo >&2 'For example, +4042-07403 stands for' echo >&2 '40 degrees 42 minutes north,' \ '74 degrees 3 minutes west.' read coord;; esac distance_table=`$AWK \ -v coord="$coord" \ -v TZ_COUNTRY_TABLE="$TZ_COUNTRY_TABLE" \ "$output_distances" <"$TZ_ZONE_TABLE" | sort -n | sed "${location_limit}q" ` regions=`say "$distance_table" | $AWK ' BEGIN { FS = "\t" } { print $NF } '` echo >&2 'Please select one of the following timezones,' \ echo >&2 'listed roughly in increasing order' \ "of distance from $coord". doselect $regions region=$select_result TZ=`say "$distance_table" | $AWK -v region="$region" ' BEGIN { FS="\t" } $NF == region { print $4 } '` ;; *) # Get list of names of countries in the continent or ocean. countries=`$AWK \ -v continent="$continent" \ -v TZ_COUNTRY_TABLE="$TZ_COUNTRY_TABLE" \ ' BEGIN { FS = "\t" } /^#/ { next } $3 ~ ("^" continent "/") { ncc = split($1, cc, /,/) for (i = 1; i <= ncc; i++) if (!cc_seen[cc[i]]++) cc_list[++ccs] = cc[i] } END { while (getline <TZ_COUNTRY_TABLE) { if ($0 !~ /^#/) cc_name[$1] = $2 } for (i = 1; i <= ccs; i++) { country = cc_list[i] if (cc_name[country]) { country = cc_name[country] } print country } } ' <"$TZ_ZONE_TABLE" | sort -f` # If there's more than one country, ask the user which one. case $countries in *"$newline"*) echo >&2 'Please select a country' \ 'whose clocks agree with yours.' doselect $countries country=$select_result;; *) country=$countries esac # Get list of timezones in the country. regions=`$AWK \ -v country="$country" \ -v TZ_COUNTRY_TABLE="$TZ_COUNTRY_TABLE" \ ' BEGIN { FS = "\t" cc = country while (getline <TZ_COUNTRY_TABLE) { if ($0 !~ /^#/ && country == $2) { cc = $1 break } } } /^#/ { next } $1 ~ cc { print $4 } ' <"$TZ_ZONE_TABLE"` # If there's more than one region, ask the user which one. case $regions in *"$newline"*) echo >&2 'Please select one of the following timezones.' doselect $regions region=$select_result;; *) region=$regions esac # Determine TZ from country and region. TZ=`$AWK \ -v country="$country" \ -v region="$region" \ -v TZ_COUNTRY_TABLE="$TZ_COUNTRY_TABLE" \ ' BEGIN { FS = "\t" cc = country while (getline <TZ_COUNTRY_TABLE) { if ($0 !~ /^#/ && country == $2) { cc = $1 break } } } /^#/ { next } $1 ~ cc && $4 == region { print $3 } ' <"$TZ_ZONE_TABLE"` esac # Make sure the corresponding zoneinfo file exists. TZ_for_date=$TZDIR/$TZ <"$TZ_for_date" || { say >&2 "$0: time zone files are not set up correctly" exit 1 } esac # Use the proposed TZ to output the current date relative to UTC. # Loop until they agree in seconds. # Give up after 8 unsuccessful tries. extra_info= for i in 1 2 3 4 5 6 7 8 do TZdate=`LANG=C TZ="$TZ_for_date" date` UTdate=`LANG=C TZ=UTC0 date` TZsec=`expr "$TZdate" : '.*:\([0-5][0-9]\)'` UTsec=`expr "$UTdate" : '.*:\([0-5][0-9]\)'` case $TZsec in $UTsec) extra_info=" Selected time is now: $TZdate. Universal Time is now: $UTdate." break esac done # Output TZ info and ask the user to confirm. echo >&2 "" echo >&2 "The following information has been given:" echo >&2 "" case $country%$region%$coord in ?*%?*%) say >&2 " $country$newline $region";; ?*%%) say >&2 " $country";; %?*%?*) say >&2 " coord $coord$newline $region";; %%?*) say >&2 " coord $coord";; *) say >&2 " TZ='$TZ'" esac say >&2 "" say >&2 "Therefore TZ='$TZ' will be used.$extra_info" say >&2 "Is the above information OK?" doselect Yes No ok=$select_result case $ok in Yes) break esac do coord= done case $SHELL in *csh) file=.login line="setenv TZ '$TZ'";; *) file=.profile line="TZ='$TZ'; export TZ" esac test -t 1 && say >&2 " You can make this change permanent for yourself by appending the line $line to the file '$file' in your home directory; then log out and log in again. Here is that TZ value again, this time on standard output so that you can use the $0 command in shell scripts:" say "$TZ"
#!/bin/sh # Meant to be executed from within XCode. # See http://help.apple.com/xcode/mac/8.0/#/itcaec37c2a6 for a list of config variables. if [ "X${BUILT_PRODUCTS_DIR}" = "X" ]; then echo "BUILT_PRODUCTS_DIR undefined; exiting." exit 1 fi echo "Configuring Settings Bundle." PLIST=/usr/libexec/PlistBuddy PRODUCT_PATH="${BUILT_PRODUCTS_DIR}/${PRODUCT_NAME}.app" INFOPLIST_PATH="${PRODUCT_PATH}/Info.plist" SETTINGS_BUNDLE_PATH="${PRODUCT_PATH}/Settings.bundle" SETTINGS_ROOT_PLIST_PATH="${SETTINGS_BUNDLE_PATH}/Root.plist" # Check for presence of app group ID in Info.plist. APP_GRP_ID=$(${PLIST} -c "Print KIAppGroupKey" ${INFOPLIST_PATH}) if [ "X${APP_GRP_ID}" = "X" ]; then echo "WARN: No plist value found for KIAppGroupKey, skipping patch of Settings.bundle." exit 0 fi EXISTING_SETTINGS_APP_GRP_ID=$(${PLIST} -c "Print ApplicationGroupContainerIdentifier" ${SETTINGS_ROOT_PLIST_PATH}) if [ "X${EXISTING_SETTINGS_APP_GRP_ID}" != "X" ]; then # We have an app grp id in settings; update it. echo "Found existing Settings.bundle:ApplicationGroupContainerIdentifier == ${EXISTING_SETTINGS_APP_GRP_ID}; deleting it." RMCMD="Delete ApplicationGroupContainerIdentifier" echo "Executing '${PLIST} -c \"${RMCMD}\" ${SETTINGS_ROOT_PLIST_PATH}'" ${PLIST} -c "${RMCMD}" ${SETTINGS_ROOT_PLIST_PATH} fi SETCMD="Add ApplicationGroupContainerIdentifier string group.${APP_GRP_ID}" echo "Will execute: '${PLIST} -c \"${SETCMD}\" ${SETTINGS_ROOT_PLIST_PATH}'" ${PLIST} -c "${SETCMD}" ${SETTINGS_ROOT_PLIST_PATH}
use actix_web::{HttpServer, App, middleware, web, HttpResponse, Result}; use actix_cors::Cors; async fn index() -> Result<HttpResponse> { Ok(HttpResponse::Ok().body("Hello, world!")) } #[actix_web::main] async fn main() -> std::io::Result<()> { let api_address = "127.0.0.1:8080"; let app = move || { App::new() .wrap(middleware::Logger::default()) .wrap(Cors::permissive()) .service(web::resource("/").to(index)) }; HttpServer::new(app) .bind(api_address)? .run() .await }
/* * Copyright (c) 2008, <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name "TwelveMonkeys" nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.twelvemonkeys.servlet.image; import com.twelvemonkeys.lang.StringUtil; import com.twelvemonkeys.servlet.ServletUtil; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletRequest; import java.awt.*; import java.awt.geom.Rectangle2D; /** * This servlet is capable of rendereing a text string and output it as an * image. The text can be rendered in any given font, size, * style or color, into an image, and output it as a GIF, JPEG or PNG image, * with optional caching of the rendered image files. * * <P><HR><P> * * <A name="parameters"></A><STRONG>Parameters:</STRONG><BR> * <DL> * <DT>{@code text}</DT> * <DD>string, the text string to render. * <DT>{@code width}</DT> * <DD>integer, the width of the image * <DT>{@code height}</DT> * <DD>integer, the height of the image * <DT>{@code fontFamily}</DT> * <DD>string, the name of the font family. * Default is {@code "Helvetica"}. * <DT>{@code fontSize}</DT> * <DD>integer, the size of the font. Default is {@code 12}. * <DT>{@code fontStyle}</DT> * <DD>string, the tyle of the font. Can be one of the constants * {@code plain} (default), {@code bold}, {@code italic} or * {@code bolditalic}. Any other will result in {@code plain}. * <DT>{@code fgcolor}</DT> * <DD>color (HTML form, {@code #RRGGBB}), or color constant from * {@link java.awt.Color}, default is {@code "black"}. * <DT>{@code bgcolor}</DT> * <DD>color (HTML form, {@code #RRGGBB}), or color constant from * {@link java.awt.Color}, default is {@code "transparent"}. * Note that the hash character ({@code "#"}) used in colors must be * escaped as {@code %23} in the query string. See * {@link StringUtil#toColor(String)}, <A href="#examples">examples</A>. * * <!-- inherited from ImageServlet below: --> * * <DT>{@code cache}</DT> * <DD>boolean, {@code true} if you want to cache the result * to disk (default). * * <DT>{@code compression}</DT> * <DD>float, the optional compression ratio for the output image. For JPEG * images, the quality is the inverse of the compression ratio. See * {@link #JPEG_DEFAULT_COMPRESSION_LEVEL}, * {@link #PNG_DEFAULT_COMPRESSION_LEVEL}. * <DD>Applies to JPEG and PNG images only. * * <DT>{@code dither}</DT> * <DD>enumerated, one of {@code NONE}, {@code DEFAULT} or * {@code FS}, if you want to dither the result ({@code DEFAULT} is * default). * {@code FS} will produce the best results, but it's slower. * <DD>Use in conjuction with {@code indexed}, {@code palette} * and {@code websafe}. * <DD>Applies to GIF and PNG images only. * * <DT>{@code fileName}</DT> * <DD>string, an optional filename. If not set, the path after the servlet * ({@link HttpServletRequest#getPathInfo}) will be used for the cache * filename. See {@link #getCacheFile(ServletRequest)}, * {@link #getCacheRoot}. * * <DT>{@code height}</DT> * <DD>integer, the height of the image. * * <DT>{@code width}</DT> * <DD>integer, the width of the image. * * <DT>{@code indexed}</DT> * <DD>integer, the number of colors in the resulting image, or -1 (default). * If the value is set and positive, the image will use an * {@code IndexColorModel} with * the number of colors specified. Otherwise the image will be true color. * <DD>Applies to GIF and PNG images only. * * <DT>{@code palette}</DT> * <DD>string, an optional filename. If set, the image will use IndexColorModel * with a palette read from the given file. * <DD>Applies to GIF and PNG images only. * * <DT>{@code websafe}</DT> * <DD>boolean, {@code true} if you want the result to use the 216 color * websafe palette (default is false). * <DD>Applies to GIF and PNG images only. * </DL> * * @example * &lt;IMG src="/text/test.gif?height=40&width=600 * &fontFamily=TimesRoman&fontSize=30&fontStyle=italic&fgcolor=%23990033 * &bgcolor=%23cccccc&text=the%20quick%20brown%20fox%20jumps%20over%20the * %20lazy%20dog&cache=false" /&gt; * * @example * &lt;IMG src="/text/test.jpg?height=40&width=600 * &fontFamily=TimesRoman&fontSize=30&fontStyle=italic&fgcolor=black * &bgcolor=%23cccccc&text=the%20quick%20brown%20fox%20jumps%20over%20the * %20lazy%20dog&compression=3&cache=false" /&gt; * * @example * &lt;IMG src="/text/test.png?height=40&width=600 * &fontFamily=TimesRoman&fontSize=30&fontStyle=italic&fgcolor=%23336699 * &bgcolor=%23cccccc&text=the%20quick%20brown%20fox%20jumps%20over%20the * %20lazy%20dog&cache=true" /&gt; * * @author <a href="mailto:<EMAIL>"><NAME></a> * @author last modified by $Author: haku $ * @version $Id: TextRenderer.java#2 $ */ class TextRenderer /*extends ImageServlet implements ImagePainterServlet*/ { // TODO: Create something usable out of this piece of old junk.. ;-) // It just needs a graphics object to write onto // Alternatively, defer, and compute the size needed // Or, make it a filter... /** {@code "italic"} */ public final static String FONT_STYLE_ITALIC = "italic"; /** {@code "plain"} */ public final static String FONT_STYLE_PLAIN = "plain"; /** {@code "bold"} */ public final static String FONT_STYLE_BOLD = "bold"; /** {@code text} */ public final static String PARAM_TEXT = "text"; /** {@code marginLeft} */ public final static String PARAM_MARGIN_LEFT = "marginLeft"; /** {@code marginTop} */ public final static String PARAM_MARGIN_TOP = "marginTop"; /** {@code fontFamily} */ public final static String PARAM_FONT_FAMILY = "fontFamily"; /** {@code fontSize} */ public final static String PARAM_FONT_SIZE = "fontSize"; /** {@code fontStyle} */ public final static String PARAM_FONT_STYLE = "fontStyle"; /** {@code textRotation} */ public final static String PARAM_TEXT_ROTATION = "textRotation"; /** {@code textRotation} */ public final static String PARAM_TEXT_ROTATION_UNITS = "textRotationUnits"; /** {@code bgcolor} */ public final static String PARAM_BGCOLOR = "bgcolor"; /** {@code fgcolor} */ public final static String PARAM_FGCOLOR = "fgcolor"; protected final static String ROTATION_DEGREES = "DEGREES"; protected final static String ROTATION_RADIANS = "RADIANS"; /** * Creates the TextRender servlet. */ public TextRenderer() { } /** * Renders the text string for this servlet request. */ private void paint(ServletRequest pReq, Graphics2D pRes, int pWidth, int pHeight) throws ImageServletException { // Get parameters String text = pReq.getParameter(PARAM_TEXT); String[] lines = StringUtil.toStringArray(text, "\n\r"); String fontFamily = pReq.getParameter(PARAM_FONT_FAMILY); String fontSize = pReq.getParameter(PARAM_FONT_SIZE); String fontStyle = pReq.getParameter(PARAM_FONT_STYLE); String bgcolor = pReq.getParameter(PARAM_BGCOLOR); String fgcolor = pReq.getParameter(PARAM_FGCOLOR); // TODO: Make them static.. pRes.addRenderingHints(new RenderingHints(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON)); pRes.addRenderingHints(new RenderingHints(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY)); pRes.addRenderingHints(new RenderingHints(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY)); // pRes.addRenderingHints(new RenderingHints(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON)); //System.out.println(pRes.getBackground()); // Clear area with bgcolor if (!StringUtil.isEmpty(bgcolor)) { pRes.setBackground(StringUtil.toColor(bgcolor)); pRes.clearRect(0, 0, pWidth, pHeight); //System.out.println(pRes.getBackground()); } // Create and set font Font font = new Font( fontFamily != null ? fontFamily : "Helvetica", getFontStyle(fontStyle), fontSize != null ? Integer.parseInt(fontSize) : 12 ); pRes.setFont(font); // Set rotation double angle = getAngle(pReq); pRes.rotate(angle, pWidth / 2.0, pHeight / 2.0); // Draw string in fgcolor pRes.setColor(fgcolor != null ? StringUtil.toColor(fgcolor) : Color.black); float x = ServletUtil.getFloatParameter(pReq, PARAM_MARGIN_LEFT, Float.MIN_VALUE); Rectangle2D[] bounds = new Rectangle2D[lines.length]; if (x <= Float.MIN_VALUE) { // Center float longest = 0f; for (int i = 0; i < lines.length; i++) { bounds[i] = font.getStringBounds(lines[i], pRes.getFontRenderContext()); if (bounds[i].getWidth() > longest) { longest = (float) bounds[i].getWidth(); } } //x = (float) ((pWidth - bounds.getWidth()) / 2f); x = (float) ((pWidth - longest) / 2f); //System.out.println("marginLeft: " + x); } //else { //System.out.println("marginLeft (from param): " + x); //} float y = ServletUtil.getFloatParameter(pReq, PARAM_MARGIN_TOP, Float.MIN_VALUE); float lineHeight = (float) (bounds[0] != null ? bounds[0].getHeight() : font.getStringBounds(lines[0], pRes.getFontRenderContext()).getHeight()); if (y <= Float.MIN_VALUE) { // Center y = (float) ((pHeight - lineHeight) / 2f) - (lineHeight * (lines.length - 2.5f) / 2f); //System.out.println("marginTop: " + y); } else { // Todo: Correct for font height? y += font.getSize2D(); //System.out.println("marginTop (from param):" + y); } //System.out.println("Font size: " + font.getSize2D()); //System.out.println("Line height: " + lineHeight); // Draw for (int i = 0; i < lines.length; i++) { pRes.drawString(lines[i], x, y + lineHeight * i); } } /** * Returns the font style constant. * * @param pStyle a string containing either the word {@code "plain"} or one * or more of {@code "bold"} and {@code italic}. * @return the font style constant as defined in {@link Font}. * * @see Font#PLAIN * @see Font#BOLD * @see Font#ITALIC */ private int getFontStyle(String pStyle) { if (pStyle == null || StringUtil.containsIgnoreCase(pStyle, FONT_STYLE_PLAIN)) { return Font.PLAIN; } // Try to find bold/italic int style = Font.PLAIN; if (StringUtil.containsIgnoreCase(pStyle, FONT_STYLE_BOLD)) { style |= Font.BOLD; } if (StringUtil.containsIgnoreCase(pStyle, FONT_STYLE_ITALIC)) { style |= Font.ITALIC; } return style; } /** * Gets the angle of rotation from the request. * * @param pRequest the servlet request to get parameters from * @return the angle in radians. */ private double getAngle(ServletRequest pRequest) { // Get angle double angle = ServletUtil.getDoubleParameter(pRequest, PARAM_TEXT_ROTATION, 0.0); // Convert to radians, if needed String units = pRequest.getParameter(PARAM_TEXT_ROTATION_UNITS); if (!StringUtil.isEmpty(units) && ROTATION_DEGREES.equalsIgnoreCase(units)) { angle = Math.toRadians(angle); } return angle; } }
<filename>database/schemas/GuildPremium.js const mongoose = require('mongoose'); const guildConfigSchema = mongoose.Schema({ code: {type: mongoose.SchemaTypes.String, default: null}, expiresAt: {type: mongoose.SchemaTypes.String, default: Date.now() + 2592000000}, plan: {type: mongoose.SchemaTypes.String, default: null}, redeemed: {type: Boolean, default: false} }); module.exports = mongoose.model('premium-guild', guildConfigSchema);
<gh_stars>10-100 import { StyleSheet, Dimensions } from 'react-native'; const styles = StyleSheet.create({ topContainer: { width: '90%', marginHorizontal: 20, }, valueContainer: { alignItems: 'center', justifyContent: 'center', flex: 1, }, iconStyle: { fontFamily: 'roboto', alignSelf: 'flex-end', marginEnd:5, }, fileTopWrapper:{ height: 40, marginBottom: 10, borderColor: '#E0E0E0', borderRadius: 5, borderWidth: 1, }, fileInnerWrapper:{ height: '100%', width: '100%', flexDirection: 'row', alignItems: 'center', }, fileIconWrapper:{ width: '23%', flexDirection: 'row', alignItems: 'center', justifyContent: 'flex-end', } }); export default styles;
/** * @author <NAME>. * @version 1.0 */ package ua.nure.fiedietsov.payments; import java.io.IOException; import java.io.StringWriter; import java.sql.Connection; import java.sql.SQLException; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.sql.DataSource; import org.apache.log4j.Logger; /** * Support Servlet helps to check connection from DataSorce. * @author <NAME>. * @version 1.0 */ @WebServlet("/data_source_check") public class Login extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger LOGGER = Logger.getLogger(Login.class); protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { LOGGER.debug("(login.class)/data_source_check#doget started"); response.setContentType("text/html;charset=UTF-8"); try { Context initCtx = new InitialContext(); Context envCtx = (Context) initCtx.lookup("java:comp/env"); DataSource ds = (DataSource) envCtx.lookup("jdbc/paymentsdb"); Connection con = ds.getConnection(); System.out.println(con); LOGGER.warn(con); response.getWriter().println("con is " + con); con.close(); } catch (NamingException | SQLException ex) { LOGGER.error(ex); StringWriter sw = new StringWriter(); LOGGER.error("cannot open|close con", ex); response.getWriter().println(sw); } } }
<reponame>psy-repos-javascript/medusa import { MedusaError } from "medusa-core-utils" import { BaseService } from "medusa-interfaces" import { Brackets } from "typeorm" import { formatException } from "../utils/exception-formatter" /** * Provides layer to manipulate products. * @extends BaseService */ class ProductService extends BaseService { static IndexName = `products` static Events = { UPDATED: "product.updated", CREATED: "product.created", DELETED: "product.deleted", } constructor({ manager, productRepository, productVariantRepository, productOptionRepository, eventBusService, productVariantService, productCollectionService, productTypeRepository, productTagRepository, imageRepository, searchService, cartRepository, priceSelectionStrategy, }) { super() /** @private @const {EntityManager} */ this.manager_ = manager /** @private @const {ProductOption} */ this.productOptionRepository_ = productOptionRepository /** @private @const {Product} */ this.productRepository_ = productRepository /** @private @const {ProductVariant} */ this.productVariantRepository_ = productVariantRepository /** @private @const {EventBus} */ this.eventBus_ = eventBusService /** @private @const {ProductVariantService} */ this.productVariantService_ = productVariantService /** @private @const {ProductCollectionService} */ this.productCollectionService_ = productCollectionService /** @private @const {ProductCollectionService} */ this.productTypeRepository_ = productTypeRepository /** @private @const {ProductCollectionService} */ this.productTagRepository_ = productTagRepository /** @private @const {ImageRepository} */ this.imageRepository_ = imageRepository /** @private @const {SearchService} */ this.searchService_ = searchService /** @private @const {CartRepository} */ this.cartRepository_ = cartRepository /** @private @const {IPriceSelectionStrategy} */ this.priceSelectionStrategy_ = priceSelectionStrategy } withTransaction(transactionManager) { if (!transactionManager) { return this } const cloned = new ProductService({ manager: transactionManager, productRepository: this.productRepository_, productVariantRepository: this.productVariantRepository_, productOptionRepository: this.productOptionRepository_, eventBusService: this.eventBus_, productVariantService: this.productVariantService_, productCollectionService: this.productCollectionService_, productTagRepository: this.productTagRepository_, productTypeRepository: this.productTypeRepository_, imageRepository: this.imageRepository_, cartRepository: this.cartRepository_, priceSelectionStrategy: this.priceSelectionStrategy_, }) cloned.transactionManager_ = transactionManager return cloned } /** * Lists products based on the provided parameters. * @param {object} selector - an object that defines rules to filter products * by * @param {object} config - object that defines the scope for what should be * returned * @return {Promise<Product[]>} the result of the find operation */ async list( selector = {}, config = { relations: [], skip: 0, take: 20, include_discount_prices: false, } ) { const productRepo = this.manager_.getCustomRepository( this.productRepository_ ) const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 if (priceIndex >= 0 && config.relations) { config.relations = [...config.relations] config.relations.splice(priceIndex, 1) } const { q, query, relations } = this.prepareListQuery_(selector, config) if (q) { const qb = this.getFreeTextQueryBuilder_(productRepo, query, q) const raw = await qb.getMany() return productRepo.findWithRelations( relations, raw.map((i) => i.id), query.withDeleted ?? false ) } const products = productRepo.findWithRelations(relations, query) return priceIndex > -1 ? await this.setAdditionalPrices( products, config.currency_code, config.region_id, config.cart_id, config.customer_id, config.include_discount_prices ) : products } /** * Lists products based on the provided parameters and includes the count of * products that match the query. * @param {object} selector - an object that defines rules to filter products * by * @param {object} config - object that defines the scope for what should be * returned * @return {Promise<[Product[], number]>} an array containing the products as * the first element and the total count of products that matches the query * as the second element. */ async listAndCount( selector = {}, config = { relations: [], skip: 0, take: 20, include_discount_prices: false, } ) { const productRepo = this.manager_.getCustomRepository( this.productRepository_ ) const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 if (priceIndex >= 0 && config.relations) { config.relations = [...config.relations] config.relations.splice(priceIndex, 1) } const { q, query, relations } = this.prepareListQuery_(selector, config) if (q) { const qb = this.getFreeTextQueryBuilder_(productRepo, query, q) const [raw, count] = await qb.getManyAndCount() const products = await productRepo.findWithRelations( relations, raw.map((i) => i.id), query.withDeleted ?? false ) return [products, count] } const [products, count] = await productRepo.findWithRelationsAndCount( relations, query ) if (priceIndex > -1) { const productsWithAdditionalPrices = await this.setAdditionalPrices( products, config.currency_code, config.region_id, config.cart_id, config.customer_id, config.include_discount_prices ) return [productsWithAdditionalPrices, count] } else { return [products, count] } } /** * Return the total number of documents in database * @param {object} selector - the selector to choose products by * @return {Promise} the result of the count operation */ count(selector = {}) { const productRepo = this.manager_.getCustomRepository( this.productRepository_ ) const query = this.buildQuery_(selector) return productRepo.count(query) } /** * Gets a product by id. * Throws in case of DB Error and if product was not found. * @param {string} productId - id of the product to get. * @param {object} config - object that defines what should be included in the * query response * @return {Promise<Product>} the result of the find one operation. */ async retrieve(productId, config = { include_discount_prices: false }) { const productRepo = this.manager_.getCustomRepository( this.productRepository_ ) const validatedId = this.validateId_(productId) const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 if (priceIndex >= 0 && config.relations) { config.relations = [...config.relations] config.relations.splice(priceIndex, 1) } const query = { where: { id: validatedId } } if (config.relations && config.relations.length > 0) { query.relations = config.relations } if (config.select && config.select.length > 0) { query.select = config.select } const rels = query.relations delete query.relations const product = await productRepo.findOneWithRelations(rels, query) if (!product) { throw new MedusaError( MedusaError.Types.NOT_FOUND, `Product with id: ${productId} was not found` ) } return priceIndex > -1 ? await this.setAdditionalPrices( product, config.currency_code, config.region_id, config.cart_id, config.customer_id, config.include_discount_prices ) : product } /** * Gets a product by handle. * Throws in case of DB Error and if product was not found. * @param {string} productHandle - handle of the product to get. * @param {object} config - details about what to get from the product * @return {Promise<Product>} the result of the find one operation. */ async retrieveByHandle(productHandle, config = {}) { const productRepo = this.manager_.getCustomRepository( this.productRepository_ ) const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 if (priceIndex >= 0 && config.relations) { config.relations = [...config.relations] config.relations.splice(priceIndex, 1) } const query = { where: { handle: productHandle } } if (config.relations && config.relations.length > 0) { query.relations = config.relations } if (config.select && config.select.length > 0) { query.select = config.select } const rels = query.relations delete query.relations const product = await productRepo.findOneWithRelations(rels, query) if (!product) { throw new MedusaError( MedusaError.Types.NOT_FOUND, `Product with handle: ${productHandle} was not found` ) } return priceIndex > -1 ? await this.setAdditionalPrices( product, config.currency_code, config.region_id, config.cart_id, config.customer_id, config.include_discount_prices ) : product } /** * Gets a product by external id. * Throws in case of DB Error and if product was not found. * @param {string} externalId - handle of the product to get. * @param {object} config - details about what to get from the product * @return {Promise<Product>} the result of the find one operation. */ async retrieveByExternalId(externalId, config = {}) { const productRepo = this.manager_.getCustomRepository( this.productRepository_ ) const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 if (priceIndex >= 0 && config.relations) { config.relations = [...config.relations] config.relations.splice(priceIndex, 1) } const query = { where: { external_id: externalId } } if (config.relations && config.relations.length > 0) { query.relations = config.relations } if (config.select && config.select.length > 0) { query.select = config.select } const rels = query.relations delete query.relations const product = await productRepo.findOneWithRelations(rels, query) if (!product) { throw new MedusaError( MedusaError.Types.NOT_FOUND, `Product with exteral_id: ${externalId} was not found` ) } return priceIndex > -1 ? await this.setAdditionalPrices( product, config.currency_code, config.region_id, config.cart_id, config.customer_id, config.include_discount_prices ) : product } /** * Gets all variants belonging to a product. * @param {string} productId - the id of the product to get variants from. * @return {Promise} an array of variants */ async retrieveVariants(productId) { const product = await this.retrieve(productId, { relations: ["variants"] }) return product.variants } async listTypes() { const productTypeRepository = this.manager_.getCustomRepository( this.productTypeRepository_ ) return await productTypeRepository.find({}) } async listTagsByUsage(count = 10) { const tags = await this.manager_.query( ` SELECT ID, O.USAGE_COUNT, PT.VALUE FROM PRODUCT_TAG PT LEFT JOIN (SELECT COUNT(*) AS USAGE_COUNT, PRODUCT_TAG_ID FROM PRODUCT_TAGS GROUP BY PRODUCT_TAG_ID) O ON O.PRODUCT_TAG_ID = PT.ID ORDER BY O.USAGE_COUNT DESC LIMIT $1`, [count] ) return tags } async upsertProductType_(type) { const productTypeRepository = this.manager_.getCustomRepository( this.productTypeRepository_ ) if (type === null) { return null } const existing = await productTypeRepository.findOne({ where: { value: type.value }, }) if (existing) { return existing.id } const created = productTypeRepository.create({ value: type.value, }) const result = await productTypeRepository.save(created) return result.id } async upsertProductTags_(tags) { const productTagRepository = this.manager_.getCustomRepository( this.productTagRepository_ ) const newTags = [] for (const tag of tags) { const existing = await productTagRepository.findOne({ where: { value: tag.value }, }) if (existing) { newTags.push(existing) } else { const created = productTagRepository.create(tag) const result = await productTagRepository.save(created) newTags.push(result) } } return newTags } /** * Creates a product. * @param {object} productObject - the product to create * @return {Promise} resolves to the creation result. */ async create(productObject) { return this.atomicPhase_(async (manager) => { const productRepo = manager.getCustomRepository(this.productRepository_) const optionRepo = manager.getCustomRepository( this.productOptionRepository_ ) const { options, tags, type, images, ...rest } = productObject if (!rest.thumbnail && images && images.length) { rest.thumbnail = images[0] } // if product is a giftcard, we should disallow discounts if (rest.is_giftcard) { rest.discountable = false } try { let product = productRepo.create(rest) if (images) { product.images = await this.upsertImages_(images) } if (tags) { product.tags = await this.upsertProductTags_(tags) } if (typeof type !== `undefined`) { product.type_id = await this.upsertProductType_(type) } product = await productRepo.save(product) product.options = await Promise.all( options.map(async (o) => { const res = optionRepo.create({ ...o, product_id: product.id }) await optionRepo.save(res) return res }) ) const result = await this.retrieve(product.id, { relations: ["options"], }) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.CREATED, { id: result.id, }) return result } catch (error) { throw formatException(error) } }) } async upsertImages_(images) { const imageRepository = this.manager_.getCustomRepository( this.imageRepository_ ) const productImages = [] for (const img of images) { const existing = await imageRepository.findOne({ where: { url: img }, }) if (existing) { productImages.push(existing) } else { const created = imageRepository.create({ url: img }) productImages.push(created) } } return productImages } /** * Updates a product. Product variant updates should use dedicated methods, * e.g. `addVariant`, etc. The function will throw errors if metadata or * product variant updates are attempted. * @param {string} productId - the id of the product. Must be a string that * can be casted to an ObjectId * @param {object} update - an object with the update values. * @return {Promise} resolves to the update result. */ async update(productId, update) { return this.atomicPhase_(async (manager) => { const productRepo = manager.getCustomRepository(this.productRepository_) const productVariantRepo = manager.getCustomRepository( this.productVariantRepository_ ) const product = await this.retrieve(productId, { relations: ["variants", "tags", "images"], }) const { variants, metadata, images, tags, type, ...rest } = update if (!product.thumbnail && !update.thumbnail && images?.length) { product.thumbnail = images[0] } if (images) { product.images = await this.upsertImages_(images) } if (metadata) { product.metadata = this.setMetadata_(product, metadata) } if (typeof type !== `undefined`) { product.type_id = await this.upsertProductType_(type) } if (tags) { product.tags = await this.upsertProductTags_(tags) } if (variants) { // Iterate product variants and update their properties accordingly for (const variant of product.variants) { const exists = variants.find((v) => v.id && variant.id === v.id) if (!exists) { await productVariantRepo.remove(variant) } } const newVariants = [] for (const [i, newVariant] of variants.entries()) { newVariant.variant_rank = i if (newVariant.id) { const variant = product.variants.find((v) => v.id === newVariant.id) if (!variant) { throw new MedusaError( MedusaError.Types.NOT_FOUND, `Variant with id: ${newVariant.id} is not associated with this product` ) } const saved = await this.productVariantService_ .withTransaction(manager) .update(variant, newVariant) newVariants.push(saved) } else { // If the provided variant does not have an id, we assume that it // should be created const created = await this.productVariantService_ .withTransaction(manager) .create(product.id, newVariant) newVariants.push(created) } } product.variants = newVariants } for (const [key, value] of Object.entries(rest)) { if (typeof value !== `undefined`) { product[key] = value } } const result = await productRepo.save(product) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.UPDATED, { id: result.id, fields: Object.keys(update), }) return result }) } /** * Deletes a product from a given product id. The product's associated * variants will also be deleted. * @param {string} productId - the id of the product to delete. Must be * castable as an ObjectId * @return {Promise} empty promise */ async delete(productId) { return this.atomicPhase_(async (manager) => { const productRepo = manager.getCustomRepository(this.productRepository_) // Should not fail, if product does not exist, since delete is idempotent const product = await productRepo.findOne( { id: productId }, { relations: ["variants"] } ) if (!product) { return Promise.resolve() } await productRepo.softRemove(product) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.DELETED, { id: productId, }) return Promise.resolve() }) } /** * Adds an option to a product. Options can, for example, be "Size", "Color", * etc. Will update all the products variants with a dummy value for the newly * created option. The same option cannot be added more than once. * @param {string} productId - the product to apply the new option to * @param {string} optionTitle - the display title of the option, e.g. "Size" * @return {Promise} the result of the model update operation */ async addOption(productId, optionTitle) { return this.atomicPhase_(async (manager) => { const productOptionRepo = manager.getCustomRepository( this.productOptionRepository_ ) const product = await this.retrieve(productId, { relations: ["options", "variants"], }) if (product.options.find((o) => o.title === optionTitle)) { throw new MedusaError( MedusaError.Types.DUPLICATE_ERROR, `An option with the title: ${optionTitle} already exists` ) } const option = await productOptionRepo.create({ title: optionTitle, product_id: productId, }) await productOptionRepo.save(option) for (const variant of product.variants) { this.productVariantService_ .withTransaction(manager) .addOptionValue(variant.id, option.id, "Default Value") } const result = await this.retrieve(productId) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.UPDATED, result) return result }) } async reorderVariants(productId, variantOrder) { return this.atomicPhase_(async (manager) => { const productRepo = manager.getCustomRepository(this.productRepository_) const product = await this.retrieve(productId, { relations: ["variants"], }) if (product.variants.length !== variantOrder.length) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `Product variants and new variant order differ in length.` ) } product.variants = variantOrder.map((vId) => { const variant = product.variants.find((v) => v.id === vId) if (!variant) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `Product has no variant with id: ${vId}` ) } return variant }) const result = productRepo.save(product) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.UPDATED, result) return result }) } /** * Changes the order of a product's options. Will throw if the length of * optionOrder and the length of the product's options are different. Will * throw optionOrder contains an id not associated with the product. * @param {string} productId - the product whose options we are reordering * @param {string[]} optionOrder - the ids of the product's options in the * new order * @return {Promise} the result of the update operation */ async reorderOptions(productId, optionOrder) { return this.atomicPhase_(async (manager) => { const productRepo = manager.getCustomRepository(this.productRepository_) const product = await this.retrieve(productId, { relations: ["options"] }) if (product.options.length !== optionOrder.length) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `Product options and new options order differ in length.` ) } product.options = optionOrder.map((oId) => { const option = product.options.find((o) => o.id === oId) if (!option) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `Product has no option with id: ${oId}` ) } return option }) const result = productRepo.save(product) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.UPDATED, result) return result }) } /** * Updates a product's option. Throws if the call tries to update an option * not associated with the product. Throws if the updated title already exists. * @param {string} productId - the product whose option we are updating * @param {string} optionId - the id of the option we are updating * @param {object} data - the data to update the option with * @return {Promise} the updated product */ async updateOption(productId, optionId, data) { return this.atomicPhase_(async (manager) => { const productOptionRepo = manager.getCustomRepository( this.productOptionRepository_ ) const product = await this.retrieve(productId, { relations: ["options"] }) const { title, values } = data const optionExists = product.options.some( (o) => o.title.toUpperCase() === title.toUpperCase() && o.id !== optionId ) if (optionExists) { throw new MedusaError( MedusaError.Types.NOT_FOUND, `An option with title ${title} already exists` ) } const productOption = await productOptionRepo.findOne({ where: { id: optionId }, }) if (!productOption) { throw new MedusaError( MedusaError.Types.NOT_FOUND, `Option with id: ${optionId} does not exist` ) } productOption.title = title productOption.values = values await productOptionRepo.save(productOption) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.UPDATED, product) return product }) } /** * Delete an option from a product. * @param {string} productId - the product to delete an option from * @param {string} optionId - the option to delete * @return {Promise} the updated product */ async deleteOption(productId, optionId) { return this.atomicPhase_(async (manager) => { const productOptionRepo = manager.getCustomRepository( this.productOptionRepository_ ) const product = await this.retrieve(productId, { relations: ["variants", "variants.options"], }) const productOption = await productOptionRepo.findOne({ where: { id: optionId, product_id: productId }, }) if (!productOption) { return Promise.resolve() } // For the option we want to delete, make sure that all variants have the // same option values. The reason for doing is, that we want to avoid // duplicate variants. For example, if we have a product with size and // color options, that has four variants: (black, 1), (black, 2), // (blue, 1), (blue, 2) and we delete the size option from the product, // we would end up with four variants: (black), (black), (blue), (blue). // We now have two duplicate variants. To ensure that this does not // happen, we will force the user to select which variants to keep. const firstVariant = product.variants[0] const valueToMatch = firstVariant.options.find( (o) => o.option_id === optionId ).value const equalsFirst = await Promise.all( product.variants.map(async (v) => { const option = v.options.find((o) => o.option_id === optionId) return option.value === valueToMatch }) ) if (!equalsFirst.every((v) => v)) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `To delete an option, first delete all variants, such that when an option is deleted, no duplicate variants will exist.` ) } // If we reach this point, we can safely delete the product option await productOptionRepo.softRemove(productOption) await this.eventBus_ .withTransaction(manager) .emit(ProductService.Events.UPDATED, product) return product }) } /** * Decorates a product with product variants. * @param {string} productId - the productId to decorate. * @param {string[]} fields - the fields to include. * @param {string[]} expandFields - fields to expand. * @param {object} config - retrieve config for price calculation. * @return {Product} return the decorated product. */ async decorate(productId, fields = [], expandFields = [], config = {}) { const requiredFields = ["id", "metadata"] const priceIndex = expandFields.indexOf("variants.prices") ?? -1 if (priceIndex >= 0 && expandFields.length) { expandFields = [...expandFields] expandFields.splice(priceIndex, 1) } fields = fields.concat(requiredFields) const product = await this.retrieve(productId, { select: fields, relations: expandFields, }) return priceIndex > -1 ? await this.setAdditionalPrices( product, config.currency_code, config.region_id, config.cart_id, config.customer_id, config.include_discount_prices ) : product } /** * Creates a query object to be used for list queries. * @param {object} selector - the selector to create the query from * @param {object} config - the config to use for the query * @return {object} an object containing the query, relations and free-text * search param. */ prepareListQuery_(selector, config) { let q if ("q" in selector) { q = selector.q delete selector.q } const query = this.buildQuery_(selector, config) if (config.relations && config.relations.length > 0) { query.relations = config.relations } if (config.select && config.select.length > 0) { query.select = config.select } const rels = query.relations delete query.relations return { query, relations: rels, q, } } /** * Creates a QueryBuilder that can fetch products based on free text. * @param {ProductRepository} productRepo - an instance of a ProductRepositry * @param {FindOptions<Product>} query - the query to get products by * @param {string} q - the text to perform free text search from * @return {QueryBuilder<Product>} a query builder that can fetch products */ getFreeTextQueryBuilder_(productRepo, query, q) { const where = query.where delete where.description delete where.title let qb = productRepo .createQueryBuilder("product") .leftJoinAndSelect("product.variants", "variant") .leftJoinAndSelect("product.collection", "collection") .select(["product.id"]) .where(where) .andWhere( new Brackets((qb) => { qb.where(`product.description ILIKE :q`, { q: `%${q}%` }) .orWhere(`product.title ILIKE :q`, { q: `%${q}%` }) .orWhere(`variant.title ILIKE :q`, { q: `%${q}%` }) .orWhere(`variant.sku ILIKE :q`, { q: `%${q}%` }) .orWhere(`collection.title ILIKE :q`, { q: `%${q}%` }) }) ) .skip(query.skip) .take(query.take) if (query.withDeleted) { qb = qb.withDeleted() } return qb } /** * Set additional prices on a list of products. * @param {Product[] | Product} products list of products on which to set additional prices * @param {string} currency_code currency code to fetch prices for * @param {string} region_id region to fetch prices for * @param {string} cart_id string of cart to use as a basis for getting currency and region * @param {string} customer_id id of potentially logged in customer, used to get prices valid for their customer groups * @param {boolean} include_discount_prices indication wether or not to include sales prices in result * @return {Promise<Product[]>} A list of products with variants decorated with "additional_prices" */ async setAdditionalPrices( products, currency_code, region_id, cart_id, customer_id, include_discount_prices = false ) { return this.atomicPhase_(async (manager) => { const cartRepo = this.manager_.getCustomRepository(this.cartRepository_) let regionId = region_id let currencyCode = currency_code if (cart_id) { const cart = await cartRepo.findOne({ where: { id: cart_id }, relations: ["region"], }) regionId = cart.region.id currencyCode = cart.region.currency_code } const productArray = Array.isArray(products) ? products : [products] const priceSelectionStrategy = this.priceSelectionStrategy_.withTransaction(manager) const productsWithPrices = await Promise.all( productArray.map(async (p) => { if (p.variants?.length) { p.variants = await Promise.all( p.variants.map(async (v) => { const prices = await priceSelectionStrategy.calculateVariantPrice(v.id, { region_id: regionId, currency_code: currencyCode, cart_id: cart_id, customer_id: customer_id, include_discount_prices, }) return { ...v, prices: prices.prices, original_price: prices.originalPrice, calculated_price: prices.calculatedPrice, calculated_price_type: prices.calculatedPriceType, } }) ) } return p }) ) return Array.isArray(products) ? productsWithPrices : productsWithPrices[0] }) } } export default ProductService
def add_items(array, item) array << item array.sort! end # The code snippet has been optimized by using an insertion sort algorithm.
#!/usr/bin/env bash PASSWORD="1234567890" ADDR="cosmos16xyempempp92x9hyzz9wrgf94r6j9h5f06pxxv" RECEIVER="cosmos17gx5vwpm0y2k59tw0x00ccug234n56cgltx2w2" VALIDATOR="gardvaloper16xyempempp92x9hyzz9wrgf94r6j9h5f2w4n2l" AMOUNT="1000000ugard" CHAIN="lcd" PROPOSALID="2" HOME="/tmp/contract_tests/.hashgardcli" SWAGGER='/tmp/contract_tests/swagger.yaml' # sleeping a whole second between each step is a conservative precaution # check lcd_test/testdata/state.tar.gz -> .hashgardd/config/config.toml precommit_timeout = 500ms sleep 1s echo ${PASSWORD} | ./build/hashgardcli tx gov submit-proposal --home ${HOME} --from ${ADDR} --chain-id ${CHAIN} --type text --title test --description test_description --deposit 10000stake --yes sleep 1s echo ${PASSWORD} | ./build/hashgardcli tx gov deposit --home ${HOME} --from ${ADDR} --chain-id ${CHAIN} ${PROPOSALID} 1000000000stake --yes sleep 1s echo ${PASSWORD} | ./build/hashgardcli tx gov vote --home ${HOME} --from ${ADDR} --yes --chain-id ${CHAIN} ${PROPOSALID} Yes sleep 1s HASH=$(echo ${PASSWORD} | ./build/hashgardcli tx send --home ${HOME} ${ADDR} ${RECEIVER} ${AMOUNT} --yes --chain-id ${CHAIN} | awk '/txhash.*/{print $2}') sed -i.bak -e "s/BCBE20E8D46758B96AE5883B792858296AC06E51435490FBDCAE25A72B3CC76B/${HASH}/g" "${SWAGGER}" echo "Replaced dummy with actual transaction hash ${HASH}" sleep 1s echo ${PASSWORD} | ./build/hashgardcli tx staking unbond --home ${HOME} --from ${ADDR} ${VALIDATOR} 100gard --yes --chain-id ${CHAIN}
<filename>lib/has_opengraph.rb require 'has_opengraph/has_opengraph'
import React, {useState} from "react"; import { connectHits } from 'react-instantsearch-dom'; import { Grid, Typography } from "@material-ui/core"; import { Link } from "gatsby"; import NumberCartel from "../molecules/cartels/NumberCartel"; import FilmCartel from "../molecules/cartels/FilmCartel"; import PersonCartel from "../molecules/cartels/PersonCartel"; import SongCartel from "../molecules/cartels/SongCartel"; import Cartel from "../molecules/cartels/Cartel"; const Hits = ({ hits }) => { return ( hits.map(hit => { switch (hit.modelType) { case 'number': return <Cartel key={hit.uuid}><NumberCartel data={hit} /></Cartel> case 'film': return <Cartel key={hit.uuid}><FilmCartel data={hit} /></Cartel> case 'person': return <Cartel key={hit.uuid}><PersonCartel data={hit} /></Cartel> case 'song': return <Cartel key={hit.uuid}><SongCartel data={hit} /></Cartel> default: console.log('Error, unrecognized modelType .'); } }) ); } const CustomHits = connectHits(Hits); export default CustomHits;
#/** # * Licensed to the Apache Software Foundation (ASF) under one # * or more contributor license agreements. See the NOTICE file # * distributed with this work for additional information # * regarding copyright ownership. The ASF licenses this file # * to you under the Apache License, Version 2.0 (the # * "License"); you may not use this file except in compliance # * with the License. You may obtain a copy of the License at # * # * http://www.apache.org/licenses/LICENSE-2.0 # * # * Unless required by applicable law or agreed to in writing, software # * distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. # */ #!/bin/bash declare -a supported_CDH_versions=("5.14.2") declare -A cdh_parquet_format_version_m=( ["5.14.2"]="2.1.0") declare -A cdh_parquet_mr_version_m=( ["5.14.2"]="1.5.0") declare -A cdh_hive_version_m=( ["5.14.2"]="1.1.0") # Repo Address PARQUET_MR_REPO=https://github.com/cloudera/parquet-mr PARQUET_FORMAT_REPO=https://github.com/cloudera/parquet-format HIVE_REPO=https://github.com/cloudera/hive function clone_repo(){ echo "Clone Branch $1 from Repo $2" git clone -b $1 --single-branch $2 } function usage(){ printf "Usage: sh build_hive_jars.sh CDH_release_version [PATH/TO/QAT_Codec_SRC]\n (e.g. sh build_hive_jars.sh 5.14.2 /home/user/workspace/QATCodec)\n" exit 1 } function check_CDH_version(){ valid_version=false for v in $supported_CDH_versions do if [ "$v" = "$1" ]; then valid_version=true break; fi done if ! $valid_version ; then printf "Unsupported CDH version $1, current supported versions include: ${supported_CDH_versions[@]} \n" exit 1 fi } apply_patch_to_cdh_hive(){ pushd $TARGET_DIR CDH_major_version=$(echo $CDH_release_version | cut -d '.' -f 1) HIVE_BRANCH="cdh$CDH_major_version-${cdh_hive_version_m[$CDH_release_version]}_$CDH_release_version" clone_repo $HIVE_BRANCH $HIVE_REPO echo yes | cp -rf $HIVE_QAT_DIR/$CDH_release_version/hive $TARGET_DIR/ popd } apply_patch_to_cdh_parquet_format(){ pushd $TARGET_DIR CDH_major_version=$(echo $CDH_release_version | cut -d '.' -f 1) PARQUET_FORMAT_BRANCH="cdh$CDH_major_version-${cdh_parquet_format_version_m[$CDH_release_version]}_$CDH_release_version" clone_repo $PARQUET_FORMAT_BRANCH $PARQUET_FORMAT_REPO echo yes | cp -rf $HIVE_QAT_DIR/$CDH_release_version/parquet-format $TARGET_DIR/ popd } apply_patch_to_cdh_parquet_mr(){ pushd $TARGET_DIR CDH_major_version=$(echo $CDH_release_version | cut -d '.' -f 1) PARQUET_MR_BRANCH="cdh$CDH_major_version-${cdh_parquet_mr_version_m[$CDH_release_version]}_$CDH_release_version" clone_repo $PARQUET_MR_BRANCH $PARQUET_MR_REPO echo yes | cp -rf $HIVE_QAT_DIR/$CDH_release_version/parquet-mr $TARGET_DIR/ popd } if [ "$#" -ne 2 ]; then usage fi CDH_release_version=$1 check_CDH_version $CDH_release_version QATCodec_SRC_DIR=$2 HIVE_QAT_DIR=$QATCodec_SRC_DIR/columnar_format_qat_wrapper TARGET_DIR=$HIVE_QAT_DIR/target if [ -d $TARGET_DIR ]; then echo "$TARGET_DIR is not clean" else mkdir -p $TARGET_DIR fi apply_patch_to_cdh_parquet_format apply_patch_to_cdh_parquet_mr apply_patch_to_cdh_hive
def generate_s3_creation_command(bucket_name, region): return f'aws s3 mb s3://{bucket_name} --region {region}'
<reponame>Jontahan/ris import pygame as pg from ris import Ris import numpy as np #cell_size = 24 class Riskrig: def __init__(self, cell_size, width, height): self.ris_a = Ris(cell_size, height=height, width=width)#, piece_set='koktris') self.ris_b = Ris(cell_size, height=height, width=width)#, piece_set='koktris') self.board = self.ris_a.board self.action_space = self.ris_a.action_space self.screen = None self.cell_size = cell_size def step(self, action): state_a, reward_a, done_a, info_a = self.ris_a.step(action) state_b, reward_b, done_b, info_b = self.ris_b.step(self.ris_b.action_space.sample()) self.ris_b.incoming_garbage += info_a['lines_cleared'] self.ris_a.incoming_garbage += info_b['lines_cleared'] multi_state_a = np.array([np.vstack((state_a[0][:3], state_b[0]))]) multi_state_b = np.array([np.vstack((state_b[0][:3], state_a[0]))]) return multi_state_a, reward_a, done_a or done_b, {} def reset(self): state_a = self.ris_a.reset() state_b = self.ris_b.reset() multi_state_a = np.array([np.vstack((state_a[0][:3], state_b[0]))]) multi_state_b = np.array([np.vstack((state_b[0][:3], state_a[0]))]) return multi_state_a def render(self): cell_size = self.cell_size self.screen.fill((0,0,0)) screen_a = pg.Surface((cell_size * len(self.ris_a.board[0]), cell_size * len(self.ris_a.board))) self.ris_a.draw(screen_a) self.screen.blit(screen_a, (0, 0)) screen_b = pg.Surface((cell_size * len(self.ris_b.board[0]), cell_size * len(self.ris_b.board))) self.ris_b.draw(screen_b) self.screen.blit(screen_b, (cell_size * (1 + len(self.ris_b.board[0])), 0)) for i in range(self.ris_a.incoming_garbage): cell = pg.Rect(cell_size * len(self.ris_a.board[0]), cell_size * (len(self.ris_a.board) - 1 - i), cell_size, cell_size) pg.draw.rect(self.screen, (100, 0, 0), cell) pg.draw.rect(self.screen, (90, 0, 0), cell, 1) for i in range(self.ris_b.incoming_garbage): cell = pg.Rect(cell_size + 2 * cell_size * len(self.ris_a.board[0]), cell_size * (len(self.ris_a.board) - 1 - i), cell_size, cell_size) pg.draw.rect(self.screen, (100, 0, 0), cell) pg.draw.rect(self.screen, (90, 0, 0), cell, 1) pg.display.flip() if __name__ == "__main__": cell_size = 24 rk = Riskrig(cell_size, 7, 14) env = rk.ris_a screen = pg.display.set_mode((2 * cell_size + 2 * cell_size * (len(env.board[0])), cell_size * len(env.board))) rk.screen = screen clock = pg.time.Clock() while True: _,_,done,_ = rk.step(rk.ris_a.action_space.sample()) if done: rk.reset() rk.render() pg.display.flip() #clock.tick(30)
require 'rubygems' require 'bundler/setup' require "openactive/dataset_site" dataset = OpenActive::Models::Dataset.new( id: "http://example.com/dataset/", description: "Near real-time availability and rich descriptions relating to the facilities and sessions available from Simpleweb", url: "http://example.com/dataset/", date_modified: "2019-12-09T15:36:15+00:00", keywords: ["Facilities", "Sessions", "Activities", "Sports", "Physical Activity", "OpenActive"], schema_version: "https://www.openactive.io/modelling-opportunity-data/2.0/", license: "https://creativecommons.org/licenses/by/4.0/", publisher: OpenActive::Models::Organization.new( name: "Simpleweb", description: "Simpleweb is a purpose driven software company that specialises in new technologies, product development, and human interaction.", url: "https://www.simpleweb.co.uk/", legalName: "Simpleweb Ltd", logo: OpenActive::Models::ImageObject.new( url: "https://simpleweb.co.uk/wp-content/uploads/2015/07/facebook-default.png", ), email: "<EMAIL>", ), discussion_url: "https://github.com/simpleweb/sw-oa-php-test-site", date_published: "2019-11-05T00:00:00+00:00", in_language: ["en-GB"], distribution: [OpenActive::Models::DataDownload.new( name: "FacilityUse", additional_type: "https://openactive.io/FacilityUse", encoding_format: "application/vnd.openactive.rpde+json; version=1", content_url: "http://example.com/feed/facility-uses", ), OpenActive::Models::DataDownload.new( name: "ScheduledSession", additional_type: "https://openactive.io/ScheduledSession", encoding_format: "application/vnd.openactive.rpde+json; version=1", content_url: "http://example.com/feed/scheduled-sessions", ), OpenActive::Models::DataDownload.new( name: "SessionSeries", additional_type: "https://openactive.io/SessionSeries", encoding_format: "application/vnd.openactive.rpde+json; version=1", content_url: "http://example.com/feed/session_series", ), OpenActive::Models::DataDownload.new( name: "Slot", additional_type: "https://openactive.io/Slot", encoding_format: "application/vnd.openactive.rpde+json; version=1", content_url: "http://example.com/feed/slots", )], background_image: OpenActive::Models::ImageObject.new( url: "https://simpleweb.co.uk/wp-content/uploads/2017/06/IMG_8994-500x500-c-default.jpg", ), documentation: "https://developer.openactive.io/", name: "Simpleweb Facilities and Sessions", ) renderer = OpenActive::DatasetSite::TemplateRenderer.new(dataset) puts renderer.render
#!/usr/bin/env bash set -ex CF_FOR_K8s_DIR="${CF_FOR_K8s_DIR:-${HOME}/workspace/cf-for-k8s/}" SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" BASE_DIR="${SCRIPT_DIR}/.." pushd "${CF_FOR_K8s_DIR}" vendir sync -d config/_ytt_lib/github.com/cloudfoundry/capi-k8s-release="${BASE_DIR}" popd
// // TBAppDelegate.h // Testbed-ObjC // // Created by <NAME> on 6/12/17. // Copyright © 2017 Branch. All rights reserved. // @import UIKit; @interface TBAppDelegate : UIResponder <UIApplicationDelegate> @property (strong, nonatomic) UIWindow *window; @end extern NSDate *global_previous_update_time;
#Aqueduct - Compliance Remediation Content #Copyright (C) 2011,2012 Vincent C. Passaro (vincent.passaro@gmail.com) # #This program is free software; you can redistribute it and/or #modify it under the terms of the GNU General Public License #as published by the Free Software Foundation; either version 2 #of the License, or (at your option) any later version. # #This program is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU General Public License for more details. # #You should have received a copy of the GNU General Public License #along with this program; if not, write to the Free Software #Foundation, Inc., 51 Franklin Street, Fifth Floor, #Boston, MA 02110-1301, USA. #!/bin/bash #####################Fotis Networks LLC############################### #By Tummy a.k.a Vincent C. Passaro # #Fotis Networks LLC # #Vincent[.]Passaro[@]fotisnetworks[.]com # #www.fotisnetworks.com # ######################Fotis Networks LLC############################## #_____________________________________________________________________ #| Version | Change Information | Author | Date | #|__________|_______________________|____________________|____________| #| 1.0 | Initial Script | Vincent C. Passaro | 20-oct-2011| #| | Creation | | | #|__________|_______________________|____________________|____________| # # # - Updated by Shannon Mitchell(shannon.mitchell@fusiontechnology-llc.com) # on 09-jan-2012. Moved from manual to prod and created check. #######################DISA INFORMATION############################### #Group ID (Vulid): V-789 #Group Title: NIS/NIS+/yp File Ownership #Rule ID: SV-27170r1_rule #Severity: CAT II #Rule Version (STIG-ID): GEN001320 #Rule Title: NIS/NIS+/yp files must be owned by root, sys, or bin. # #Vulnerability Discussion: NIS/NIS+/yp files are part of the system's #identification and authentication processes and are, therefore, critical #to system security. Failure to give ownership of sensitive files or #utilities to root or bin provides the designated owner and unauthorized #users with the potential to access sensitive information or change the #system configuration which could weaken the system's security posture. # # #Responsibility: System Administrator #IAControls: ECLP-1 # #Check Content: #Perform the following to check NIS file ownership: # ls -la /var/yp/*; #If the file ownership is not root, sys, or bin, this is a finding. # #Fix Text: Change the ownership of NIS/NIS+/yp files to root, sys, #bin, or system. Consult vendor documentation to determine the #location of the files. # #Procedure (example): # chown root <filename> #######################DISA INFORMATION############################### #Global Variables# PDI=GEN001320 #Start-Lockdown if [ -d '/var/yp' ] then for CURFILE in `find /var/yp -type f` do CUROWN=`stat -c %U $CURFILE`; if [ "$CUROWN" != "root" -a "$CUROWN" != "root" -a "$CUROWN" != "bin" -a "$CUROWN" != "sys" -a "$CUROWN" != "system" ] then chown root $CURFILE fi done fi
<reponame>fernnlcs/TeacherBriefcase package src.model.DAO; import java.sql.ResultSet; import src.model.VO.QuestaoVO; public interface QuestaoInterDAO<VO extends QuestaoVO> extends BaseInterDAO<VO> { public ResultSet buscarPorDificuldade(VO vo); }
<gh_stars>1-10 from pioneer.common.logging_manager import LoggingManager from pioneer.das.api.samples.trace import Trace from typing import Callable import copy import numpy as np class FastTrace(Trace): """Derivation of Trace sample, for Pixell sensors. Two distinct sets of waveforms are contained in the raw data dictionnary, under the keys 'high' and 'low'. """ def __init__(self, index, datasource, virtual_raw=None, virtual_ts=None): super(FastTrace, self).__init__( index, datasource, virtual_raw, virtual_ts) @property def raw(self): if self._raw is None: self._raw = super(Trace, self).raw for fast_trace_type in ['low', 'high']: if 'time_base_delays' not in self._raw[fast_trace_type]: self._raw[fast_trace_type]['time_base_delays'] = self.datasource.sensor.time_base_delays[fast_trace_type] self._raw[fast_trace_type]['distance_scaling'] = self.datasource.sensor.distance_scaling self._raw[fast_trace_type]['trace_smoothing_kernel'] = self.datasource.sensor.get_trace_smoothing_kernel()[ fast_trace_type] return self._raw @property def raw_array(self): specs = self.specs rawLow = self.raw['low']['data'] rawMid = self.raw['high']['data'] array = np.zeros((2, specs['v']*specs['h'], rawMid.shape[-1])) array[0] = rawMid array[1, :, :rawLow.shape[-1]] = rawLow return array.reshape((2, specs['v'], specs['h'], array.shape[-1])) def processed_array(self, trace_processing: Callable): specs = self.specs processedLow = self.processed(trace_processing)['low']['data'] processedMid = self.processed(trace_processing)['high']['data'] array = np.zeros((2, specs['v']*specs['h'], processedMid.shape[-1])) array[0] = processedMid array[1, :, :processedLow.shape[-1]] = processedLow return array.reshape((2, specs['v'], specs['h'], array.shape[-1])) def processed(self, trace_processing: Callable): processed_traces = {} raw_copy = copy.deepcopy(self.raw) for fast_trace_type in ['low', 'high']: processed_traces[fast_trace_type] = trace_processing( raw_copy[fast_trace_type]) return processed_traces @property def max_range(self): raw = self.raw trace_lenght = raw['high']['data'].shape[-1] return raw['high']['time_base_delays'] + trace_lenght*raw['high']['distance_scaling'] @property def signal_to_noise(self): snr = {} for fast_trace_type in ['low', 'high']: traces_zeroed = self.raw[fast_trace_type]['data'] - \ np.mean(self.raw[fast_trace_type]['data'], axis=1)[:, None] snr[fast_trace_type] = np.log10( np.mean(traces_zeroed**2, axis=1)/np.std(traces_zeroed, axis=1))+1 return snr
// // DataViewExtensions.ts // // Created by <NAME> on 16 Jun 2021. // Copyright 2021 Vircadia contributors. // // Distributed under the Apache License, Version 2.0. // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // /*@devdoc * The <code>DataView</code> namespace comprises methods added to the prototype of JavaScript's * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView|DataView} object, for * handling reading and writing large <code>bigint</vcde> values. These methods are added only if they aren't already present * in the browser's <code>DataView</code> implementation. * <p>C++: N/A</p> * @namespace DataView */ // WEBRTC TODO: May need to implement Uint64 methods for some browsers (e.g., Safari) if Babel doesn't handle this. // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView#64-bit_integer_values /* eslint-disable @typescript-eslint/no-magic-numbers, @typescript-eslint/no-invalid-this */ const MAX_U128_VALUE = 2n ** 128n - 1n; const SHIFT_64_BITS = 64n; const MASK_64_BITS = 0xffffffffffffffffn; /*@devdoc * Writes an unsigned 128-bit (16-byte) integer value to the DataView. * @function DataView.setBigUint128 * @param {number} byteOffset - The offset from the start of the DataView. * @param {bigint} value - The value to write. The maximum value is <code>2n ** 128n - 1n</code>. If larger than this value, * a value of <code>0n</code> is written. * @param {boolean} littleEndian=false - <code>true</code> to write the data in little-endian format, <code>false</codE> to * write in big-endian format. */ function setBigUint128(this: DataView, byteOffset: number, value: bigint, littleEndian = false) { const sanitizedValue = value > MAX_U128_VALUE ? 0n : value; if (littleEndian) { this.setBigUint64(byteOffset + 8, sanitizedValue >> SHIFT_64_BITS, littleEndian); this.setBigUint64(byteOffset, sanitizedValue & MASK_64_BITS, littleEndian); } else { this.setBigUint64(byteOffset, sanitizedValue >> SHIFT_64_BITS, littleEndian); this.setBigUint64(byteOffset + 8, sanitizedValue & MASK_64_BITS, littleEndian); } } /*@devdoc * Reads an unsigned 128-bit (16-byte) integer value from the DataView. * @function DataView.getBigUint128 * @param {number} byteOffset - The offset from the start of the DataView. * @param {boolean} littleEndian=false - <code>true</code> to read the data in little-endian format, <code>false</codE> to read * read in big-endian format. * @returns {bigint} The value read. */ function getBigUint128(this: DataView, byteOffset: number, littleEndian = false): bigint { let result = 0n; if (littleEndian) { result = (this.getBigUint64(byteOffset + 8, littleEndian) << SHIFT_64_BITS) + this.getBigUint64(byteOffset, littleEndian); } else { result = (this.getBigUint64(byteOffset, littleEndian) << SHIFT_64_BITS) + this.getBigUint64(byteOffset + 8, littleEndian); } return result; } /* eslint-enable @typescript-eslint/no-magic-numbers, no-invalid-this */ /* eslint-disable no-extend-native */ export { }; // Provide a module context for the declaration. declare global { interface DataView { setBigUint128: (this: DataView, byteOffset: number, value: bigint, littleEndian: boolean) => void; getBigUint128: (this: DataView, byteOffset: number, littleEndian: boolean) => bigint; } } if (!Object.prototype.hasOwnProperty.call(DataView, "setBigUint128")) { DataView.prototype.setBigUint128 = setBigUint128; } if (!Object.prototype.hasOwnProperty.call(DataView, "getBigUint128")) { DataView.prototype.getBigUint128 = getBigUint128; } /* eslint-enable no-extend-native */
<reponame>kmartin91/onnenousditpastout<gh_stars>0 /* eslint-disable jsx-a11y/no-distracting-elements */ import React from "react" import { Link } from "gatsby" import Bio from "./bio" import Logo from "./logo.jpg" import "./reset.css" class Layout extends React.Component { render() { const { posts, children } = this.props const categories = posts && posts.length > 0 ? posts.map(({ node }) => node.frontmatter.category).sort() : [] /*const { location, title, children, posts } = this.props const rootPath = `${__PATH_PREFIX__}/` let header if (location.pathname === rootPath) { header = ( <h1 style={{ ...scale(1.5), marginBottom: rhythm(1.5), marginTop: 0, }} > <Link style={{ boxShadow: `none`, textDecoration: `none`, color: `inherit`, }} to={`/`} > {title} </Link> </h1> ) } else { header = ( <h3 style={{ fontFamily: `Montserrat, sans-serif`, marginTop: 0, }} > <Link style={{ boxShadow: `none`, textDecoration: `none`, color: `inherit`, }} to={`/`} > {title} </Link> </h3> ) } */ return ( <table style={{ backgroundColor: "#013220", fontFamily: "Comic Sans Ms", width: "100%", height: "100vh", color: "yellow", }} cellPadding="0" cellSpacing="0" > <tbody> <tr height="300"> <th style={{ padding: 0, textAlign: "left", width: "300px", }} > <Link to={`/`}> <img src={Logo} width="300px" alt="Oups je crois qu'on a un problème d'avatar, surement la CIA qui a bloqué notre avatar, de peur de perdre tous leurs secrets les plus ultimes. Ne soyez plus des moutons, libérez votre conscience !" title="Pourquoi mettez-vous la souris sur le logo ? Surement parce qu'on vous a appris à le faire, mais dans quel but vous a-t-on lavé le cerveau comme ceci? Posez-vous les bonnes questions!" /> </Link> </th> <th style={{ padding: "0 0 0 16px", verticalAlign: "middle" }}> <h1 style={{ fontFamily: "Comic Sans Ms", fontStyle: "italic" }}> On ne nous dit pas tout </h1> <marquee behavior="alternate"> Le seul blog qui traîte ouvertement des sujets qu'on nous cache. - La référence en terme de vérités. - Une équipe de spécialistes américains vérifient les sources. </marquee> <Bio /> </th> <th style={{ width: "300px", padding: 0 }}> </th> </tr> <tr> <td> <table style={{ width: "100%" }} cellPadding="0" cellSpacing="0"> <tbody> <tr> <td style={{ fontSize: "2rem", textAlign: "center", }} > <p>Catégories: </p> {categories && categories.length > 0 && categories.map(category => { if (!category) return null return ( <Link to={`/?cat=${category}`}>{category}</Link> ) })} </td> </tr> </tbody> </table> </td> <td>{children}</td> <td> <table cellPadding="0" cellSpacing="0"> <tbody> <tr> <td style={{ fontSize: "2rem", textAlign: "center", }} > <p>Nos sources:</p> </td> </tr> </tbody> </table> </td> </tr> </tbody> <tfoot> <tr> <td> © Nous sommes en{" "} {new Date().getFullYear() * Math.floor(Math.random() * Math.floor("2000"))} </td> </tr> </tfoot> </table> ) } } export default Layout
#!/bin/bash dir=$1 for name in "$dir"/*; do if [ -d "$name" ]; then echo "$name" fi done
package com.packtpub.designpatterns.structural; public class RegisteredMemberService implements Service { @Override public String getDescription() { return "Additional Services for Packtpub Registered Members"; } @Override public double getCost() { return 10; } }
<reponame>symbiosis-cloud/symbiosis-k8s-controller /* * CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen * THIS FILE MUST NOT BE EDITED BY HAND */ package assert import ( http "net/http" url "net/url" time "time" assert "github.com/stretchr/testify/assert" ) // Condition uses a Comparison to assert a complex condition. func (a *Assertions) Condition(comp assert.Comparison, msgAndArgs ...interface{}) { Condition(a.t, comp, msgAndArgs...) } // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // // a.Contains("Hello World", "World") // a.Contains(["Hello", "World"], "World") // a.Contains({"Hello": "World"}, "Hello") func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { Contains(a.t, s, contains, msgAndArgs...) } // DirExists checks whether a directory exists in the given path. It also fails // if the path is a file rather a directory or there is an error checking whether it exists. func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) { DirExists(a.t, path, msgAndArgs...) } // ElementsMatch asserts that the specified listA(array, slice...) is equal to specified // listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, // the number of appearances of each of them in both lists should match. // // a.ElementsMatch([1, 3, 2, 3], [1, 3, 3, 2]) func (a *Assertions) ElementsMatch(listA interface{}, listB interface{}, msgAndArgs ...interface{}) { ElementsMatch(a.t, listA, listB, msgAndArgs...) } // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // // a.Empty(obj) func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) { Empty(a.t, object, msgAndArgs...) } // Equal asserts that two objects are equal. // // a.Equal(123, 123) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality // cannot be determined and will always fail. func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { Equal(a.t, expected, actual, msgAndArgs...) } // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // // actualObj, err := SomeFunction() // a.EqualError(err, expectedErrorString) func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) { EqualError(a.t, theError, errString, msgAndArgs...) } // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // // a.EqualValues(uint32(123), int32(123)) func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { EqualValues(a.t, expected, actual, msgAndArgs...) } // Error asserts that a function returned an error (i.e. not `nil`). // // actualObj, err := SomeFunction() // if a.Error(err) { // assert.Equal(t, expectedError, err) // } func (a *Assertions) Error(err error, msgAndArgs ...interface{}) { Error(a.t, err, msgAndArgs...) } // Eventually asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // // a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { Eventually(a.t, condition, waitFor, tick, msgAndArgs...) } // Exactly asserts that two objects are equal in value and type. // // a.Exactly(int32(123), int64(123)) func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { Exactly(a.t, expected, actual, msgAndArgs...) } // Fail reports a failure through func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) { Fail(a.t, failureMessage, msgAndArgs...) } // FailNow fails test func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) { FailNow(a.t, failureMessage, msgAndArgs...) } // False asserts that the specified value is false. // // a.False(myBool) func (a *Assertions) False(value bool, msgAndArgs ...interface{}) { False(a.t, value, msgAndArgs...) } // FileExists checks whether a file exists in the given path. It also fails if // the path points to a directory or there is an error when trying to check the file. func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) { FileExists(a.t, path, msgAndArgs...) } // Greater asserts that the first element is greater than the second // // a.Greater(2, 1) // a.Greater(float64(2), float64(1)) // a.Greater("b", "a") func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { Greater(a.t, e1, e2, msgAndArgs...) } // GreaterOrEqual asserts that the first element is greater than or equal to the second // // a.GreaterOrEqual(2, 1) // a.GreaterOrEqual(2, 2) // a.GreaterOrEqual("b", "a") // a.GreaterOrEqual("b", "b") func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { GreaterOrEqual(a.t, e1, e2, msgAndArgs...) } // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // // a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { HTTPBodyContains(a.t, handler, method, url, values, str, msgAndArgs...) } // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. // // a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { HTTPBodyNotContains(a.t, handler, method, url, values, str, msgAndArgs...) } // HTTPError asserts that a specified handler returns an error status code. // // a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { HTTPError(a.t, handler, method, url, values, msgAndArgs...) } // HTTPRedirect asserts that a specified handler returns a redirect status code. // // a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { HTTPRedirect(a.t, handler, method, url, values, msgAndArgs...) } // HTTPStatusCode asserts that a specified handler returns a specified status code. // // a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) { HTTPStatusCode(a.t, handler, method, url, values, statuscode, msgAndArgs...) } // HTTPSuccess asserts that a specified handler returns a success status code. // // a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { HTTPSuccess(a.t, handler, method, url, values, msgAndArgs...) } // Implements asserts that an object is implemented by the specified interface. // // a.Implements((*MyInterface)(nil), new(MyObject)) func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { Implements(a.t, interfaceObject, object, msgAndArgs...) } // InDelta asserts that the two numerals are within delta of each other. // // a.InDelta(math.Pi, 22/7.0, 0.01) func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { InDelta(a.t, expected, actual, delta, msgAndArgs...) } // InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. func (a *Assertions) InDeltaMapValues(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { InDeltaMapValues(a.t, expected, actual, delta, msgAndArgs...) } // InDeltaSlice is the same as InDelta, except it compares two slices. func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...) } // InEpsilon asserts that expected and actual have a relative error less than epsilon func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...) } // InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { InEpsilonSlice(a.t, expected, actual, epsilon, msgAndArgs...) } // IsType asserts that the specified objects are of the same type. func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { IsType(a.t, expectedType, object, msgAndArgs...) } // JSONEq asserts that two JSON strings are equivalent. // // a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) { JSONEq(a.t, expected, actual, msgAndArgs...) } // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // // a.Len(mySlice, 3) func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) { Len(a.t, object, length, msgAndArgs...) } // Less asserts that the first element is less than the second // // a.Less(1, 2) // a.Less(float64(1), float64(2)) // a.Less("a", "b") func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { Less(a.t, e1, e2, msgAndArgs...) } // LessOrEqual asserts that the first element is less than or equal to the second // // a.LessOrEqual(1, 2) // a.LessOrEqual(2, 2) // a.LessOrEqual("a", "b") // a.LessOrEqual("b", "b") func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { LessOrEqual(a.t, e1, e2, msgAndArgs...) } // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // // a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { Never(a.t, condition, waitFor, tick, msgAndArgs...) } // Nil asserts that the specified object is nil. // // a.Nil(err) func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) { Nil(a.t, object, msgAndArgs...) } // NoDirExists checks whether a directory does not exist in the given path. // It fails if the path points to an existing _directory_ only. func (a *Assertions) NoDirExists(path string, msgAndArgs ...interface{}) { NoDirExists(a.t, path, msgAndArgs...) } // NoError asserts that a function returned no error (i.e. `nil`). // // actualObj, err := SomeFunction() // if a.NoError(err) { // assert.Equal(t, expectedObj, actualObj) // } func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) { NoError(a.t, err, msgAndArgs...) } // NoFileExists checks whether a file does not exist in a given path. It fails // if the path points to an existing _file_ only. func (a *Assertions) NoFileExists(path string, msgAndArgs ...interface{}) { NoFileExists(a.t, path, msgAndArgs...) } // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // // a.NotContains("Hello World", "Earth") // a.NotContains(["Hello", "World"], "Earth") // a.NotContains({"Hello": "World"}, "Earth") func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { NotContains(a.t, s, contains, msgAndArgs...) } // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // // if a.NotEmpty(obj) { // assert.Equal(t, "two", obj[1]) // } func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) { NotEmpty(a.t, object, msgAndArgs...) } // NotEqual asserts that the specified values are NOT equal. // // a.NotEqual(obj1, obj2) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { NotEqual(a.t, expected, actual, msgAndArgs...) } // NotEqualValues asserts that two objects are not equal even when converted to the same type // // a.NotEqualValues(obj1, obj2) func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { NotEqualValues(a.t, expected, actual, msgAndArgs...) } // NotNil asserts that the specified object is not nil. // // a.NotNil(err) func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) { NotNil(a.t, object, msgAndArgs...) } // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // // a.NotPanics(func(){ RemainCalm() }) func (a *Assertions) NotPanics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { NotPanics(a.t, f, msgAndArgs...) } // NotRegexp asserts that a specified regexp does not match a string. // // a.NotRegexp(regexp.MustCompile("starts"), "it's starting") // a.NotRegexp("^start", "it's not starting") func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { NotRegexp(a.t, rx, str, msgAndArgs...) } // NotSame asserts that two pointers do not reference the same object. // // a.NotSame(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. func (a *Assertions) NotSame(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { NotSame(a.t, expected, actual, msgAndArgs...) } // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // // a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { NotSubset(a.t, list, subset, msgAndArgs...) } // NotZero asserts that i is not the zero value for its type. func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) { NotZero(a.t, i, msgAndArgs...) } // Panics asserts that the code inside the specified PanicTestFunc panics. // // a.Panics(func(){ GoCrazy() }) func (a *Assertions) Panics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { Panics(a.t, f, msgAndArgs...) } // PanicsWithError asserts that the code inside the specified PanicTestFunc // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // // a.PanicsWithError("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithError(errString string, f assert.PanicTestFunc, msgAndArgs ...interface{}) { PanicsWithError(a.t, errString, f, msgAndArgs...) } // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // // a.PanicsWithValue("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithValue(expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { PanicsWithValue(a.t, expected, f, msgAndArgs...) } // Regexp asserts that a specified regexp matches a string. // // a.Regexp(regexp.MustCompile("start"), "it's starting") // a.Regexp("start...$", "it's not starting") func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { Regexp(a.t, rx, str, msgAndArgs...) } // Same asserts that two pointers reference the same object. // // a.Same(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { Same(a.t, expected, actual, msgAndArgs...) } // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // // a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { Subset(a.t, list, subset, msgAndArgs...) } // True asserts that the specified value is true. // // a.True(myBool) func (a *Assertions) True(value bool, msgAndArgs ...interface{}) { True(a.t, value, msgAndArgs...) } // WithinDuration asserts that the two times are within duration delta of each other. // // a.WithinDuration(time.Now(), time.Now(), 10*time.Second) func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { WithinDuration(a.t, expected, actual, delta, msgAndArgs...) } // YAMLEq asserts that two YAML strings are equivalent. func (a *Assertions) YAMLEq(expected string, actual string, msgAndArgs ...interface{}) { YAMLEq(a.t, expected, actual, msgAndArgs...) } // Zero asserts that i is the zero value for its type. func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) { Zero(a.t, i, msgAndArgs...) }
<reponame>Windstars/tensor2tensor # coding=utf-8 # Copyright 2019 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """J2J models.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import gin from jax.experimental import stax @gin.configurable() def MLP(num_hidden_layers=2, hidden_size=512, activation_fn=stax.Relu, num_output_classes=10): layers = [stax.Flatten] layers += [stax.Dense(hidden_size), activation_fn] * num_hidden_layers layers += [stax.Dense(num_output_classes), stax.LogSoftmax] return stax.serial(*layers)
import { Units } from './Units'; import { LayerType } from '../layers'; import { ActivationFunction } from '../activation-funcs'; export declare type SingleData = { qty: number; names?: Array<string>; ActivationFunction: ActivationFunction; }; export declare type Data = { name?: string; type: LayerType; unitsData: Array<SingleData>; }; declare const create: (data: Data) => Units; export default create;
#!/bin/bash # # Python # rm -rf build/ # We emulate readlink with python, so we can work on OSX without greadlink. # From: http://stackoverflow.com/questions/1055671/how-can-i-get-the-behavior- # of-gnus-readlink-f-on-a-mac PYTHON_ENV=$(python -c 'import os,sys;print os.path.realpath(sys.argv[1])' ./_python-env) PYTHON=$(which python) if [[ -n MACHTYPE ]]; then PIP="pip install" else PIP="pip install --user" fi $PIP virtualenv $PIP awscli # Setup virtualenv if it doesn't exist. test -e $PYTHON_ENV || virtualenv -p $PYTHON $PYTHON_ENV # Install project sources and dependencies into the environment $PYTHON_ENV/bin/pip uninstall -y openfda || true $PYTHON_ENV/bin/python setup.py develop # # Node # pushd api/faers npm install popd pushd openfda/spl npm install popd
<gh_stars>1-10 package middleware import ( "net/http" "strconv" "time" "github.com/gin-gonic/gin" "github.com/prometheus/client_golang/prometheus" ) var ( httpRequestCount = prometheus.NewCounterVec( prometheus.CounterOpts{Name: "app_request_totals", Help: "http request count"}, []string{"url", "status"}) httpRequestDuration = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "app_request_duration_milliseconds", Help: "http request duration", Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, }, []string{"url", "status"}) httpSlowCount = prometheus.NewCounterVec( prometheus.CounterOpts{Name: "app_slow_request_totals", Help: "http slow request count"}, []string{"url"}) httpRequestCountWithoutUrl = prometheus.NewCounterVec( prometheus.CounterOpts{Name: "app_request_all_totals", Help: "http request count without url"}, []string{"client"}) ) func init() { prometheus.MustRegister(httpRequestCount) prometheus.MustRegister(httpRequestDuration) prometheus.MustRegister(httpSlowCount) prometheus.MustRegister(httpRequestCountWithoutUrl) } func Instrument(c *gin.Context) { defer func(begin time.Time) { r := c.Request url := r.RequestURI if FilterUrl(url) { return } w := c.Writer values := []string{r.URL.Path, strconv.Itoa(w.Status())} httpRequestCount.WithLabelValues(values...).Inc() timeElapsed := float64(time.Since(begin)) / float64(time.Millisecond) // 超过1s需要记录慢请求 if timeElapsed > 1000 { httpSlowCount.WithLabelValues(r.URL.Path).Inc() } httpRequestDuration.WithLabelValues(values...).Observe(timeElapsed) httpRequestCountWithoutUrl.WithLabelValues(GetMeshClient(r)).Inc() }(time.Now()) c.Next() } func GetMeshClient(r *http.Request) string { mc := r.Header.Get("x-client") if mc == "" { mc = "/" } return mc }
#!/bin/bash # Backup Assets # # Backup local assets # # @author nystudio107 # @copyright Copyright (c) 2017 nystudio107 # @link https://nystudio107.com/ # @package craft-scripts # @since 1.1.0 # @license MIT # Get the directory of the currently executing script DIR="$(dirname "${BASH_SOURCE[0]}")" # Include files INCLUDE_FILES=( "common/defaults.sh" ".env.sh" "common/common_env.sh" ) for INCLUDE_FILE in "${INCLUDE_FILES[@]}" do if [[ ! -f "${DIR}/${INCLUDE_FILE}" ]] ; then echo "File ${DIR}/${INCLUDE_FILE} is missing, aborting." exit 1 fi source "${DIR}/${INCLUDE_FILE}" done # Set the backup directory paths BACKUP_ASSETS_DIR_PATH="${LOCAL_BACKUPS_PATH}${LOCAL_DB_NAME}/${ASSETS_BACKUP_SUBDIR}/" BACKUP_EE_DIR_PATH="${LOCAL_BACKUPS_PATH}${LOCAL_DB_NAME}/${CRAFT_BACKUP_SUBDIR}/" # Make sure the asset backup directory exists echo "Ensuring backup directory exists at '${BACKUP_ASSETS_DIR_PATH}'" mkdir -p "${BACKUP_ASSETS_DIR_PATH}" # Backup the asset dir files via rsync for DIR in "${LOCAL_ASSETS_DIRS[@]}" do rsync -F -L -a -z --progress "${LOCAL_ASSETS_PATH}${DIR}" "${BACKUP_ASSETS_DIR_PATH}" echo "*** Backed up assets from ${LOCAL_ASSETS_PATH}${DIR}" done # Make sure the Craft files backup directory exists echo "Ensuring backup directory exists at '${BACKUP_EE_DIR_PATH}'" mkdir -p "${BACKUP_EE_DIR_PATH}" # Backup the Craft-specific dir files via rsync for DIR in "${LOCAL_EE_FILE_DIRS[@]}" do rsync -F -L -a -z --progress "${LOCAL_EE_FILES_PATH}${DIR}" "${BACKUP_EE_DIR_PATH}" echo "*** Backed up assets from ${LOCAL_EE_FILES_PATH}${DIR}" done # Normal exit exit 0
<reponame>dined1/demo /** * This file was generated by the JPA Modeler */ package com.example.demo.model; import lombok.Getter; import lombok.Setter; import org.hibernate.annotations.GenericGenerator; import org.springframework.transaction.annotation.Transactional; import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.ManyToOne; import java.io.Serializable; /** * @author dzni0816 */ @Entity @Getter @Setter @Transactional public class ProductItems implements Serializable { @Id @GeneratedValue(generator = "UUID") @GenericGenerator( name = "UUID", strategy = "org.hibernate.id.UUIDGenerator" ) private String id; @Basic private Float mp; @Basic private Float otp; @Basic private Float MPWithTaxandDiscont; @Basic private Float OTPWithTaxandDiscont; @ManyToOne(targetEntity = Soproduct.class) private Soproduct soproduct; @ManyToOne(targetEntity = OrdItem.class, cascade = CascadeType.REMOVE) private OrdItem ordItem; }
<gh_stars>1-10 #include <cstring> #include <utility> #include "errno.h" #include "unistd.h" #include "core/runtime.h" #include "util.h" #include "spinet/server.h" namespace spinet { class TcpAcceptor : public BaseAcceptor { public: TcpAcceptor(int fd, const Address& address, Server::Settings* settings, std::function<void(std::shared_ptr<TcpSocket>)> accept_callback) : bind_address_ { address } , settings_ { settings } , accept_callback_ { std::move(accept_callback) } { fd_ = fd; } ~TcpAcceptor() { } protected: void do_accept() override { ::sockaddr_in socket_address {}; ::socklen_t address_size = sizeof(socket_address); auto runtime = runtime_.lock(); if (!runtime) { return; } while (true) { int socket_fd = ::accept(fd_, (::sockaddr*)&socket_address, &address_size); if (socket_fd == -1) { return; } set_nonblock(socket_fd); if (settings_->reuse_port) { set_reuse_port(socket_fd); } std::shared_ptr<TcpSocket> socket { new TcpSocket( socket_fd, std::get<0>(Address::parse(from_sockaddr_in(socket_address), ntohs(socket_address.sin_port)))) }; runtime->register_handle(socket); accept_callback_(socket); } } Address bind_address_; Server::Settings* settings_; std::function<void(std::shared_ptr<TcpSocket>)> accept_callback_; }; } using namespace spinet; std::optional<std::string> Server::Settings::validate() { if (workers < 1) { return "workers must be more than zero"; } return {}; } Server::Settings Server::Settings::default_settings() { return { .workers = 1, .reuse_port = false }; } Server::Server() : running_ { false } , settings_ {} { } Server::~Server() { stop(); while (running_) { } } std::optional<std::string> Server::with_settings(Settings settings) { std::unique_lock<std::mutex> lck { mtx_ }; if (running_) { return "server has been running"; } if (settings_) { return "settings has been set"; } if (auto err = settings.validate()) { return err; } settings_ = settings; for (std::size_t i = 0; i < settings_->workers; i++) { workers_.push_back(std::shared_ptr<Runtime> { new Runtime() }); } return {}; } bool Server::has_settings() { std::unique_lock<std::mutex> lck { mtx_ }; return settings_.has_value(); } std::optional<std::string> Server::listen_tcp_endpoint(Address& address, const std::function<void(std::shared_ptr<TcpSocket>)>& accept_callback) { std::unique_lock<std::mutex> lck { mtx_ }; if (running_) { return "server has been running"; } if (!settings_) { return "settings has been not set"; } auto res = to_sockaddr_in(address.address().c_str(), address.port()); if (res.index() == 1) { return std::get<1>(res); } ::sockaddr_in socket_address = std::get<0>(res); std::vector<int> listen_fds {}; for (std::size_t i = 0; i < workers_.size(); i++) { int listen_fd = ::socket(socket_address.sin_family, SOCK_STREAM, 0); if (listen_fd == -1) { std::string err = std::string { "socket cannot open, reason:" } + std::strerror(errno); for (std::size_t i = 0; i < listen_fds.size(); i++) { ::close(listen_fds[i]); } return err; } set_reuse_port(listen_fd); listen_fds.push_back(listen_fd); if (::bind(listen_fd, reinterpret_cast<::sockaddr*>(&socket_address), sizeof(socket_address)) != 0) { std::string err = std::string { "socket cannot bind with address " } + from_sockaddr_in(socket_address) + ":" + std::to_string(ntohs(socket_address.sin_port)) + ", reason:" + std::strerror(errno); for (std::size_t i = 0; i < listen_fds.size(); i++) { ::close(listen_fds[i]); } return err; } if (::listen(listen_fd, SOMAXCONN) == -1) { std::string err = std::string { "socket cannot be listened, reason:" } + std::strerror(errno); for (std::size_t i = 0; i < listen_fds.size(); i++) { ::close(listen_fds[i]); } return err; } set_nonblock(listen_fd); } for (std::size_t i = 0; i < listen_fds.size(); i++) { std::shared_ptr<TcpAcceptor> acceptor { new TcpAcceptor(listen_fds[i], address, &settings_.value(), accept_callback) }; workers_[i]->register_handle(acceptor); } return {}; } std::optional<std::string> Server::run() { bool expected = false; if (!running_.compare_exchange_weak(expected, true)) { return "server has been running"; } std::unique_lock<std::mutex> lck { mtx_ }; if (!settings_) { return "settings has been not set"; } for (auto& worker : workers_) { if (auto err = worker->run()) { return err; } } return {}; } void Server::stop() { std::unique_lock<std::mutex> lck { mtx_ }; if (!running_) { return; } for (auto& worker : workers_) { worker->stop(); } running_ = false; } bool Server::is_running() { return running_; }
#!/bin/sh conda install -c conda-forge spacy=3.2.1 pip install spacy_langdetect pip install 'pycountry==22.1.10' python -m spacy download en_core_web_sm
<filename>VendingMachine.java import java.util.*; public class VendingMachine { static Scanner sc; static { sc = new Scanner(System.in); } // checkCoin(-) method public static boolean checkCoin(int coin){ if(coin == 1 || coin == 5 || coin == 10 || coin ==0) return true; else return false; }// end checkCoin(-) method // getProductList(-) method public static void getProductList(int amount) { String status; int productOption=0; System.out.println("Choose product to press 1 Biscutt 2 water & 3 Energy Bar"); productOption = sc.nextInt(); if(productOption == 1){ if(amount == 25){ System.out.println("Please Take your Biscutt product Thank you for purchesing"); VendingMachine.repetedCode(); } else if(amount > 25){ VendingMachine.purchesProduct(amount,25); } else { System.out.println("Insufficant Fund Please insert a coin"); System.out.println("Please take your money of"+amount+" Rupes Thank You Visiting"); VendingMachine.repetedCode(); } } else if(productOption == 2){ if(amount == 18){ System.out.println("Please Take your Water product Thank you for purchesing"); VendingMachine.repetedCode(); } else if(amount > 18){ VendingMachine.purchesProduct(amount,18); } else { System.out.println("Insufficant Fund Please insert a coin"); System.out.println("Please take your money of "+amount+" Rupes Thank You Visiting"); VendingMachine.repetedCode(); } } else if(productOption == 3){ if(amount == 45){ System.out.println("Please Take your Energy Bar product Thank you for purchesing"); VendingMachine.repetedCode(); } else if(amount > 45){ VendingMachine.purchesProduct(amount,45); } else { System.out.println("Insufficant Fund Please insert a coin"); System.out.println("Please take your money of "+amount+" Rupes Thank You Visiting"); VendingMachine.repetedCode(); } } else { System.out.println("Please select Option only i for Biscut, 2 for Water & 3 for Energy Bar"); //VendingMachine.insertCoin(); } }// end getProductList(-) method // purchesProduct(-,-) method public static void purchesProduct(int amount, int productPrice){ String status; int returnAmmount = amount - productPrice; System.out.println("Please Take your product & your money return is :"+returnAmmount); System.out.println("Thank you for purchesing product"); VendingMachine.repetedCode(); }// end purchesProduct(-,-) method // repetedCode() method public static void repetedCode(){ String status; System.out.println(); System.out.println(); System.out.println("You want to more product [yes/no]"); status = sc.next(); if(status.equals("yes")) VendingMachine.insertCoin(); else if(status.equals("no")) System.exit(0); }// end repetedCode() method // insertCoin() method public static void insertCoin(){ Integer [] arr = new Integer[10]; //ArrayList al = new ArrayList(); int coin = 0; boolean flag; int amount = 0; System.out.println("Insert a coin to purchese product"); for(int i = 0;i<=arr.length;i++){ coin = sc.nextInt(); flag = VendingMachine.checkCoin(coin); if(!flag){ System.out.println("Please drop coin only 1,5,10 rupees only"); } else if(coin == 0){ break; } else { //arr = new Integer[i]; //al.add(coin); amount = amount+coin; System.out.println("Your Total Amount is :"+amount+" or Stop insert coin press 0"); } } VendingMachine.getProductList(amount); } // end insertCoin() method // main() method public static void main(String[] args) { // call insertCoin() method to insert coin VendingMachine.insertCoin(); }//end main() method } // end class
/* * Copyright 2019 Wultra s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.getlime.security.powerauth.app.tppengine.model.request; /** * @author <NAME>, <EMAIL> */ public class CreateTppAppRequest { /** * TPP app name. */ private String appName; /** * TPP app description. */ private String appDescription; /** * App Type. */ private String appType; /** * TPP app OAuth 2.0 redirect URIs. */ private String[] redirectUris; /** * TPP app OAuth 2.0 scopes. */ private String[] scopes; /** * TPP name (company name). */ private String tppName; /** * TPP license number. */ private String tppLicense; /** * TPP website. */ private String tppWebsite; /** * TPP address. */ private String tppAddress; public String getAppName() { return appName; } public void setAppName(String appName) { this.appName = appName; } public String getAppDescription() { return appDescription; } public void setAppDescription(String appDescription) { this.appDescription = appDescription; } public String getAppType() { return appType; } public void setAppType(String appType) { this.appType = appType; } public String getTppName() { return tppName; } public void setTppName(String tppName) { this.tppName = tppName; } public String getTppLicense() { return tppLicense; } public void setTppLicense(String tppLicense) { this.tppLicense = tppLicense; } public String getTppWebsite() { return tppWebsite; } public void setTppWebsite(String tppWebsite) { this.tppWebsite = tppWebsite; } public String getTppAddress() { return tppAddress; } public void setTppAddress(String tppAddress) { this.tppAddress = tppAddress; } public String[] getRedirectUris() { return redirectUris; } public void setRedirectUris(String[] redirectUris) { this.redirectUris = redirectUris; } public String[] getScopes() { return scopes; } public void setScopes(String[] scopes) { this.scopes = scopes; } }
// In --watch mode Jest runs with --onlyChanged, attempting to // only run files that have changed. However this only works with // static dependencies, otherwise it will bail and run all tests on // any change. // // This watch plugin is like a poor man's --onlyChanged option, // re-running only changed (non-committed or staged) tests or files // naively without the dependency tree. This assumes using the __tests__ // convention. For example the following tests will run when their // associated library has changes: // // routes/users.js -> routes/__tests__/users.js // routes/users.js -> routes/__tests__/users.test.js // // Note: using the --watch-all flag will disable this plugin // entirely and run all tests as usual. // // Note: there seems to be a transitent bug when running a previously // skipped test immediately again (within a few seconds) will skip // again even if the file becomes change. It seems like it could be // an issue with watchman, however it only happens when saving files // very quickly again. const path = require('path'); const { getChangedFilesForRoots } = require('jest-changed-files'); const { globsToMatcher } = require('jest-util'); const { memoize } = require('lodash'); const args = process.argv.slice(2); // Thank so much jest for not exposing this CLI config. const isWatchAll = args.some((arg) => { return arg === '--watch-all'; }); const hasFilters = args.some((arg) => { return !arg.startsWith('--'); }); class ChangedFilesPlugin { apply(jestHooks) { if (!isWatchAll && !hasFilters) { jestHooks.shouldRunTestSuite(async ({ config, testPath }) => { const matcher = getMatcher(config); const changedFiles = await this.getChangedFiles(); if (!matcher(testPath)) { return false; } const associated = this.getAssociated(testPath); return changedFiles.some((file) => { if (file === testPath) { // If the test file itself has changed, then always run. return true; } else { // Otherwise check if it matches the test's expected lib, // for example __tests__/users.js -> users.js return file === associated; } }); }); } } async getChangedFiles() { const { changedFiles } = await getChangedFilesForRoots(['.']); return Array.from(changedFiles); } getAssociated(testPath) { let basename = path.basename(testPath, '.js'); basename = path.basename(basename, '.test'); return path.resolve(testPath, `../../${basename}.js`); } } const getMatcher = memoize( (config) => { return globsToMatcher(config.testMatch); }, (config) => { // Just in case allow monorepo usage by only caching per root return config.rootDir; } ); module.exports = ChangedFilesPlugin;
# Parse MACHINE bash helper function validate_arch() { machine=$1 # lets reset to empty MACHINE="" if [ "$machine" = "armv6" ] || [ "$machine" = "ARMv6" ]; then machine="arm6" elif [ "$machine" = "armv7" ] || [ "$machine" = "ARMv7" ] || [ "$machine" = "ARMv8-AArch32" ]; then machine="arm7" elif [ "$machine" = "arm8" ] || [ "$machine" = "ARMv8" ] || [ "$machine" = "ARMv8-AArch64" ]; then machine="arm64" elif [ "$machine" = "x86-64" ] || [ "$machine" = "x86_64" ]; then machine="amd64" fi if [ "$machine" != "arm6" ] && \ [ "$machine" != "arm7" ] && \ [ "$machine" != "arm64" ] && \ [ "$machine" != "amd64" ] && \ [ "$machine" != "x86" ]; then echo "$COMMAND: ARCH '$machine' value not supported." >&2 return fi MACHINE=$machine } function parse_machine() { if [[ -n ${MACHINE} ]]; then validate_arch ${MACHINE} return fi if [[ ! -z ${CONFIG} ]] && [[ -f ${CONFIG} ]]; then arch=$(jq -r .API_PROJECT_DEVICE_ARCH ${CONFIG} | tr -d '\n') if [ "$arch" != "null" ]; then validate_arch $arch fi fi }
SELECT first_name, last_name FROM people WHERE age > 35;
<filename>scheme.brands/src/main/java/org/mammon/scheme/brands/generic/bank/AbstractBank.java package org.mammon.scheme.brands.generic.bank; import org.mammon.math.FiniteField; import org.mammon.math.Group; import org.mammon.messaging.Identifiable; import org.mammon.messaging.Transactable; import org.mammon.scheme.brands.BrandsSchemeSetup; import org.mammon.scheme.brands.PaymentHashFunction; import org.mammon.scheme.brands.SignatureHashFunction; import org.mammon.scheme.brands.bank.Bank; import org.mammon.util.HashCodeUtil; import org.mammon.util.messaging.AbstractTransactable; public abstract class AbstractBank<G extends Group<G>, F extends FiniteField<F>, T, H extends SignatureHashFunction<G, F>, H0 extends PaymentHashFunction<G, F, T>> extends AbstractTransactable implements Bank<G, F, T, H, H0>, Identifiable, Transactable { private final BrandsSchemeSetup<G, F, T, H, H0> setup; private final Group.Element<G> publicKey; protected AbstractBank(BrandsSchemeSetup<G, F, T, H, H0> setup, Group.Element<G> publicKey) { this.setup = setup; this.publicKey = publicKey; } @Override public Group.Element<G> getPublicKey() { return publicKey; } @Override public BrandsSchemeSetup<G, F, T, H, H0> getSetup() { return setup; } @Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof AbstractBank<?, ?, ?, ?, ?>)) { return false; } AbstractBank<?, ?, ?, ?, ?> other = (AbstractBank<?, ?, ?, ?, ?>) obj; return setup.equals(other.setup) && publicKey.equals(other.publicKey); } @Override public int hashCode() { int hashCode = HashCodeUtil.SEED; hashCode = HashCodeUtil.hash(hashCode, setup); hashCode = HashCodeUtil.hash(hashCode, publicKey); return publicKey.hashCode(); } @Override public String toString() { return "ExampleBank(" + setup.hashCode() + "," + publicKey.toString() + ")"; } }
#!/bin/bash # Color theming if [ -f ~/clouddrive/aspnet-learn/setup/theme.sh ] then . <(cat ~/clouddrive/aspnet-learn/setup/theme.sh) fi if [ -f ~/clouddrive/aspnet-learn/create-aks-exports.txt ] then eval $(cat ~/clouddrive/aspnet-learn/create-aks-exports.txt) fi if [ -f ~/clouddrive/aspnet-learn/create-idtag-exports.txt ] then eval $(cat ~/clouddrive/aspnet-learn/create-idtag-exports.txt) fi eshopRg=${ESHOP_RG} eshopLocation=${ESHOP_LOCATION} eshopIdTag=${ESHOP_IDTAG} while [ "$1" != "" ]; do case $1 in -g | --resource-group) shift eshopRg=$1 ;; -l | --location) shift eshopLocation=$1 ;; * ) echo "Invalid param: $1" exit 1 esac shift done if [ -z "$eshopRg" ] then echo "${newline}${errorStyle}ERROR: Resource group is mandatory. Use -g to set it${defaultTextStyle}${newline}" exit 1 fi rg=`az group show -g $eshopRg -o json` if [ -z "$rg" ] then if [ -z "$eshopLocation" ] then echo "${newline}${errorStyle}ERROR: If resource group has to be created, location is mandatory. Use -l to set it.${defaultTextStyle}${newline}" exit 1 fi echo "Creating resource group \"$eshopRg\" in location \"$eshopLocation\"..." az group create -n $eshopRg -l $eshopLocation if [ ! $? -eq 0 ] then echo "${newline}${errorStyle}ERROR: Can't create resource group${defaultTextStyle}${newline}" exit 1 fi echo "Created resource group \"$eshopRg\" in location \"$eshopLocation\"." else if [ -z "$eshopLocation" ] then eshopLocation=`az group show -g $eshopRg --query "location" -otsv` fi fi # ACR Creation eshopAcrName=${ESHOP_ACRNAME} if [ -z "$eshopAcrName" ] then if [ -z "$eshopIdTag" ] then dateString=$(date "+%Y%m%d%H%M%S") random=`head /dev/urandom | tr -dc 0-9 | head -c 3 ; echo ''` eshopIdTag="$dateString$random" fi echo echo "Creating Azure Container Registry \"eshoplearn$eshopIdTag\" in resource group \"$eshopRg\"..." acrCommand="az acr create --name eshoplearn$eshopIdTag -g $eshopRg -l $eshopLocation -o json --sku basic --admin-enabled --query \"name\" -otsv" echo "${newline} > ${azCliCommandStyle}$acrCommand${defaultTextStyle}${newline}" eshopAcrName=`$acrCommand` if [ ! $? -eq 0 ] then echo "${newline}${errorStyle}ERROR creating ACR!${defaultTextStyle}${newline}" exit 1 fi echo ACR instance created! echo fi eshopRegistry=`az acr show -n $eshopAcrName --query "loginServer" -otsv` if [ -z "$eshopRegistry" ] then echo "${newline}${errorStyle}ERROR! ACR server $eshopAcrName doesn't exist!${defaultTextStyle}${newline}" exit 1 fi eshopAcrCredentials=`az acr credential show -n $eshopAcrName --query "[username,passwords[0].value]" -otsv` eshopAcrUser=`echo "$eshopAcrCredentials" | head -1` eshopAcrPassword=`echo "$eshopAcrCredentials" | tail -1` # Grant permisions to AKS if created aksIdentityObjectId=$(az aks show -g $eshopRg -n $ESHOP_AKSNAME --query identityProfile.kubeletidentity.objectId -otsv) if [ ! -z "$aksIdentityObjectId" ] then acrResourceId=$(az acr show -n $eshopAcrName -g $eshopRg --query id -o tsv) az role assignment create \ --role AcrPull \ --assignee-object-id $aksIdentityObjectId \ --scope $acrResourceId \ --output none fi echo export ESHOP_RG=$eshopRg >> create-acr-exports.txt echo export ESHOP_LOCATION=$eshopLocation >> create-acr-exports.txt echo export ESHOP_AKSNAME=$ESHOP_AKSNAME >> create-acr-exports.txt echo export ESHOP_LBIP=$ESHOP_LBIP >> create-acr-exports.txt echo export ESHOP_ACRNAME=$eshopAcrName >> create-acr-exports.txt echo export ESHOP_REGISTRY=$eshopRegistry >> create-acr-exports.txt echo export ESHOP_ACRUSER=$eshopAcrUser >> create-acr-exports.txt echo export ESHOP_ACRPASSWORD=$eshopAcrPassword >> create-acr-exports.txt echo export ESHOP_IDTAG=$eshopIdTag >> create-acr-exports.txt echo export ESHOP_IDTAG=$eshopIdTag >> create-idtag-exports.txt echo echo "Created Azure Container Registry \"$eshopAcrName\" in resource group \"$eshopRg\" in location \"$eshopLocation\"." mv -f create-acr-exports.txt ~/clouddrive/aspnet-learn/ mv -f create-idtag-exports.txt ~/clouddrive/aspnet-learn/
package apps; public class Inventory { }
#!/bin/bash # entire script fails if a single command fails set -e # script should be run from the project directory export PROJECT_DIR="$PWD" ARGS="" default_gpu='v100' number_of_gpus=1 echo "options :" while [ $# -gt 0 ] do unset OPTIND unset OPTARG while getopts :g:n:j: options do case $options in g) echo "-Selected GPU is: $OPTARG" default_gpu="$OPTARG" ;; n) echo "Number of GPUs=$OPTARG" number_of_gpus="$OPTARG" ;; j) echo "Job name is: $OPTARG" job_name=$OPTARG ;; \?) echo "Invalid option: -$OPTARG" exit 1 ;; :) echo "Option -$OPTARG requires an argument." exit 1 ;; esac done shift $((OPTIND-1)) ARGS="${ARGS} $1" shift done if test -z "$job_name" then args_array=($ARGS) job_name=${args_array[0]} else echo "\$var is NOT empty" fi echo "ARGS :$ARGS" echo "$@" echo "--job-name $job_name --gres=gpu:$default_gpu:$number_of_gpus python run.py" $ARGS export HYDRA_FULL_ERROR=1 # launch the training job sbatch --job-name "$job_name" --gres "gpu:$default_gpu:$number_of_gpus" \ "$PROJECT_DIR"/bin/train.sbatch "$PROJECT_DIR"/hello_world.py $ARGS
#!/bin/sh upgrade_tool ul Bin/RV1108_usb_boot_V1.26.bin upgrade_tool wl 0x40 Bin/Firmware.img upgrade_tool wl 0x1000 ./rtthread.bin upgrade_tool rd
<reponame>codefinity/micro-continuum package com.codefinity.microcontinuum.common.event.sourcing; public class EventStoreAppendException extends EventStoreException { private static final long serialVersionUID = 1; public EventStoreAppendException(String aMessage, Throwable aCause) { super(aMessage, aCause); } public EventStoreAppendException(String aMessage) { super(aMessage); } }
void setPixel(int x, int y, int r, int g, int b) { // Check if the pixel coordinates are within the framebuffer bounds if (x >= 0 && x < width && y >= 0 && y < height) { pixels[x][y][0] = r; // Set the red component pixels[x][y][1] = g; // Set the green component pixels[x][y][2] = b; // Set the blue component } else { std::cout << "Error: Pixel coordinates out of bounds" << std::endl; } } void clear() { for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { pixels[x][y][0] = 0; // Set the red component to 0 pixels[x][y][1] = 0; // Set the green component to 0 pixels[x][y][2] = 0; // Set the blue component to 0 } } } std::vector<int> getPixel(int x, int y) { if (x >= 0 && x < width && y >= 0 && y < height) { return pixels[x][y]; // Return the RGB color values of the specified pixel } else { std::cout << "Error: Pixel coordinates out of bounds" << std::endl; return std::vector<int>{0, 0, 0}; // Return black color for out-of-bounds pixels } }
// BankAccount class public class BankAccount { private String accountNumber; private double balance; public BankAccount(String accountNumber) { this.accountNumber = accountNumber; this.balance = 0.0; } public void deposit(double amount) { balance += amount; } public boolean withdraw(double amount) { if (balance >= amount) { balance -= amount; return true; } return false; // Insufficient funds } public double getBalance() { return balance; } public boolean transfer(BankAccount destination, double amount) { if (withdraw(amount)) { destination.deposit(amount); return true; } return false; // Transfer failed due to insufficient funds } } // Bank class import java.util.HashMap; import java.util.Map; public class Bank { private Map<String, BankAccount> accounts; public Bank() { this.accounts = new HashMap<>(); } public void createAccount(String accountNumber) { if (!accounts.containsKey(accountNumber)) { accounts.put(accountNumber, new BankAccount(accountNumber)); } } public BankAccount getAccount(String accountNumber) { return accounts.get(accountNumber); } public boolean transfer(String sourceAccountNumber, String destinationAccountNumber, double amount) { BankAccount sourceAccount = accounts.get(sourceAccountNumber); BankAccount destinationAccount = accounts.get(destinationAccountNumber); if (sourceAccount != null && destinationAccount != null) { return sourceAccount.transfer(destinationAccount, amount); } return false; // Transfer failed due to invalid account numbers } } // Usage example public class Main { public static void main(String[] args) { Bank bank = new Bank(); bank.createAccount("123456"); bank.createAccount("789012"); BankAccount account1 = bank.getAccount("123456"); BankAccount account2 = bank.getAccount("789012"); account1.deposit(1000.0); account1.transfer(account2, 500.0); System.out.println("Account 1 balance: " + account1.getBalance()); // Output: 500.0 System.out.println("Account 2 balance: " + account2.getBalance()); // Output: 500.0 } }
package http4s.extend.syntax import cats.Eq private[syntax] trait EqSyntax { implicit def eqSyntax(t: =>Throwable): ThrowableEqOps = new ThrowableEqOps(t) } private[syntax] class ThrowableEqOps(val t: Throwable) extends AnyVal { def ===(that: =>Throwable)(implicit te: Eq[Throwable]): Boolean = te.eqv(t, that) }
<gh_stars>1-10 package net.community.apps.tools.srvident; import net.community.chest.ui.helpers.table.EnumTableColumn; import org.w3c.dom.Element; /** * <P>Copyright 2007 as per GPLv2</P> * * @author <NAME>. * @since Oct 25, 2007 9:52:55 AM */ public class IdTableColInfo extends EnumTableColumn<IdTableColumns> { /** * */ private static final long serialVersionUID = 6359588876563023075L; public IdTableColInfo (IdTableColumns colIndex) { super(IdTableColumns.class, colIndex); } public IdTableColInfo (Element elem) throws Exception { super(IdTableColumns.class, elem); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package userinterface; /** * * @author Joy */ public class StartupPage extends javax.swing.JPanel { /** * Creates new form StartupPage */ public StartupPage() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jLabel1 = new javax.swing.JLabel(); jLabel3 = new javax.swing.JLabel(); jPanel1 = new javax.swing.JPanel(); jLabel7 = new javax.swing.JLabel(); jPanel2 = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); jLabel8 = new javax.swing.JLabel(); jLabel9 = new javax.swing.JLabel(); jPanel3 = new javax.swing.JPanel(); jLabel10 = new javax.swing.JLabel(); jLabel3.setText("jLabel3"); jPanel1.setBackground(new java.awt.Color(51, 122, 183)); jLabel7.setBackground(new java.awt.Color(102, 102, 255)); jLabel7.setFont(new java.awt.Font("Times New Roman", 2, 48)); // NOI18N jLabel7.setForeground(new java.awt.Color(255, 255, 255)); jLabel7.setText(" ADOPT ASSIST"); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel7, javax.swing.GroupLayout.PREFERRED_SIZE, 907, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(448, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(29, 29, 29) .addComponent(jLabel7, javax.swing.GroupLayout.PREFERRED_SIZE, 65, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(72, Short.MAX_VALUE)) ); jPanel2.setBackground(new java.awt.Color(51, 122, 183)); jLabel4.setBackground(new java.awt.Color(255, 255, 255)); jLabel4.setFont(new java.awt.Font("Times New Roman", 2, 48)); // NOI18N jLabel4.setForeground(new java.awt.Color(255, 255, 255)); jLabel4.setText(" Welcome"); jLabel5.setFont(new java.awt.Font("Times New Roman", 2, 24)); // NOI18N jLabel5.setForeground(new java.awt.Color(255, 255, 255)); jLabel5.setText("We are so glad you are here! We are eager to help support you on your journey to be adoptive parents. "); jLabel6.setFont(new java.awt.Font("Times New Roman", 2, 24)); // NOI18N jLabel6.setForeground(new java.awt.Color(255, 255, 255)); jLabel6.setText(" You have many options for how you choose to proceed, and we look forward to helping you plan that journey. "); jLabel8.setFont(new java.awt.Font("Times New Roman", 2, 24)); // NOI18N jLabel8.setForeground(new java.awt.Color(255, 255, 255)); jLabel8.setText("Adoption is a lifelong journey and you will always remain a part of the Adopt Assit family. "); jLabel9.setFont(new java.awt.Font("Times New Roman", 2, 24)); // NOI18N jLabel9.setForeground(new java.awt.Color(255, 255, 255)); jLabel9.setText("We also have a program called “The Journey of Search” assisting young adult adoptees and birth parents to connect with one another "); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel9, javax.swing.GroupLayout.PREFERRED_SIZE, 2298, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel8, javax.swing.GroupLayout.PREFERRED_SIZE, 2298, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(0, 0, Short.MAX_VALUE)) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jLabel6, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel4, javax.swing.GroupLayout.PREFERRED_SIZE, 1135, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 2298, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap(25, Short.MAX_VALUE)))) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(29, 29, 29) .addComponent(jLabel4, javax.swing.GroupLayout.PREFERRED_SIZE, 43, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 48, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(3, 3, 3) .addComponent(jLabel6, javax.swing.GroupLayout.PREFERRED_SIZE, 62, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel8, javax.swing.GroupLayout.PREFERRED_SIZE, 47, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jLabel9, javax.swing.GroupLayout.PREFERRED_SIZE, 65, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(173, Short.MAX_VALUE)) ); jPanel3.setBackground(new java.awt.Color(255, 255, 255)); jLabel10.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/ChildFortPage1.gif"))); // NOI18N javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel10, javax.swing.GroupLayout.DEFAULT_SIZE, 1341, Short.MAX_VALUE)) ); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addComponent(jLabel10, javax.swing.GroupLayout.PREFERRED_SIZE, 376, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 28, Short.MAX_VALUE)) ); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel3)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(0, 0, Short.MAX_VALUE))) .addGap(18, 18, 18) .addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 1139, javax.swing.GroupLayout.PREFERRED_SIZE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel3)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGap(0, 72, Short.MAX_VALUE) .addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 908, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGap(327, 327, 327)) .addGroup(layout.createSequentialGroup() .addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel10; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JLabel jLabel9; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; // End of variables declaration//GEN-END:variables }
/** * Layout component that queries for data * with Gatsby's useStaticQuery component * * See: https://www.gatsbyjs.com/docs/use-static-query/ */ import React from 'react' import Helmet from 'react-helmet' import 'bootstrap/dist/css/bootstrap.min.css' import Nav from './Nav' import Footer from './Footer' import '../style.css' import '../new-moon.css' export default function Layout({ children }) { return ( <> <Helmet> </Helmet> <Nav /> <main>{children}</main> <Footer /> </> ) }