text stringlengths 1 1.05M |
|---|
<filename>mimiron/core/config.py
# -*- coding: utf-8 -*-
import json
import os
from jsonschema import validate as validate_schema
from jsonschema.exceptions import ValidationError
from git import Repo
from git import InvalidGitRepositoryError as _InvalidGitRepositoryError
from git import NoSuchPathError as _NoSuchPathError
from mimiron.schemas.config import config_schema
from mimiron.exceptions.config import ConfigLoadError, MalformedConfig
from mimiron.exceptions.config import DeploymentRepositoriesNotSpecified
from mimiron.exceptions.vendor import InvalidGitRepository
from mimiron.exceptions.vendor import NoTFVarsFilesFound
from mimiron.vendor.terraform import TFVarsConfig
__all__ = ['Config']
class Config(object):
def __init__(self, config=None, config_path='~/.mimiron.json'):
self.data = config or {}
self._config_path = os.path.expanduser(config_path)
def init(self):
self.read()
self.validate()
self.process()
def read(self, force=False):
"""Reads base configuration file defined by `config_path`."""
if self.data and not force:
return None
try:
with open(self._config_path, 'rU') as f:
self.data = json.loads(f.read())
except IOError:
raise ConfigLoadError(self._config_path)
return None
def validate(self):
"""Validates the base configuration file is correct against the `config_schema`."""
try:
validate_schema(self.data, config_schema)
except ValidationError as e:
raise MalformedConfig(e.message)
def _read_tfvars(self, repo):
"""Reads all tfvars (json file) in the given `repo['path']`/terraform/tfvars directory.
Terraform deployment projects are expected to follow this structure:
├── README.md
├── scripts
│ └── ...
└── terraform
...
├── main.tf
├── tfvars
│ ├── variables.production.json
│ └── variables.staging.json
└── variables.tf
`tfvars/` follow a flat structure. Variables are free to be split between many JSON
files and can be grouped based on `<group>` (<name>[.<group>].json).
NOTE: tfvar files don't need a grouping. If no group is found, the tfvar is assumed
to be applied on all groups.
"""
repo_path = os.path.join(repo['path'], 'terraform/tfvars')
if not os.path.isdir(repo_path):
raise NoTFVarsFilesFound(repo_path)
tfvars_paths = []
for root, dirs, files in os.walk(repo_path):
for f in files:
if not f.endswith('.json'): # skip non-json files.
continue
tfvars_path = os.path.join(os.path.abspath(root), f)
tfvars_paths.append(tfvars_path)
if not tfvars_paths:
raise NoTFVarsFilesFound(repo_path)
return TFVarsConfig(repo, tfvars_paths) # aggregates all tfvars files.
def process(self):
"""Initialises Mimiron using the configuration found in `config_path`."""
for i, repo in enumerate(self.data['terraformRepositories']):
repo['path'] = os.path.expanduser(repo['path'])
try:
git_repo = Repo(repo['path'])
if git_repo.bare:
raise _InvalidGitRepositoryError
repo['defaultEnvironment'] = repo.get('defaultEnvironment', None)
repo['tagEnvironment'] = repo.get('tagEnvironment', None)
repo['git'] = git_repo
repo['tfvars'] = self._read_tfvars(repo)
except _InvalidGitRepositoryError:
raise InvalidGitRepository(repo['path'])
except _NoSuchPathError:
raise DeploymentRepositoriesNotSpecified
def get(self, key):
"""Retrieves the `value` in `self.data` given `key`."""
return self.data.get(key)
def set(self, key, value):
"""Sets the `value` of item at `key` given `value`."""
self.data[key] = value
|
'use strict';
const { LANGUAGE } = require('@data/config');
const i18next = require('i18next');
const SimplicityEmbed = require('../structures/discord/SimplicityEmbed');
const { EMOJIS: { STARBOARD } } = require('./Constants');
/**
* Contains various starboard related utility methods.
* @class StarboardUtil
*/
class StarboardUtil {
/**
* Creates an instance of StarboardUtil.
*/
constructor() {
throw new Error(`The ${this.constructor.name} class may not be instantiated.`);
}
/**
* Handles a starboard reaction.
* @param {Client} client The Client.
* @param {Reaction} reaction The reaction that was added.
* @param {User} user The user that reacted.
* @returns {Promise<void>}
*/
async handle(client, reaction, user) {
const { emoji, users } = reaction;
const message = await reaction.message.fetch();
if (STARBOARD !== emoji.name || user.id === message.author.id) return;
const guildData = client.database && await client.database.guilds.get(message.guild.id);
const channelId = guildData && guildData.starboard;
const channel = channelId && message.channel.guild.channels.cache.get(channelId);
if (channel) {
const reactionsSize = users.filter((u) => u.id !== message.author.id).size;
// Search embed exists
const messages = await channel.messages.fetch({ limit: 100 });
const found = messages.find((msg) =>
msg.author.id === client.user.id &&
msg.embeds.length &&
msg.embeds[0].footer &&
msg.embeds[0].footer.text === message.id,
);
// Delete message if users reaction = 0
if (reactionsSize < 3) {
if (found && !found.deleted) return found.delete();
else return;
}
const language = (guildData && guildData.lang) || LANGUAGE;
const t = i18next.getFixedT(language);
const image = message.attachments.size > 0 ? message.attachments.first().url : null;
const embed = new SimplicityEmbed({ author: message.author, t })
.setTitle(`${STARBOARD} ${reactionsSize}`)
.addField('$$common:starboardJumpToMessage', `[ $$common:clickHere ](${message.url})`)
.setFooter(message.id)
.setColor('YELLOW');
if (message.cleanContent.length) embed.addField('$$common:message', message.cleanContent);
if (image) embed.setImage(image);
if (found) found.edit({ embed });
else channel.send(embed);
}
}
}
module.exports = StarboardUtil;
|
<gh_stars>1-10
import async = require("async");
import {HomeRoute} from "./HomeRoute";
import {RootState} from "../states/RootState";
import {ErrorUtil, ISkeletosCommand} from "../../../../core";
import {AbstractRouteAction, AbstractRouteState} from "../../../../web-router";
export class DummyRouteSyncAction extends AbstractRouteAction<HomeRoute, HomeRoute, RootState> {
protected getCommands(): ISkeletosCommand[] | object {
return [
this.callFunctionAsynchronously(this.changeSyncString)
];
}
private changeSyncString(callback: async.ErrorCallback<Error>): void {
const route: AbstractRouteState = this.newRoute ? this.newRoute : this.oldRoute;
const routeName: string = ErrorUtil.getDebugName(route);
if (!this.getRootState(RootState).loadedSync) {
this.getRootState(RootState).loadedSync = routeName + " Started<br>";
} else {
this.getRootState(RootState).loadedSync += routeName + " Started<br>";
}
setTimeout(() => {
this.getRootState(RootState).loadedSync += routeName + " Ended<br>";
callback();
}, 0);
}
} |
export default (s) => {
const first = s.charAt(0).toUpperCase();
const rest = s.slice(1).toLowerCase();
return `${first}${rest}`;
};
|
<gh_stars>0
package edu.neu.coe.csye6225.controller;
import com.amazonaws.services.sns.model.PublishRequest;
import com.amazonaws.services.sns.model.PublishResult;
import com.amazonaws.services.sns.model.SubscribeRequest;
import edu.neu.coe.csye6225.entity.Note;
import edu.neu.coe.csye6225.entity.User;
import edu.neu.coe.csye6225.service.*;
import edu.neu.coe.csye6225.util.QuickResponse;
import net.sf.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import com.timgroup.statsd.StatsDClient;
import com.timgroup.statsd.NonBlockingStatsDClient;
@RestController
public class AccountController {
private final AccountService accountService;
private final AccountValidation accountValidation;
private AmazonSNSClientService amazonSNSClientService;
private final NoteService noteService;
private final AttachmentService attachmentService;
private static final StatsDClient statsd = new NonBlockingStatsDClient("my.prefix", "localhost", 8125);
private static final Logger logger = LoggerFactory.getLogger(AccountController.class);
/**
* changed field dependency injection to constructor injection
*
* @param accountService autowired account service
*/
@Autowired
public AccountController(AccountService accountService, AccountValidation accountValidation,
NoteService noteService, AttachmentService attachmentService, AmazonSNSClientService amazonSNSClientService) {
this.accountService = accountService;
this.accountValidation = accountValidation;
this.noteService = noteService;
this.attachmentService = attachmentService;
this.amazonSNSClientService = amazonSNSClientService;
}
/**
* controller for user register "/user/register"
*
* @param user request body in json
* @param httpServletResponse response
* @return response body in json
*/
@RequestMapping(method = RequestMethod.POST, value = "/user/register")
public ResponseEntity<String> register(@RequestBody User user, HttpServletResponse httpServletResponse) {
accountService.createTable();
noteService.createNew();
attachmentService.createNew();
statsd.incrementCounter("endpoint.userRegister.http.post");
// validate username
if (!accountValidation.nameValidation(user.getUsername())) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("message", "register failed");
jsonObject.put("cause", "username not valid");
httpServletResponse.setHeader("status", String.valueOf(SC_BAD_REQUEST));
logger.warn("user register failed, " + " reason: [ username not valid ]");
return ResponseEntity.badRequest()
.body(jsonObject.toString());
}
// validate password
else if (!accountValidation.isPasswordStrong(user.getPassword())) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("message", "register failed");
jsonObject.put("cause", "password not strong enough");
httpServletResponse.setHeader("status", String.valueOf(SC_BAD_REQUEST));
logger.warn("user register failed, " + " reason: [ password not strong ]");
return ResponseEntity.badRequest()
.body(jsonObject.toString());
}
// validate if user already registered
else if (accountValidation.isUserRegistered(user.getUsername())) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("message", "register failed");
jsonObject.put("cause", "user already registered");
httpServletResponse.setHeader("status", String.valueOf(SC_BAD_REQUEST));
logger.warn("user register failed, " + " reason: [ user already exist ]");
return ResponseEntity.badRequest()
.body(jsonObject.toString());
}
// sign up into database
else if (accountService.signUp(user)) {
JSONObject jsonObject = new JSONObject();
httpServletResponse.setHeader("status", String.valueOf(HttpStatus.OK));
jsonObject.put("message", "register success");
logger.info("user register success, " + " [ welcome ]");
return ResponseEntity.ok()
.body(jsonObject.toString());
} else {
JSONObject jsonObject = new JSONObject();
jsonObject.put("message", "register failed!");
jsonObject.put("cause", "unkown");
logger.warn("user register failed, " + " reason: [ unknown ]");
return ResponseEntity.badRequest()
.body(jsonObject.toString());
}
}
/**
* controller for mapping "/"
*
* @param httpServletRequest verification info
* @param httpServletResponse response message
* @return current date if username and password is correct
* @throws IOException by sendError()
*/
@GetMapping("/")
public ResponseEntity<String> getUser(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws IOException {
accountService.createTable();
statsd.incrementCounter("endpoint.homepage.http.get");
String auth = httpServletRequest.getHeader("Authorization");
User user = UserVerification.addVerification(auth);
if (user == null) {
return QuickResponse.userUnauthorized(httpServletResponse);
}
if (accountService.logIn(user)) {
JSONObject jsonObject = new JSONObject();
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
httpServletResponse.setHeader("status", String.valueOf(SC_OK));
String date = df.format(new Date());
jsonObject.put("date", date);
logger.info("user logged in, returned server time : [ " + date + " ]. ");
return ResponseEntity.ok()
.body(jsonObject.toString());
} else {
return QuickResponse.userUnauthorized(httpServletResponse);
}
}
/**
* create a new note using default structure
*/
@RequestMapping(method = RequestMethod.POST, value = "/pwdreset")
public ResponseEntity<String> resetPassword(@RequestBody Map<String, String> resetEmail,
HttpServletResponse httpServletResponse) throws IOException {
// User user = UserVerification.addVerification(httpServletRequest.getHeader("Authorization"));
statsd.incrementCounter("endpoint.note.http.post");
if (resetEmail == null) {
return QuickResponse.quickBadRequestConstruct(httpServletResponse, "Request body empty");
}
String email = resetEmail.get("email"); // could be null
// if missing email
if (email == null || email.equals(""))
return QuickResponse.quickBadRequestConstruct(httpServletResponse, "email address missing");
if (accountValidation.isUserRegistered(email)) {
amazonSNSClientService.publishMessagetoTopic(email);
} else {
return QuickResponse.quickBadRequestConstruct(httpServletResponse,"User not registed");
}
httpServletResponse.setHeader("status", String.valueOf(HttpStatus.CREATED));
return ResponseEntity.ok()
.body("");
}
}
|
####################################
# Install L4D2
adduser l4d2
echo "<%=cypher.read('secret/L4D2')%>" | passwd --stdin l4d2
yum install screen glibc.i686 libstdc++.i686 -y
mkdir /L4D2 2>/dev/null
mkdir /L4D2/Morpheus 2>/dev/null
cd /L4D2/
####################################
# Install L4D2
wget https://steamcdn-a.akamaihd.net/client/installer/steamcmd_linux.tar.gz
tar xf steamcmd_linux.tar.gz
rm -f steamcmd_linux.tar.gz
./steamcmd.sh +login anonymous +force_install_dir '/L4D2/Server' +app_update 222860 -validate +quit
####################################
# Install Metamod
cd /L4D2/Server/left4dead2/
wget https://mms.alliedmods.net/mmsdrop/1.10/mmsource-1.10.7-git971-linux.tar.gz
tar xf mmsource-1.10.7-git971-linux.tar.gz
rm -f mmsource-1.10.7-git971-linux.tar.gz
####################################
# Install Sourcemod
cd /L4D2/Server/left4dead2/
wget https://sm.alliedmods.net/smdrop/1.10/sourcemod-1.10.0-git6497-linux.tar.gz
tar xf sourcemod-1.10.0-git6497-linux.tar.gz
rm -f sourcemod-1.10.0-git6497-linux.tar.gz
####################################
# Create Symlinks
ln -sf /L4D2/Morpheus/server.cfg /L4D2/Server/left4dead2/cfg/server.cfg
ln -sf /L4D2/Morpheus/banned_user.cfg /L4D2/Server/left4dead2/cfg/banned_user.cfg
ln -sf /L4D2/Morpheus/banned_ip.cfg /L4D2/Server/left4dead2/cfg/banned_ip.cfg
ln -sf /L4D2/Morpheus/motd.txt /L4D2/Server/left4dead2/motd.txt
ln -sf /L4D2/Morpheus/metamod.vdf /L4D2/Server/left4dead2/addons/metamod.vdf
ln -sf /L4D2/Morpheus/host.txt /L4D2/Server/left4dead2/host.txt
ln -sf /L4D2/Morpheus/admins_simple.ini /L4D2/Server/left4dead2/addons/sourcemod/configs/admins_simple.ini
####################################
# Fix Ownership
sudo chown -R l4d2 /L4D2/
####################################
# Start Server
#su l4d2
#/L4D2/Server/srcds_run -game left4dead2 +exec server.cfg +port "<%=customOptions.steamPort%>" +maxplayers 8 -tickrate 100 -pingboost 2 +map c1m1_hotel > /dev/null 2>&1 & |
<filename>KimberlyPraxel-FactoryPatternAssignment/src/edu/greenriver/it/driver/Driver.java<gh_stars>1-10
/*
* <NAME>
* 11/06/16
* Driver.java
*/
package edu.greenriver.it.driver;
import java.text.DecimalFormat;
import java.util.Scanner;
import edu.greenriver.it.abstractfactory.EUOrderFactory;
import edu.greenriver.it.abstractfactory.IOrderFactory;
import edu.greenriver.it.abstractfactory.USOrderFactory;
import edu.greenriver.it.shippingratecalculators.Rate;
/**
* Drives Factory Pattern Assignment
*
* @author kimberlypraxel
* @version 1.0
*/
public class Driver{
/**
* Main which starts program
*
* @param args
*/
public static void main(String [] args){
orderMaker();
}
/**
* Logic behind running the program gets user input and sends through
* factory pattern to determine cost of tax and shipping depending on user
* input
*/
public static void orderMaker(){
Scanner console = new Scanner(System.in);
int userRegionChoice = 0;
System.out.println("Welcome to Order Maker");
System.out.println("1: US");
System.out.println("2: EU");
System.out.println("Select shipping region: ");
IOrderFactory order = null;
userRegionChoice = console.nextInt();
if(userRegionChoice == 1){
System.out.println("Enter Zipcode: ");
int zip = console.nextInt();
console.nextLine();
order = new USOrderFactory(zip);
}else if(userRegionChoice == 2){
System.out.println("Enter country: ");
console.nextLine();
String country = console.nextLine();
country = country.toLowerCase();
order = new EUOrderFactory(country);
}
System.out.println("Enter order name: ");
String orderName = console.nextLine();
orderName = orderName.toUpperCase();
System.out.println("Enter subtotal: ");
double subtotal = console.nextDouble();
System.out.println("Enter weight: ");
double weight = console.nextDouble();
System.out.println("Enter shipping type (US: priority or standard) EU(standard only): ");
String shippingType = console.next();
shippingType = shippingType.toLowerCase();
Rate shippingRate = order.getRateObject().getRate(shippingType, weight);
double tax = order.getTaxObject().calculateTax(subtotal);
double total = tax + subtotal + shippingRate.getRate();
DecimalFormat usDollarFormat = new DecimalFormat("$###0.00");
System.out.println("\n\nORDER NAME: " + orderName + " - WEIGHT: " + weight + "lbs - SHIPPING TYPE: "
+ shippingType.toUpperCase());
System.out.println("\nOrder Subtotal: " + usDollarFormat.format(subtotal) + "\nShipping Cost: "
+ usDollarFormat.format(shippingRate.getRate()) + "\nOrder Tax: " + usDollarFormat.format(tax));
System.out.println(" --------\nOrder Total: " + usDollarFormat.format(total));
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.validation ;
import org.apache.jena.atlas.io.IndentedLineBuffer ;
import org.apache.jena.atlas.json.JsonBuilder ;
import org.apache.jena.atlas.json.JsonObject ;
import org.apache.jena.fuseki.servlets.ServletOps ;
import org.apache.jena.query.Query ;
import org.apache.jena.query.QueryFactory ;
import org.apache.jena.query.QueryParseException ;
import org.apache.jena.query.Syntax ;
import org.apache.jena.sparql.algebra.Algebra ;
import org.apache.jena.sparql.algebra.Op ;
import org.apache.jena.sparql.serializer.SerializationContext ;
public class QueryValidator extends ValidatorBaseJson {
public QueryValidator() {}
@Override
protected String validatorName() {
return "SPARQL Query" ;
}
static final String paramQuery = "query" ;
static final String paramSyntax = "languageSyntax" ;
static final String jInput = "input" ;
static final String jFormatted = "formatted" ;
static final String jAlgebra = "algebra" ;
static final String jAlgebraQuads = "algebra-quads" ;
static final String jAlgebraOpt = "algebra-opt" ;
static final String jAlgebraOptQuads = "algebra-opt-quads" ;
@Override
protected JsonObject execute(ValidationAction action) {
JsonBuilder obj = new JsonBuilder() ;
obj.startObject() ;
final String queryString = getArg(action, paramQuery) ;
String querySyntax = getArgOrNull(action, paramSyntax) ;
if ( querySyntax == null || querySyntax.equals("") )
querySyntax = "SPARQL" ;
Syntax language = Syntax.lookup(querySyntax) ;
if ( language == null ) {
ServletOps.errorBadRequest("Unknown syntax: " + querySyntax) ;
return null ;
}
boolean outputSPARQL = true ;
boolean outputAlgebra = true ;
boolean outputQuads = true ;
boolean outputOptimized = true ;
boolean outputOptimizedQuads = true ;
obj.key(jInput).value(queryString) ;
// Attempt to parse it.
Query query = null ;
try {
query = QueryFactory.create(queryString, "http://example/base/", language) ;
} catch (QueryParseException ex) {
obj.key(jErrors) ;
obj.startArray() ; // Errors array
obj.startObject() ;
obj.key(jParseError).value(ex.getMessage()) ;
obj.key(jParseErrorLine).value(ex.getLine()) ;
obj.key(jParseErrorCol).value(ex.getColumn()) ;
obj.finishObject() ;
obj.finishArray() ;
obj.finishObject() ; // Outer object
return obj.build().getAsObject() ;
}
if ( query != null ) {
if ( outputSPARQL )
formatted(obj, query) ;
if ( outputAlgebra )
algebra(obj, query) ;
if ( outputQuads )
algebraQuads(obj, query) ;
if ( outputOptimized )
algebraOpt(obj, query) ;
if ( outputOptimizedQuads )
algebraOptQuads(obj, query) ;
}
obj.finishObject() ;
return obj.build().getAsObject() ;
}
private void formatted(JsonBuilder obj, Query query) {
IndentedLineBuffer out = new IndentedLineBuffer() ;
query.serialize(out) ;
obj.key(jFormatted).value(out.asString()) ;
}
private void algebra(JsonBuilder obj, Query query) {
Op op = Algebra.compile(query) ;
obj.key(jAlgebra).value(string(query, op)) ;
}
private void algebraQuads(JsonBuilder obj, Query query) {
Op op = Algebra.compile(query) ;
op = Algebra.toQuadForm(op) ;
obj.key(jAlgebraQuads).value(string(query, op)) ;
}
private void algebraOpt(JsonBuilder obj, Query query) {
Op op = Algebra.compile(query) ;
op = Algebra.optimize(op) ;
obj.key(jAlgebraOpt).value(string(query, op)) ;
}
private void algebraOptQuads(JsonBuilder obj, Query query) {
Op op = Algebra.compile(query) ;
op = Algebra.toQuadForm(op) ;
op = Algebra.optimize(op) ;
obj.key(jAlgebraOptQuads).value(string(query, op)) ;
}
private String string(Query query, Op op) {
final SerializationContext sCxt = new SerializationContext(query) ;
IndentedLineBuffer out = new IndentedLineBuffer() ;
op.output(out, sCxt) ;
return out.asString() ;
}
}
|
<filename>src/modules/dpi/reassembly/tcp_reassembly.c<gh_stars>1-10
/*
* tcp_reassembly.c
*
* Created on: Jun 15, 2018
* by: <NAME>
*/
#include "tcp_reassembly.h"
#include <tcpip/mmt_tcpip.h>
#include "../../../lib/memory.h"
struct tcp_reassembly_struct{
tcp_session_payload_callback_t callback;
void *user_args;
};
tcp_reassembly_t* tcp_reassembly_alloc_init( bool is_enable, mmt_handler_t *dpi_handler, tcp_session_payload_callback_t callback, void *user_args ){
if( !is_enable || callback == NULL)
return NULL;
//call DPI function to activate the reassembly on TCP
update_protocol( PROTO_TCP, TCP_ENABLE_REASSEMBLE );
tcp_reassembly_t *ret = mmt_alloc_and_init_zero( sizeof( tcp_reassembly_t ));
ret->user_args = user_args;
ret->callback = callback;
return ret;
}
void tcp_reassembly_close( tcp_reassembly_t *context ){
if( context == NULL )
return;
mmt_probe_free( context );
}
|
from __future__ import division
from __future__ import print_function
import tables.flavor
tables.flavor.restrict_flavors(keep=["numpy"])
import numpy
import tables as PT
import os.path
import sys
import progressbar
import tempfile
from optparse import OptionParser
from multicamselfcal.execute import MultiCamSelfCal
import warnings
import flydra_core.reconstruct
import flydra_analysis.analysis.result_utils as result_utils
def save_calibration_directory(
IdMat=None,
points=None,
Res=None,
calib_dir=None,
cam_ids=None,
square_pixels=True,
num_cameras_fill=-1,
# intrinsics_reconstructor=None,
# intrinsics_yaml=None,
):
warnings.warn(
"DeprecationWarning: save_calibration_directory backward compatibility shim being used",
DeprecationWarning,
stacklevel=2,
)
mcsc = MultiCamSelfCal(calib_dir)
mcsc.create_calibration_directory(
cam_ids=cam_ids,
IdMat=IdMat,
points=points,
Res=Res,
# cam_calibrations=cam_calibrations,
# cam_centers=cam_centers,
# radial_distortion=undo_radial_distortion,
square_pixels=square_pixels,
num_cameras_fill=num_cameras_fill,
)
def create_new_row(
d2d, this_camns, this_camn_idxs, cam_ids, camn2cam_id, npoints_by_cam_id
):
n_pts = 0
IdMat_row = []
points_row = []
for cam_id in cam_ids:
found = False
for this_camn, this_camn_idx in zip(this_camns, this_camn_idxs):
if camn2cam_id[this_camn] != cam_id:
continue
this_camn_d2d = d2d[d2d["camn"] == this_camn]
for this_row in this_camn_d2d: # XXX could be sped up
if this_row["frame_pt_idx"] == this_camn_idx:
found = True
break
if not found:
IdMat_row.append(0)
points_row.extend([numpy.nan, numpy.nan, numpy.nan])
else:
npoints_by_cam_id[cam_id] = npoints_by_cam_id[cam_id] + 1
n_pts += 1
IdMat_row.append(1)
points_row.extend([this_row["x"], this_row["y"], 1.0])
return IdMat_row, points_row
def do_it(
filename,
efilename,
use_nth_observation=None,
h5_2d_data_filename=None,
use_kalman_data=True,
start=None,
stop=None,
options=None,
):
if h5_2d_data_filename is None:
h5_2d_data_filename = filename
calib_dir = filename + ".recal"
if not os.path.exists(calib_dir):
os.makedirs(calib_dir)
results = result_utils.get_results(filename, mode="r+")
if use_kalman_data:
mylocals = {}
myglobals = {}
execfile(efilename, myglobals, mylocals)
use_obj_ids = mylocals["long_ids"]
if "bad" in mylocals:
use_obj_ids = set(use_obj_ids)
bad = set(mylocals["bad"])
use_obj_ids = list(use_obj_ids.difference(bad))
kobs = results.root.ML_estimates
kobs_2d = results.root.ML_estimates_2d_idxs
h5_2d_data = result_utils.get_results(h5_2d_data_filename, mode="r+")
camn2cam_id, cam_id2camns = result_utils.get_caminfo_dicts(h5_2d_data)
cam_ids = list(cam_id2camns.keys())
cam_ids.sort()
data2d = h5_2d_data.root.data2d_distorted
# use_idxs = numpy.arange(data2d.nrows)
frames = data2d.cols.frame[:]
qfi = result_utils.QuickFrameIndexer(frames)
npoints_by_ncams = {}
npoints_by_cam_id = {}
for cam_id in cam_ids:
npoints_by_cam_id[cam_id] = 0
IdMat = []
points = []
if use_kalman_data:
row_keys = []
if start is not None or stop is not None:
print("start, stop", start, stop)
print(
"WARNING: currently ignoring start/stop because Kalman data is being used"
)
for obj_id_enum, obj_id in enumerate(use_obj_ids):
row_keys.append((len(points), obj_id))
# print 'obj_id %d (%d of %d)'%(obj_id, obj_id_enum+1, len(use_obj_ids))
this_obj_id = obj_id
k_use_idxs = kobs.get_where_list("obj_id==this_obj_id")
obs_2d_idxs = kobs.read_coordinates(k_use_idxs, field="obs_2d_idx")
kframes = kobs.read_coordinates(k_use_idxs, field="frame")
kframes_use = kframes[::use_nth_observation]
obs_2d_idxs_use = obs_2d_idxs[::use_nth_observation]
widgets = [
"obj_id % 5d (% 3d of % 3d) "
% (obj_id, obj_id_enum + 1, len(use_obj_ids)),
progressbar.Percentage(),
" ",
progressbar.Bar(),
" ",
progressbar.ETA(),
]
pbar = progressbar.ProgressBar(
widgets=widgets, maxval=len(kframes_use)
).start()
for n_kframe, (kframe, obs_2d_idx) in enumerate(
zip(kframes_use, obs_2d_idxs_use)
):
pbar.update(n_kframe)
if 0:
k_use_idx = k_use_idxs[n_kframe * use_nth_observation]
print(kobs.read_coordinates(numpy.array([k_use_idx])))
if PT.__version__ <= "1.3.3":
obs_2d_idx_find = int(obs_2d_idx)
kframe_find = int(kframe)
else:
obs_2d_idx_find = obs_2d_idx
kframe_find = kframe
obj_id_save = int(obj_id) # convert from possible numpy scalar
# sys.stdout.write(' reading frame data...')
# sys.stdout.flush()
obs_2d_idx_find_next = obs_2d_idx_find + numpy.uint64(1)
kobs_2d_data = kobs_2d.read(
start=obs_2d_idx_find, stop=obs_2d_idx_find_next
)
# sys.stdout.write('done\n')
# sys.stdout.flush()
assert len(kobs_2d_data) == 1
kobs_2d_data = kobs_2d_data[0]
this_camns = kobs_2d_data[0::2]
this_camn_idxs = kobs_2d_data[1::2]
# sys.stdout.write(' doing frame selections...')
# sys.stdout.flush()
if 1:
this_use_idxs = qfi.get_frame_idxs(kframe_find)
elif 0:
this_use_idxs = numpy.nonzero(frames == kframe_find)[0]
else:
this_use_idxs = data2d.get_where_list("frame==kframe_find")
# sys.stdout.write('done\n')
# sys.stdout.flush()
if PT.__version__ <= "1.3.3":
this_use_idxs = [int(t) for t in this_use_idxs]
d2d = data2d.read_coordinates(this_use_idxs)
if len(this_camns) < options.min_num_points:
# not enough points to contribute to calibration
continue
npoints_by_ncams[len(this_camns)] = (
npoints_by_ncams.get(len(this_camns), 0) + 1
)
IdMat_row, points_row = create_new_row(
d2d,
this_camns,
this_camn_idxs,
cam_ids,
camn2cam_id,
npoints_by_cam_id,
)
IdMat.append(IdMat_row)
points.append(points_row)
## print 'running total of points','-'*20
## for cam_id in cam_ids:
## print 'cam_id %s: %d points'%(cam_id,npoints_by_cam_id[cam_id])
## print
pbar.finish()
if start is None:
start = 0
if stop is None:
stop = int(frames.max())
if not use_kalman_data:
row_keys = None
count = 0
for frameno in range(start, stop + 1, use_nth_observation):
this_use_idxs = qfi.get_frame_idxs(frameno)
d2d = data2d.read_coordinates(this_use_idxs)
d2d = d2d[~numpy.isnan(d2d["x"])]
this_camns = d2d["camn"]
unique_camns = numpy.unique(this_camns)
if len(this_camns) != len(unique_camns):
# ambiguity - a camera has > 1 point
continue
this_camn_idxs = numpy.array([0] * len(this_camns))
if len(this_camns) < options.min_num_points:
# not enough points to contribute to calibration
continue
npoints_by_ncams[len(this_camns)] = (
npoints_by_ncams.get(len(this_camns), 0) + 1
)
count += 1
IdMat_row, points_row = create_new_row(
d2d, this_camns, this_camn_idxs, cam_ids, camn2cam_id, npoints_by_cam_id
)
IdMat.append(IdMat_row)
points.append(points_row)
print("%d points" % len(IdMat))
print("by camera id:")
for cam_id in cam_ids:
print(" %s: %d" % (cam_id, npoints_by_cam_id[cam_id]))
print("by n points:")
max_npoints = 0
for ncams in npoints_by_ncams:
print(" %d: %d" % (ncams, npoints_by_ncams[ncams]))
max_npoints = max(max_npoints, npoints_by_ncams[ncams])
print()
if max_npoints < 10:
print("not enough points, aborting", file=sys.stderr)
results.close()
h5_2d_data.close()
sys.exit(1)
IdMat = numpy.array(IdMat, dtype=numpy.uint8).T
points = numpy.array(points, dtype=numpy.float32).T
# resolution
Res = []
for cam_id in cam_ids:
image_table = results.root.images
arr = getattr(image_table, cam_id)
imsize = arr.shape[1], arr.shape[0]
Res.append(imsize)
Res = numpy.array(Res)
cam_centers = []
cam_calibrations = []
if options.camera_center_reconstructor:
creconstructor = flydra_core.reconstruct.Reconstructor(
cal_source=options.camera_center_reconstructor
)
cam_centers = numpy.asarray(
[creconstructor.get_camera_center(cam_id)[:, 0] for cam_id in cam_ids]
)
flydra_core.reconstruct.save_ascii_matrix(
cam_centers, os.path.join(calib_dir, "original_cam_centers.dat")
)
intrinsics_reconstructor = options.undistort_intrinsics_reconstructor
if intrinsics_reconstructor and os.path.exists(intrinsics_reconstructor):
tdir = tempfile.mkdtemp()
reconstructor = flydra_core.reconstruct.Reconstructor(
cal_source=intrinsics_reconstructor
)
for i, cam_id in enumerate(cam_ids):
fname = os.path.join(tdir, "%s.rad" % cam_id)
scc = reconstructor.get_SingleCameraCalibration(cam_id)
scc.helper.save_to_rad_file(fname)
cam_calibrations.append(fname)
intrinsics_yaml = options.undistort_intrinsics_yaml
if intrinsics_yaml and os.path.exists(intrinsics_yaml):
for cam_id in cam_ids:
fname = os.path.join(intrinsics_yaml, "%s.yaml" % cam_id)
cam_calibrations.append(fname)
undo_radial_distortion = len(cam_calibrations) == len(cam_ids)
mcsc = MultiCamSelfCal(calib_dir)
mcsc.create_calibration_directory(
cam_ids=cam_ids,
IdMat=IdMat,
points=points,
Res=Res,
cam_calibrations=cam_calibrations,
cam_centers=cam_centers,
radial_distortion=undo_radial_distortion,
square_pixels=1,
num_cameras_fill=options.num_cameras_fill,
)
results.close()
h5_2d_data.close()
if row_keys is not None:
row_keys = numpy.array(row_keys)
flydra_core.reconstruct.save_ascii_matrix(
row_keys, os.path.join(calib_dir, "obj_ids_zero_indexed.dat"), isint=True
)
if options.run_mcsc:
caldir = mcsc.execute(silent=False)
print("\nfinished: result in ", caldir)
if options.output_xml:
fname = os.path.join(caldir, "reconstructor.xml")
recon = flydra_core.reconstruct.Reconstructor(cal_source=caldir)
recon.save_to_xml_filename(fname)
print("\nfinished: new reconstructor in", fname)
def main():
usage = "%prog FILE EFILE [options]"
usage += """
The basic idea is to watch some trajectories with::
kdviewer <DATAfilename.h5> --n-top-traces=10
Find the top traces, reject any bad ones, and put them in an "efile".
The form of the efile is::
# Lots of traces
long_ids = [1,2,3,4]
# Exclude from above
bad = [3]
Then run this program::
flydra_analysis_generate_recalibration <DATAfilename.h5> [EFILENAME] [options]
To ignore 3D trajectories and simply use all data::
export DATA2D=<DATAfilename.h5>
flydra_analysis_generate_recalibration --2d-data ${DATA2D} --disable-kalman-objs ${DATA2D}
"""
parser = OptionParser(usage)
parser.add_option(
"--use-nth-observation", type="int", dest="use_nth_observation", default=1
)
parser.add_option(
"--2d-data", type="string", dest="h5_2d_data_filename", default=None
)
parser.add_option(
"--disable-kalman-objs",
action="store_false",
default=True,
dest="use_kalman_data",
)
parser.add_option(
"--start", dest="start", type="int", help="first frame", metavar="START"
)
parser.add_option(
"--stop", dest="stop", type="int", help="last frame", metavar="STOP"
)
parser.add_option("--min-num-points", type="int", default=3)
parser.add_option(
"--num-cameras-fill",
type="int",
help="when a point is missing from one camera, how many other cameras should be should "
"be used to calculate the position of the missing point. In general, set this to 0 "
"(disable) or -1 (use all cameras). Only choose other values if you know what you are doing.",
default=-1,
)
parser.add_option(
"--undistort-intrinsics-yaml",
help="path to a directory containing .yaml files, as created by the OpenCV/ROS "
"camera calibration tools. If yaml files corresponding to the ID of every camera "
"are found then UNDO_RADIAL is set in the multicamselfcal config, and radial distortion "
"is corrected before computing calibration",
)
parser.add_option(
"--camera-center-reconstructor",
help="path to a reconstructor dir/xml file. The camera centers are"
"used to maximally align the new calibration to the centers of the"
"cameras in this reconstructor",
)
parser.add_option(
"--undistort-intrinsics-reconstructor",
help="path to a reconstructor dir/xml file. If intrinsics for "
"every camera are known then radial distortion "
"is corrected before computing calibration",
)
parser.add_option(
"--run-mcsc",
action="store_true",
default=False,
help="run multicamselfcal on the exported data",
)
parser.add_option(
"--output-xml",
action="store_true",
default=False,
help="save the new reconstructor in xml format",
)
(options, args) = parser.parse_args()
if len(args) > 2:
print(
"arguments interpreted as FILE and EFILE supplied more than once",
file=sys.stderr,
)
parser.print_help()
return
if len(args) < 1:
parser.print_help()
return
h5_filename = args[0]
if len(args) == 2:
efilename = args[1]
else:
efilename = None
if options.use_kalman_data is not False:
raise ValueError(
"Kalman objects have not been disabled, but you did not specify an EFILE (hint: specify an EFILE or use --disable-kalman-objs"
)
do_it(
h5_filename,
efilename,
use_nth_observation=options.use_nth_observation,
h5_2d_data_filename=options.h5_2d_data_filename,
use_kalman_data=options.use_kalman_data,
start=options.start,
stop=options.stop,
options=options,
)
if __name__ == "__main__":
main()
|
/*
Paste out of the clipboard.
Along with C and Windows libraries
Remarks:
Refer at util/lib/obj/src/cli_init_roll.c and util/bin/obj/src/ty.c
*/
# define CBR
# define CLI_W32
# include <conio.h>
# include <stdio.h>
# include <stdlib.h>
# include "../../../incl/config.h"
signed(__cdecl cli_pasting_beta(CLI_W32_STAT(*argp))) {
/* **** DATA, BSS and STACK */
auto signed char CR = ('\r');
auto signed char LF = ('\n');
auto CLI_COORD coord[0x02];
auto cli_page_t *page;
auto void *g;
auto signed short *w;
auto signed char *cur,*base,*buff,*p,*b;
auto signed i,r;
auto signed offset;
auto signed pages;
auto signed kept;
auto signed short cr;
auto signed short flag;
auto signed short y;
/* **** CODE/TEXT */
if(!argp) return(0x00);
b = (signed char(*)) (*(CLI_B+(R(base,R(clipboard,R(ty,*argp))))));
if(!b) {
printf("%s \n","<< Empty at *(CLI_B+(R(base,R(clipboard,R(ty,*argp..");
return(0x00);
}
page = (*(CLI_INDEX+(R(page,R(spool,R(ty,*argp))))));
cur = (*(CLI_INDEX+(R(cur,R(ty,*argp)))));
base = (*(CLI_BASE+(R(base,R(roll,R(ty,*argp))))));
r = compare(cur,base);
offset = (r);
// to append
p = (*(CLI_OFFSET+(R(append,R(ty,*argp)))));
if(p) {
embed(0x00,p);
free(p);
p = (0x00);
}
r = ct(cur);
if(!r) p = (0x00);
else {
kept = keep(&p,cur);
if(!kept) {
printf("%s \n","<< Error at fn. keep()");
return(0x00);
}}
*(CLI_OFFSET+(R(append,R(ty,*argp)))) = (p);
// to merge pages
*cur = (0x00);
// r = cli_book(&(R(ty,*argp)));
r = cli_book_no_history(&(R(ty,*argp)));
if(!r) {
printf("%s \n","<< Error at fn. cli_book_no_history()");
return(0x00);
}
r = cli_copy_to_pages(CLI_PBR&(R(flag,*page)),&(R(spool,R(clipboard,R(ty,*argp)))),b);
if(!r) {
printf("%s \n","<< Error at fn. cli_copy_to_pages()");
return(0x00);
}
pages = (r);
r = cli_insert_pages(&(R(spool,R(ty,*argp))),&(R(spool,R(clipboard,R(ty,*argp)))));
if(!r) {
printf("%s \n","<< Error at fn. cli_insert_pages()");
return(0x00);
}
r = cli_merge_pages(&(R(spool,R(ty,*argp))));
if(!r) {
printf("%s \n","<< Error at fn. cli_merge_pages()");
return(0x00);
}
r = cli_connect_with_workspace(page,&(R(ty,*argp)));
if(!r) {
printf("%s \n","<< Error at fn. cli_connect_with_workspace()");
return(0x00);
}
R(offset,R(ty,*argp)) = (offset);
ADD(*(CLI_INDEX+(R(cur,R(ty,*argp)))),R(offset,R(ty,*argp)));
cur = (*(CLI_INDEX+(R(cur,R(ty,*argp)))));
flag = (CG_EMUL);
i = (pages);
if(0x01<(i)) {
OR(R(flag,R(ty,*argp)),CLI_FORCED);
OR(flag,CG_CLEAR);
}
r = cli_gram_beta(flag,cur,argp);
if(!r) {
/* empty or..
printf("%s \n","<< Error at fn. cli_gram_beta()");
return(0x00);
//*/
}
while(--i) {
page = R(d,*page);
if(!page) {
printf("%s \n","<< Could not find a page..");
return(0x00);
}
if(0x01<(i)) flag = (CG_CLEAR|CG_EMUL);
else flag = (CG_EMUL);
r = cli_coord_page_beta(flag,page,argp);
if(!r) {
/* empty or..
printf("%s \n","<< Error at fn. cli_coord_page_beta()");
return(0x00);
//*/
}}
// depart.
R(y,*(CLI_INDEX+(R(coord,R(ty,*argp))))) = (R(y,*(CLI_BASE+(R(coord,*page)))));
R(x,*(CLI_INDEX+(R(coord,R(ty,*argp))))) = (0x00);
*(CLI_INDEX+(R(page,R(spool,R(ty,*argp))))) = (page);
// add appendant p.
r = ct(*(CLI_BASE+(R(base,*page))));
offset = (r);
R(offset,R(ty,*argp)) = (offset);
i = (0x00);
ADD(i,R(offset,R(ty,*argp)));
r = ct(p);
ADD(i,r);
i++;
i = (i*(sizeof(*buff)));
buff = (signed char(*)) malloc(i);
if(!buff) {
printf("%s \n","<< Error at fn. malloc()");
return(0x00);
}
r = concat_b(buff,*(CLI_BASE+(R(base,*page))),p,(void*) 0x00);
if(!r) {
printf("%s \n","<< Error at fn. concat_b()");
return(0x00);
}
r = embed(0x00,*(CLI_BASE+(R(base,*page))));
free(*(CLI_BASE+(R(base,*page))));
*(CLI_BASE+(R(base,*page))) = (buff);
buff = (0x00);
// release an appendant
if(p) {
r = release(kept,&p);
if(kept^(r)) {
printf("%s \n","<< Error at fn. release()");
return(0x00);
}
*(CLI_OFFSET+(R(append,R(ty,*argp)))) = (p);
}
i = (0x00);
if(CLI_FORCED&(R(flag,R(ty,*argp)))) i++;
r = cli_connect_with_workspace(page,&(R(ty,*argp)));
if(!r) {
printf("%s \n","<< Error at fn. cli_connect_with_workspace()");
return(0x00);
}
R(offset,R(ty,*argp)) = (offset);
ADD(*(CLI_INDEX+(R(cur,R(ty,*argp)))),R(offset,R(ty,*argp)));
if(i) OR(R(flag,R(ty,*argp)),CLI_FORCED);
OR(R(flag,R(ty,*argp)),CLI_REFRESH);
return(0x01);
}
|
import { ModuleCategory, ModuleCategories } from './../types/index';
import { Tools, ChecklistItem, Module } from '../types';
const getAllChecklistItems = (m: Module) => {
return Object.values(m.checkLists).flatMap(checklist => Object.values(checklist)).flat();
};
export const getSelectedTools = (tools: Tools) => {
return Object.keys(tools).flatMap(toolCategory =>
Object.keys(tools[toolCategory]).filter(
tool => tools[toolCategory][tool].response,
),
);
};
export const getSupportedTools = (
checkListItem: ChecklistItem,
selectedTools: string[],
): string[] => {
if (!checkListItem.tools) {
return [];
}
return checkListItem.tools.filter(tool => selectedTools.includes(tool));
};
export const getNumberOfAnsweredQuestions = (m: Module, tools: Tools) => {
if (!m.checkLists) {
return 0;
}
const selectedTools = getSelectedTools(tools);
const allChecklistItems = getAllChecklistItems(m);
return allChecklistItems.map(checklistItem => {
return getSupportedTools(checklistItem, selectedTools).length > 0;
})
.filter(isAnswered => isAnswered).length;
};
export const getNumberOfCheckListItems = (m: Module) => {
if (!m.checkLists) {
return 0;
}
return getAllChecklistItems(m).length;
};
export const getCategoryName = (categoryData: ModuleCategory) =>
Object.values(categoryData)[0].category;
export const getModuleDescription = (m: Module) => {
if(!m) return "";
const description = [m.assessmentQuestion];
const resources = m.resources;
const moduleDescription = m.guidance;
if(moduleDescription) description.push('', '#### Guidance:', '', moduleDescription);
if (resources) {
description.push('', '#### Resources:', '');
description.push(...resources.map(resource => `+ ${resource}`));
}
return description.join('\n');
}
export const getModule = (categories: ModuleCategories, categoryName: string, moduleName: string) => {
if(!categories || !Object.entries(categories).length) return undefined;
const moduleCategory = categories[categoryName];
return moduleCategory ? moduleCategory[moduleName] : undefined;
} |
/// <summary>
/// A pattern to abstractify transactions from storage APIs.
/// </summary>
public interface IUnitOfWork : IDisposable, IAsyncDisposable
{
/// <summary>
/// Gets the repository for a type. Will by default create an <see cref="EntityRepository{T}"/>.
/// </summary>
IRepository<T> GetRepository<T>() where T : class;
}
/// <summary>
/// Represents a repository for a specific type.
/// </summary>
public interface IRepository<T> where T : class
{
// Define repository methods here, e.g., Add, Update, Delete, GetById, etc.
}
/// <summary>
/// Represents a concrete implementation of the IUnitOfWork interface.
/// </summary>
public class UnitOfWork : IUnitOfWork
{
// Implement the IDisposable and IAsyncDisposable interfaces
public IRepository<T> GetRepository<T>() where T : class
{
// Implement the logic to create and return a repository for the specified type
return new EntityRepository<T>(); // Example: Creating an EntityRepository
}
}
/// <summary>
/// Represents a generic repository for entities of type T.
/// </summary>
public class EntityRepository<T> : IRepository<T> where T : class
{
// Implement the repository methods for the specified type
} |
/*
* Copyright (c) 2016 <NAME> <<EMAIL>>.
* Released under the MIT License
* http://www.opensource.org/licenses/mit-license.php
*/
/**
* Projections content module
*
* @param {type} ko
* @param {type} $
* @returns {ProjectionsViewModel}
*/
define(['knockout',
'jquery',
'model/Constants'],
function (ko, $, constants) {
/**
* The view model for the Projections panel.
* @param {Globe} globe The globe that provides the supported projections
* @constructor
*/
function ProjectionsViewModel(globe) {
var self = this;
self.projections = ko.observableArray([
constants.PROJECTION_NAME_3D,
constants.PROJECTION_NAME_EQ_RECT,
constants.PROJECTION_NAME_MERCATOR,
constants.PROJECTION_NAME_NORTH_POLAR,
constants.PROJECTION_NAME_SOUTH_POLAR,
constants.PROJECTION_NAME_NORTH_UPS,
constants.PROJECTION_NAME_SOUTH_UPS,
constants.PROJECTION_NAME_NORTH_GNOMONIC,
constants.PROJECTION_NAME_SOUTH_GNOMONIC
]);
// Track the current projection
self.currentProjection = ko.observable('3D');
// Projection click handler
self.changeProjection = function (projectionName) {
// Capture the selection
self.currentProjection(projectionName);
// Change the projection
globe.setProjection(projectionName);
};
}
return ProjectionsViewModel;
}
);
|
list = [1,2,3,4,5]
sum = sum(list(map(lambda x: x, list))) |
#!/bin/bash
gcc -O2 c/syscall_bench.c -o syscall_pure_c
go test -bench=. -count=5 -timeout 20m -v
for i in `seq 1 5`; do
./syscall_pure_c
done
rm syscall_pure_c |
<filename>jwx/src/main/java/weixin/liuliangbao/jsonbean/Update.java
package weixin.liuliangbao.jsonbean;
import java.util.HashMap;
import java.util.Map;
/**
*
*
* Created by aa on 2015/11/27.
*/
public class Update {
private String code;
private String message;
private String detail;
private Map<String, Object> attributes = new HashMap<String, Object>();
public Update() {
}
public Update(String code, String message) {
this.code = code;
this.message = message;
}
public Update(String code, String message, String detail) {
this.code = code;
this.message = message;
this.detail = detail;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getDetail() {
return detail;
}
public void setDetail(String detail) {
this.detail = detail;
}
public Map<String, Object> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, Object> attributes) {
this.attributes = attributes;
}
}
|
import pytest
from alexandria.tools.skip_graph import validate_graph
from alexandria.skip_graph import SGNode, LocalGraph, NotFound, LEFT, RIGHT
@pytest.mark.trio
async def test_insert_far_right():
anchor = SGNode(0)
graph = LocalGraph(anchor)
node = await graph.insert(1)
assert node.key == 1
assert node.get_neighbor(0, LEFT) == anchor.key
assert node.get_neighbor(0, RIGHT) is None
assert node.get_neighbor(node.max_level, LEFT) is None
assert node.get_neighbor(node.max_level, RIGHT) is None
validate_graph(graph)
@pytest.mark.trio
async def test_insert_sequential_to_the_correct_in_order():
anchor = SGNode(0)
graph = LocalGraph(anchor)
node_1, node_2, node_3 = tuple([
await graph.insert(key) for key in (1, 2, 3)
])
validate_graph(graph)
assert anchor.get_neighbor(0, LEFT) is None
assert anchor.get_neighbor(0, RIGHT) == 1
assert node_1.get_neighbor(0, LEFT) == 0
assert node_1.get_neighbor(0, RIGHT) == 2
assert node_2.get_neighbor(0, LEFT) == 1
assert node_2.get_neighbor(0, RIGHT) == 3
assert node_3.get_neighbor(0, LEFT) == 2
assert node_3.get_neighbor(0, RIGHT) is None
@pytest.mark.trio
async def test_insert_sequential_to_the_correct_mixed_order():
anchor = SGNode(0)
graph = LocalGraph(anchor)
node_3, node_1, node_2 = tuple([
await graph.insert(key) for key in (3, 1, 2)
])
validate_graph(graph)
assert anchor.get_neighbor(0, LEFT) is None
assert anchor.get_neighbor(0, RIGHT) == 1
assert node_1.get_neighbor(0, LEFT) == 0
assert node_1.get_neighbor(0, RIGHT) == 2
assert node_2.get_neighbor(0, LEFT) == 1
assert node_2.get_neighbor(0, RIGHT) == 3
assert node_3.get_neighbor(0, LEFT) == 2
assert node_3.get_neighbor(0, RIGHT) is None
@pytest.mark.trio
async def test_insert_far_left():
anchor = SGNode(1)
graph = LocalGraph(anchor)
node = await graph.insert(0)
assert node.key == 0
assert node.get_neighbor(0, RIGHT) == anchor.key
assert node.get_neighbor(0, LEFT) is None
validate_graph(graph)
#
# Search
#
@pytest.mark.trio
async def test_search():
anchor = SGNode(0)
graph = LocalGraph(anchor)
for key in range(5, 100, 5):
result = await graph.insert(key)
assert result.key == key
validate_graph(graph)
assert (await graph.search(0)).key == 0
node_5 = await graph.search(5)
assert node_5.key == 5
with pytest.raises(NotFound):
await graph.search(6)
graph.cursor = node_5
with pytest.raises(NotFound):
await graph.search(6)
with pytest.raises(NotFound):
await graph.search(4)
node_80 = await graph.search(80)
assert node_80.key == 80
#
# Delete
#
@pytest.mark.parametrize(
'key_order',
(
(7, 6, 5),
(5, 6, 7),
(3, 2, 1),
(1, 2, 3),
(3, 5, 7, 1, 2, 6),
),
)
@pytest.mark.trio
async def test_delete(key_order):
anchor = SGNode(4)
graph = LocalGraph(anchor)
for key in sorted(key_order):
await graph.insert(key)
validate_graph(graph)
assert all(key in graph.db._db for key in key_order)
for key in key_order:
await graph.search(key)
await graph.delete(key)
with pytest.raises(NotFound):
await graph.search(key)
validate_graph(graph)
@pytest.mark.trio
async def test_graph_forward_iteration():
graph = LocalGraph(SGNode(1))
for key in (3, 5):
await graph.insert(key)
assert tuple([key async for key in graph.iter_keys()]) == (1, 3, 5)
assert tuple([key async for key in graph.iter_keys(start=1)]) == (1, 3, 5)
assert tuple([key async for key in graph.iter_keys(start=2)]) == (3, 5)
assert tuple([key async for key in graph.iter_keys(start=3)]) == (3, 5)
assert tuple([key async for key in graph.iter_keys(start=4)]) == (5,)
assert tuple([key async for key in graph.iter_keys(start=5)]) == (5,)
assert tuple([key async for key in graph.iter_keys(start=6)]) == ()
assert tuple([key async for key in graph.iter_keys(end=10)]) == (1, 3, 5)
assert tuple([key async for key in graph.iter_keys(start=1, end=10)]) == (1, 3, 5)
assert tuple([key async for key in graph.iter_keys(start=1, end=5)]) == (1, 3)
assert tuple([key async for key in graph.iter_keys(start=1, end=4)]) == (1, 3)
assert tuple([key async for key in graph.iter_keys(start=1, end=3)]) == (1,)
assert tuple([key async for key in graph.iter_keys(start=2, end=3)]) == ()
@pytest.mark.trio
async def test_graph_reverse_iteration():
graph = LocalGraph(SGNode(1))
for key in (3, 5):
await graph.insert(key)
assert tuple([key async for key in graph.iter_keys(start=10, end=5)]) == ()
assert tuple([key async for key in graph.iter_keys(start=10, end=4)]) == (5,)
assert tuple([key async for key in graph.iter_keys(start=10, end=3)]) == (5,)
assert tuple([key async for key in graph.iter_keys(start=10, end=2)]) == (5, 3)
assert tuple([key async for key in graph.iter_keys(start=5, end=3)]) == (5,)
assert tuple([key async for key in graph.iter_keys(start=5, end=2)]) == (5, 3)
assert tuple([key async for key in graph.iter_keys(start=5, end=1)]) == (5, 3)
assert tuple([key async for key in graph.iter_keys(start=5, end=0)]) == (5, 3, 1)
assert tuple([key async for key in graph.iter_keys(start=4, end=0)]) == (3, 1)
assert tuple([key async for key in graph.iter_keys(start=3, end=0)]) == (3, 1)
assert tuple([key async for key in graph.iter_keys(start=2, end=0)]) == (1,)
assert tuple([key async for key in graph.iter_keys(start=1, end=0)]) == (1,)
|
#!/bin/bash
pushd ci/mock-backends
docker ps > /dev/null
if [ $? -eq 0 ]; then
echo "Building and starting Docker containers for testing"
else
echo
echo "Cannot run docker-compose commands. "
echo "Please install docker-compose or give this user access to run it"
popd
exit -1
fi
docker-compose build
docker-compose up -d
docker ps
popd
|
<gh_stars>0
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.db.api.expander;
import cn.hutool.core.util.RandomUtil;
import cn.stylefeng.roses.kernel.config.api.context.ConfigContext;
import cn.stylefeng.roses.kernel.db.api.constants.DbConstants;
import lombok.extern.slf4j.Slf4j;
/**
* Druid数据源的一些配置
*
* @author fengshuonan
* @date 2021/1/10 11:32
*/
@Slf4j
public class DruidConfigExpander {
/**
* Druid监控界面的url映射
*
* @author fengshuonan
* @date 2021/1/10 11:32
*/
public static String getDruidUrlMappings() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_URL_MAPPINGS", String.class, DbConstants.DEFAULT_DRUID_URL_MAPPINGS);
}
/**
* Druid控制台账号
*
* @author fengshuonan
* @date 2021/1/10 11:32
*/
public static String getDruidAdminAccount() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_ACCOUNT", String.class, DbConstants.DEFAULT_DRUID_ADMIN_ACCOUNT);
}
/**
* Druid控制台账号密码
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminPassword() {
String sysDruidPassword = ConfigContext.me().getConfigValueNullable("SYS_DRUID_PASSWORD", String.class);
// 没配置就返回一个随机密码
if (sysDruidPassword == null) {
String randomString = RandomUtil.randomString(20);
log.info("Druid密码未在系统配置表设置,Druid密码为:{}", randomString);
return randomString;
} else {
return sysDruidPassword;
}
}
/**
* Druid控制台的监控数据是否可以重置清零
*
* @return true-可以重置,false-不可以
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminResetFlag() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_RESET_ENABLE", String.class, DbConstants.DEFAULT_DRUID_ADMIN_RESET_ENABLE);
}
/**
* druid web url统计的拦截范围
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminWebStatFilterUrlPattern() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_WEB_STAT_FILTER_URL_PATTERN", String.class, DbConstants.DRUID_WEB_STAT_FILTER_URL_PATTERN);
}
/**
* druid web url统计的排除拦截表达式
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminWebStatFilterExclusions() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_WEB_STAT_FILTER_EXCLUSIONS", String.class, DbConstants.DRUID_WEB_STAT_FILTER_EXCLUSIONS);
}
/**
* druid web url统计的session统计开关
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminWebStatFilterSessionStatEnable() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_WEB_STAT_FILTER_SESSION_STAT_ENABLE", String.class, DbConstants.DRUID_WEB_STAT_FILTER_SESSION_STAT_ENABLE);
}
/**
* druid web url统计的session名称
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminWebStatFilterSessionName() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_WEB_STAT_FILTER_PRINCIPAL_SESSION_NAME", String.class, DbConstants.DRUID_WEB_STAT_FILTER_PRINCIPAL_SESSION_NAME);
}
/**
* druid web url统计的session最大监控数
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminWebStatFilterSessionStatMaxCount() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_WEB_STAT_FILTER_SESSION_STAT_MAX_COUNT", String.class, DbConstants.DRUID_WEB_STAT_FILTER_SESSION_STAT_MAX_COUNT);
}
/**
* druid web url统计的cookie名称
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminWebStatFilterPrincipalCookieName() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_WEB_STAT_FILTER_PRINCIPAL_COOKIE_NAME", String.class, DbConstants.DRUID_WEB_STAT_FILTER_PRINCIPAL_COOKIE_NAME);
}
/**
* druid web url统计的是否开启监控单个url调用的sql列表
*
* @author fengshuonan
* @date 2021/1/10 11:34
*/
public static String getDruidAdminWebStatFilterProfileEnable() {
return ConfigContext.me().getSysConfigValueWithDefault("SYS_DRUID_WEB_STAT_FILTER_PROFILE_ENABLE", String.class, DbConstants.DRUID_WEB_STAT_FILTER_PROFILE_ENABLE);
}
}
|
#!/bin/bash
image="mario21ic/liveness:java-v1"
echo "## build ##"
docker build -t $image .
echo "## run ##"
docker run -d -p 8080:8080 $image
echo "## curl ##"
curl localhost:8080/
echo "## liveness ##"
curl localhost:8080/liveness
echo "## readiness ##"
curl localhost:8080/readiness
|
#!/bin/sh
if [ $# -ne 1 ]; then
echo "Usage: $0 <file>"
exit 1
fi
ed -s $1 <<< $'w'
|
<reponame>dan-seol/C<filename>filesortingdave.c<gh_stars>0
/* FILE: sorting_excersize_starter_dave.c
*
* Please fill this in with some code to sort. You can use the
* linear sort algorithm suggested on the Lecture 6 slides
* or something you learned in a previous course.
* Good luck!
*
* Author: <NAME>
* Date: Sept 19, 2018
*/
#include <stdio.h>
int main(){
int array[4] = {3,1,4,2};
// Write your code here, try to sort the array
for(int pos=0; pos<3; pos++){
int val_here = array[pos];
int min_over_rest = 9999999;
int min_pos = -1;
for(int next_pos = pos+1; next_pos <4; next_pos++){
if(array[next_pos] < min_over_rest){
min_over_rest = array[next_pos];
min_pos = next_pos;
}
}
if(min_over_rest < val_here){
array[min_pos] = val_here;
array[pos] = min_over_rest;
}
}
printf( "The array holds: " );
for( int i=0; i<4; i++ )
printf( "%d ", array[i] );
printf("\n");
printf( "If you coded it correctly, you should see 1 2 3 4\n");
return 0;
}
|
<gh_stars>1-10
package com.vxml.tag;
import org.w3c.dom.Node;
public class ItemTag extends AbstractTag {
public ItemTag(Node node) {
super(node);
}
@Override
public void execute() {
try {
String input = getNode().getFirstChild().getTextContent();
String inputMapping = getNode().getChildNodes().item(1).getTextContent();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
#!/bin/bash
# Copyright 2016 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
asciidoc \
-b bootstrap \
-f ./demo.conf \
-o ./htmldoc/standalone.html \
-a toc2 \
-a footer \
-a toc-placement=right \
./standalone.asciidoc
asciidoctor-pdf ./standalone.asciidoc --out-file ./pdf/standalone.pdf
asciidoc \
-b bootstrap \
-f ./demo.conf \
-o ./htmldoc/openshift.html \
-a toc2 \
-a toc-placement=right \
./openshift.asciidoc
asciidoctor-pdf ./openshift.asciidoc --out-file ./pdf/openshift.pdf
asciidoc \
-b bootstrap \
-f ./demo.conf \
-o ./htmldoc/install.html \
-a toc2 \
-a toc-placement=right \
./install.asciidoc
asciidoctor-pdf ./install.asciidoc --out-file ./pdf/install.pdf
asciidoc \
-b bootstrap \
-f ./demo.conf \
-o ./htmldoc/metrics.html \
-a toc2 \
-a toc-placement=right \
./metrics.asciidoc
asciidoctor-pdf ./metrics.asciidoc --out-file ./pdf/metrics.pdf
asciidoc \
-b bootstrap \
-f ./demo.conf \
-o ./htmldoc/containers.html \
-a toc2 \
-a toc-placement=right \
./containers.asciidoc
asciidoctor-pdf ./containers.asciidoc --out-file ./pdf/containers.pdf
asciidoc \
-b bootstrap \
-f ./demo.conf \
-o ./htmldoc/kube.html \
-a toc2 \
-a toc-placement=right \
./kube.asciidoc
asciidoctor-pdf ./kube.asciidoc --out-file ./pdf/kube.pdf
|
/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Viewer} from '../../src/viewer';
import {platform} from '../../src/platform';
describe('Viewer', () => {
let sandbox;
let windowMock;
let viewer;
let windowApi;
beforeEach(() => {
sandbox = sinon.sandbox.create();
const WindowApi = function() {};
WindowApi.prototype.setTimeout = function(callback, delay) {};
windowApi = new WindowApi();
windowApi.location = {hash: '', href: '/test/viewer'};
windowMock = sandbox.mock(windowApi);
viewer = new Viewer(windowApi);
});
afterEach(() => {
viewer = null;
windowMock.verify();
windowMock = null;
sandbox.restore();
sandbox = null;
});
it('should configure as natural viewport by default', () => {
expect(viewer.getViewportType()).to.equal('natural');
expect(viewer.getViewportWidth()).to.equal(0);
expect(viewer.getViewportHeight()).to.equal(0);
expect(viewer.getScrollTop()).to.equal(0);
expect(viewer.getPaddingTop()).to.equal(0);
});
it('should configure correctly based on window name and hash', () => {
windowApi.name = '__AMP__viewportType=virtual&width=222&height=333' +
'&scrollTop=15';
windowApi.location.hash = '#width=111&paddingTop=17&other=something';
windowApi.document = {body: {style: {}}};
const viewer = new Viewer(windowApi);
expect(viewer.getViewportType()).to.equal('virtual');
expect(viewer.getViewportWidth()).to.equal(111);
expect(viewer.getViewportHeight()).to.equal(333);
expect(viewer.getScrollTop()).to.equal(15);
expect(viewer.getPaddingTop()).to.equal(17);
// All of the startup params are also available via getParam.
expect(viewer.getParam('paddingTop')).to.equal('17');
expect(viewer.getParam('width')).to.equal('111');
expect(viewer.getParam('other')).to.equal('something');
});
it('should configure visibilityState visible by default', () => {
expect(viewer.getVisibilityState()).to.equal('visible');
expect(viewer.isVisible()).to.equal(true);
expect(viewer.getPrerenderSize()).to.equal(1);
});
it('should configure visibilityState and prerender', () => {
windowApi.location.hash = '#visibilityState=hidden&prerenderSize=3';
const viewer = new Viewer(windowApi);
expect(viewer.getVisibilityState()).to.equal('hidden');
expect(viewer.isVisible()).to.equal(false);
expect(viewer.getPrerenderSize()).to.equal(3);
});
it('should configure correctly for iOS embedding', () => {
windowApi.name = '__AMP__viewportType=natural';
windowApi.parent = {};
const body = {style: {}};
const documentElement = {style: {}};
windowApi.document = {body: body, documentElement: documentElement};
sandbox.mock(platform).expects('isIos').returns(true).once();
const viewer = new Viewer(windowApi);
expect(viewer.getViewportType()).to.equal('natural-ios-embed');
});
it('should NOT configure for iOS embedding if not embedded', () => {
windowApi.name = '__AMP__viewportType=natural';
windowApi.parent = windowApi;
const body = {style: {}};
const documentElement = {style: {}};
windowApi.document = {body: body, documentElement: documentElement};
sandbox.mock(platform).expects('isIos').returns(true).once();
expect(new Viewer(windowApi).getViewportType()).to.equal('natural');
windowApi.parent = null;
expect(new Viewer(windowApi).getViewportType()).to.equal('natural');
});
it('should receive viewport event', () => {
let viewportEvent = null;
viewer.onViewportEvent(event => {
viewportEvent = event;
});
viewer.receiveMessage('viewport', {
scrollTop: 11,
scrollLeft: 12,
width: 13,
height: 14,
paddingTop: 19
});
expect(viewportEvent).to.not.equal(null);
expect(viewer.getScrollTop()).to.equal(11);
expect(viewer.getViewportWidth()).to.equal(13);
expect(viewer.getViewportHeight()).to.equal(14);
expect(viewer.getPaddingTop()).to.equal(19);
});
it('should receive visibilitychange event', () => {
let visEvent = null;
viewer.onVisibilityChanged(event => {
visEvent = event;
});
viewer.receiveMessage('visibilitychange', {
state: 'other',
prerenderSize: 4
});
expect(visEvent).to.not.equal(null);
expect(viewer.getVisibilityState()).to.equal('other');
expect(viewer.isVisible()).to.equal(false);
expect(viewer.getPrerenderSize()).to.equal(4);
});
it('should post documentLoaded event', () => {
viewer.postDocumentReady(11, 12);
const m = viewer.messageQueue_[0];
expect(m.eventType).to.equal('documentLoaded');
expect(m.data.width).to.equal(11);
expect(m.data.height).to.equal(12);
});
it('should post documentResized event', () => {
viewer.postDocumentResized(13, 14);
const m = viewer.messageQueue_[0];
expect(m.eventType).to.equal('documentResized');
expect(m.data.width).to.equal(13);
expect(m.data.height).to.equal(14);
});
it('should post request/cancelFullOverlay event', () => {
viewer.requestFullOverlay();
viewer.cancelFullOverlay();
expect(viewer.messageQueue_[0].eventType).to.equal('requestFullOverlay');
expect(viewer.messageQueue_[1].eventType).to.equal('cancelFullOverlay');
});
it('should queue non-dupe events', () => {
viewer.postDocumentReady(11, 12);
viewer.postDocumentResized(13, 14);
viewer.postDocumentResized(15, 16);
expect(viewer.messageQueue_.length).to.equal(2);
expect(viewer.messageQueue_[0].eventType).to.equal('documentLoaded');
const m = viewer.messageQueue_[1];
expect(m.eventType).to.equal('documentResized');
expect(m.data.width).to.equal(15);
expect(m.data.height).to.equal(16);
});
it('should dequeue events when deliverer set', () => {
viewer.postDocumentReady(11, 12);
viewer.postDocumentResized(13, 14);
expect(viewer.messageQueue_.length).to.equal(2);
const delivered = [];
viewer.setMessageDeliverer((eventType, data) => {
delivered.push({eventType: eventType, data: data});
});
expect(viewer.messageQueue_.length).to.equal(0);
expect(delivered.length).to.equal(2);
expect(delivered[0].eventType).to.equal('documentLoaded');
expect(delivered[0].data.width).to.equal(11);
expect(delivered[1].eventType).to.equal('documentResized');
expect(delivered[1].data.width).to.equal(13);
});
});
|
#!/bin/bash
dieharder -d 2 -g 0 -S 564897870
|
// app/routes/tweet.js
module.exports = function(app,tweetRoutes) {
// used to create, sign, and verify tokens
var jwt = require('jsonwebtoken'); //https://npmjs.org/package/node-jsonwebtoken
var expressJwt = require('express-jwt'); //https://npmjs.org/package/express-jwt
var async = require('async');
var Tweet = require('../models/tweet'); // get our mongoose model
var User = require('../models/user');
var Word = require('../models/word');
tweetRoutes.get('/dual', function(req, res) {
Tweet.find(function(err, tweets) {
if (err)
res.send(err);
console.log(tweets);
res.json(tweets);
});
});
// http://localhost:8080/api/users/:username/tweets
tweetRoutes.route('/users/:username/tweets')
// get all the tweets
.get(function(req, res) {
Tweet.find({user_id: req.user._id}, function(err, tweets) {
console.log(req.user._id);
if (err)
res.send(err);
res.json(tweets);
});
})
// create a tweet
.post(function(req, res) {
var content = req.body.content;
var scs = true;
var msg = 'Tweet created!';
var words = content.split(' '); // separate by space
words = words.filter(function(value){return value!='';}); // remove extra spaces
console.log(words);
size = words.length;
// Array to hold async tasks
var asyncTasks = [];
// Loop through words
words.forEach(function(word){
// We don't actually execute the async action here
// We add a function containing it to an array of "tasks"
asyncTasks.push(function(callback){
// Call an async function, often a save() to DB
Word.findOne({text: word}, function(err, w) {
if (err)
res.send(err);
// if word is found
console.log(w);
if (w == null){
//console.log(msg);
scs = false;
msg = "Inexistent word(s) in tweet";
}
callback();
});
});
});
// At this point, nothing has been executed.
// We just pushed all the async tasks into an array.
// Then, whe add another task after iterations
asyncTasks.push(function(callback){
// Set a timeout for 3 seconds
if (scs){
var tweet = new Tweet(); // creating tweet
tweet.user_id = req.user._id;
tweet.content = words.join(' ');
tweet.date = Date.now();
tweet.save(function(err) {
if (err)
res.send(err);
});
}
callback();
});
// Now we have an array of functions doing async tasks
// Execute all async tasks in the asyncTasks array
async.series(asyncTasks, function(){
// All tasks are done now
res.json({ success: scs, message: msg });
});
});
// on routes that end in /users/:username/tweets/:tweet_id
// ----------------------------------------------------
tweetRoutes.route('/users/:username/tweets/:tweet_id')
// get the tweet with that id (accessed at GET http://localhost:8080/api/users/:username/tweets/:tweet_id)
.get(function(req, res) {
Tweet.findById(req.params.tweet_id, function(err, tweet) {
if (err)
res.send(err);
res.json(tweet);
});
})
//*
// update the tweet with this id (accessed at PUT http://localhost:8080/api/users/:username/tweets/:tweet_id)
.put(function(req, res) {
var content = req.body.content;
var scs = true;
var msg = 'Tweet updated!';
var words = content.split(' '); // separate by space
words = words.filter(function(value){return value!='';}); // remove extra spaces
console.log(words);
size = words.length;
// Array to hold async tasks
var asyncTasks = [];
// Loop through words
words.forEach(function(word){
// We don't actually execute the async action here
// We add a function containing it to an array of "tasks"
asyncTasks.push(function(callback){
// Call an async function, often a save() to DB
Word.findOne({text: word}, function(err, w) {
if (err)
res.send(err);
// if word is found
console.log(w);
if (w == null){
//console.log(msg);
scs = false;
msg = "Inexistent word(s) in tweet";
}
callback();
});
});
});
// At this point, nothing has been executed.
// We just pushed all the async tasks into an array.
// Then, whe add another task after iterations
asyncTasks.push(function(callback){
if (scs){
// finding tweet
Tweet.findById(req.params.tweet_id, function(err, tweet) {
if (err)
res.send(err);
tweet.content = words.join(' ');
// save the tweet
tweet.save(function(err) {
if (err)
res.send(err);
});
});
}
callback();
});
// Now we have an array of functions doing async tasks
// Execute all async tasks in the asyncTasks array
async.series(asyncTasks, function(){
// All tasks are done now
res.json({ success: scs, message: msg });
});
})
//*/
// delete the tweet with this id (accessed at DELETE http://localhost:8080/api/users/:username/tweets/:tweet_id)
.delete(function(req, res) {
Tweet.remove({
_id: req.params.tweet_id
}, function(err, tweet) {
if (err)
res.send(err);
res.json({ message: 'Tweet successfully deleted' });
});
});
// We are going to protect /api/words routes with JWT
app.use('/api/dual', expressJwt({secret: app.get('superSecret')}));
//app.use('/api/users/:username/tweets', expressJwt({secret: app.get('superSecret')}));
} |
#!/usr/bin/env bash
set -ex
yosys -p "synth_xilinx -flatten -abc9 -arch xc7 -top top; write_json blinky.json" blinky.v
../../../nextpnr-xilinx --chipdb ../../xc7z020.bin --xdc artyz7.xdc --json blinky.json --write blinky_routed.json --fasm blinky.fasm
source "${XRAY_DIR}/utils/environment.sh"
${XRAY_UTILS_DIR}/fasm2frames.py --part xc7z020clg400-1 --db-root ${XRAY_UTILS_DIR}/../database/zynq7 blinky.fasm > blinky.frames
${XRAY_TOOLS_DIR}/xc7frames2bit --part_file ${XRAY_UTILS_DIR}/../database/zynq7/xc7z020clg400-1/part.yaml --part_name xc7z020clg400-1 --frm_file blinky.frames --output_file blinky.bit
|
#!/usr/bin/env bash
#
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound Ubiquitium protocol traffic to this rate
LIMIT="160kbit"
#defines the IPv4 address space for which you wish to disable rate limiting
LOCALNET_V4="192.168.0.0/16"
#defines the IPv6 address space for which you wish to disable rate limiting
LOCALNET_V6="fe80::/10"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
if [ ! -z "${LOCALNET_V6}" ] ; then
# v6 cannot have the same priority value as v4
tc filter add dev ${IF} parent 1: protocol ipv6 prio 3 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ipv6 prio 4 handle 2 fw classid 1:11
fi
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 2390. but not when dealing with a host on the local network
# (defined by $LOCALNET_V4 and $LOCALNET_V6)
# --set-mark marks packages matching these criteria with the number "2" (v4)
# --set-mark marks packages matching these criteria with the number "4" (v6)
# these packets are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 2390 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 2390 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
if [ ! -z "${LOCALNET_V6}" ] ; then
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --dport 2390 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --sport 2390 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
fi
|
#!/bin/sh
../../levitas-testserver -s settings |
package net.dean.jraw.models.meta;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* <p>Used for testing purposes. Signifies that a method retrieves data from a JSON node. For a given JsonModel, each
* method annotated with this class will be invoked via reflection. If the method throws an exception of any kind
* or the return value was null and {@link #nullable()} returns false, then the test will fail. Methods annotated with
* this class must take zero parameters.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Documented
public @interface JsonProperty {
/**
* Whether the return value of this method is allowed to be null
* @return If this method is allowed to return null
*/
boolean nullable() default false;
}
|
import open3d as o3d
import copy
import numpy as np
# Helper visualization function
def draw_registration_result(source, target, transformation):
source_temp = copy.deepcopy(source)
target_temp = copy.deepcopy(target)
source_temp.paint_uniform_color([1, 0.706, 0])
target_temp.paint_uniform_color([0, 0.651, 0.929])
source_temp.transform(transformation)
o3d.visualization.draw_geometries([source_temp, target_temp])
# input
source = o3d.io.read_point_cloud("../test_data/icp/cloud_bin_0.pcd")
target = o3d.io.read_point_cloud("../test_data/icp/cloud_bin_1.pcd")
trans_init = np.asarray([[0.862, 0.011, -0.507, 0.5],
[-0.139, 0.967, -0.215, 0.7],
[0.487, 0.255, 0.835, -1.4], [0.0, 0.0, 0.0, 1.0]])
draw_registration_result(source, target, trans_init)
# init
print("Initial alignment")
threshold = 0.02
evaluation = o3d.pipelines.registration.evaluate_registration(
source, target, threshold, trans_init)
print(evaluation)
# point-to-point ICP
print("Apply point-to-point ICP")
reg_p2p = o3d.pipelines.registration.registration_icp(
source, target, threshold, trans_init,
o3d.pipelines.registration.TransformationEstimationPointToPoint())
print(reg_p2p)
print("Transformation is:")
print(reg_p2p.transformation)
draw_registration_result(source, target, reg_p2p.transformation)
# point-to-point ICP, max_iteration
reg_p2p = o3d.pipelines.registration.registration_icp(
source, target, threshold, trans_init,
o3d.pipelines.registration.TransformationEstimationPointToPoint(),
o3d.pipelines.registration.ICPConvergenceCriteria(max_iteration=2000))
print(reg_p2p)
print("Transformation is:")
print(reg_p2p.transformation)
draw_registration_result(source, target, reg_p2p.transformation)
# point-to plane ICP
print("Apply point-to-plane ICP")
reg_p2l = o3d.pipelines.registration.registration_icp(
source, target, threshold, trans_init,
o3d.pipelines.registration.TransformationEstimationPointToPlane())
print(reg_p2l)
print("Transformation is:")
print(reg_p2l.transformation)
draw_registration_result(source, target, reg_p2l.transformation) |
<reponame>bonitasoft-labs/bpmn-js<filename>src/component/mxgraph/style/identifiers.ts<gh_stars>1-10
/**
* Copyright 2021 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Define BPMN specific keys used in mxGraph styles. Use constants defined in this class instead of hard coded string values.
* @category BPMN Theme
* @experimental You may use this to customize the BPMN theme as proposed in the examples. But be aware that the way we store and allow to change the defaults is subject to change.
*/
export class BpmnStyleIdentifier {
// edge
static readonly EDGE = 'bpmn.edge';
static readonly EDGE_START_FILL_COLOR = 'bpmn.edge.startFillColor';
static readonly EDGE_END_FILL_COLOR = 'bpmn.edge.endFillColor';
// kind
static readonly EVENT_BASED_GATEWAY_KIND = 'bpmn.gatewayKind';
static readonly EVENT_DEFINITION_KIND = 'bpmn.eventDefinitionKind';
static readonly GLOBAL_TASK_KIND = 'bpmn.globalTaskKind';
static readonly SUB_PROCESS_KIND = 'bpmn.subProcessKind';
// state
static readonly IS_INITIATING = 'bpmn.isInitiating';
static readonly IS_INSTANTIATING = 'bpmn.isInstantiating';
static readonly IS_INTERRUPTING = 'bpmn.isInterrupting';
// other identifiers
static readonly EXTRA_CSS_CLASSES = 'bpmn.extra.css.classes';
static readonly MARKERS = 'bpmn.markers';
static readonly MESSAGE_FLOW_ICON = 'bpmn.messageFlowIcon';
}
/**
* @category BPMN Theme
* @experimental You may use this to customize the BPMN theme as proposed in the examples. But be aware that the way we store and allow to change the defaults is subject to change.
*/
export class MarkerIdentifier {
static readonly ARROW_DASH = 'bpmn.dash';
}
|
#! /bin/sh
set -ex
./scripts/tests.sh
|
const sqlite3 = require('sqlite3');
module.exports = (req, res) => {
const { keywords } = req.query;
const db = new sqlite3.Database('db/links.db', sqlite3.OPEN_READONLY, (err) => {
if (err) return console.error(err.message);
console.log('\nConnected');
});
const sql =
`SELECT
*
FROM
links_saved
WHERE
url LIKE '%${keywords}%' OR
title LIKE '%${keywords}%' OR
description LIKE '%${keywords}%'
ORDER BY id DESC`;
db.all(sql, [], (err, rows) => {
if (err) return console.log(err);
return res.render('search', { rows, keywords });
});
db.close((err) => {
if (err) return console.error(err.message);
console.log('Operation finished\n');
});
} |
<reponame>nokia/jspy<filename>src/spyGui/CommandComboBox.java<gh_stars>10-100
package spyGui;
import common.Utilities;
import javax.swing.*;
import java.util.ArrayList;
public class CommandComboBox extends JComboBox {
private ArrayList<String> commands;
public CommandComboBox() {
setRenderer(new ComboToolTipRenderer());
commands = Utilities.getCommandHistory();
setItemsInCombo(commands);
}
public void addCommand(String cmd) {
int cmdIndex = commands.indexOf(cmd);
if (cmdIndex != -1) {
commands.remove(cmdIndex);
}
commands.add(0, cmd);
setItemsInCombo(commands);
Utilities.writeCommandHistory(commands.toArray());
}
private void setItemsInCombo(ArrayList<String> cmds) {
removeAllItems();
addItem("");
setSelectedIndex(0);
for (String cmd : cmds) {
addItem(cmd);
}
}
}
|
<reponame>MattPlays/MinecraftAPI<gh_stars>0
class UUIDResponse {
/**
*
* @param {string} name
* @param {string} id
*/
constructor(name, id) {
this.name = name;
this.id = id;
};
};
module.exports = UUIDResponse; |
<reponame>tlaanemaa/tinioc-example
/**
* This is the interface for our Employee objects that we receive from the API in the employees client.
* Ideally this type should be together with incoming response validation schema,
* since that's what gives the response it's shape but that's out of scope for this example
* so it's just here.
*/
export interface IEmployee {
id: number;
employee_name: string;
employee_salary: number;
employee_age: number;
profile_image: string;
}
|
<filename>stage0/stdlib/Lean/Meta/Match/CaseArraySizes.c
// Lean compiler output
// Module: Lean.Meta.Match.CaseArraySizes
// Imports: Init Lean.Meta.Tactic.Assert Lean.Meta.Match.CaseValues
#include <lean/lean.h>
#if defined(__clang__)
#pragma clang diagnostic ignored "-Wunused-parameter"
#pragma clang diagnostic ignored "-Wunused-label"
#elif defined(__GNUC__) && !defined(__CLANG__)
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wunused-label"
#pragma GCC diagnostic ignored "-Wunused-but-set-variable"
#endif
#ifdef __cplusplus
extern "C" {
#endif
lean_object* l_Lean_Meta_CaseArraySizesSubgoal_subst___default;
size_t l_USize_add(size_t, size_t);
extern lean_object* l_Array_term_____x5b___x3a___x5d___closed__2;
lean_object* l_Lean_Expr_mvarId_x21(lean_object*);
lean_object* l_Lean_Meta_withMVarContext___at_Lean_Meta_admit___spec__1___rarg(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
extern lean_object* l_Lean_Syntax_mkAntiquotNode___closed__3;
lean_object* l_Lean_stringToMessageData(lean_object*);
lean_object* lean_mk_empty_array_with_capacity(lean_object*);
lean_object* l_Lean_Meta_mkForallFVars(lean_object*, lean_object*, uint8_t, uint8_t, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_throwError___at_Lean_Meta_whnf___spec__1(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* lean_name_mk_string(lean_object*, lean_object*);
lean_object* lean_array_uget(lean_object*, size_t);
lean_object* l_Lean_Meta_mkAppM(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__1___boxed(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_CaseArraySizesSubgoal_diseqs___default;
lean_object* lean_array_uset(lean_object*, size_t, lean_object*);
extern lean_object* l_Array_empty___closed__1;
lean_object* l_Lean_Meta_withLocalDecl___at_Lean_Meta_substCore___spec__2___rarg(lean_object*, uint8_t, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseArraySizes___lambda__1___boxed(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_getMVarTag(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseArraySizes___lambda__1___closed__2;
lean_object* l_Lean_Meta_CaseArraySizesSubgoal_elems___default;
extern lean_object* l_Lean_Literal_type___closed__3;
lean_object* lean_array_push(lean_object*, lean_object*);
lean_object* lean_array_get_size(lean_object*);
lean_object* l_ReaderT_bind___at_Lean_Meta_instMonadLCtxMetaM___spec__2___rarg(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3___lambda__1(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__2;
lean_object* l_Lean_Meta_caseArraySizes___lambda__1___closed__1;
lean_object* l_Lean_Expr_appArg_x21(lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__1;
uint8_t l_USize_decLt(size_t, size_t);
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__2(lean_object*, size_t, size_t, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* lean_nat_add(lean_object*, lean_object*);
lean_object* l_Lean_mkAppN(lean_object*, lean_object*);
lean_object* l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, size_t, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_intro1Core(lean_object*, uint8_t, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* lean_array_fget(lean_object*, lean_object*);
lean_object* l_Lean_Meta_substCore(lean_object*, lean_object*, uint8_t, lean_object*, uint8_t, uint8_t, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
uint8_t lean_nat_dec_eq(lean_object*, lean_object*);
lean_object* l_Lean_Meta_getMVarType(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__3;
lean_object* l_Lean_Meta_caseArraySizes_match__1___rarg(lean_object*, lean_object*);
lean_object* lean_nat_sub(lean_object*, lean_object*);
lean_object* l_Lean_Meta_getArrayArgType(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_mkEqSymm(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* lean_array_get(lean_object*, lean_object*, lean_object*);
extern lean_object* l_Lean_Meta_caseValue___closed__2;
lean_object* l_Lean_Meta_getArrayArgType___closed__1;
lean_object* l_Lean_Expr_fvarId_x21(lean_object*);
lean_object* l_Lean_Meta_clear(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* lean_name_append_index_after(lean_object*, lean_object*);
lean_object* l_Lean_Meta_getArrayArgType___closed__2;
lean_object* l_Lean_Meta_mkArrayLit(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_instInhabitedCaseArraySizesSubgoal;
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__1(size_t, size_t, lean_object*);
lean_object* lean_array_to_list(lean_object*, lean_object*);
lean_object* l_Lean_Meta_mkDecideProof(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
uint8_t l_Lean_Expr_isAppOfArity(lean_object*, lean_object*, lean_object*);
extern lean_object* l_Lean_KernelException_toMessageData___closed__15;
lean_object* l_Lean_Meta_introNCore(lean_object*, lean_object*, lean_object*, uint8_t, uint8_t, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseArraySizes_match__2___rarg(lean_object*, lean_object*);
lean_object* l_Lean_mkFVar(lean_object*);
size_t lean_usize_of_nat(lean_object*);
extern lean_object* l_Lean_Syntax_mkAntiquotNode___closed__9;
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_match__1(lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__1(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_FVarSubst_get(lean_object*, lean_object*);
lean_object* l_Lean_Meta_getArrayArgType___lambda__1___boxed(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_assignExprMVar(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__2;
lean_object* l_Lean_Meta_assertExt(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_instInhabitedCaseArraySizesSubgoal___closed__1;
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__2___boxed(lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseArraySizes_match__2(lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__1;
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__4;
lean_object* l_Lean_Meta_caseArraySizes_match__3(lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseValues(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
extern lean_object* l_Lean_instInhabitedName;
lean_object* l_Lean_Meta_inferType(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseArraySizes___lambda__1___closed__3;
lean_object* l_Lean_Meta_caseArraySizes___lambda__1(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_getArrayArgType___lambda__1(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseArraySizes_match__1(lean_object*);
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__1___boxed(lean_object*, lean_object*, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__2(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_whnfD(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
extern lean_object* l_Lean_mkOptionalNode___closed__2;
extern lean_object* l_Array_myMacro____x40_Init_Data_Array_Subarray___hyg_969____closed__10;
lean_object* l_Lean_mkNatLit(lean_object*);
lean_object* l_Lean_Meta_mkFreshExprSyntheticOpaqueMVar(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_unsafeCast(lean_object*, lean_object*, lean_object*);
lean_object* l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3___boxed(lean_object**);
lean_object* l_Lean_Meta_mkEq(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_indentExpr(lean_object*);
lean_object* l_Lean_Meta_caseArraySizes(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
lean_object* l_Lean_Meta_caseArraySizes_match__3___rarg(lean_object*, lean_object*);
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_match__1___rarg(lean_object*, lean_object*);
lean_object* l_Lean_Meta_mkLt(lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*, lean_object*);
uint8_t lean_nat_dec_lt(lean_object*, lean_object*);
static lean_object* _init_l_Lean_Meta_CaseArraySizesSubgoal_elems___default() {
_start:
{
lean_object* x_1;
x_1 = l_Array_empty___closed__1;
return x_1;
}
}
static lean_object* _init_l_Lean_Meta_CaseArraySizesSubgoal_diseqs___default() {
_start:
{
lean_object* x_1;
x_1 = l_Array_empty___closed__1;
return x_1;
}
}
static lean_object* _init_l_Lean_Meta_CaseArraySizesSubgoal_subst___default() {
_start:
{
lean_object* x_1;
x_1 = lean_box(0);
return x_1;
}
}
static lean_object* _init_l_Lean_Meta_instInhabitedCaseArraySizesSubgoal___closed__1() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3; lean_object* x_4;
x_1 = lean_box(0);
x_2 = lean_box(0);
x_3 = l_Array_empty___closed__1;
x_4 = lean_alloc_ctor(0, 4, 0);
lean_ctor_set(x_4, 0, x_2);
lean_ctor_set(x_4, 1, x_3);
lean_ctor_set(x_4, 2, x_3);
lean_ctor_set(x_4, 3, x_1);
return x_4;
}
}
static lean_object* _init_l_Lean_Meta_instInhabitedCaseArraySizesSubgoal() {
_start:
{
lean_object* x_1;
x_1 = l_Lean_Meta_instInhabitedCaseArraySizesSubgoal___closed__1;
return x_1;
}
}
lean_object* l_Lean_Meta_getArrayArgType___lambda__1(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7) {
_start:
{
lean_object* x_8; lean_object* x_9;
x_8 = l_Lean_Expr_appArg_x21(x_1);
x_9 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_9, 0, x_8);
lean_ctor_set(x_9, 1, x_7);
return x_9;
}
}
static lean_object* _init_l_Lean_Meta_getArrayArgType___closed__1() {
_start:
{
lean_object* x_1;
x_1 = lean_mk_string("array expected");
return x_1;
}
}
static lean_object* _init_l_Lean_Meta_getArrayArgType___closed__2() {
_start:
{
lean_object* x_1; lean_object* x_2;
x_1 = l_Lean_Meta_getArrayArgType___closed__1;
x_2 = l_Lean_stringToMessageData(x_1);
return x_2;
}
}
lean_object* l_Lean_Meta_getArrayArgType(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6) {
_start:
{
lean_object* x_7;
lean_inc(x_5);
lean_inc(x_4);
lean_inc(x_3);
lean_inc(x_2);
lean_inc(x_1);
x_7 = l_Lean_Meta_inferType(x_1, x_2, x_3, x_4, x_5, x_6);
if (lean_obj_tag(x_7) == 0)
{
lean_object* x_8; lean_object* x_9; lean_object* x_10;
x_8 = lean_ctor_get(x_7, 0);
lean_inc(x_8);
x_9 = lean_ctor_get(x_7, 1);
lean_inc(x_9);
lean_dec(x_7);
lean_inc(x_5);
lean_inc(x_4);
lean_inc(x_3);
lean_inc(x_2);
x_10 = l_Lean_Meta_whnfD(x_8, x_2, x_3, x_4, x_5, x_9);
if (lean_obj_tag(x_10) == 0)
{
lean_object* x_11; lean_object* x_12; lean_object* x_13; lean_object* x_14; uint8_t x_15;
x_11 = lean_ctor_get(x_10, 0);
lean_inc(x_11);
x_12 = lean_ctor_get(x_10, 1);
lean_inc(x_12);
lean_dec(x_10);
x_13 = l_Array_term_____x5b___x3a___x5d___closed__2;
x_14 = lean_unsigned_to_nat(1u);
x_15 = l_Lean_Expr_isAppOfArity(x_11, x_13, x_14);
if (x_15 == 0)
{
lean_object* x_16; lean_object* x_17; lean_object* x_18; lean_object* x_19; lean_object* x_20; lean_object* x_21; uint8_t x_22;
lean_dec(x_11);
x_16 = l_Lean_indentExpr(x_1);
x_17 = l_Lean_Meta_getArrayArgType___closed__2;
x_18 = lean_alloc_ctor(10, 2, 0);
lean_ctor_set(x_18, 0, x_17);
lean_ctor_set(x_18, 1, x_16);
x_19 = l_Lean_KernelException_toMessageData___closed__15;
x_20 = lean_alloc_ctor(10, 2, 0);
lean_ctor_set(x_20, 0, x_18);
lean_ctor_set(x_20, 1, x_19);
x_21 = l_Lean_throwError___at_Lean_Meta_whnf___spec__1(x_20, x_2, x_3, x_4, x_5, x_12);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
x_22 = !lean_is_exclusive(x_21);
if (x_22 == 0)
{
return x_21;
}
else
{
lean_object* x_23; lean_object* x_24; lean_object* x_25;
x_23 = lean_ctor_get(x_21, 0);
x_24 = lean_ctor_get(x_21, 1);
lean_inc(x_24);
lean_inc(x_23);
lean_dec(x_21);
x_25 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_25, 0, x_23);
lean_ctor_set(x_25, 1, x_24);
return x_25;
}
}
else
{
lean_object* x_26; lean_object* x_27;
lean_dec(x_1);
x_26 = lean_box(0);
x_27 = l_Lean_Meta_getArrayArgType___lambda__1(x_11, x_26, x_2, x_3, x_4, x_5, x_12);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_11);
return x_27;
}
}
else
{
uint8_t x_28;
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_1);
x_28 = !lean_is_exclusive(x_10);
if (x_28 == 0)
{
return x_10;
}
else
{
lean_object* x_29; lean_object* x_30; lean_object* x_31;
x_29 = lean_ctor_get(x_10, 0);
x_30 = lean_ctor_get(x_10, 1);
lean_inc(x_30);
lean_inc(x_29);
lean_dec(x_10);
x_31 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_31, 0, x_29);
lean_ctor_set(x_31, 1, x_30);
return x_31;
}
}
}
else
{
uint8_t x_32;
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_1);
x_32 = !lean_is_exclusive(x_7);
if (x_32 == 0)
{
return x_7;
}
else
{
lean_object* x_33; lean_object* x_34; lean_object* x_35;
x_33 = lean_ctor_get(x_7, 0);
x_34 = lean_ctor_get(x_7, 1);
lean_inc(x_34);
lean_inc(x_33);
lean_dec(x_7);
x_35 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_35, 0, x_33);
lean_ctor_set(x_35, 1, x_34);
return x_35;
}
}
}
}
lean_object* l_Lean_Meta_getArrayArgType___lambda__1___boxed(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7) {
_start:
{
lean_object* x_8;
x_8 = l_Lean_Meta_getArrayArgType___lambda__1(x_1, x_2, x_3, x_4, x_5, x_6, x_7);
lean_dec(x_6);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_1);
return x_8;
}
}
static lean_object* _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__1() {
_start:
{
lean_object* x_1;
x_1 = lean_mk_string("getLit");
return x_1;
}
}
static lean_object* _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__2() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3;
x_1 = l_Array_term_____x5b___x3a___x5d___closed__2;
x_2 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__1;
x_3 = lean_name_mk_string(x_1, x_2);
return x_3;
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9) {
_start:
{
lean_object* x_10; lean_object* x_11; lean_object* x_12;
x_10 = l_Lean_mkNatLit(x_2);
x_11 = l_Lean_mkNatLit(x_3);
lean_inc(x_8);
lean_inc(x_7);
lean_inc(x_6);
lean_inc(x_5);
lean_inc(x_10);
x_12 = l_Lean_Meta_mkLt(x_10, x_11, x_5, x_6, x_7, x_8, x_9);
if (lean_obj_tag(x_12) == 0)
{
lean_object* x_13; lean_object* x_14; lean_object* x_15;
x_13 = lean_ctor_get(x_12, 0);
lean_inc(x_13);
x_14 = lean_ctor_get(x_12, 1);
lean_inc(x_14);
lean_dec(x_12);
lean_inc(x_8);
lean_inc(x_7);
lean_inc(x_6);
lean_inc(x_5);
x_15 = l_Lean_Meta_mkDecideProof(x_13, x_5, x_6, x_7, x_8, x_14);
if (lean_obj_tag(x_15) == 0)
{
lean_object* x_16; lean_object* x_17; lean_object* x_18; lean_object* x_19; lean_object* x_20; lean_object* x_21; lean_object* x_22; lean_object* x_23; lean_object* x_24;
x_16 = lean_ctor_get(x_15, 0);
lean_inc(x_16);
x_17 = lean_ctor_get(x_15, 1);
lean_inc(x_17);
lean_dec(x_15);
x_18 = l_Lean_Syntax_mkAntiquotNode___closed__3;
x_19 = lean_array_push(x_18, x_1);
x_20 = lean_array_push(x_19, x_10);
x_21 = lean_array_push(x_20, x_4);
x_22 = lean_array_push(x_21, x_16);
x_23 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__2;
x_24 = l_Lean_Meta_mkAppM(x_23, x_22, x_5, x_6, x_7, x_8, x_17);
return x_24;
}
else
{
uint8_t x_25;
lean_dec(x_10);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_6);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_1);
x_25 = !lean_is_exclusive(x_15);
if (x_25 == 0)
{
return x_15;
}
else
{
lean_object* x_26; lean_object* x_27; lean_object* x_28;
x_26 = lean_ctor_get(x_15, 0);
x_27 = lean_ctor_get(x_15, 1);
lean_inc(x_27);
lean_inc(x_26);
lean_dec(x_15);
x_28 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_28, 0, x_26);
lean_ctor_set(x_28, 1, x_27);
return x_28;
}
}
}
else
{
uint8_t x_29;
lean_dec(x_10);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_6);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_1);
x_29 = !lean_is_exclusive(x_12);
if (x_29 == 0)
{
return x_12;
}
else
{
lean_object* x_30; lean_object* x_31; lean_object* x_32;
x_30 = lean_ctor_get(x_12, 0);
x_31 = lean_ctor_get(x_12, 1);
lean_inc(x_31);
lean_inc(x_30);
lean_dec(x_12);
x_32 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_32, 0, x_30);
lean_ctor_set(x_32, 1, x_31);
return x_32;
}
}
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_match__1___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3; lean_object* x_4; lean_object* x_5;
x_3 = lean_ctor_get(x_1, 0);
lean_inc(x_3);
x_4 = lean_ctor_get(x_1, 1);
lean_inc(x_4);
lean_dec(x_1);
x_5 = lean_apply_2(x_2, x_3, x_4);
return x_5;
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_match__1(lean_object* x_1) {
_start:
{
lean_object* x_2;
x_2 = lean_alloc_closure((void*)(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_match__1___rarg), 2, 0);
return x_2;
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__1(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10) {
_start:
{
lean_object* x_11;
x_11 = l_Lean_Meta_getMVarType(x_1, x_6, x_7, x_8, x_9, x_10);
if (lean_obj_tag(x_11) == 0)
{
lean_object* x_12; lean_object* x_13; lean_object* x_14; uint8_t x_15; uint8_t x_16; lean_object* x_17;
x_12 = lean_ctor_get(x_11, 0);
lean_inc(x_12);
x_13 = lean_ctor_get(x_11, 1);
lean_inc(x_13);
lean_dec(x_11);
x_14 = lean_array_push(x_2, x_5);
x_15 = 0;
x_16 = 1;
x_17 = l_Lean_Meta_mkForallFVars(x_14, x_12, x_15, x_16, x_6, x_7, x_8, x_9, x_13);
if (lean_obj_tag(x_17) == 0)
{
uint8_t x_18;
x_18 = !lean_is_exclusive(x_17);
if (x_18 == 0)
{
lean_object* x_19; lean_object* x_20; lean_object* x_21;
x_19 = lean_ctor_get(x_17, 0);
x_20 = lean_array_push(x_3, x_4);
x_21 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_21, 0, x_19);
lean_ctor_set(x_21, 1, x_20);
lean_ctor_set(x_17, 0, x_21);
return x_17;
}
else
{
lean_object* x_22; lean_object* x_23; lean_object* x_24; lean_object* x_25; lean_object* x_26;
x_22 = lean_ctor_get(x_17, 0);
x_23 = lean_ctor_get(x_17, 1);
lean_inc(x_23);
lean_inc(x_22);
lean_dec(x_17);
x_24 = lean_array_push(x_3, x_4);
x_25 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_25, 0, x_22);
lean_ctor_set(x_25, 1, x_24);
x_26 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_26, 0, x_25);
lean_ctor_set(x_26, 1, x_23);
return x_26;
}
}
else
{
uint8_t x_27;
lean_dec(x_4);
lean_dec(x_3);
x_27 = !lean_is_exclusive(x_17);
if (x_27 == 0)
{
return x_17;
}
else
{
lean_object* x_28; lean_object* x_29; lean_object* x_30;
x_28 = lean_ctor_get(x_17, 0);
x_29 = lean_ctor_get(x_17, 1);
lean_inc(x_29);
lean_inc(x_28);
lean_dec(x_17);
x_30 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_30, 0, x_28);
lean_ctor_set(x_30, 1, x_29);
return x_30;
}
}
}
else
{
uint8_t x_31;
lean_dec(x_6);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
x_31 = !lean_is_exclusive(x_11);
if (x_31 == 0)
{
return x_11;
}
else
{
lean_object* x_32; lean_object* x_33; lean_object* x_34;
x_32 = lean_ctor_get(x_11, 0);
x_33 = lean_ctor_get(x_11, 1);
lean_inc(x_33);
lean_inc(x_32);
lean_dec(x_11);
x_34 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_34, 0, x_32);
lean_ctor_set(x_34, 1, x_33);
return x_34;
}
}
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__2(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10, lean_object* x_11, lean_object* x_12, lean_object* x_13, lean_object* x_14, lean_object* x_15, lean_object* x_16) {
_start:
{
lean_object* x_17; lean_object* x_18;
x_17 = lean_array_push(x_1, x_11);
lean_inc(x_15);
lean_inc(x_14);
lean_inc(x_13);
lean_inc(x_12);
lean_inc(x_5);
lean_inc(x_4);
lean_inc(x_2);
x_18 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit(x_2, x_3, x_4, x_5, x_12, x_13, x_14, x_15, x_16);
if (lean_obj_tag(x_18) == 0)
{
lean_object* x_19; lean_object* x_20; lean_object* x_21; lean_object* x_22;
x_19 = lean_ctor_get(x_18, 0);
lean_inc(x_19);
x_20 = lean_ctor_get(x_18, 1);
lean_inc(x_20);
lean_dec(x_18);
x_21 = lean_array_push(x_6, x_19);
x_22 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop(x_7, x_2, x_4, x_8, x_5, x_9, x_10, x_17, x_21, x_12, x_13, x_14, x_15, x_20);
return x_22;
}
else
{
uint8_t x_23;
lean_dec(x_17);
lean_dec(x_15);
lean_dec(x_14);
lean_dec(x_13);
lean_dec(x_12);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_6);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_2);
x_23 = !lean_is_exclusive(x_18);
if (x_23 == 0)
{
return x_18;
}
else
{
lean_object* x_24; lean_object* x_25; lean_object* x_26;
x_24 = lean_ctor_get(x_18, 0);
x_25 = lean_ctor_get(x_18, 1);
lean_inc(x_25);
lean_inc(x_24);
lean_dec(x_18);
x_26 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_26, 0, x_24);
lean_ctor_set(x_26, 1, x_25);
return x_26;
}
}
}
}
static lean_object* _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__1() {
_start:
{
lean_object* x_1;
x_1 = lean_mk_string("toArrayLitEq");
return x_1;
}
}
static lean_object* _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__2() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3;
x_1 = l_Array_term_____x5b___x3a___x5d___closed__2;
x_2 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__1;
x_3 = lean_name_mk_string(x_1, x_2);
return x_3;
}
}
static lean_object* _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__3() {
_start:
{
lean_object* x_1;
x_1 = lean_mk_string("hEqALit");
return x_1;
}
}
static lean_object* _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__4() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3;
x_1 = lean_box(0);
x_2 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__3;
x_3 = lean_name_mk_string(x_1, x_2);
return x_3;
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10, lean_object* x_11, lean_object* x_12, lean_object* x_13, lean_object* x_14) {
_start:
{
uint8_t x_15;
x_15 = lean_nat_dec_lt(x_7, x_3);
if (x_15 == 0)
{
lean_object* x_16; lean_object* x_17;
lean_dec(x_7);
lean_dec(x_4);
lean_inc(x_8);
x_16 = lean_array_to_list(lean_box(0), x_8);
lean_inc(x_13);
lean_inc(x_12);
lean_inc(x_11);
lean_inc(x_10);
x_17 = l_Lean_Meta_mkArrayLit(x_6, x_16, x_10, x_11, x_12, x_13, x_14);
if (lean_obj_tag(x_17) == 0)
{
lean_object* x_18; lean_object* x_19; lean_object* x_20;
x_18 = lean_ctor_get(x_17, 0);
lean_inc(x_18);
x_19 = lean_ctor_get(x_17, 1);
lean_inc(x_19);
lean_dec(x_17);
lean_inc(x_13);
lean_inc(x_12);
lean_inc(x_11);
lean_inc(x_10);
lean_inc(x_2);
x_20 = l_Lean_Meta_mkEq(x_2, x_18, x_10, x_11, x_12, x_13, x_19);
if (lean_obj_tag(x_20) == 0)
{
lean_object* x_21; lean_object* x_22; lean_object* x_23; lean_object* x_24; lean_object* x_25; lean_object* x_26; lean_object* x_27; lean_object* x_28; lean_object* x_29;
x_21 = lean_ctor_get(x_20, 0);
lean_inc(x_21);
x_22 = lean_ctor_get(x_20, 1);
lean_inc(x_22);
lean_dec(x_20);
x_23 = l_Lean_mkNatLit(x_3);
x_24 = l_Lean_Syntax_mkAntiquotNode___closed__9;
x_25 = lean_array_push(x_24, x_2);
x_26 = lean_array_push(x_25, x_23);
x_27 = lean_array_push(x_26, x_5);
x_28 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__2;
lean_inc(x_13);
lean_inc(x_12);
lean_inc(x_11);
lean_inc(x_10);
x_29 = l_Lean_Meta_mkAppM(x_28, x_27, x_10, x_11, x_12, x_13, x_22);
if (lean_obj_tag(x_29) == 0)
{
lean_object* x_30; lean_object* x_31; lean_object* x_32; lean_object* x_33; uint8_t x_34; lean_object* x_35;
x_30 = lean_ctor_get(x_29, 0);
lean_inc(x_30);
x_31 = lean_ctor_get(x_29, 1);
lean_inc(x_31);
lean_dec(x_29);
x_32 = lean_alloc_closure((void*)(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__1___boxed), 10, 4);
lean_closure_set(x_32, 0, x_1);
lean_closure_set(x_32, 1, x_8);
lean_closure_set(x_32, 2, x_9);
lean_closure_set(x_32, 3, x_30);
x_33 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__4;
x_34 = 0;
x_35 = l_Lean_Meta_withLocalDecl___at_Lean_Meta_substCore___spec__2___rarg(x_33, x_34, x_21, x_32, x_10, x_11, x_12, x_13, x_31);
return x_35;
}
else
{
uint8_t x_36;
lean_dec(x_21);
lean_dec(x_13);
lean_dec(x_12);
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_1);
x_36 = !lean_is_exclusive(x_29);
if (x_36 == 0)
{
return x_29;
}
else
{
lean_object* x_37; lean_object* x_38; lean_object* x_39;
x_37 = lean_ctor_get(x_29, 0);
x_38 = lean_ctor_get(x_29, 1);
lean_inc(x_38);
lean_inc(x_37);
lean_dec(x_29);
x_39 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_39, 0, x_37);
lean_ctor_set(x_39, 1, x_38);
return x_39;
}
}
}
else
{
uint8_t x_40;
lean_dec(x_13);
lean_dec(x_12);
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_5);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_1);
x_40 = !lean_is_exclusive(x_20);
if (x_40 == 0)
{
return x_20;
}
else
{
lean_object* x_41; lean_object* x_42; lean_object* x_43;
x_41 = lean_ctor_get(x_20, 0);
x_42 = lean_ctor_get(x_20, 1);
lean_inc(x_42);
lean_inc(x_41);
lean_dec(x_20);
x_43 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_43, 0, x_41);
lean_ctor_set(x_43, 1, x_42);
return x_43;
}
}
}
else
{
uint8_t x_44;
lean_dec(x_13);
lean_dec(x_12);
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_5);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_1);
x_44 = !lean_is_exclusive(x_17);
if (x_44 == 0)
{
return x_17;
}
else
{
lean_object* x_45; lean_object* x_46; lean_object* x_47;
x_45 = lean_ctor_get(x_17, 0);
x_46 = lean_ctor_get(x_17, 1);
lean_inc(x_46);
lean_inc(x_45);
lean_dec(x_17);
x_47 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_47, 0, x_45);
lean_ctor_set(x_47, 1, x_46);
return x_47;
}
}
}
else
{
lean_object* x_48; lean_object* x_49; lean_object* x_50; lean_object* x_51; uint8_t x_52; lean_object* x_53;
x_48 = lean_unsigned_to_nat(1u);
x_49 = lean_nat_add(x_7, x_48);
lean_inc(x_49);
lean_inc(x_4);
x_50 = lean_name_append_index_after(x_4, x_49);
lean_inc(x_6);
x_51 = lean_alloc_closure((void*)(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__2), 16, 10);
lean_closure_set(x_51, 0, x_8);
lean_closure_set(x_51, 1, x_2);
lean_closure_set(x_51, 2, x_7);
lean_closure_set(x_51, 3, x_3);
lean_closure_set(x_51, 4, x_5);
lean_closure_set(x_51, 5, x_9);
lean_closure_set(x_51, 6, x_1);
lean_closure_set(x_51, 7, x_4);
lean_closure_set(x_51, 8, x_6);
lean_closure_set(x_51, 9, x_49);
x_52 = 0;
x_53 = l_Lean_Meta_withLocalDecl___at_Lean_Meta_substCore___spec__2___rarg(x_50, x_52, x_6, x_51, x_10, x_11, x_12, x_13, x_14);
return x_53;
}
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__1___boxed(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10) {
_start:
{
lean_object* x_11;
x_11 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___lambda__1(x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9, x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
return x_11;
}
}
lean_object* l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10) {
_start:
{
lean_object* x_11;
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_7);
lean_inc(x_6);
lean_inc(x_2);
x_11 = l_Lean_Meta_getArrayArgType(x_2, x_6, x_7, x_8, x_9, x_10);
if (lean_obj_tag(x_11) == 0)
{
lean_object* x_12; lean_object* x_13; lean_object* x_14; lean_object* x_15; lean_object* x_16;
x_12 = lean_ctor_get(x_11, 0);
lean_inc(x_12);
x_13 = lean_ctor_get(x_11, 1);
lean_inc(x_13);
lean_dec(x_11);
x_14 = lean_unsigned_to_nat(0u);
x_15 = l_Array_empty___closed__1;
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_7);
lean_inc(x_6);
lean_inc(x_1);
x_16 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop(x_1, x_2, x_3, x_4, x_5, x_12, x_14, x_15, x_15, x_6, x_7, x_8, x_9, x_13);
if (lean_obj_tag(x_16) == 0)
{
lean_object* x_17; lean_object* x_18; lean_object* x_19; lean_object* x_20; lean_object* x_21;
x_17 = lean_ctor_get(x_16, 0);
lean_inc(x_17);
x_18 = lean_ctor_get(x_16, 1);
lean_inc(x_18);
lean_dec(x_16);
x_19 = lean_ctor_get(x_17, 0);
lean_inc(x_19);
x_20 = lean_ctor_get(x_17, 1);
lean_inc(x_20);
lean_dec(x_17);
lean_inc(x_1);
x_21 = l_Lean_Meta_getMVarTag(x_1, x_6, x_7, x_8, x_9, x_18);
if (lean_obj_tag(x_21) == 0)
{
lean_object* x_22; lean_object* x_23; lean_object* x_24; lean_object* x_25; lean_object* x_26; lean_object* x_27; lean_object* x_28; uint8_t x_29;
x_22 = lean_ctor_get(x_21, 0);
lean_inc(x_22);
x_23 = lean_ctor_get(x_21, 1);
lean_inc(x_23);
lean_dec(x_21);
lean_inc(x_6);
x_24 = l_Lean_Meta_mkFreshExprSyntheticOpaqueMVar(x_19, x_22, x_6, x_7, x_8, x_9, x_23);
x_25 = lean_ctor_get(x_24, 0);
lean_inc(x_25);
x_26 = lean_ctor_get(x_24, 1);
lean_inc(x_26);
lean_dec(x_24);
lean_inc(x_25);
x_27 = l_Lean_mkAppN(x_25, x_20);
lean_dec(x_20);
x_28 = l_Lean_Meta_assignExprMVar(x_1, x_27, x_6, x_7, x_8, x_9, x_26);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_6);
x_29 = !lean_is_exclusive(x_28);
if (x_29 == 0)
{
lean_object* x_30; lean_object* x_31;
x_30 = lean_ctor_get(x_28, 0);
lean_dec(x_30);
x_31 = l_Lean_Expr_mvarId_x21(x_25);
lean_dec(x_25);
lean_ctor_set(x_28, 0, x_31);
return x_28;
}
else
{
lean_object* x_32; lean_object* x_33; lean_object* x_34;
x_32 = lean_ctor_get(x_28, 1);
lean_inc(x_32);
lean_dec(x_28);
x_33 = l_Lean_Expr_mvarId_x21(x_25);
lean_dec(x_25);
x_34 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_34, 0, x_33);
lean_ctor_set(x_34, 1, x_32);
return x_34;
}
}
else
{
uint8_t x_35;
lean_dec(x_20);
lean_dec(x_19);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_6);
lean_dec(x_1);
x_35 = !lean_is_exclusive(x_21);
if (x_35 == 0)
{
return x_21;
}
else
{
lean_object* x_36; lean_object* x_37; lean_object* x_38;
x_36 = lean_ctor_get(x_21, 0);
x_37 = lean_ctor_get(x_21, 1);
lean_inc(x_37);
lean_inc(x_36);
lean_dec(x_21);
x_38 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_38, 0, x_36);
lean_ctor_set(x_38, 1, x_37);
return x_38;
}
}
}
else
{
uint8_t x_39;
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_6);
lean_dec(x_1);
x_39 = !lean_is_exclusive(x_16);
if (x_39 == 0)
{
return x_16;
}
else
{
lean_object* x_40; lean_object* x_41; lean_object* x_42;
x_40 = lean_ctor_get(x_16, 0);
x_41 = lean_ctor_get(x_16, 1);
lean_inc(x_41);
lean_inc(x_40);
lean_dec(x_16);
x_42 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_42, 0, x_40);
lean_ctor_set(x_42, 1, x_41);
return x_42;
}
}
}
else
{
uint8_t x_43;
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_6);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_1);
x_43 = !lean_is_exclusive(x_11);
if (x_43 == 0)
{
return x_11;
}
else
{
lean_object* x_44; lean_object* x_45; lean_object* x_46;
x_44 = lean_ctor_get(x_11, 0);
x_45 = lean_ctor_get(x_11, 1);
lean_inc(x_45);
lean_inc(x_44);
lean_dec(x_11);
x_46 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_46, 0, x_44);
lean_ctor_set(x_46, 1, x_45);
return x_46;
}
}
}
}
lean_object* l_Lean_Meta_caseArraySizes_match__1___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3; lean_object* x_4; lean_object* x_5;
x_3 = lean_ctor_get(x_1, 0);
lean_inc(x_3);
x_4 = lean_ctor_get(x_1, 1);
lean_inc(x_4);
lean_dec(x_1);
x_5 = lean_apply_2(x_2, x_3, x_4);
return x_5;
}
}
lean_object* l_Lean_Meta_caseArraySizes_match__1(lean_object* x_1) {
_start:
{
lean_object* x_2;
x_2 = lean_alloc_closure((void*)(l_Lean_Meta_caseArraySizes_match__1___rarg), 2, 0);
return x_2;
}
}
lean_object* l_Lean_Meta_caseArraySizes_match__2___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3; lean_object* x_4; lean_object* x_5;
x_3 = lean_ctor_get(x_1, 0);
lean_inc(x_3);
x_4 = lean_ctor_get(x_1, 1);
lean_inc(x_4);
lean_dec(x_1);
x_5 = lean_apply_2(x_2, x_3, x_4);
return x_5;
}
}
lean_object* l_Lean_Meta_caseArraySizes_match__2(lean_object* x_1) {
_start:
{
lean_object* x_2;
x_2 = lean_alloc_closure((void*)(l_Lean_Meta_caseArraySizes_match__2___rarg), 2, 0);
return x_2;
}
}
lean_object* l_Lean_Meta_caseArraySizes_match__3___rarg(lean_object* x_1, lean_object* x_2) {
_start:
{
lean_object* x_3; lean_object* x_4; lean_object* x_5;
x_3 = lean_ctor_get(x_1, 0);
lean_inc(x_3);
x_4 = lean_ctor_get(x_1, 1);
lean_inc(x_4);
lean_dec(x_1);
x_5 = lean_apply_2(x_2, x_3, x_4);
return x_5;
}
}
lean_object* l_Lean_Meta_caseArraySizes_match__3(lean_object* x_1) {
_start:
{
lean_object* x_2;
x_2 = lean_alloc_closure((void*)(l_Lean_Meta_caseArraySizes_match__3___rarg), 2, 0);
return x_2;
}
}
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__1(size_t x_1, size_t x_2, lean_object* x_3) {
_start:
{
uint8_t x_4;
x_4 = x_2 < x_1;
if (x_4 == 0)
{
lean_object* x_5;
x_5 = x_3;
return x_5;
}
else
{
lean_object* x_6; lean_object* x_7; lean_object* x_8; lean_object* x_9; lean_object* x_10; size_t x_11; size_t x_12; lean_object* x_13; lean_object* x_14;
x_6 = lean_array_uget(x_3, x_2);
x_7 = lean_unsigned_to_nat(0u);
x_8 = lean_array_uset(x_3, x_2, x_7);
x_9 = x_6;
x_10 = l_Lean_mkNatLit(x_9);
x_11 = 1;
x_12 = x_2 + x_11;
x_13 = x_10;
x_14 = lean_array_uset(x_8, x_2, x_13);
x_2 = x_12;
x_3 = x_14;
goto _start;
}
}
}
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__2(lean_object* x_1, size_t x_2, size_t x_3, lean_object* x_4) {
_start:
{
uint8_t x_5;
x_5 = x_3 < x_2;
if (x_5 == 0)
{
lean_object* x_6;
x_6 = x_4;
return x_6;
}
else
{
lean_object* x_7; lean_object* x_8; lean_object* x_9; lean_object* x_10; lean_object* x_11; lean_object* x_12; size_t x_13; size_t x_14; lean_object* x_15; lean_object* x_16;
x_7 = lean_array_uget(x_4, x_3);
x_8 = lean_unsigned_to_nat(0u);
x_9 = lean_array_uset(x_4, x_3, x_8);
x_10 = x_7;
x_11 = l_Lean_Meta_FVarSubst_get(x_1, x_10);
x_12 = l_Lean_Expr_fvarId_x21(x_11);
lean_dec(x_11);
x_13 = 1;
x_14 = x_3 + x_13;
x_15 = x_12;
x_16 = lean_array_uset(x_9, x_3, x_15);
x_3 = x_14;
x_4 = x_16;
goto _start;
}
}
}
lean_object* l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3___lambda__1(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10, lean_object* x_11, lean_object* x_12) {
_start:
{
lean_object* x_13;
lean_inc(x_11);
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_3);
x_13 = l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit(x_1, x_2, x_3, x_4, x_7, x_8, x_9, x_10, x_11, x_12);
if (lean_obj_tag(x_13) == 0)
{
lean_object* x_14; lean_object* x_15; lean_object* x_16; uint8_t x_17; lean_object* x_18;
x_14 = lean_ctor_get(x_13, 0);
lean_inc(x_14);
x_15 = lean_ctor_get(x_13, 1);
lean_inc(x_15);
lean_dec(x_13);
x_16 = lean_box(0);
x_17 = 0;
lean_inc(x_11);
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
x_18 = l_Lean_Meta_introNCore(x_14, x_3, x_16, x_17, x_17, x_8, x_9, x_10, x_11, x_15);
if (lean_obj_tag(x_18) == 0)
{
lean_object* x_19; lean_object* x_20; lean_object* x_21; lean_object* x_22; lean_object* x_23;
x_19 = lean_ctor_get(x_18, 0);
lean_inc(x_19);
x_20 = lean_ctor_get(x_18, 1);
lean_inc(x_20);
lean_dec(x_18);
x_21 = lean_ctor_get(x_19, 0);
lean_inc(x_21);
x_22 = lean_ctor_get(x_19, 1);
lean_inc(x_22);
lean_dec(x_19);
lean_inc(x_11);
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
x_23 = l_Lean_Meta_intro1Core(x_22, x_17, x_8, x_9, x_10, x_11, x_20);
if (lean_obj_tag(x_23) == 0)
{
lean_object* x_24; lean_object* x_25; lean_object* x_26; lean_object* x_27; lean_object* x_28;
x_24 = lean_ctor_get(x_23, 0);
lean_inc(x_24);
x_25 = lean_ctor_get(x_23, 1);
lean_inc(x_25);
lean_dec(x_23);
x_26 = lean_ctor_get(x_24, 0);
lean_inc(x_26);
x_27 = lean_ctor_get(x_24, 1);
lean_inc(x_27);
lean_dec(x_24);
lean_inc(x_11);
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
x_28 = l_Lean_Meta_clear(x_27, x_5, x_8, x_9, x_10, x_11, x_25);
if (lean_obj_tag(x_28) == 0)
{
lean_object* x_29; lean_object* x_30; uint8_t x_31; lean_object* x_32;
x_29 = lean_ctor_get(x_28, 0);
lean_inc(x_29);
x_30 = lean_ctor_get(x_28, 1);
lean_inc(x_30);
lean_dec(x_28);
x_31 = 1;
x_32 = l_Lean_Meta_substCore(x_29, x_26, x_17, x_6, x_31, x_17, x_8, x_9, x_10, x_11, x_30);
if (lean_obj_tag(x_32) == 0)
{
uint8_t x_33;
x_33 = !lean_is_exclusive(x_32);
if (x_33 == 0)
{
lean_object* x_34; lean_object* x_35; lean_object* x_36; lean_object* x_37; lean_object* x_38;
x_34 = lean_ctor_get(x_32, 0);
x_35 = lean_ctor_get(x_34, 0);
lean_inc(x_35);
x_36 = lean_ctor_get(x_34, 1);
lean_inc(x_36);
lean_dec(x_34);
x_37 = l_Array_empty___closed__1;
x_38 = lean_alloc_ctor(0, 4, 0);
lean_ctor_set(x_38, 0, x_36);
lean_ctor_set(x_38, 1, x_21);
lean_ctor_set(x_38, 2, x_37);
lean_ctor_set(x_38, 3, x_35);
lean_ctor_set(x_32, 0, x_38);
return x_32;
}
else
{
lean_object* x_39; lean_object* x_40; lean_object* x_41; lean_object* x_42; lean_object* x_43; lean_object* x_44; lean_object* x_45;
x_39 = lean_ctor_get(x_32, 0);
x_40 = lean_ctor_get(x_32, 1);
lean_inc(x_40);
lean_inc(x_39);
lean_dec(x_32);
x_41 = lean_ctor_get(x_39, 0);
lean_inc(x_41);
x_42 = lean_ctor_get(x_39, 1);
lean_inc(x_42);
lean_dec(x_39);
x_43 = l_Array_empty___closed__1;
x_44 = lean_alloc_ctor(0, 4, 0);
lean_ctor_set(x_44, 0, x_42);
lean_ctor_set(x_44, 1, x_21);
lean_ctor_set(x_44, 2, x_43);
lean_ctor_set(x_44, 3, x_41);
x_45 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_45, 0, x_44);
lean_ctor_set(x_45, 1, x_40);
return x_45;
}
}
else
{
uint8_t x_46;
lean_dec(x_21);
x_46 = !lean_is_exclusive(x_32);
if (x_46 == 0)
{
return x_32;
}
else
{
lean_object* x_47; lean_object* x_48; lean_object* x_49;
x_47 = lean_ctor_get(x_32, 0);
x_48 = lean_ctor_get(x_32, 1);
lean_inc(x_48);
lean_inc(x_47);
lean_dec(x_32);
x_49 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_49, 0, x_47);
lean_ctor_set(x_49, 1, x_48);
return x_49;
}
}
}
else
{
uint8_t x_50;
lean_dec(x_26);
lean_dec(x_21);
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_6);
x_50 = !lean_is_exclusive(x_28);
if (x_50 == 0)
{
return x_28;
}
else
{
lean_object* x_51; lean_object* x_52; lean_object* x_53;
x_51 = lean_ctor_get(x_28, 0);
x_52 = lean_ctor_get(x_28, 1);
lean_inc(x_52);
lean_inc(x_51);
lean_dec(x_28);
x_53 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_53, 0, x_51);
lean_ctor_set(x_53, 1, x_52);
return x_53;
}
}
}
else
{
uint8_t x_54;
lean_dec(x_21);
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_6);
lean_dec(x_5);
x_54 = !lean_is_exclusive(x_23);
if (x_54 == 0)
{
return x_23;
}
else
{
lean_object* x_55; lean_object* x_56; lean_object* x_57;
x_55 = lean_ctor_get(x_23, 0);
x_56 = lean_ctor_get(x_23, 1);
lean_inc(x_56);
lean_inc(x_55);
lean_dec(x_23);
x_57 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_57, 0, x_55);
lean_ctor_set(x_57, 1, x_56);
return x_57;
}
}
}
else
{
uint8_t x_58;
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_6);
lean_dec(x_5);
x_58 = !lean_is_exclusive(x_18);
if (x_58 == 0)
{
return x_18;
}
else
{
lean_object* x_59; lean_object* x_60; lean_object* x_61;
x_59 = lean_ctor_get(x_18, 0);
x_60 = lean_ctor_get(x_18, 1);
lean_inc(x_60);
lean_inc(x_59);
lean_dec(x_18);
x_61 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_61, 0, x_59);
lean_ctor_set(x_61, 1, x_60);
return x_61;
}
}
}
else
{
uint8_t x_62;
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_6);
lean_dec(x_5);
lean_dec(x_3);
x_62 = !lean_is_exclusive(x_13);
if (x_62 == 0)
{
return x_13;
}
else
{
lean_object* x_63; lean_object* x_64; lean_object* x_65;
x_63 = lean_ctor_get(x_13, 0);
x_64 = lean_ctor_get(x_13, 1);
lean_inc(x_64);
lean_inc(x_63);
lean_dec(x_13);
x_65 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_65, 0, x_63);
lean_ctor_set(x_65, 1, x_64);
return x_65;
}
}
}
}
lean_object* l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, size_t x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10, lean_object* x_11, lean_object* x_12, lean_object* x_13, lean_object* x_14, lean_object* x_15, lean_object* x_16, lean_object* x_17, lean_object* x_18) {
_start:
{
lean_object* x_19; uint8_t x_20;
x_19 = lean_unsigned_to_nat(0u);
x_20 = lean_nat_dec_eq(x_10, x_19);
if (x_20 == 0)
{
lean_object* x_21; lean_object* x_22; lean_object* x_23; lean_object* x_24; lean_object* x_25; lean_object* x_26; lean_object* x_27; uint8_t x_28;
x_21 = lean_unsigned_to_nat(1u);
x_22 = lean_nat_sub(x_10, x_21);
lean_dec(x_10);
x_23 = lean_array_fget(x_9, x_11);
x_24 = lean_ctor_get(x_23, 2);
lean_inc(x_24);
x_25 = lean_ctor_get(x_23, 0);
lean_inc(x_25);
lean_inc(x_5);
x_26 = l_Lean_Meta_FVarSubst_get(x_24, x_5);
x_27 = l_Lean_Expr_fvarId_x21(x_26);
lean_dec(x_26);
x_28 = lean_nat_dec_lt(x_11, x_6);
if (x_28 == 0)
{
uint8_t x_29; uint8_t x_30; lean_object* x_31;
lean_dec(x_27);
x_29 = 0;
x_30 = 1;
lean_inc(x_17);
lean_inc(x_16);
lean_inc(x_15);
lean_inc(x_14);
lean_inc(x_5);
x_31 = l_Lean_Meta_substCore(x_25, x_5, x_29, x_24, x_30, x_29, x_14, x_15, x_16, x_17, x_18);
if (lean_obj_tag(x_31) == 0)
{
lean_object* x_32; lean_object* x_33; lean_object* x_34; lean_object* x_35; lean_object* x_36; lean_object* x_37; size_t x_38; lean_object* x_39; lean_object* x_40; lean_object* x_41; lean_object* x_42; lean_object* x_43; lean_object* x_44; lean_object* x_45;
x_32 = lean_ctor_get(x_31, 0);
lean_inc(x_32);
x_33 = lean_ctor_get(x_31, 1);
lean_inc(x_33);
lean_dec(x_31);
x_34 = lean_ctor_get(x_32, 0);
lean_inc(x_34);
x_35 = lean_ctor_get(x_32, 1);
lean_inc(x_35);
lean_dec(x_32);
x_36 = lean_ctor_get(x_23, 1);
lean_inc(x_36);
lean_dec(x_23);
x_37 = lean_array_get_size(x_36);
x_38 = lean_usize_of_nat(x_37);
lean_dec(x_37);
x_39 = x_36;
x_40 = l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__2(x_34, x_38, x_7, x_39);
x_41 = x_40;
x_42 = l_Array_empty___closed__1;
x_43 = lean_alloc_ctor(0, 4, 0);
lean_ctor_set(x_43, 0, x_35);
lean_ctor_set(x_43, 1, x_42);
lean_ctor_set(x_43, 2, x_41);
lean_ctor_set(x_43, 3, x_34);
x_44 = lean_nat_add(x_11, x_21);
lean_dec(x_11);
x_45 = lean_array_push(x_13, x_43);
x_10 = x_22;
x_11 = x_44;
x_12 = lean_box(0);
x_13 = x_45;
x_18 = x_33;
goto _start;
}
else
{
uint8_t x_47;
lean_dec(x_23);
lean_dec(x_22);
lean_dec(x_17);
lean_dec(x_16);
lean_dec(x_15);
lean_dec(x_14);
lean_dec(x_13);
lean_dec(x_11);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
x_47 = !lean_is_exclusive(x_31);
if (x_47 == 0)
{
return x_31;
}
else
{
lean_object* x_48; lean_object* x_49; lean_object* x_50;
x_48 = lean_ctor_get(x_31, 0);
x_49 = lean_ctor_get(x_31, 1);
lean_inc(x_49);
lean_inc(x_48);
lean_dec(x_31);
x_50 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_50, 0, x_48);
lean_ctor_set(x_50, 1, x_49);
return x_50;
}
}
}
else
{
lean_object* x_51; lean_object* x_52; lean_object* x_53; lean_object* x_54; lean_object* x_55;
x_51 = lean_array_fget(x_1, x_11);
x_52 = lean_ctor_get(x_23, 1);
lean_inc(x_52);
lean_dec(x_23);
x_53 = l_Lean_instInhabitedName;
x_54 = lean_array_get(x_53, x_52, x_19);
lean_dec(x_52);
lean_inc(x_17);
lean_inc(x_16);
lean_inc(x_15);
lean_inc(x_14);
x_55 = l_Lean_Meta_clear(x_25, x_54, x_14, x_15, x_16, x_17, x_18);
if (lean_obj_tag(x_55) == 0)
{
lean_object* x_56; lean_object* x_57; lean_object* x_58; lean_object* x_59; lean_object* x_60;
x_56 = lean_ctor_get(x_55, 0);
lean_inc(x_56);
x_57 = lean_ctor_get(x_55, 1);
lean_inc(x_57);
lean_dec(x_55);
lean_inc(x_4);
x_58 = l_Lean_Meta_FVarSubst_get(x_24, x_4);
x_59 = l_Lean_Expr_fvarId_x21(x_58);
lean_dec(x_58);
lean_inc(x_17);
lean_inc(x_16);
lean_inc(x_15);
lean_inc(x_14);
x_60 = l_Lean_Meta_clear(x_56, x_59, x_14, x_15, x_16, x_17, x_57);
if (lean_obj_tag(x_60) == 0)
{
lean_object* x_61; lean_object* x_62; lean_object* x_63; lean_object* x_64; lean_object* x_65; lean_object* x_66; lean_object* x_67;
x_61 = lean_ctor_get(x_60, 0);
lean_inc(x_61);
x_62 = lean_ctor_get(x_60, 1);
lean_inc(x_62);
lean_dec(x_60);
lean_inc(x_27);
x_63 = l_Lean_mkFVar(x_27);
x_64 = lean_alloc_closure((void*)(l_Lean_Meta_mkEqSymm), 6, 1);
lean_closure_set(x_64, 0, x_63);
lean_inc(x_2);
lean_inc(x_3);
lean_inc(x_61);
x_65 = lean_alloc_closure((void*)(l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3___lambda__1), 12, 6);
lean_closure_set(x_65, 0, x_61);
lean_closure_set(x_65, 1, x_3);
lean_closure_set(x_65, 2, x_51);
lean_closure_set(x_65, 3, x_2);
lean_closure_set(x_65, 4, x_27);
lean_closure_set(x_65, 5, x_24);
x_66 = lean_alloc_closure((void*)(l_ReaderT_bind___at_Lean_Meta_instMonadLCtxMetaM___spec__2___rarg), 7, 2);
lean_closure_set(x_66, 0, x_64);
lean_closure_set(x_66, 1, x_65);
lean_inc(x_17);
lean_inc(x_16);
lean_inc(x_15);
lean_inc(x_14);
x_67 = l_Lean_Meta_withMVarContext___at_Lean_Meta_admit___spec__1___rarg(x_61, x_66, x_14, x_15, x_16, x_17, x_62);
if (lean_obj_tag(x_67) == 0)
{
lean_object* x_68; lean_object* x_69; lean_object* x_70; lean_object* x_71;
x_68 = lean_ctor_get(x_67, 0);
lean_inc(x_68);
x_69 = lean_ctor_get(x_67, 1);
lean_inc(x_69);
lean_dec(x_67);
x_70 = lean_nat_add(x_11, x_21);
lean_dec(x_11);
x_71 = lean_array_push(x_13, x_68);
x_10 = x_22;
x_11 = x_70;
x_12 = lean_box(0);
x_13 = x_71;
x_18 = x_69;
goto _start;
}
else
{
uint8_t x_73;
lean_dec(x_22);
lean_dec(x_17);
lean_dec(x_16);
lean_dec(x_15);
lean_dec(x_14);
lean_dec(x_13);
lean_dec(x_11);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
x_73 = !lean_is_exclusive(x_67);
if (x_73 == 0)
{
return x_67;
}
else
{
lean_object* x_74; lean_object* x_75; lean_object* x_76;
x_74 = lean_ctor_get(x_67, 0);
x_75 = lean_ctor_get(x_67, 1);
lean_inc(x_75);
lean_inc(x_74);
lean_dec(x_67);
x_76 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_76, 0, x_74);
lean_ctor_set(x_76, 1, x_75);
return x_76;
}
}
}
else
{
uint8_t x_77;
lean_dec(x_51);
lean_dec(x_27);
lean_dec(x_24);
lean_dec(x_22);
lean_dec(x_17);
lean_dec(x_16);
lean_dec(x_15);
lean_dec(x_14);
lean_dec(x_13);
lean_dec(x_11);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
x_77 = !lean_is_exclusive(x_60);
if (x_77 == 0)
{
return x_60;
}
else
{
lean_object* x_78; lean_object* x_79; lean_object* x_80;
x_78 = lean_ctor_get(x_60, 0);
x_79 = lean_ctor_get(x_60, 1);
lean_inc(x_79);
lean_inc(x_78);
lean_dec(x_60);
x_80 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_80, 0, x_78);
lean_ctor_set(x_80, 1, x_79);
return x_80;
}
}
}
else
{
uint8_t x_81;
lean_dec(x_51);
lean_dec(x_27);
lean_dec(x_24);
lean_dec(x_22);
lean_dec(x_17);
lean_dec(x_16);
lean_dec(x_15);
lean_dec(x_14);
lean_dec(x_13);
lean_dec(x_11);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
x_81 = !lean_is_exclusive(x_55);
if (x_81 == 0)
{
return x_55;
}
else
{
lean_object* x_82; lean_object* x_83; lean_object* x_84;
x_82 = lean_ctor_get(x_55, 0);
x_83 = lean_ctor_get(x_55, 1);
lean_inc(x_83);
lean_inc(x_82);
lean_dec(x_55);
x_84 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_84, 0, x_82);
lean_ctor_set(x_84, 1, x_83);
return x_84;
}
}
}
}
else
{
lean_object* x_85;
lean_dec(x_17);
lean_dec(x_16);
lean_dec(x_15);
lean_dec(x_14);
lean_dec(x_11);
lean_dec(x_10);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
x_85 = lean_alloc_ctor(0, 2, 0);
lean_ctor_set(x_85, 0, x_13);
lean_ctor_set(x_85, 1, x_18);
return x_85;
}
}
}
static lean_object* _init_l_Lean_Meta_caseArraySizes___lambda__1___closed__1() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3;
x_1 = l_Array_term_____x5b___x3a___x5d___closed__2;
x_2 = l_Array_myMacro____x40_Init_Data_Array_Subarray___hyg_969____closed__10;
x_3 = lean_name_mk_string(x_1, x_2);
return x_3;
}
}
static lean_object* _init_l_Lean_Meta_caseArraySizes___lambda__1___closed__2() {
_start:
{
lean_object* x_1;
x_1 = lean_mk_string("aSize");
return x_1;
}
}
static lean_object* _init_l_Lean_Meta_caseArraySizes___lambda__1___closed__3() {
_start:
{
lean_object* x_1; lean_object* x_2; lean_object* x_3;
x_1 = lean_box(0);
x_2 = l_Lean_Meta_caseArraySizes___lambda__1___closed__2;
x_3 = lean_name_mk_string(x_1, x_2);
return x_3;
}
}
lean_object* l_Lean_Meta_caseArraySizes___lambda__1(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10, lean_object* x_11) {
_start:
{
lean_object* x_12; lean_object* x_13; lean_object* x_14; lean_object* x_15;
x_12 = l_Lean_mkOptionalNode___closed__2;
lean_inc(x_1);
x_13 = lean_array_push(x_12, x_1);
x_14 = l_Lean_Meta_caseArraySizes___lambda__1___closed__1;
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_7);
x_15 = l_Lean_Meta_mkAppM(x_14, x_13, x_7, x_8, x_9, x_10, x_11);
if (lean_obj_tag(x_15) == 0)
{
lean_object* x_16; lean_object* x_17; lean_object* x_18; lean_object* x_19; lean_object* x_20; lean_object* x_21;
x_16 = lean_ctor_get(x_15, 0);
lean_inc(x_16);
x_17 = lean_ctor_get(x_15, 1);
lean_inc(x_17);
lean_dec(x_15);
x_18 = l_Lean_Meta_caseArraySizes___lambda__1___closed__3;
x_19 = l_Lean_Literal_type___closed__3;
x_20 = l_Lean_Meta_caseValue___closed__2;
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_7);
x_21 = l_Lean_Meta_assertExt(x_2, x_18, x_19, x_16, x_20, x_7, x_8, x_9, x_10, x_17);
if (lean_obj_tag(x_21) == 0)
{
lean_object* x_22; lean_object* x_23; uint8_t x_24; lean_object* x_25;
x_22 = lean_ctor_get(x_21, 0);
lean_inc(x_22);
x_23 = lean_ctor_get(x_21, 1);
lean_inc(x_23);
lean_dec(x_21);
x_24 = 0;
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_7);
x_25 = l_Lean_Meta_intro1Core(x_22, x_24, x_7, x_8, x_9, x_10, x_23);
if (lean_obj_tag(x_25) == 0)
{
lean_object* x_26; lean_object* x_27; lean_object* x_28; lean_object* x_29; lean_object* x_30;
x_26 = lean_ctor_get(x_25, 0);
lean_inc(x_26);
x_27 = lean_ctor_get(x_25, 1);
lean_inc(x_27);
lean_dec(x_25);
x_28 = lean_ctor_get(x_26, 0);
lean_inc(x_28);
x_29 = lean_ctor_get(x_26, 1);
lean_inc(x_29);
lean_dec(x_26);
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_7);
x_30 = l_Lean_Meta_intro1Core(x_29, x_24, x_7, x_8, x_9, x_10, x_27);
if (lean_obj_tag(x_30) == 0)
{
lean_object* x_31; lean_object* x_32; lean_object* x_33; lean_object* x_34; lean_object* x_35; size_t x_36; size_t x_37; lean_object* x_38; lean_object* x_39; lean_object* x_40; lean_object* x_41;
x_31 = lean_ctor_get(x_30, 0);
lean_inc(x_31);
x_32 = lean_ctor_get(x_30, 1);
lean_inc(x_32);
lean_dec(x_30);
x_33 = lean_ctor_get(x_31, 0);
lean_inc(x_33);
x_34 = lean_ctor_get(x_31, 1);
lean_inc(x_34);
lean_dec(x_31);
x_35 = lean_array_get_size(x_3);
x_36 = lean_usize_of_nat(x_35);
x_37 = 0;
lean_inc(x_3);
x_38 = x_3;
x_39 = l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__1(x_36, x_37, x_38);
x_40 = x_39;
lean_inc(x_10);
lean_inc(x_9);
lean_inc(x_8);
lean_inc(x_7);
lean_inc(x_28);
x_41 = l_Lean_Meta_caseValues(x_34, x_28, x_40, x_4, x_7, x_8, x_9, x_10, x_32);
if (lean_obj_tag(x_41) == 0)
{
lean_object* x_42; lean_object* x_43; lean_object* x_44; lean_object* x_45; lean_object* x_46; lean_object* x_47;
x_42 = lean_ctor_get(x_41, 0);
lean_inc(x_42);
x_43 = lean_ctor_get(x_41, 1);
lean_inc(x_43);
lean_dec(x_41);
x_44 = lean_array_get_size(x_42);
x_45 = lean_mk_empty_array_with_capacity(x_44);
x_46 = lean_unsigned_to_nat(0u);
x_47 = l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3(x_3, x_5, x_1, x_28, x_33, x_35, x_37, x_42, x_42, x_44, x_46, lean_box(0), x_45, x_7, x_8, x_9, x_10, x_43);
lean_dec(x_42);
lean_dec(x_35);
lean_dec(x_3);
return x_47;
}
else
{
uint8_t x_48;
lean_dec(x_35);
lean_dec(x_33);
lean_dec(x_28);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_5);
lean_dec(x_3);
lean_dec(x_1);
x_48 = !lean_is_exclusive(x_41);
if (x_48 == 0)
{
return x_41;
}
else
{
lean_object* x_49; lean_object* x_50; lean_object* x_51;
x_49 = lean_ctor_get(x_41, 0);
x_50 = lean_ctor_get(x_41, 1);
lean_inc(x_50);
lean_inc(x_49);
lean_dec(x_41);
x_51 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_51, 0, x_49);
lean_ctor_set(x_51, 1, x_50);
return x_51;
}
}
}
else
{
uint8_t x_52;
lean_dec(x_28);
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_1);
x_52 = !lean_is_exclusive(x_30);
if (x_52 == 0)
{
return x_30;
}
else
{
lean_object* x_53; lean_object* x_54; lean_object* x_55;
x_53 = lean_ctor_get(x_30, 0);
x_54 = lean_ctor_get(x_30, 1);
lean_inc(x_54);
lean_inc(x_53);
lean_dec(x_30);
x_55 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_55, 0, x_53);
lean_ctor_set(x_55, 1, x_54);
return x_55;
}
}
}
else
{
uint8_t x_56;
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_1);
x_56 = !lean_is_exclusive(x_25);
if (x_56 == 0)
{
return x_25;
}
else
{
lean_object* x_57; lean_object* x_58; lean_object* x_59;
x_57 = lean_ctor_get(x_25, 0);
x_58 = lean_ctor_get(x_25, 1);
lean_inc(x_58);
lean_inc(x_57);
lean_dec(x_25);
x_59 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_59, 0, x_57);
lean_ctor_set(x_59, 1, x_58);
return x_59;
}
}
}
else
{
uint8_t x_60;
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_1);
x_60 = !lean_is_exclusive(x_21);
if (x_60 == 0)
{
return x_21;
}
else
{
lean_object* x_61; lean_object* x_62; lean_object* x_63;
x_61 = lean_ctor_get(x_21, 0);
x_62 = lean_ctor_get(x_21, 1);
lean_inc(x_62);
lean_inc(x_61);
lean_dec(x_21);
x_63 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_63, 0, x_61);
lean_ctor_set(x_63, 1, x_62);
return x_63;
}
}
}
else
{
uint8_t x_64;
lean_dec(x_10);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_7);
lean_dec(x_5);
lean_dec(x_4);
lean_dec(x_3);
lean_dec(x_2);
lean_dec(x_1);
x_64 = !lean_is_exclusive(x_15);
if (x_64 == 0)
{
return x_15;
}
else
{
lean_object* x_65; lean_object* x_66; lean_object* x_67;
x_65 = lean_ctor_get(x_15, 0);
x_66 = lean_ctor_get(x_15, 1);
lean_inc(x_66);
lean_inc(x_65);
lean_dec(x_15);
x_67 = lean_alloc_ctor(1, 2, 0);
lean_ctor_set(x_67, 0, x_65);
lean_ctor_set(x_67, 1, x_66);
return x_67;
}
}
}
}
lean_object* l_Lean_Meta_caseArraySizes(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10) {
_start:
{
lean_object* x_11; lean_object* x_12; lean_object* x_13; lean_object* x_14; lean_object* x_15;
x_11 = l_Lean_mkFVar(x_2);
lean_inc(x_11);
x_12 = lean_alloc_closure((void*)(l_Lean_Meta_getArrayArgType), 6, 1);
lean_closure_set(x_12, 0, x_11);
lean_inc(x_1);
x_13 = lean_alloc_closure((void*)(l_Lean_Meta_caseArraySizes___lambda__1___boxed), 11, 5);
lean_closure_set(x_13, 0, x_11);
lean_closure_set(x_13, 1, x_1);
lean_closure_set(x_13, 2, x_3);
lean_closure_set(x_13, 3, x_5);
lean_closure_set(x_13, 4, x_4);
x_14 = lean_alloc_closure((void*)(l_ReaderT_bind___at_Lean_Meta_instMonadLCtxMetaM___spec__2___rarg), 7, 2);
lean_closure_set(x_14, 0, x_12);
lean_closure_set(x_14, 1, x_13);
x_15 = l_Lean_Meta_withMVarContext___at_Lean_Meta_admit___spec__1___rarg(x_1, x_14, x_6, x_7, x_8, x_9, x_10);
return x_15;
}
}
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__1___boxed(lean_object* x_1, lean_object* x_2, lean_object* x_3) {
_start:
{
size_t x_4; size_t x_5; lean_object* x_6;
x_4 = lean_unbox_usize(x_1);
lean_dec(x_1);
x_5 = lean_unbox_usize(x_2);
lean_dec(x_2);
x_6 = l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__1(x_4, x_5, x_3);
return x_6;
}
}
lean_object* l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__2___boxed(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4) {
_start:
{
size_t x_5; size_t x_6; lean_object* x_7;
x_5 = lean_unbox_usize(x_2);
lean_dec(x_2);
x_6 = lean_unbox_usize(x_3);
lean_dec(x_3);
x_7 = l_Array_mapMUnsafe_map___at_Lean_Meta_caseArraySizes___spec__2(x_1, x_5, x_6, x_4);
lean_dec(x_1);
return x_7;
}
}
lean_object* l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3___boxed(lean_object** _args) {
lean_object* x_1 = _args[0];
lean_object* x_2 = _args[1];
lean_object* x_3 = _args[2];
lean_object* x_4 = _args[3];
lean_object* x_5 = _args[4];
lean_object* x_6 = _args[5];
lean_object* x_7 = _args[6];
lean_object* x_8 = _args[7];
lean_object* x_9 = _args[8];
lean_object* x_10 = _args[9];
lean_object* x_11 = _args[10];
lean_object* x_12 = _args[11];
lean_object* x_13 = _args[12];
lean_object* x_14 = _args[13];
lean_object* x_15 = _args[14];
lean_object* x_16 = _args[15];
lean_object* x_17 = _args[16];
lean_object* x_18 = _args[17];
_start:
{
size_t x_19; lean_object* x_20;
x_19 = lean_unbox_usize(x_7);
lean_dec(x_7);
x_20 = l_Array_mapIdxM_map___at_Lean_Meta_caseArraySizes___spec__3(x_1, x_2, x_3, x_4, x_5, x_6, x_19, x_8, x_9, x_10, x_11, x_12, x_13, x_14, x_15, x_16, x_17, x_18);
lean_dec(x_9);
lean_dec(x_8);
lean_dec(x_6);
lean_dec(x_1);
return x_20;
}
}
lean_object* l_Lean_Meta_caseArraySizes___lambda__1___boxed(lean_object* x_1, lean_object* x_2, lean_object* x_3, lean_object* x_4, lean_object* x_5, lean_object* x_6, lean_object* x_7, lean_object* x_8, lean_object* x_9, lean_object* x_10, lean_object* x_11) {
_start:
{
lean_object* x_12;
x_12 = l_Lean_Meta_caseArraySizes___lambda__1(x_1, x_2, x_3, x_4, x_5, x_6, x_7, x_8, x_9, x_10, x_11);
lean_dec(x_6);
return x_12;
}
}
lean_object* initialize_Init(lean_object*);
lean_object* initialize_Lean_Meta_Tactic_Assert(lean_object*);
lean_object* initialize_Lean_Meta_Match_CaseValues(lean_object*);
static bool _G_initialized = false;
lean_object* initialize_Lean_Meta_Match_CaseArraySizes(lean_object* w) {
lean_object * res;
if (_G_initialized) return lean_io_result_mk_ok(lean_box(0));
_G_initialized = true;
res = initialize_Init(lean_io_mk_world());
if (lean_io_result_is_error(res)) return res;
lean_dec_ref(res);
res = initialize_Lean_Meta_Tactic_Assert(lean_io_mk_world());
if (lean_io_result_is_error(res)) return res;
lean_dec_ref(res);
res = initialize_Lean_Meta_Match_CaseValues(lean_io_mk_world());
if (lean_io_result_is_error(res)) return res;
lean_dec_ref(res);
l_Lean_Meta_CaseArraySizesSubgoal_elems___default = _init_l_Lean_Meta_CaseArraySizesSubgoal_elems___default();
lean_mark_persistent(l_Lean_Meta_CaseArraySizesSubgoal_elems___default);
l_Lean_Meta_CaseArraySizesSubgoal_diseqs___default = _init_l_Lean_Meta_CaseArraySizesSubgoal_diseqs___default();
lean_mark_persistent(l_Lean_Meta_CaseArraySizesSubgoal_diseqs___default);
l_Lean_Meta_CaseArraySizesSubgoal_subst___default = _init_l_Lean_Meta_CaseArraySizesSubgoal_subst___default();
lean_mark_persistent(l_Lean_Meta_CaseArraySizesSubgoal_subst___default);
l_Lean_Meta_instInhabitedCaseArraySizesSubgoal___closed__1 = _init_l_Lean_Meta_instInhabitedCaseArraySizesSubgoal___closed__1();
lean_mark_persistent(l_Lean_Meta_instInhabitedCaseArraySizesSubgoal___closed__1);
l_Lean_Meta_instInhabitedCaseArraySizesSubgoal = _init_l_Lean_Meta_instInhabitedCaseArraySizesSubgoal();
lean_mark_persistent(l_Lean_Meta_instInhabitedCaseArraySizesSubgoal);
l_Lean_Meta_getArrayArgType___closed__1 = _init_l_Lean_Meta_getArrayArgType___closed__1();
lean_mark_persistent(l_Lean_Meta_getArrayArgType___closed__1);
l_Lean_Meta_getArrayArgType___closed__2 = _init_l_Lean_Meta_getArrayArgType___closed__2();
lean_mark_persistent(l_Lean_Meta_getArrayArgType___closed__2);
l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__1 = _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__1();
lean_mark_persistent(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__1);
l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__2 = _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__2();
lean_mark_persistent(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_mkArrayGetLit___closed__2);
l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__1 = _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__1();
lean_mark_persistent(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__1);
l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__2 = _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__2();
lean_mark_persistent(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__2);
l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__3 = _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__3();
lean_mark_persistent(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__3);
l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__4 = _init_l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__4();
lean_mark_persistent(l___private_Lean_Meta_Match_CaseArraySizes_0__Lean_Meta_introArrayLit_loop___closed__4);
l_Lean_Meta_caseArraySizes___lambda__1___closed__1 = _init_l_Lean_Meta_caseArraySizes___lambda__1___closed__1();
lean_mark_persistent(l_Lean_Meta_caseArraySizes___lambda__1___closed__1);
l_Lean_Meta_caseArraySizes___lambda__1___closed__2 = _init_l_Lean_Meta_caseArraySizes___lambda__1___closed__2();
lean_mark_persistent(l_Lean_Meta_caseArraySizes___lambda__1___closed__2);
l_Lean_Meta_caseArraySizes___lambda__1___closed__3 = _init_l_Lean_Meta_caseArraySizes___lambda__1___closed__3();
lean_mark_persistent(l_Lean_Meta_caseArraySizes___lambda__1___closed__3);
return lean_io_result_mk_ok(lean_box(0));
}
#ifdef __cplusplus
}
#endif
|
def resource_allocation_algorithm(resources):
# Initialize all resources to false
allocated_resources = [False for i in range(len(resources))]
# Loop through resources and assign true
for i in range(len(resources)):
if resources[i] == "needed":
allocated_resources[i] = True
# Return the list of allocated resources
return allocated_resources |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.atlas.json ;
import java.math.BigDecimal ;
import java.util.ArrayDeque ;
import java.util.Deque ;
import java.util.Objects;
import org.apache.jena.atlas.logging.Log ;
/* Builder pattern for JSON.
* The JsonValue built can be an array or object at the outmost leve, but not a atomic value.
*/
public class JsonBuilder {
// If not an array or object.
private JsonValue builtValue = null ;
private static final String NoMarker = "" ;
private Deque<String> markers = new ArrayDeque<>() ;
private Deque<JsonArray> arrays = new ArrayDeque<>() ;
private Deque<JsonObject> objects = new ArrayDeque<>() ;
private static enum State {
ARRAY, OBJECT
}
private Deque<State> stack = new ArrayDeque<>() ;
// The depth of this stack is the object depth. key: { key: ... }
private Deque<String> keys = new ArrayDeque<>() ;
static JsonBuilder create() { return new JsonBuilder() ; }
public JsonBuilder() {
}
public JsonValue build() {
if ( builtValue == null ) {
if ( objects.isEmpty() && arrays.isEmpty() )
throw new JsonException("Alignment error: no object or array started") ;
throw new JsonException("Alignment error: unfinished outer object or array") ;
}
return builtValue ;
}
public void reset() {
builtValue = null ;
stack.clear() ;
objects.clear() ;
keys.clear();
arrays.clear();
}
public JsonBuilder startObject() { return startObject(NoMarker) ; }
public JsonBuilder startObject(String startMarker) {
markers.push(startMarker);
objects.push(new JsonObject()) ;
stack.push(State.OBJECT) ;
return this ;
}
public JsonBuilder finishObject() { return finishObject(NoMarker) ; }
public JsonBuilder finishObject(String finishMarker) {
if ( stack.isEmpty() )
throw new JsonException("Alignment error : already built outer most object or array") ;
State state = stack.pop() ;
if ( state != State.OBJECT )
throw new JsonException("JSON build error : not in an object") ;
JsonValue value = objects.pop() ;
maybeObjectOrArray(value) ;
if ( stack.isEmpty() )
builtValue = value ;
String startMarker = markers.pop();
if ( ! Objects.equals(startMarker, finishMarker) )
throw new JsonException("JSON build error : start/finish alignment error: start="+startMarker+" finish="+finishMarker) ;
return this ;
}
public JsonBuilder startArray() {
arrays.push(new JsonArray()) ;
stack.push(State.ARRAY) ;
return this ;
}
public JsonBuilder finishArray() {
if ( stack.isEmpty() )
throw new JsonException("Alignment error : already built outer most object or array") ;
State state = stack.pop() ;
if ( state != State.ARRAY )
throw new JsonException("JSON build error : not in an array") ;
JsonValue value = arrays.pop() ;
maybeObjectOrArray(value) ;
if ( stack.isEmpty() )
builtValue = value ;
return this ;
}
public JsonBuilder key(String key) {
State state = stack.peek() ;
if ( state != State.OBJECT )
throw new JsonException("JSON build error : not in an object") ;
keys.push(key) ;
return this ;
}
private void maybeObjectOrArray(JsonValue value) {
if ( stack.size() == 0 )
// Error.
return ;
switch (stack.peek()) {
case OBJECT : {
String k = keys.pop() ;
JsonObject obj = objects.peek() ;
if ( obj.hasKey(k) )
Log.warn(this, "Duplicate key '" + k + "' for object") ;
obj.put(k, value) ;
return ;
}
case ARRAY : {
arrays.peek().add(value) ;
return ;
}
}
}
public JsonBuilder value(JsonValue v) {
maybeObjectOrArray(v) ;
return this ;
}
public JsonBuilder value(boolean b) {
JsonValue value = new JsonBoolean(b) ;
maybeObjectOrArray(value) ;
return this ;
}
public JsonBuilder value(BigDecimal decimal) {
JsonValue value = JsonNumber.value(decimal) ;
maybeObjectOrArray(value) ;
return this ;
}
public JsonBuilder value(double d) {
JsonValue value = JsonNumber.value(d) ;
maybeObjectOrArray(value) ;
return this ;
}
public JsonBuilder value(long val) {
JsonValue value = JsonNumber.value(val) ;
maybeObjectOrArray(value) ;
return this ;
}
public JsonBuilder valueNull() {
JsonValue value = JsonNull.instance ;
maybeObjectOrArray(value) ;
return this ;
}
public JsonBuilder value(String string) {
JsonValue value = new JsonString(string) ;
maybeObjectOrArray(value) ;
return this ;
}
}
|
import pybrake
from ..config import ENV, AIRBRAKE_PROJECT_KEY, AIRBRAKE_PROJECT_ID
class ExceptionTracker:
airbrake_notifier = None
def __init__(self):
if AIRBRAKE_PROJECT_KEY and AIRBRAKE_PROJECT_ID:
self.airbrake_notifier = pybrake.Notifier(
project_id=AIRBRAKE_PROJECT_ID,
project_key=AIRBRAKE_PROJECT_KEY,
environment=ENV,
)
def notify(self, *args):
for a in args:
try:
if self.airbrake_notifier:
self.airbrake_notifier.notify(a)
except Exception as e:
print(e)
|
import React from 'react';
import ReactDOM from 'react-dom';
import axios from 'axios';
class App extends React.Component {
constructor() {
super();
this.state = {
tweets: []
};
}
componentDidMount() {
axios.get('/api/tweets')
.then(response => {
this.setState({
tweets: response.data
});
})
.catch(error => {
console.log(error);
});
}
handleSearch(e) {
let searchTerm = e.target.value;
axios.get('/api/tweets?search=' + searchTerm)
.then(response => {
this.setState({
tweets: response.data
});
});
}
render() {
return (
<div>
<input type="text" onChange={this.handleSearch.bind(this)} />
<div>
{this.state.tweets.map(tweet => (
<div key={tweet.id}>{tweet.text}</div>
))}
</div>
</div>
);
}
}
ReactDOM.render(<App />, document.getElementById('root')); |
<gh_stars>1000+
/**
* @license Copyright (c) 2003-2021, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/* globals console:false, document, window */
import ClassicEditor from '../../src/classiceditor';
import Enter from '@ckeditor/ckeditor5-enter/src/enter';
import Typing from '@ckeditor/ckeditor5-typing/src/typing';
import Heading from '@ckeditor/ckeditor5-heading/src/heading';
import Paragraph from '@ckeditor/ckeditor5-paragraph/src/paragraph';
import Undo from '@ckeditor/ckeditor5-undo/src/undo';
import Bold from '@ckeditor/ckeditor5-basic-styles/src/bold';
import Italic from '@ckeditor/ckeditor5-basic-styles/src/italic';
window.editors = [];
let counter = 1;
const container = document.querySelector( '.container' );
function initEditor() {
ClassicEditor
.create( `<h2>Hello world! #${ counter }</h2><p>This is an editor instance.</p>`, {
plugins: [ Enter, Typing, Paragraph, Undo, Heading, Bold, Italic ],
toolbar: [ 'heading', '|', 'bold', 'italic', 'undo', 'redo' ]
} )
.then( editor => {
counter += 1;
window.editors.push( editor );
container.appendChild( editor.ui.element );
} )
.catch( err => {
console.error( err.stack );
} );
}
function destroyEditors() {
window.editors.forEach( editor => {
editor.destroy()
.then( () => {
editor.ui.element.remove();
} );
} );
window.editors = [];
counter = 1;
}
document.getElementById( 'initEditor' ).addEventListener( 'click', initEditor );
document.getElementById( 'destroyEditors' ).addEventListener( 'click', destroyEditors );
|
<reponame>madmax983/aura
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @description Simple Value Provider. Holds generic map of Key/Value Pairs
* @constructor
*/
function ObjectValueProvider(values) {
this.values = values || {};
}
/**
* returns $GVP values
*/
ObjectValueProvider.prototype.getValues = function() {
return this.values;
};
/**
* Merges all values into the existing ones.
*
* @param values
*/
ObjectValueProvider.prototype.merge = function(values) {
$A.util.applyNotFromPrototype(this.values, values, true, true);
};
/**
* Gets value and creates new simple value that references specified component.
*
* @param {String} expression used to compute the new values.
* @param {Function} callback called after creating the new values
* @return {Object} The value referenced by the expression.
*/
ObjectValueProvider.prototype.get = function(expression, callback) {
var value = this.values[expression]||$A.expressionService.resolve(expression,this.values);
if( $A.util.isFunction(callback) ) {
callback(value);
}
return value;
};
Aura.Provider.ObjectValueProvider = ObjectValueProvider; |
<reponame>databook1/license-checker-action
const core = require('@actions/core');
const fs = require('fs');
const DEFAULT_SOURCE = 'package.json';
const SUPPORTED_SOURCES = new Set(['package.json', 'bower.json']);
const LICENSE_FILE = './.licenses.json';
async function run() {
try {
const sources = (core.getInput('dependencies-sources') || DEFAULT_SOURCE).split(',');
const invalidSources = sources.filter(manager => !SUPPORTED_SOURCES.has(manager));
if (invalidSources.length) {
throw new Error(`Configuration contains invalid sources: ${invalidSources.join(', ')}`)
}
core.info(`Verifying licenses for the following sources: ${sources.join(', ')}`);
let licenses;
try {
fs.statSync(LICENSE_FILE);
core.info('Found license file');
licenses = JSON.parse(fs.readFileSync(LICENSE_FILE).toString('utf-8'));
} catch (err) {
throw new Error(`License file not found. Create a file with the licenses - ${LICENSE_FILE}`)
}
const added = [];
const updated = [];
const removed = [];
sources.forEach(source => {
const dependencies = getDirectDependencies(source);
const licenseFileDependencies = getLicenseFileDependencies(source, licenses);
Object.keys(dependencies).forEach(dependency => {
const version = dependencies[dependency];
const config = licenseFileDependencies[dependency];
if (!config) {
added.push(`${source}:${dependency}@${version}`);
} else if (config.version !== version) {
updated.push(`${source}:${dependency}@${version}`);
}
});
Object.keys(licenseFileDependencies).forEach(dependency => {
if (!dependencies[dependency]) {
removed.push(`${source}:${dependency}`);
}
})
});
if (added.length || updated.length || removed.length) {
let message = `For all the packages listed below, specify a correct license in the ${LICENSE_FILE} using the format:
{
[source]: {
dependencies: {
[package-name]: {
version: 'string',
license: 'string'
}
}
}
}\n\n`;
if (added.length) {
message += `The following packages were added: ${added.join(', ')}. Add them to the file.\n`
}
if (updated.length) {
message += `The following packages were updated: ${updated.join(', ')}. Update their version (and license if it changed) in the file.\n`
}
if (removed.length) {
message += `The following packages were deleted: ${removed.join(', ')}. Remove them from the file.`
}
throw new Error(message)
}
core.info('All dependencies look good!');
} catch (error) {
core.setFailed(error.message);
}
}
function getDirectDependencies(file) {
const dependenciesJsonFile = JSON.parse(fs.readFileSync(file).toString('utf-8'))
return dependenciesJsonFile.dependencies;
}
function getLicenseFileDependencies(packageManager, file) {
return file[packageManager] ? file[packageManager].dependencies : {}
}
run();
|
import java.util.ArrayList;
import java.util.List;
public class Main {
public static void main(String[] args) {
//construct the map
String[][] map = {
{"#", "#", "#", "#", "#", "#", "#", "#"},
{"#", ".", ".", ".", ".", ".", ".", "#"},
{"#", ".", "#", "#", "#", ".", ".", "#"},
{"#", ".", ".", ".", "#", ".", "#", "#"},
{"#", "X", "#", ".", ".", ".", ".", "#"},
{"#", "#", "#", "#", "#", "#", "#", "#"}
};
int[] start = new int[1];
//locate the starting point
for (int x = 0; x < map.length; x++) {
for (int y = 0; y < map[0].length; y++) {
if (map[x][y] == "X") {
start = new int[]{x, y};
}
}
}
List<int[]> treasure = new ArrayList<>();
int colLength = map[0].length;
int rowLength = map.length;
int startRow = start[0];
int startColumn = start[1];
//holds previous coordinate for north, east, and south
int prevNorthRow = 0, prevNorthCol = 0, prevEastRow = 0, prevEastCol = 0, prevSouthRow = 0, prevSouthCol = 0;
//lets iterate from 1 to starting row
for (int row = 1; row <= startRow; row++) {
int currRow = startRow;
int currCol = startColumn;
//go north first
boolean validNorth = false;
int north = 0;
while (north < rowLength - row) {
north++;
currRow = startRow - north;
currCol = startColumn;
validNorth = isValidLocation(map, currRow, currCol);
if (validNorth && currRow != prevNorthRow && currCol != prevNorthCol) { //north location is valid and its not yet check
prevNorthRow = currRow;
prevNorthCol = currCol;
break; //break here, check east
}
}
if (!validNorth) {
continue;
}
//go east
int east = 0;
boolean validEast = false;
while (east < colLength - currCol) {
east++;
currRow = startRow - north;
currCol = startColumn + east;
validEast = isValidLocation(map, currRow, currCol);
if (validEast && currRow != prevEastRow && currCol != prevEastCol) { //east location is valid and its not yet check
prevEastRow = currRow;
prevEastCol = currCol;
break; //break here, check south
}
}
if (!validEast) {
continue;
}
//go south
int south = 0;
boolean validSouth = false;
while (south < row) {
south++;
currRow += south;
validSouth = isValidLocation(map, currRow, currCol);
if (validSouth && currRow != prevSouthRow && currCol != prevSouthCol) { //south location is valid and its not yet check
prevSouthRow = currRow;
prevSouthCol = currCol;
break;
}
}
if (validSouth) {
treasure.add(new int[]{currRow, currCol});
}
}
printMap(map, treasure);
}
private static boolean isValidLocation(String[][] map, int row, int col) {
// check valid location, its inside the boundary and not equals to #
if (row < 0 || row >= map.length || col < 0 || col >= map[0].length
|| map[row][col].equals("#")) {
return false;
}
return true;
}
private static void printMap(String[][] map, List<int[]> treasure) {
System.out.println("Treasure Location:");
for (int[] coordinate : treasure) {
System.out.printf("(%d, %d)\n", coordinate[0], coordinate[1]);
map[coordinate[0]][coordinate[1]] = "$";
}
for (int x = 0; x < map.length; x++) {
for (int y = 0; y < map[0].length; y++) {
String val = map[x][y];
if (treasure.contains(new int[]{x, y})) {
val = "$";
}
System.out.print(val);
}
System.out.println();
}
}
}
|
<gh_stars>0
import { Status } from './jwt/jwt.strategy';
export interface SignUpDto {
username: string;
password: string;
email: string;
status: Status
}
export interface LoginDto {
username: string
password: string
}
|
import React from "react";
import SvgIcon from "@material-ui/core/SvgIcon";
export default function (props) {
return (
<SvgIcon {...props} viewBox="0 0 96 90">
<path d="M2.4 25.4H69C70.1 25.4 70.9 24.5 70.9 23.5V2.20005C70.9 1.10005 70 0.300049 69 0.300049H2.4C1.3 0.300049 0.5 1.20005 0.5 2.20005V23.4C0.5 24.5 1.4 25.4 2.4 25.4ZM4.4 4.20005H21.7V21.5H4.4V4.20005Z" />
<path d="M10.4001 49.1001C10.8001 49.5001 11.3001 49.7001 11.8001 49.7001C12.3001 49.7001 12.9001 49.5001 13.2001 49.1001L18.3001 43.6001C19.0001 42.8001 19.0001 41.6001 18.2001 40.9001C17.4001 40.2001 16.2001 40.2001 15.5001 41.0001L11.8001 45.0001L10.5001 43.6001C9.80014 42.8001 8.50014 42.8001 7.80014 43.5001C7.00014 44.2001 7.00014 45.5001 7.70014 46.2001L10.4001 49.1001Z" />
<path d="M93.6 32.5H2.4C1.3 32.5 0.5 33.4 0.5 34.4V55.6C0.5 56.7 1.4 57.5 2.4 57.5H93.5C94.6 57.5 95.4 56.6 95.4 55.6V34.4C95.5 33.3 94.6 32.5 93.6 32.5ZM21.7 53.7H4.4V36.3H21.7V53.7V53.7Z" />
<path d="M53.7 64.6001H2.4C1.3 64.6001 0.5 65.5001 0.5 66.5001V87.7001C0.5 88.8001 1.4 89.6001 2.4 89.6001H53.7C54.8 89.6001 55.6 88.7001 55.6 87.7001V66.6001C55.6 65.5001 54.8 64.6001 53.7 64.6001ZM21.7 85.8001H4.4V68.5001H21.7V85.8001V85.8001Z" />
</SvgIcon>
);
}
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.hangouts = void 0;
var hangouts = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M7.997 0c-3.816 0-6.909 3.094-6.909 6.909 0 3.616 3.294 6.547 6.909 6.547v2.544c4.197-2.128 6.916-5.556 6.916-9.091 0-3.816-3.1-6.909-6.916-6.909zM7 8c0 0.828-0.447 1.5-1 1.5v-1.5h-2v-3h3v3zM12 8c0 0.828-0.447 1.5-1 1.5v-1.5h-2v-3h3v3z"
}
}]
};
exports.hangouts = hangouts; |
<reponame>CN-3211/vt-cesium2.0
/*
* @Date: 2022-04-08 09:44:21
* @LastEditors: huangzh873
* @LastEditTime: 2022-04-08 09:47:12
* @FilePath: /vt-cesium2.0/src/components/jt-toolbar/config/contents/effect/groups/other/index.ts
*/
import {
Group,
ClickHandlerOption,
OnMountedOption,
ActiveOption,
} from '../../../Types'
import { OtherActionTypes } from '@/store/modules/jt-cesium-vue/modules/toolbar/modules/other/action-types'
import { OtherMutationTypes } from '@/store/modules/jt-cesium-vue/modules/toolbar/modules/other/mutation-types'
const view: Group = {
name: '其他',
items: [
{
name: '深度检测',
icon: '360',
clickHandler: (option: ClickHandlerOption): void => {
option.store.dispatch(
`jtCesiumVue/toolbar/other/${OtherActionTypes.SWITCH_DEPTH_TEST_AGAINST_TERRAIN}`,
option
)
},
active: (option: ActiveOption) =>
option.store.state.jtCesiumVue.toolbar.other.depthTestAgainstTerrain,
onMounted: (option: OnMountedOption): void => {
const viewer = option?.viewer
if (viewer) {
option.store.commit(
`jtCesiumVue/toolbar/other/${OtherMutationTypes.SET_DEPTH_TEST_AGAINST_TERRAIN}`,
viewer.scene.globe.depthTestAgainstTerrain
)
}
},
},
],
}
export default view
|
#!/usr/bin/env sh
pacman -S --noconfirm reflector
reflector --threads 0 -c "United States" --sort rate --save /etc/pacman.d/mirrorlist
pacman -Syy
pacman -S --noconfirm yarn git htop
# apk update
# apk add yarn git htop bash
yarn global add parcel-bundler gulp-cli
|
<gh_stars>0
import warning from 'warning'
import {deriveValuesFromFields} from './utils'
export function updateFields(fieldsMeta, fields, incomingFields) {
const newFields = {...fields}
Object.keys(incomingFields).forEach(fieldName => {
const field = incomingFields[fieldName]
warning(
fieldsMeta[fieldName],
`The field \`${fieldName}\` was assigned before being registered`,
)
if (!newFields[fieldName]) {
newFields[fieldName] = {}
}
const originalField = fields[fieldName]
const {rule} = fieldsMeta[fieldName]
const finalField = {...originalField, ...field}
const validationInfo = {
value: finalField.value,
values: deriveValuesFromFields(newFields),
}
const validationResult = rule ? rule(validationInfo) : null
Object.assign(finalField, {error: validationResult}, field)
newFields[fieldName] = finalField
})
return newFields
}
export function resetFields() {}
|
// C++ pgm displays symmetric matrix given any elements in an array.
#include <iostream>
using namespace std;
class Symmetric
{
private:
int *A, n, x;
public:
Symmetric(int n);
void create(int n);
void set(int i, int j, int x);
int get(int i, int j);
void display();
~Symmetric();
};
// creates the mat. acc. to dimensions.
Symmetric::Symmetric(int n)
{
this->n = n;
A = new int[n * (n + 1) / 2];
}
// get the elements by user.
void Symmetric::create(int n)
{
this->n = n;
cout << "Enter the elements for " << n << "x" << n << " matrix: " << endl;
for (int i = 1; i <= n; i++)
{
for (int j = 1; j <= n; j++)
{
cin >> x;
set(i, j, x);
}
}
}
// set the elements in matrix at proper index.
void Symmetric::set(int i, int j, int x)
{
if (i >= j)
{
// row major mapping
A[i * (i - 1) / 2 + j - 1] = x;
}
else
// column major mapping
A[j * (j - 1) / 2 + i - 1] = x;
}
int Symmetric::get(int i, int j)
{
if (i >= j)
{
// row major mapping
return A[i * (i - 1) / 2 + j - 1];
}
else
{
// column major mapping
return A[j * (j - 1) / 2 + i - 1];
}
}
// display the matrix elements in proper manner.
void Symmetric::display()
{
for (int i = 1; i <= n; i++)
{
for (int j = 1; j <= n; j++)
{
if (i >= j)
{
cout << A[(i * (i - 1)) / 2 + (j - 1)] << " ";
}
else
{
cout << A[j * (j - 1) / 2 + i - 1] << " ";
}
}
cout << endl;
}
}
// destructor
Symmetric::~Symmetric()
{
delete[] A;
}
int main()
{
int n;
cout << "Enter the dimension for matrix: ";
cin >> n;
Symmetric S1(n);
S1.create(n);
S1.display();
// get the elements at specific index function;
// for 3x3 matrix below, for others u can edit!
cout << "The element at row 3 & column 2 is: " << S1.get(3, 2) << endl;
cout << "The element at row 2 & column 1 is: " << S1.get(2, 1) << endl;
return 0;
} |
<reponame>fengjinqi/website-react-webapp<filename>src/pages/my/others/OthersInfo.js
import React,{Fragment} from 'react'
import {ActivityIndicator, Icon, NavBar} from "antd-mobile";
import {connect} from 'react-redux'
import {getOhtersInfo} from "../../../api/user";
class OthersInfo extends React.Component{
constructor(props){
super(props)
this.state={
type:true,
info:null
}
}
componentDidMount() {
this.getInfo(this.props.match.params.id)
}
getInfo(id){
getOhtersInfo(id).then(res=>{
this.setState({
type:false,
info:res.data
})
})
}
render() {
let {info} = this.state
const row = ()=>{
if (info){
return (
<Fragment>
<li style={{position:'relative'}}>头像 {this.state.type?<input type="file" onChange={()=>this.ImagePicker(info[0])} ref={img=>this.lv=img} accept='image/*'/>:''}<img ref={img=>this.img=img} src={info.user_imag?info.user_imag:info.user_image?info.user_image:'https://www.fengjinqi.com/static/img/pc-icon.png'} alt=""/></li>
<li>昵称
<span>{info.username}</span>
</li>
<li>职位<span>{info.position}</span></li>
<li>简介 <span>{info.info}</span></li>
</Fragment>
)
}
}
return(
<Fragment>
<ActivityIndicator toast text="正在加载"animating={this.state.type} />
<NavBar
mode="dark"
icon={<Icon type="left" />}
onLeftClick={() => this.props.history.goBack()}
>个人信息</NavBar>
<ul className='container info'>
{row()}
</ul>
</Fragment>
)
}
}
const mapState = (state)=>({
})
const mapDispatch =(dispatch)=>({
})
export default connect(mapState,mapDispatch)(OthersInfo) |
<filename>.vn/tests/vn/api/auth.server.test.ts
/**
* We use an hackish solution to run Next.js API endpoints, however
* if this fall short, we might move to running the actual dev server + an inmemory mongo
*
* @see https://github.com/vercel/next.js/discussions/15166
* @see
*/
import { connectToDb } from "~/lib/api/mongoose/connection";
import { apiRoutes } from "~/lib/api/apiRoutes";
import { MongoMemoryServer } from "mongodb-memory-server"; // @see https://github.com/nodkz/mongodb-memory-server
import mongoose from "mongoose";
import request from "supertest";
import { spawn } from "child_process";
let mongod;
let mongoUri;
let serverUrl = "http://localhost:3000";
beforeAll(async () => {
// Spin up a dummy mongo server
mongod = await MongoMemoryServer.create();
mongoUri = mongod.getUri();
// const port = await mongod.getPort();
// const dbPath = await mongod.getDbPath();
// const dbName = await mongod.getDbName();
// Connect mongoose client
//await mongoose.connect(mongoUri);
await connectToDb(mongoUri);
// TODO: spin up the Next server as well USING THE LOCAL MONGO_URI
});
afterAll(async () => {
// remove the collection
// disconnect the client
await mongoose.disconnect();
// stop mongo server
await mongod.stop();
});
test.skip("signup", async () => {
const user = {
email: "<EMAIL>",
password: "<PASSWORD>",
};
//TODO: this tests expects the Next server to already run
// we are not yet able to spin a server elegantly
// @see https://github.com/vercel/next.js/discussions/28173
const res = await request(serverUrl)
.post(apiRoutes.account.signup.href)
.send(user)
.expect(200);
expect(res.body).toEqual({ done: true });
});
test.skip("login", () => {
// TODO
});
test.skip("change password while being logged in", () => {
// TODO
});
|
#
# Defines general aliases and functions.
#
# Authors:
# Robby Russell <robby@planetargon.com>
# Suraj N. Kurapati <sunaku@gmail.com>
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Load dependencies.
pmodload 'helper' 'spectrum'
# Correct commands.
setopt CORRECT
#
# Aliases
#
# Disable correction.
alias ack='nocorrect ack'
alias cd='nocorrect cd'
alias cp='nocorrect cp'
alias ebuild='nocorrect ebuild'
alias gcc='nocorrect gcc'
alias gist='nocorrect gist'
alias grep='nocorrect grep'
alias heroku='nocorrect heroku'
alias ln='nocorrect ln'
alias man='nocorrect man'
alias mkdir='nocorrect mkdir'
alias mv='nocorrect mv'
alias mysql='nocorrect mysql'
alias rm='nocorrect rm'
# Disable globbing.
alias bower='noglob bower'
alias fc='noglob fc'
alias find='noglob find'
alias ftp='noglob ftp'
alias history='noglob history'
alias locate='noglob locate'
alias rake='noglob rake'
alias rsync='noglob rsync'
alias scp='noglob scp'
alias sftp='noglob sftp'
# Define general aliases.
alias _='sudo'
alias b='${(z)BROWSER}'
alias cp="${aliases[cp]:-cp} -i"
alias e='${(z)VISUAL:-${(z)EDITOR}}'
alias ln="${aliases[ln]:-ln} -i"
alias mkdir="${aliases[mkdir]:-mkdir} -p"
alias mv="${aliases[mv]:-mv} -i"
alias p='${(z)PAGER}'
alias po='popd'
alias pu='pushd'
# alias rm="${aliases[rm]:-rm} -i"
alias rm="${aliases[rm]:-rm} -I"
alias type='type -a'
# ls
if is-callable 'dircolors'; then
# GNU Core Utilities
alias ls='ls --group-directories-first'
if zstyle -t ':prezto:module:utility:ls' color; then
if [[ -s "$HOME/.dir_colors" ]]; then
eval "$(dircolors --sh "$HOME/.dir_colors")"
else
eval "$(dircolors --sh)"
fi
alias ls="${aliases[ls]:-ls} --color=auto"
else
alias ls="${aliases[ls]:-ls} -F"
fi
else
# BSD Core Utilities
if zstyle -t ':prezto:module:utility:ls' color; then
# Define colors for BSD ls.
export LSCOLORS='exfxcxdxbxGxDxabagacad'
# Define colors for the completion system.
export LS_COLORS='di=34:ln=35:so=32:pi=33:ex=31:bd=36;01:cd=33;01:su=31;40;07:sg=36;40;07:tw=32;40;07:ow=33;40;07:'
alias ls="${aliases[ls]:-ls} -G"
else
alias ls="${aliases[ls]:-ls} -F"
fi
fi
alias l='ls -1A' # Lists in one column, hidden files.
alias ll='ls -lh' # Lists human readable sizes.
alias lr='ll -R' # Lists human readable sizes, recursively.
alias la='ll -A' # Lists human readable sizes, hidden files.
alias lm='la | "$PAGER"' # Lists human readable sizes, hidden files through pager.
alias lx='ll -XB' # Lists sorted by extension (GNU only).
alias lk='ll -Sr' # Lists sorted by size, largest last.
alias lt='ll -tr' # Lists sorted by date, most recent last.
alias lc='lt -c' # Lists sorted by date, most recent last, shows change time.
alias lu='lt -u' # Lists sorted by date, most recent last, shows access time.
alias sl='ls' # I often screw this up.
# Grep
if zstyle -t ':prezto:module:utility:grep' color; then
export GREP_COLOR='37;45' # BSD.
export GREP_COLORS="mt=$GREP_COLOR" # GNU.
alias grep="${aliases[grep]:-grep} --color=auto"
fi
# Mac OS X Everywhere
if [[ "$OSTYPE" == darwin* ]]; then
alias o='open'
elif [[ "$OSTYPE" == cygwin* ]]; then
alias o='cygstart'
alias pbcopy='tee > /dev/clipboard'
alias pbpaste='cat /dev/clipboard'
else
alias o='xdg-open'
if (( $+commands[xclip] )); then
alias pbcopy='xclip -selection clipboard -in'
alias pbpaste='xclip -selection clipboard -out'
elif (( $+commands[xsel] )); then
alias pbcopy='xsel --clipboard --input'
alias pbpaste='xsel --clipboard --output'
fi
fi
alias pbc='pbcopy'
alias pbp='pbpaste'
# File Download
if (( $+commands[curl] )); then
alias get='curl --continue-at - --location --progress-bar --remote-name --remote-time'
elif (( $+commands[wget] )); then
alias get='wget --continue --progress=bar --timestamping'
fi
# Resource Usage
alias df='df -kh'
alias du='du -kh'
if (( $+commands[htop] )); then
alias top=htop
else
if [[ "$OSTYPE" == (darwin*|*bsd*) ]]; then
alias topc='top -o cpu'
alias topm='top -o vsize'
else
alias topc='top -o %CPU'
alias topm='top -o %MEM'
fi
fi
# Miscellaneous
# Serves a directory via HTTP.
alias http-serve='python -m SimpleHTTPServer'
#
# Functions
#
# Makes a directory and changes to it.
function mkdcd {
[[ -n "$1" ]] && mkdir -p "$1" && builtin cd "$1"
}
# Changes to a directory and lists its contents.
function cdls {
builtin cd "$argv[-1]" && ls "${(@)argv[1,-2]}"
}
# Pushes an entry onto the directory stack and lists its contents.
function pushdls {
builtin pushd "$argv[-1]" && ls "${(@)argv[1,-2]}"
}
# Pops an entry off the directory stack and lists its contents.
function popdls {
builtin popd "$argv[-1]" && ls "${(@)argv[1,-2]}"
}
# Prints columns 1 2 3 ... n.
function slit {
awk "{ print ${(j:,:):-\$${^@}} }"
}
# Finds files and executes a command on them.
function find-exec {
find . -type f -iname "*${1:-}*" -exec "${2:-file}" '{}' \;
}
# Displays user owned processes status.
function psu {
ps -U "${1:-$LOGNAME}" -o 'pid,%cpu,%mem,command' "${(@)argv[2,-1]}"
}
|
def refine_decl(decl):
"""
Refine type for sources decl
"""
if decl.location:
if decl.what == 'function' and not decl.type:
info = symbol_info(decl.location.filename, decl.module.name, decl.name, None, no_ghci=no_ghci)
if info:
decl.type = info.type |
import random
def random_number():
return random.randint(1,10) |
import { useUser } from "@auth0/nextjs-auth0";
import { Heading } from "@chakra-ui/layout";
import { Button, Spinner, Text, VStack } from "@chakra-ui/react";
import axios from "axios";
import { useRouter } from "next/dist/client/router";
import { useState } from "react";
import useSWR, { mutate } from "swr";
import Reward from "react-rewards";
import fetcher from "../../../common/utils/fetcher";
import withCustomAuth from "../../../components/hoc/with-custom-auth";
import Default from "../../../components/layouts/Default/Default";
import { useRef } from "react";
const Praise = () => {
const [isPraising, setIsPraising] = useState(false);
const router = useRouter();
const { user } = useUser();
const rewardsRef = useRef<any>(null);
const { data: habit, error } = useSWR(
`/api/habits/${router.query.id}/praise`,
fetcher
);
const { data: praise } = useSWR(() =>
user ? `/api/praises/${habit.id}` : null
);
if (error) return <div>oops... {error.message}</div>;
const handleClick = async () => {
setIsPraising(true);
await axios.post(`/api/habits/${router.query.id}/praise`);
await mutate(`/api/praises/${habit.id}`);
setIsPraising(false);
rewardsRef.current!.rewardMe();
};
return (
<Default>
<VStack>
<Heading>Praise</Heading>
{!praise ? (
<>
<Spinner />
<Text>Preparing fresh data</Text>
</>
) : (
<>
<Heading as="h2" size="lg">
{habit.owner_name}
</Heading>
<Text>for completing</Text>
<Heading as="h3" size="md">
{habit.name}
</Heading>
<Reward ref={rewardsRef} type="confetti">
<Button
isLoading={isPraising}
loadingText="Praising"
onClick={handleClick}
disabled={praise.length > 0}
>
{praise.length > 0 ? "Praised" : "Praise!"}
</Button>
</Reward>
</>
)}
</VStack>
</Default>
);
};
export default withCustomAuth(Praise);
|
#!/bin/sh
# SPDX-License-Identifier: BSD-3-Clause
# Copyright(c) 2018 Intel Corporation. All rights reserved.
# Runs a given script in the docker container you can generate from the
# docker_build directory.
# Example:
# To build sof for baytrail:
# ./scripts/docker-run.sh ./scripts/xtensa-build-all.sh byt
# To build topology:
# ./scripts/docker-run.sh ./scripts/build-tools.sh
docker run -i -v `pwd`:/home/sof/work/sof.git \
--user `id -u` sof $@
|
# Written by HakaseShounen
# August 01, 2016
# Feel free to contact me at: hakaseshounen@gmail.com
# 0.1v
hakaseshounen_hijra () {
hs_tarikh='date +%Y%m%d'
hs_url="http://hijrah.mfrapps.com/api/hijrah-api.php?tarikh=$tarikh"
hs_xml=$(curl -k --silent "$hs_url")
hs_hijra_day=$(printf '%s\n' "$hs_xml" | xmlstarlet sel -t -v "date/hijrah/day")
hs_hijra_month=$(printf '%s\n' "$hs_xml" | xmlstarlet sel -t -v "date/hijrah/month")
hs_hijra_year=$(printf '%s\n' "$hs_xml" | xmlstarlet sel -t -v "date/hijrah/year")
hs_masihi_day=$(printf '%s\n' "$hs_xml" | xmlstarlet sel -t -v "date/masihi/day")
hs_masihi_month=$(printf '%s\n' "$hs_xml" | xmlstarlet sel -t -v "date/masihi/month")
hs_masihi_month_name=$(printf '%s\n' "$hs_xml" | xmlstarlet sel -t -v "date/masihi/month_name")
hs_masihi_year=$(printf '%s\n' "$hs_xml" | xmlstarlet sel -t -v "date/masihi/year")
echo "Today in hijra: $hs_hijra_day $hs_hijra_month $hs_hijra_year"
echo "Today in masihi: $hs_masihi_day $hs_masihi_month_name $hs_masihi_year"
}
hakaseshounen_hijra
|
#! /usr/bin/env python3
from nexus import settings,job,run_project,obj
from nexus import generate_physical_system
from nexus import generate_pyscf
from nexus import generate_convert4qmc
from nexus import generate_qmcpack
settings(
pseudo_dir = '../../pseudopotentials',
results = '',
sleep = 3,
machine = 'ws16',
)
system = generate_physical_system(
units = 'A',
axes = '''1.785 1.785 0.000
0.000 1.785 1.785
1.785 0.000 1.785''',
elem_pos = '''
C 0.0000 0.0000 0.0000
C 0.8925 0.8925 0.8925
''',
tiling = (2,1,1),
kgrid = (1,1,1),
kshift = (0,0,0),
C = 4,
)
scf = generate_pyscf(
identifier = 'scf', # log output goes to scf.out
path = 'diamond/scf', # directory to run in
job = job(serial=True,threads=16),# pyscf must run w/o mpi
template = './scf_template.py', # pyscf template file
system = system,
cell = obj( # used to make Cell() inputs
basis = 'bfd-vdz',
ecp = 'bfd',
drop_exponent = 0.1,
verbose = 5,
),
save_qmc = True, # save wfn data for qmcpack
)
c4q = generate_convert4qmc(
identifier = 'c4q',
path = 'diamond/scf',
job = job(cores=1),
no_jastrow = True,
hdf5 = True, # use hdf5 format
dependencies = (scf,'orbitals'),
)
opt = generate_qmcpack(
block = True,
identifier = 'opt',
path = 'diamond/optJ2',
job = job(cores=16,threads=4,app='qmcpack'),
input_type = 'basic',
system = system,
pseudos = ['C.BFD.xml'],
corrections = [],
J2 = True,
qmc = 'opt',
minmethod = 'oneshiftonly', # adjust for oneshift
init_cycles = 3,
init_minwalkers = 0.1,
cycles = 3,
samples = 25600,
dependencies = (c4q,'orbitals'),
)
qmc = generate_qmcpack(
block = True,
identifier = 'vmc',
path = 'diamond/vmc',
job = job(cores=16,threads=4,app='qmcpack'),
input_type = 'basic',
system = system,
pseudos = ['C.BFD.xml'],
corrections = [],
qmc = 'vmc',
dependencies = [(c4q,'orbitals'),
(opt,'jastrow')],
)
qmc = generate_qmcpack(
block = True,
identifier = 'dmc',
path = 'diamond/dmc',
job = job(cores=16,threads=4,app='qmcpack'),
input_type = 'basic',
system = system,
pseudos = ['C.BFD.xml'],
corrections = [],
qmc = 'dmc',
vmc_samples = 800,
eq_dmc = True,
dependencies = [(c4q,'orbitals'),
(opt,'jastrow')],
)
run_project()
|
#!/bin/bash
#SBATCH -J Act_tanh_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/G2P-my_LSTM-act1_save_new_odd.py tanh 50 Adamax 1 0.32873413360732373 0.002314007172161447 orth 1.0 efile.norm.1_7 odd_G2P_1_7/
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/nanopb/nanopb.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/nanopb/nanopb.framework"
fi
if [[ "$CONFIGURATION" == "Profile" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/nanopb/nanopb.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#
# basic load test
#
if [ -z "$DEPOSITAUTH_URL" ]; then
echo "ERROR: DEPOSITAUTH_URL is not defined"
exit 1
fi
if [ -z "$API_TOKEN" ]; then
echo "ERROR: API_TOKEN is not defined"
exit 1
fi
LT=../../bin/bombardier
if [ ! -f "$LT" ]; then
echo "ERROR: Bombardier is not available"
exit 1
fi
# set the test parameters
endpoint=$DEPOSITAUTH_URL
concurrent=10
count=1000
url=inbound?after=0\&auth=$API_TOKEN
CMD="$LT -c $concurrent -n $count -l $endpoint/$url"
echo "Host = $endpoint, count = $count, concurrency = $concurrent"
echo $CMD
$CMD
exit $?
#
# end of file
#
|
export const LOCAL_PORT = 5000;
export const DATA_FILE = 'data.json';
export const JSON_EXT = '.json';
|
<filename>node_modules/react-icons-kit/md/ic_rule_folder_twotone.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_rule_folder_twotone = void 0;
var ic_rule_folder_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M11.17,8l-2-2H4v12l16,0V8H11.17z M7.83,16L5,13.17l1.41-1.41l1.41,1.41l3.54-3.54l1.41,1.41L7.83,16z M19,14.59L17.59,16L16,14.41L14.41,16L13,14.59L14.59,13L13,11.41L14.41,10L16,11.59L17.59,10L19,11.41L17.41,13L19,14.59z",
"opacity": ".3"
},
"children": [{
"name": "path",
"attribs": {
"d": "M11.17,8l-2-2H4v12l16,0V8H11.17z M7.83,16L5,13.17l1.41-1.41l1.41,1.41l3.54-3.54l1.41,1.41L7.83,16z M19,14.59L17.59,16L16,14.41L14.41,16L13,14.59L14.59,13L13,11.41L14.41,10L16,11.59L17.59,10L19,11.41L17.41,13L19,14.59z",
"opacity": ".3"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M7.83,16L5,13.17l1.41-1.41l1.41,1.41l3.54-3.54l1.41,1.41L7.83,16z M17.41,13L19,14.59L17.59,16L16,14.41L14.41,16 L13,14.59L14.59,13L13,11.41L14.41,10L16,11.59L17.59,10L19,11.41L17.41,13z M20,6h-8l-2-2H4C2.9,4,2.01,4.9,2.01,6L2,18 c0,1.1,0.9,2,2,2h16c1.1,0,2-0.9,2-2V8C22,6.9,21.1,6,20,6z M20,18L4,18V6h5.17l2,2H20V18z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M7.83,16L5,13.17l1.41-1.41l1.41,1.41l3.54-3.54l1.41,1.41L7.83,16z M17.41,13L19,14.59L17.59,16L16,14.41L14.41,16 L13,14.59L14.59,13L13,11.41L14.41,10L16,11.59L17.59,10L19,11.41L17.41,13z M20,6h-8l-2-2H4C2.9,4,2.01,4.9,2.01,6L2,18 c0,1.1,0.9,2,2,2h16c1.1,0,2-0.9,2-2V8C22,6.9,21.1,6,20,6z M20,18L4,18V6h5.17l2,2H20V18z"
},
"children": []
}]
}]
}]
};
exports.ic_rule_folder_twotone = ic_rule_folder_twotone; |
<reponame>nicklinyi/AxiSEM-3D
//
// ClaytonFluid3D.cpp
// AxiSEM3D
//
// Created by <NAME> on 7/30/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// Clayton-Enquist ABC for fluid points in 3D
#include "ClaytonFluid3D.hpp"
#include "FluidPoint.hpp"
#include "fft.hpp"
// check compatibility
void ClaytonFluid3D::checkCompatibility() {
// check size
int nr = mFluidPoint->getNr();
if (nr != mAreaOverRhoVp.rows()) {
throw std::runtime_error("ClaytonFluid3D::checkCompatibility ||"
"Incompatible sizes.");
}
// workspace
if (sVecR.rows() < nr) {
sVecR.resize(nr);
sVecC.resize(nr / 2 + 1);
}
// report request to FFT
fft::gFFT_1.addNR(nr);
}
// apply ABC
void ClaytonFluid3D::apply() const {
// get fields
const eigen::CColX &veloc = mFluidPoint->getFields().mVeloc;
eigen::CColX &stiff = mFluidPoint->getFields().mStiff;
// constants
int nr = mFluidPoint->getNr();
int nu_1 = nr / 2 + 1;
// FFT: Fourier => cardinal
fft::gFFT_1.computeC2R(veloc, sVecR, nr);
// multiply by area / (rho * vp) in cardinal space
sVecR.topRows(nr).array() *= mAreaOverRhoVp.array();
// FFT: cardinal => Fourier
fft::gFFT_1.computeR2C(sVecR, sVecC, nr);
// subtract
stiff -= sVecC.topRows(nu_1);
}
|
<gh_stars>0
package com.luban.ioc.import1;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Component;
@Component("Dao2Impl")
public class Dao2Impl implements Dao2, InitializingBean {
@Override
public void query() {
System.out.println("hahahahahhaha");
}
@Override
public void afterPropertiesSet() throws Exception {
}
}
|
<reponame>BryceStandley/TNAH
#pragma once
#include "TNAH/Core/Window.h"
#include "TNAH/Events/ApplicationEvent.h"
#include "TNAH/Events/MouseEvent.h"
#include "TNAH/Events/KeyEvent.h"
#include "Platform/OpenGL/OpenGLGraphicsContext.h"
#include <GLFW/glfw3.h>
namespace tnah {
/**
* @class WinWindow
*
* @brief Form for viewing the window.
*
* @author <NAME>
* @date 7/09/2021
*/
class WinWindow : public Window
{
public:
/**
* @fn WinWindow::WinWindow(const WindowProps& props);
*
* @brief Constructor
*
* @author <NAME>
* @date 7/09/2021
*
* @param props The properties.
*/
WinWindow(const WindowProps& props);
/**
* @fn virtual WinWindow::~WinWindow();
*
* @brief Destructor
*
* @author <NAME>
* @date 7/09/2021
*/
virtual ~WinWindow();
/**
* @fn void WinWindow::OnUpdate() override;
*
* @brief Executes the 'update' action
*
* @author <NAME>
* @date 7/09/2021
*/
void OnUpdate() override;
/**
* @fn inline unsigned int WinWindow::GetWidth() const override
*
* @brief Gets the width of window
*
* @author <NAME>
* @date 7/09/2021
*
* @returns The width.
*/
inline unsigned int GetWidth() const override { return m_Data.Width; }
/**
* @fn inline unsigned int WinWindow::GetHeight() const override
*
* @brief Gets the height of window
*
* @author <NAME>
* @date 7/09/2021
*
* @returns The height.
*/
inline unsigned int GetHeight() const override { return m_Data.Height; }
/**
* @fn inline void WinWindow::SetEventCallback(const EventCallbackFn& callback) override
*
* @brief Sets the event callback
*
* @author <NAME>
* @date 7/09/2021
*
* @param callback The callback.
*/
inline void SetEventCallback(const EventCallbackFn& callback) override { m_Data.EventCallback = callback; }
/**
* @fn void WinWindow::SetVSync(bool enabled) override;
*
* @brief Sets v synchronize
*
* @author <NAME>
* @date 7/09/2021
*
* @param enabled True to enable, false to disable.
*/
void SetVSync(bool enabled) override;
/**
* @fn bool WinWindow::IsVSync() const override;
*
* @brief Query if this object is v synchronize
*
* @author <NAME>
* @date 7/09/2021
*
* @returns True if v synchronize, false if not.
*/
bool IsVSync() const override;
/**
* @fn inline void* WinWindow::GetNativeWindow() const override
*
* @brief Gets native window
*
* @author <NAME>
* @date 7/09/2021
*
* @returns Null if it fails, else the native window.
*/
inline void* GetNativeWindow() const override { return m_Window; }
/**
* @fn inline virtual void WinWindow::SetCursorDisabled(bool disable) override
*
* @brief Sets cursor status
*
* @author <NAME>
* @date 7/09/2021
*
* @param disable True to disable, false to enable.
*/
inline virtual void SetCursorDisabled(bool disable) override
{
if(disable)
glfwSetInputMode(m_Window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
else
glfwSetInputMode(m_Window, GLFW_CURSOR, GLFW_CURSOR_NORMAL);
}
/**
* @fn virtual void WinWindow::ToggleFullScreen(const bool& enabled) override;
*
* @brief Toggle full screen
*
* @author <NAME>
* @date 7/09/2021
*
* @param enabled True to enable, false to disable.
*/
virtual void ToggleFullScreen(const bool& enabled) override;
/**
* @fn inline bool WinWindow::IsFullscreen() const override
*
* @brief Query if this object is fullscreen
*
* @author <NAME>
* @date 7/09/2021
*
* @returns True if fullscreen, false if not.
*/
inline bool IsFullscreen() const override {return m_Data.Fullscreen;}
/**
* @fn virtual void WinWindow::SetScreenResolution(const uint32_t& width, const uint32_t& height) override;
*
* @brief Sets screen resolution
*
* @author <NAME>
* @date 7/09/2021
*
* @param width The width.
* @param height The height.
*/
virtual void SetScreenResolution(const uint32_t& width, const uint32_t& height) override;
private:
/**
* @fn virtual void WinWindow::Init(const WindowProps& props);
*
* @brief Initializes this object
*
* @author <NAME>
* @date 7/09/2021
*
* @param props The properties.
*/
virtual void Init(const WindowProps& props);
/**
* @fn virtual void WinWindow::Shutdown();
*
* @brief Shuts down this object and frees any resources it is using
*
* @author <NAME>
* @date 7/09/2021
*/
virtual void Shutdown();
private:
/** @brief The window */
GLFWwindow* m_Window;
/** @brief The context */
GraphicsContext* m_Context;
/**
* @struct WindowData
*
* @brief A struct containing Window data.
*
* @author <NAME>
* @date 7/09/2021
*/
struct WindowData
{
/** @brief The title */
std::string Title;
/** @brief The window with and height */
unsigned int Width, Height;
/** @brief True to synchronize */
bool VSync;
/** @brief True to fullscreen */
bool Fullscreen;
/** @brief The event callback */
EventCallbackFn EventCallback;
};
/** @brief The data */
WindowData m_Data;
};
}
|
import {
createBan,
getBanById,
getBansByCommunityId,
} from '../../services/banServices';
import Ban from '../../reports/Ban';
import { ErrorREST, Errors } from '../../utilities/ErrorREST';
import mongoose from 'mongoose';
import { mongoURI } from '../../config/db';
describe('banServices', (): void => {
beforeAll(
async (): Promise<void> => {
mongoose.connect(mongoURI, { useNewUrlParser: true });
await Ban.deleteMany({}).exec();
},
);
beforeEach(
async (): Promise<void> => {
await Ban.deleteMany({});
},
);
afterEach(
async (): Promise<void> => {
await Ban.deleteMany({}).exec();
},
);
afterAll(
async (): Promise<void> => {
await Ban.deleteMany({}).exec();
await mongoose.disconnect();
},
);
const userId = mongoose.Types.ObjectId().toString();
const reason = 'It breaks a rule';
const communityId = mongoose.Types.ObjectId().toString();
const ruleId = mongoose.Types.ObjectId().toString();
describe('createBan', (): void => {
it(`should create new ban`, async (): Promise<void> => {
await createBan(userId, userId, communityId, reason, ruleId);
expect(ruleId).toBeTruthy();
const ban = await Ban.findById(ruleId);
if (!ban) {
return;
}
expect(ban.reason).toMatch(reason);
});
});
describe('getBanById', (): void => {
it(`should return a ban`, async (): Promise<void> => {
const newBan = new Ban({
reason,
bannedUser: userId,
community: communityId,
user: userId,
});
await newBan.save();
const { _id } = newBan;
const ban = await getBanById(_id);
if (!ban) {
return;
}
expect(ban.reason).toMatch(reason);
});
it(`shouldn't throw an error if rule isn't found`, async (): Promise<
void
> => {
const id = mongoose.Types.ObjectId().toString();
const { status, message } = Errors.NotFound;
const error = new ErrorREST(status, message, null);
await expect(getBanById(id)).rejects.toThrow(error);
});
});
describe('getBansByCommunityId', (): void => {
it(`should return a list of bans`, async (): Promise<void> => {
const secondCommunityId = mongoose.Types.ObjectId().toString();
const bansArr = [
{
reason,
bannedUser: userId,
community: communityId,
user: userId,
},
{
reason,
bannedUser: userId,
community: communityId,
user: userId,
},
{
reason,
bannedUser: userId,
community: secondCommunityId,
user: userId,
},
];
await Ban.insertMany(bansArr);
const communityRules = await getBansByCommunityId(communityId);
const secondCommunityRules = await getBansByCommunityId(
secondCommunityId,
);
expect(communityRules).toHaveLength(2);
expect(secondCommunityRules).toHaveLength(1);
});
it(`shouldn't throw an error if no bans are found found`, async (): Promise<
void
> => {
const { status, message } = Errors.NotFound;
const error = new ErrorREST(status, message, null);
await expect(getBansByCommunityId(communityId)).rejects.toThrow(error);
});
});
});
|
'use strict';
/*global require*/
var defaultValue = require('terriajs-cesium/Source/Core/defaultValue');
var defined = require('terriajs-cesium/Source/Core/defined');
var DeveloperError = require('terriajs-cesium/Source/Core/DeveloperError');
var knockout = require('terriajs-cesium/Source/ThirdParty/knockout');
var ExplorerTabViewModel = function(name) {
this.panel = undefined;
this.name = defaultValue(name, 'Unknown');
this.badgeText = undefined;
this.badgeIsPopped = false;
this.isVisible = true;
this.isActive = false;
this._popTimeoutID = undefined;
knockout.track(this, ['name', 'badgeText', 'badgeIsPopped', 'isVisible', 'isActive']);
};
ExplorerTabViewModel.prototype.activate = function() {
if (!defined(this.panel)) {
throw new DeveloperError('This tab must be added to the explorer panel before it can be activated.');
}
this.panel.activateTab(this);
};
ExplorerTabViewModel.prototype.popBadge = function() {
// Reset the popped state. It might still be true if the pop was previously aborted.
this.badgeIsPopped = false;
// Delay the pop slightly, in case the badge just appeared.
if (!defined(this._popTimeoutID)) {
var that = this;
this._popTimeoutID = setTimeout(function() {
that._popTimeoutID = undefined;
that.badgeIsPopped = true;
}, 50);
}
};
ExplorerTabViewModel.prototype.unpopBadge = function() {
if (defined(this._popTimeoutID)) {
clearTimeout(this._popTimeoutID);
this._popTimeoutID = undefined;
}
this.badgeIsPopped = false;
};
module.exports = ExplorerTabViewModel;
|
which crystal
|
#!/bin/bash
PWD=`pwd`
echo $PWD
MASTER_DIR=$PWD/mysql/master/
SLAVE_DIR=$PWD/mysql/slave/
## First we could rm the existed container
docker rm -f mysql_master
docker rm -f mysql_slave
## Rm the existed directory
if [ -d $MASTER_DIR ]; then
rm -rf $MASTER_DIR
else
mkdir -p $MASTER_DIR
fi
if [ -d $SLAVE_DIR ]; then
rm -rf $SLAVE_DIR
else
mkdir -p $SLAVE_DIR
fi
## Start instance
echo 'Create master container'
docker run --name mysql_master -v $PWD/conf/mysql/master/my.cnf:/etc/mysql/my.cnf -v $MASTER_DIR:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=123456 -d mariadb
echo 'Create slave container'
docker run --name mysql_slave -v $PWD/conf/mysql/slave/my.cnf:/etc/mysql/my.cnf -v $SLAVE_DIR:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=123456 --link mysql_master:mysql_master -d mariadb
## Creating a User for Replication
docker stop mysql_master mysql_slave
docker start mysql_master mysql_slave
echo 'waiting.'
sleep 1
echo 'waiting..'
sleep 1
echo 'waiting...'
sleep 1
# create replication user and grant privaliage
echo 'Create replication user and grant privaliage'
docker exec -it mysql_master mysql -e "CREATE USER 'repl'@'%' IDENTIFIED BY 'repl';GRANT REPLICATION SLAVE ON *.* TO 'repl'@'%';"
echo 'Obtaining the Replication Master Binary Log Coordinates'
## Obtaining the Replication Master Binary Log Coordinates
master_status=`docker exec -it mysql_master mysql -e "show master status\G"`
master_log_file=`echo "$master_status" | awk 'NR==2{print substr($2,1,length($2)-1)}'`
master_log_pos=`echo "$master_status" | awk 'NR==3{print $2}'`
echo $master_log_pos
master_log_file="'""$master_log_file""'"
echo $master_log_file
echo 'Setting Up Replication Slaves'
## Setting Up Replication Slaves
docker exec -it mysql_slave mysql -e "CHANGE MASTER TO MASTER_HOST='mysql_master',MASTER_PORT=3306,MASTER_USER='repl',MASTER_PASSWORD='repl',MASTER_LOG_FILE=$master_log_file,MASTER_LOG_POS=$master_log_pos;"
docker exec -it mysql_slave mysql -e "start slave;"
docker exec -it mysql_slave mysql -e "show slave status\G"
## Creates shortcuts
# grep "alias master" /etc/profile
# if [ $? -eq 1 ];then
# echo 'alias mysql="docker exec -it master mysql"' >> /etc/profile
# echo 'alias master="docker exec -it master mysql -h 127.0.0.1 -P3306"' >> /etc/profile
# echo 'alias slave="docker exec -it master mysql -h 127.0.0.1 -P3307"' >> /etc/profile
# source /etc/profile
# fi |
#!/usr/bin/env bash
# Copyright 2020 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [[ -z "$RDS_MASTERUSER" ]]; then
echo '$RDS_MASTERUSER not set !'
exit 1
else
SUPERUSER= $RDS_MASTERUSER #<master username>
fi
if [[ -z "$RDS_MASTERPASSWORD" ]]; then
echo '$RDS_MASTEPASSWORD not set !'
exit 1
else
SU_PASSWORD= $RDS_MASTERPASSWORD #<master password>
fi
if [[ -z "$RDS_ENDPOINT " ]]; then
echo '$RDS_ENDPOINT not set !'
exit 1
else
DB_SERVER= $RDS_ENDPOINT # <DB_instance_endpoint:port>
fi
DATABASE=controller
USER=tester
USER_PASSWORD=tester
# drop database
psql "postgresql://${SUPERUSER}:${SU_PASSWORD}@${DB_SERVER}/postgres" -c "DROP DATABASE IF EXISTS ${DATABASE};"
psql "postgresql://${SUPERUSER}:${SU_PASSWORD}@${DB_SERVER}/postgres" -c "REVOKE ALL PRIVILEGES ON SCHEMA public FROM ${USER};DROP ROLE ${USER};"
|
SELECT *
FROM Products
WHERE Price > 50 AND Quantity > 5; |
<reponame>textmagus/textmagus-3rdparty-cdk
/* $Id: calendar_ex.c,v 1.16 2012/03/23 13:54:44 tom Exp $ */
#include <cdk_test.h>
#ifdef HAVE_XCURSES
char *XCursesProgramName = "calendar_ex";
#endif
static BINDFN_PROTO (createCalendarMarkCB);
static BINDFN_PROTO (removeCalendarMarkCB);
/*
* This program demonstrates the Cdk calendar widget.
*/
int main (int argc, char **argv)
{
/* *INDENT-EQLS* */
CDKSCREEN *cdkscreen = 0;
CDKCALENDAR *calendar = 0;
WINDOW *cursesWin = 0;
const char *mesg[5];
char temp[256];
struct tm *dateInfo;
time_t clck, retVal;
CDK_PARAMS params;
char *title;
int day;
int month;
int year;
/*
* Get the current dates and set the default values for
* the day/month/year values for the calendar.
*/
time (&clck);
dateInfo = gmtime (&clck);
/* *INDENT-EQLS* */
CDKparseParams (argc, argv, ¶ms, "d:m:y:t:w:" CDK_MIN_PARAMS);
day = CDKparamNumber2 (¶ms, 'd', dateInfo->tm_mday);
month = CDKparamNumber2 (¶ms, 'm', dateInfo->tm_mon + 1);
year = CDKparamNumber2 (¶ms, 'y', dateInfo->tm_year + 1900);
title = CDKparamString2 (¶ms, 't', "<C></U>CDK Calendar Widget\n<C>Demo");
/* Set up CDK. */
cursesWin = initscr ();
cdkscreen = initCDKScreen (cursesWin);
/* Start CDK Colors. */
initCDKColor ();
/* Create the calendar widget. */
calendar = newCDKCalendar (cdkscreen,
CDKparamValue (¶ms, 'X', CENTER),
CDKparamValue (¶ms, 'Y', CENTER),
title, day, month, year,
COLOR_PAIR (16) | A_BOLD,
COLOR_PAIR (24) | A_BOLD,
COLOR_PAIR (32) | A_BOLD,
COLOR_PAIR (40) | A_REVERSE,
CDKparamValue (¶ms, 'N', TRUE),
CDKparamValue (¶ms, 'S', FALSE));
/* Is the widget null? */
if (calendar == 0)
{
/* Clean up the memory. */
destroyCDKScreen (cdkscreen);
/* End curses... */
endCDK ();
printf ("Cannot create the calendar. Is the window too small?\n");
ExitProgram (EXIT_FAILURE);
}
/* Create a key binding to mark days on the calendar. */
bindCDKObject (vCALENDAR, calendar, 'm', createCalendarMarkCB, calendar);
bindCDKObject (vCALENDAR, calendar, 'M', createCalendarMarkCB, calendar);
bindCDKObject (vCALENDAR, calendar, 'r', removeCalendarMarkCB, calendar);
bindCDKObject (vCALENDAR, calendar, 'R', removeCalendarMarkCB, calendar);
calendar->weekBase = CDKparamNumber (¶ms, 'w');
/* Draw the calendar widget. */
drawCDKCalendar (calendar, ObjOf (calendar)->box);
/* Let the user play with the widget. */
retVal = activateCDKCalendar (calendar, 0);
/* Check which day they selected. */
if (calendar->exitType == vESCAPE_HIT)
{
mesg[0] = "<C>You hit escape. No date selected.";
mesg[1] = "";
mesg[2] = "<C>Press any key to continue.";
popupLabel (cdkscreen, (CDK_CSTRING2) mesg, 3);
}
else if (calendar->exitType == vNORMAL)
{
mesg[0] = "You selected the following date";
sprintf (temp, "<C></B/16>%02d/%02d/%d (dd/mm/yyyy)",
calendar->day,
calendar->month,
calendar->year);
mesg[1] = temp;
mesg[2] = "<C>Press any key to continue.";
popupLabel (cdkscreen, (CDK_CSTRING2) mesg, 3);
}
/* Clean up and exit. */
destroyCDKCalendar (calendar);
destroyCDKScreen (cdkscreen);
endCDK ();
fflush (stdout);
printf ("Selected Time: %s\n", ctime (&retVal));
ExitProgram (EXIT_SUCCESS);
}
/*
* This adds a marker to the calendar.
*/
static int createCalendarMarkCB (EObjectType objectType GCC_UNUSED,
void *object,
void *clientData GCC_UNUSED,
chtype key GCC_UNUSED)
{
CDKCALENDAR *calendar = (CDKCALENDAR *)object;
setCDKCalendarMarker (calendar,
calendar->day,
calendar->month,
calendar->year,
COLOR_PAIR (5) | A_REVERSE);
drawCDKCalendar (calendar, ObjOf (calendar)->box);
return (FALSE);
}
/*
* This removes a marker from the calendar.
*/
static int removeCalendarMarkCB (EObjectType objectType GCC_UNUSED,
void *object,
void *clientData GCC_UNUSED,
chtype key GCC_UNUSED)
{
CDKCALENDAR *calendar = (CDKCALENDAR *)object;
removeCDKCalendarMarker (calendar,
calendar->day,
calendar->month,
calendar->year);
drawCDKCalendar (calendar, ObjOf (calendar)->box);
return (FALSE);
}
|
package index
import (
"squirreldb/types"
"sync"
"time"
"github.com/pilosa/pilosa/v2/roaring"
"github.com/prometheus/prometheus/pkg/labels"
)
// labelsLookupCache provide cache for metricID to labels.
// It's eviction policy is random (Golang map order).
type labelsLookupCache struct {
cache map[types.MetricID]labelsEntry
l sync.Mutex
}
// postingsCache provide a cache for postings queries by label=value.
// It is only used for query and not metric creation, mostly because creation only
// occure once thus a cache is not useful.
// Invalidation occur from TTL and on metric creation/deletion: a message is sent to all
// SquirrelDB and posting that match the created metric are invalidated.
type postingsCache struct {
// cache map shard => label name => label value => postingEntry
cache map[postingsCacheKey]postingEntry
l sync.Mutex
}
type postingsCacheKey struct {
Name string
Value string
Shard int32
}
type postingEntry struct {
expire time.Time
value *roaring.Bitmap
count uint64
}
type labelsEntry struct {
cassandraExpire time.Time
value labels.Labels
}
const (
labelCacheMaxSize = 10000
postingsCacheMaxSize = 10000
postingsCacheTTL = 15 * time.Minute
)
// Get return the non-expired cache entry or an nil list.
func (c *labelsLookupCache) Get(now time.Time, id types.MetricID) labels.Labels {
c.l.Lock()
defer c.l.Unlock()
return c.get(now, id)
}
// MGet return the non-expired cache entry or an nil list for multiple IDs.
func (c *labelsLookupCache) MGet(now time.Time, ids []types.MetricID) []labels.Labels {
c.l.Lock()
defer c.l.Unlock()
result := make([]labels.Labels, len(ids))
for i, id := range ids {
result[i] = c.get(now, id)
}
return result
}
// Set add entry to the cache. Return the current cache size.
func (c *labelsLookupCache) Set(
now time.Time,
id types.MetricID,
value labels.Labels,
cassandraExpiration time.Time,
) int {
c.l.Lock()
defer c.l.Unlock()
if len(c.cache) > labelCacheMaxSize {
// First drop expired entry
for k, v := range c.cache {
if v.cassandraExpire.Before(now) {
delete(c.cache, k)
}
}
// we want to evict at least 50%, to avoid doing set/evict one/set/evict one/...
if len(c.cache) > labelCacheMaxSize/2 {
toDelete := len(c.cache) - labelCacheMaxSize/2
for k := range c.cache {
delete(c.cache, k)
toDelete--
if toDelete <= 0 {
break
}
}
}
}
c.cache[id] = labelsEntry{
value: value,
cassandraExpire: cassandraExpiration,
}
return len(c.cache)
}
// Drop delete entries from cache. Return the cache size.
func (c *labelsLookupCache) Drop(ids []uint64) int {
c.l.Lock()
defer c.l.Unlock()
for _, id := range ids {
delete(c.cache, types.MetricID(id))
}
return len(c.cache)
}
func (c *labelsLookupCache) get(now time.Time, id types.MetricID) labels.Labels {
entry := c.cache[id]
if entry.cassandraExpire.IsZero() {
return nil
}
if entry.cassandraExpire.Before(now) {
delete(c.cache, id)
return nil
}
return entry.value
}
// Get return the non-expired cache entry or nil.
func (c *postingsCache) Get(shard int32, name string, value string) *roaring.Bitmap {
return c.get(time.Now(), shard, name, value)
}
// Invalidate drop entry that are impacted by given labels. Return the cache size.
func (c *postingsCache) Invalidate(entries []postingsCacheKey) int {
c.l.Lock()
defer c.l.Unlock()
for _, k := range entries {
delete(c.cache, k)
}
return len(c.cache)
}
// Set add an entry. Return the cache size.
func (c *postingsCache) Set(shard int32, name string, value string, bitmap *roaring.Bitmap) int {
now := time.Now()
return c.set(now, shard, name, value, bitmap)
}
func (c *postingsCache) set(now time.Time, shard int32, name string, value string, bitmap *roaring.Bitmap) int {
c.l.Lock()
defer c.l.Unlock()
if len(c.cache) > postingsCacheMaxSize {
// First drop expired entry
for k, v := range c.cache {
if v.expire.Before(now) {
delete(c.cache, k)
}
}
// we want to evict at least 50%, to avoid doing set/evict one/set/evict one/...
if len(c.cache) > postingsCacheMaxSize/2 {
toDelete := len(c.cache) - postingsCacheMaxSize/2
for k := range c.cache {
delete(c.cache, k)
toDelete--
if toDelete <= 0 {
break
}
}
}
}
key := postingsCacheKey{
Shard: shard,
Name: name,
Value: value,
}
c.cache[key] = postingEntry{
expire: now.Add(postingsCacheTTL),
value: bitmap,
count: bitmap.Count(),
}
return len(c.cache)
}
func (c *postingsCache) get(now time.Time, shard int32, name string, value string) *roaring.Bitmap {
c.l.Lock()
defer c.l.Unlock()
key := postingsCacheKey{
Shard: shard,
Name: name,
Value: value,
}
entry := c.cache[key]
if entry.expire.IsZero() {
return nil
}
if entry.value.Count() != entry.count {
panic("fuck...")
}
if entry.expire.Before(now) {
delete(c.cache, key)
return nil
}
return entry.value
}
|
package com.atjl.dbservice.api;
import java.util.Map;
public interface TgtDataNeedUpdateChecker {
/**
*
*/
boolean needUpdate(Map raw,Map tgt);
}
|
<html>
<body>
<div>
<h2>User Details</h2>
<ul>
<li>Name: {{user.name}}</li>
<li>Age: {{user.age}}</li>
<li>Hobbies:
<ul>
<li>{{user.hobbies[0]}}</li>
<li>{{user.hobbies[1]}}</li>
<li>{{user.hobbies[2]}}</li>
</ul>
</li>
</ul>
</div>
</body>
</html> |
<html>
<head>
<title>Calculator</title>
<script type="text/javascript">
function calculate(val1, operator, val2) {
switch (operator) {
case '+':
return val1 + val2;
case '-':
return val1 - val2;
case '*':
return val1 * val2;
case '/':
return val1 / val2;
default:
return 0;
}
}
function result() {
var val1 = document.getElementById('val1').value;
var operator = document.getElementById('operator').value;
var val2 = document.getElementById('val2').value;
document.getElementById('result').value = calculate(Number(val1), operator, Number(val2));
}
</script>
</head>
<body>
<input type="text" id="val1" /><br><br>
<select id="operator">
<option>+</option>
<option>-</option>
<option>*</option>
<option>/</option>
</select><br><br>
<input type="text" id="val2" /><br><br>
<button type="button" onclick="result()">Calculate</button><br><br>
<input type="text" id="result" />
</body>
</html> |
<reponame>davidyu62/egovframe-runtime<filename>Foundation/org.egovframe.rte.fdl.logging/src/test/java/org/egovframe/rte/fdl/logging/Log4j2FilterTest.java
package org.egovframe.rte.fdl.logging;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import org.egovframe.rte.fdl.logging.sample.MarkerFilterTestSample;
import org.egovframe.rte.fdl.logging.util.LogFileUtil;
import javax.annotation.Resource;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.ThreadContext;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath*:META-INF/spring/context-common.xml" })
public class Log4j2FilterTest {
/**
* 아래는 Log4j2에 추가된 ThresholdFilter 를 테스트하는 메소드
* ThresholdFilter는 log level에 대한 임계치를 재설정한다.
* logger: level=DEBUG, appender=Console, File, fileName=./logs/file/filter/ThresholdFilter
* ThresholdFilter: level=ERROR
* */
@Test
public void testThresholdFilter() throws Exception {
Logger logger = LogManager.getLogger("thresholdFilterLogger");
// 로그 출력
// Console Appender에는 DEBUG 레벨 이상 모든 로그 출력
// File Appender에는 ThresholdFilter level을 ERROR 설정했으므로 ERROR 레벨 이상 로그만 출력됨
logger.debug("ThresholdFilter Test Start");
try {
@SuppressWarnings("unused")
int value = 5 / 0;
} catch (ArithmeticException ae) {
logger.error("An ArithmeticException have been thrown");
logger.catching(ae);
}
logger.debug("ThresholdFilter Test End");
// 하단 Console에서 로그 확인
// File에는 error()와 catching()만 로깅됨 (ref. catching() 메소드의 레벨은 ERROR임)
String logFileDir = "./logs/file/filter/ThresholdFilter.log";
File logFile = new File(logFileDir);
// 로그 확인
// 출력 패턴 %d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %m%n
if(logFile != null) {
assertTrue(!LogFileUtil.contains(logFileDir, "DEBUG"));
assertEquals(2, LogFileUtil.countWords(logFileDir, "ERROR"));
String[] tailLines = LogFileUtil.getTailLines(logFile, 35);
assertTrue(tailLines[0].endsWith("- An ArithmeticException have been thrown"));
// 엔트리 메서드는 메서드 명으로 로그 메세지가 출력됨 (ex. entry() - entry, exit() - exit ...)
assertTrue(tailLines[1].endsWith("- Catching"));
}
}
/**
* 아래는 Log4j2에 추가된 DynamicThresholdFilter 를 테스트하는 메소드
* DynamicThresholdFilter는 ThreadContext Map의 key와 value 값에 따라 filtering을 수행한다.
* 따라서 value(admin1, admin2, admin3) 값에 따라 Log Level을 다르게 지정할 수 있다.
* logger: level=DEBUG, appender=File, fileName=./logs/file/filter/DynamicThresholdFilter
* DynamicThresholdFilter: key=loginId, defaultThreshold=ERROR
* */
@Test
public void testDynamicThresholdFilter() throws Exception {
Logger logger = LogManager.getLogger("dynamicThresholdFilterLogger");
// key:value = loginId : admin1
// debug 레벨 이상 로그 출력
ThreadContext.put("loginId", "admin1");
logger.debug("DEBUG - loginId: admin1");
logger.info("INFO - loginId: admin1");
logger.warn("WARN - loginId: admin1");
logger.error("ERROR - loginId: admin1");
logger.fatal("FATAL - loginId: admin1");
// key:value = loginId:admin2
// warn 레벨 이상 로그 출력
ThreadContext.put("loginId", "admin2");
logger.debug("DEBUG - loginId: admin2");
logger.info("INFO - loginId: admin2");
logger.warn("WARN - loginId: admin2");
logger.error("ERROR - loginId: admin2");
logger.fatal("FATAL - loginId: admin2");
// key:value = loginId:admin3
// defaultThreshold=ERROR 적용을 받아 error 레벨 이상 로그 출력
ThreadContext.put("loginId", "admin3");
logger.debug("DEBUG - loginId: admin3");
logger.info("INFO - loginId: admin3");
logger.warn("WARN - loginId: admin3");
logger.error("ERROR - loginId: admin3");
logger.fatal("FATAL - loginId: admin3");
String logFileDir = "./logs/file/filter/DynamicThresholdFilter.log";
File logFile = new File(logFileDir);
// 로그 확인
if(logFile != null) {
int numLines = LogFileUtil.countLines(logFileDir);
assertEquals(10,numLines);
String[] tailLines = LogFileUtil.getTailLines(logFile, 10);
assertTrue(tailLines[0].endsWith("DEBUG - loginId: admin1"));
assertTrue(tailLines[1].endsWith("INFO - loginId: admin1"));
assertTrue(tailLines[2].endsWith("WARN - loginId: admin1"));
assertTrue(tailLines[3].endsWith("ERROR - loginId: admin1"));
assertTrue(tailLines[4].endsWith("FATAL - loginId: admin1"));
assertTrue(tailLines[5].endsWith("WARN - loginId: admin2"));
assertTrue(tailLines[6].endsWith("ERROR - loginId: admin2"));
assertTrue(tailLines[7].endsWith("FATAL - loginId: admin2"));
assertTrue(tailLines[8].endsWith("ERROR - loginId: admin3"));
assertTrue(tailLines[9].endsWith("FATAL - loginId: admin3"));
}
}
/**
* 아래는 Log4j2에 추가된 MarkerFilter 를 테스트하는 메소드
* MarkerFilter는 코드에 지정한 Marker 종류에 따라 로깅여부를 결정할 수 있다.
* MarkerFilter: marker=INSERT
* fileName=./logs/file/filter/MarkerFilter.log
* */
@Resource(name="markerFilterTestService")
MarkerFilterTestSample markerFilterTestSample;
@Test
public void testMarkerFilter() throws Exception {
String userId = "egov";
// 로그 출력
// MarkerFilter의 marker 속성값을 "INSERT" 로 지정해놓았기 때문에
// "INSERT" 이름의 Marker를 파라미터로 하는 로그만 출력됨
// "SELECT" Marker를 파라미터로 로그출력하는 메서드
markerFilterTestSample.doSelectUser(userId);
// "INSERT" Marker를 파라미터로 로그출력하는 메서드
// 아래 메서드 내에 INSERT 마커 로그만 출력됨
markerFilterTestSample.doInsertUser(userId);
// "UPDATE" Marker를 파라미터로 로그출력하는 메서드
markerFilterTestSample.doUpdateUser(userId);
// "DELETE" Marker를 파라미터로 로그출력하는 메서드
markerFilterTestSample.doDeleteUser(userId);
// 로그 확인
String logFileDir = "./logs/file/filter/MarkerFilter.log";
File logFile = new File(logFileDir);
if(logFile != null) {
// PatternLayout의 %marker 패턴은 Marker명[ Parent Marker명 ] 으로 로깅됨
Boolean printSELECT= LogFileUtil.contains(logFileDir, "SELECT[ SQL ]");
Boolean printINSERT= LogFileUtil.contains(logFileDir, "INSERT[ SQL ]");
Boolean printUPDATE= LogFileUtil.contains(logFileDir, "UPDATE[ SQL ]");
Boolean printDELETE= LogFileUtil.contains(logFileDir, "DELETE[ SQL ]");
assertEquals(false, printSELECT);
assertEquals(true, printINSERT);
assertEquals(false, printUPDATE);
assertEquals(false, printDELETE);
}
}
/**
* 아래는 Log4j2에 추가된 RegexFilter 를 테스트하는 메소드
* RegexFilter는 표현식에 따라 출력할 로그를 결정한다.
* logger: level=DEBUG, appender=Console, fileName=./logs/file/filter/RegexFilter.log
* RegexFilter: regex=".* Test .*"
* */
@Test
public void testRegexFilter() throws Exception {
Logger logger = LogManager.getLogger("regexFilterLogger");
logger.debug("RegexFilterTest Start");
// 로그 출력
logger.debug("DEBUG - RegexFilter Test !!");
logger.info("INFO - RegexFilter Test !!");
logger.warn("WARN - RegexFilterTest !!");
logger.error("ERROR - RegexFilter Test !!");
logger.fatal("FATAL - RegexFilter Test !!");
logger.debug("RegexFilterTest End");
// 로그 확인
File logFile = new File("./logs/file/filter/RegexFilter.log");
String[] tailLines = LogFileUtil.getTailLines(logFile, 4);
assertTrue(tailLines[0].endsWith("DEBUG - RegexFilter Test !!"));
assertTrue(tailLines[1].endsWith("INFO - RegexFilter Test !!"));
assertTrue(tailLines[2].endsWith("ERROR - RegexFilter Test !!"));
assertTrue(tailLines[3].endsWith("FATAL - RegexFilter Test !!"));
}
} |
function replaceStrings(strings, numbers) {
// Create an empty array to store the result
let result = [];
// Loop through both arrays
for (let i = 0; i < strings.length; i++) {
// If the index is in the numbers array, replace the string with the number
if (numbers.indexOf(i) !== -1) {
result.push(numbers[i]);
} else {
// Otherwise push the string
result.push(strings[i]);
}
}
return result;
}
// Sample output
replaceStrings(strings, numbers);
// Output: [1, 2, 3, 'D'] |
@Path("pubyear")
public class PubYearResource {
@GET
@Produces({ MediaType.TEXT_PLAIN })
public String getPubYear(@QueryParam("title") String title) {
// search book title in db
int pubYear = ...
if (pubYear == 0) {
return "No matching book found";
} else {
return Integer.toString(pubYear);
}
}
} |
<reponame>yr0/indago
require 'terminal-table'
module Indago
class Output
attr_accessor :collection, :result
def initialize(collection, result = [])
@collection = collection
@result = result
end
def print_listed_fields(fields)
output "Search fields for #{collection}:"
output fields.join("\n")
end
def table_print
if result.empty?
output Terminal::Table.new rows: [['No results found']]
return
end
result.each do |item|
output Terminal::Table.new(title: "#{collection.singularize.capitalize} ##{item[Indago::PRIMARY_FIELD_NAME]}",
rows: wrap_values(item))
end
end
private
def output(value)
puts value
end
def wrap_values(item)
item.each do |key, value|
item[key] = wrap(value) if value.to_s.size > OUTPUT_TABLE_MAX_WIDTH
end
end
def wrap(value)
value = value.to_s
insert_break_at = OUTPUT_TABLE_MAX_WIDTH
while value.size > insert_break_at
value.insert(insert_break_at, "\n")
insert_break_at += OUTPUT_TABLE_MAX_WIDTH + 1
end
value
end
end
end
|
<filename>offer/src/main/java/com/java/study/offer/chapter5/FindIndex.java
package com.java.study.offer.chapter5;
public class FindIndex {
public static void main(String[] args) {
int[] arr = new int[]{3,3,3,1,2};
int targetArr = findMinIndex(arr);
System.out.println(targetArr);
}
private static int findMinIndex(int[] arr) {
int startIndex = 0;
int endIndex = arr.length - 1;
//用于避免移动0个元素到数组结尾,此时最小的元素在首位
int midIndex = startIndex;
while (arr[startIndex] >= arr[endIndex]) {
if (endIndex - startIndex == 1) {
midIndex = endIndex;
break;
}
midIndex = (startIndex + endIndex) / 2;
if (arr[midIndex] == arr[startIndex] && arr[midIndex] == arr[endIndex]) {
return minInIndex(arr, startIndex, endIndex);
}
if (arr[midIndex] >= arr[startIndex]) {
startIndex = midIndex;
} else {
endIndex = midIndex;
}
}
return arr[midIndex];
}
private static int minInIndex(int[] arr, int startIndex, int endIndex) {
int result = arr[startIndex];
for (int i = startIndex + 1; i <= endIndex; i++) {
result = Math.min(result, arr[i]);
}
return result;
}
}
|
var num1 = 5;
var num2 = 6;
console.log("The result is " + (num1 + num2)); |
<gh_stars>10-100
class CreateQwesterPresentationQuestionnaires < ActiveRecord::Migration
def change
create_table :qwester_presentation_questionnaires do |t|
t.integer :questionnaire_id
t.integer :presentation_id
t.timestamps
end
end
end
|
/*******************************************************************************
* Copyright (c) 2016 Maxim Integrated Products, Inc., All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL MAXIM INTEGRATED BE LIABLE FOR ANY CLAIM, DAMAGES
* OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Maxim Integrated
* Products, Inc. shall not be used except as stated in the Maxim Integrated
* Products, Inc. Branding Policy.
*
* The mere transfer of this software does not imply any licenses
* of trade secrets, proprietary technology, copyrights, patents,
* trademarks, maskwork rights, or any other form of intellectual
* property whatsoever. Maxim Integrated Products, Inc. retains all
* ownership rights.
*******************************************************************************
*/
#include "device.h"
#include "PeripheralPins.h"
#include "ioman_regs.h"
#include "ioman.h"
#include "adc.h"
/*
* To select a peripheral function on Maxim microcontrollers, multiple
* configurations must be made. The mbed PinMap structure only includes one
* data member to hold this information. To extend the configuration storage,
* the "function" data member is used as a pointer to a pin_function_t
* structure. This structure is defined in objects.h. The definitions below
* include the creation of the pin_function_t structures and the assignment of
* the pointers to the "function" data members.
*/
#ifdef TOOLCHAIN_ARM_STD
#pragma diag_suppress 1296
#endif
/************I2C***************/
const PinMap PinMap_I2C_SDA[] = {
{ P1_6, I2C_0, (int)&((pin_function_t){&MXC_IOMAN->i2cm0_req, &MXC_IOMAN->i2cm0_ack, MXC_F_IOMAN_I2CM0_REQ_MAPPING_REQ, MXC_F_IOMAN_I2CM0_ACK_MAPPING_ACK}) },
{ P3_4, I2C_1, (int)&((pin_function_t){&MXC_IOMAN->i2cm1_req, &MXC_IOMAN->i2cm1_ack, MXC_F_IOMAN_I2CM1_REQ_MAPPING_REQ, MXC_F_IOMAN_I2CM1_ACK_MAPPING_ACK}) },
{ NC, NC, 0 }
};
const PinMap PinMap_I2C_SCL[] = {
{ P1_7, I2C_0, (int)&((pin_function_t){&MXC_IOMAN->i2cm0_req, &MXC_IOMAN->i2cm0_ack, MXC_F_IOMAN_I2CM0_REQ_MAPPING_REQ, MXC_F_IOMAN_I2CM0_ACK_MAPPING_ACK}) },
{ P3_5, I2C_1, (int)&((pin_function_t){&MXC_IOMAN->i2cm1_req, &MXC_IOMAN->i2cm1_ack, MXC_F_IOMAN_I2CM1_REQ_MAPPING_REQ, MXC_F_IOMAN_I2CM1_ACK_MAPPING_ACK}) },
{ NC, NC, 0 }
};
/************UART***************/
const PinMap PinMap_UART_TX[] = {
{ P0_1, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART0_REQ_IO_REQ), (MXC_F_IOMAN_UART0_ACK_IO_MAP | MXC_F_IOMAN_UART0_ACK_IO_ACK)}) },
{ P2_1, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART1_REQ_IO_REQ), (MXC_F_IOMAN_UART1_ACK_IO_MAP | MXC_F_IOMAN_UART1_ACK_IO_ACK)}) },
{ P3_1, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART2_REQ_IO_REQ), (MXC_F_IOMAN_UART2_ACK_IO_MAP | MXC_F_IOMAN_UART2_ACK_IO_ACK)}) },
{ P0_0, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART0_REQ_IO_REQ), (MXC_F_IOMAN_UART0_ACK_IO_MAP | MXC_F_IOMAN_UART0_ACK_IO_ACK)}) },
{ P2_0, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART1_REQ_IO_REQ), (MXC_F_IOMAN_UART1_ACK_IO_MAP | MXC_F_IOMAN_UART1_ACK_IO_ACK)}) },
{ P3_0, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART2_REQ_IO_REQ), (MXC_F_IOMAN_UART2_ACK_IO_MAP | MXC_F_IOMAN_UART2_ACK_IO_ACK)}) },
{ NC, NC, 0 }
};
const PinMap PinMap_UART_RX[] = {
{ P0_0, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART0_REQ_IO_REQ), (MXC_F_IOMAN_UART0_ACK_IO_MAP | MXC_F_IOMAN_UART0_ACK_IO_ACK)}) },
{ P2_0, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART1_REQ_IO_REQ), (MXC_F_IOMAN_UART1_ACK_IO_MAP | MXC_F_IOMAN_UART1_ACK_IO_ACK)}) },
{ P3_0, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART2_REQ_IO_REQ), (MXC_F_IOMAN_UART2_ACK_IO_MAP | MXC_F_IOMAN_UART2_ACK_IO_ACK)}) },
{ P0_1, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART0_REQ_IO_REQ), (MXC_F_IOMAN_UART0_ACK_IO_MAP | MXC_F_IOMAN_UART0_ACK_IO_ACK)}) },
{ P2_1, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART1_REQ_IO_REQ), (MXC_F_IOMAN_UART1_ACK_IO_MAP | MXC_F_IOMAN_UART1_ACK_IO_ACK)}) },
{ P3_1, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART2_REQ_IO_REQ), (MXC_F_IOMAN_UART2_ACK_IO_MAP | MXC_F_IOMAN_UART2_ACK_IO_ACK)}) },
{ NC, NC, 0 }
};
const PinMap PinMap_UART_CTS[] = {
{ P0_2, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART0_REQ_CTS_IO_REQ), (MXC_F_IOMAN_UART0_ACK_CTS_MAP | MXC_F_IOMAN_UART0_ACK_CTS_IO_ACK)}) },
{ P2_2, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART1_REQ_CTS_IO_REQ), (MXC_F_IOMAN_UART1_ACK_CTS_MAP | MXC_F_IOMAN_UART1_ACK_CTS_IO_ACK)}) },
{ P3_2, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART2_REQ_CTS_IO_REQ), (MXC_F_IOMAN_UART2_ACK_CTS_MAP | MXC_F_IOMAN_UART2_ACK_CTS_IO_ACK)}) },
{ P0_3, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART0_REQ_CTS_IO_REQ), (MXC_F_IOMAN_UART0_ACK_CTS_MAP | MXC_F_IOMAN_UART0_ACK_CTS_IO_ACK)}) },
{ P2_3, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART1_REQ_CTS_IO_REQ), (MXC_F_IOMAN_UART1_ACK_CTS_MAP | MXC_F_IOMAN_UART1_ACK_CTS_IO_ACK)}) },
{ P3_3, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART2_REQ_CTS_IO_REQ), (MXC_F_IOMAN_UART2_ACK_CTS_MAP | MXC_F_IOMAN_UART2_ACK_CTS_IO_ACK)}) },
{ NC, NC, 0 }
};
const PinMap PinMap_UART_RTS[] = {
{ P0_3, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART0_REQ_RTS_IO_REQ), (MXC_F_IOMAN_UART0_ACK_RTS_MAP | MXC_F_IOMAN_UART0_ACK_RTS_IO_ACK)}) },
{ P2_3, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART1_REQ_RTS_IO_REQ), (MXC_F_IOMAN_UART1_ACK_RTS_MAP | MXC_F_IOMAN_UART1_ACK_RTS_IO_ACK)}) },
{ P3_3, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_A | MXC_F_IOMAN_UART2_REQ_RTS_IO_REQ), (MXC_F_IOMAN_UART2_ACK_RTS_MAP | MXC_F_IOMAN_UART2_ACK_RTS_IO_ACK)}) },
{ P0_2, UART_0, (int)&((pin_function_t){&MXC_IOMAN->uart0_req, &MXC_IOMAN->uart0_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART0_REQ_RTS_IO_REQ), (MXC_F_IOMAN_UART0_ACK_RTS_MAP | MXC_F_IOMAN_UART0_ACK_RTS_IO_ACK)}) },
{ P2_2, UART_1, (int)&((pin_function_t){&MXC_IOMAN->uart1_req, &MXC_IOMAN->uart1_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART1_REQ_RTS_IO_REQ), (MXC_F_IOMAN_UART1_ACK_RTS_MAP | MXC_F_IOMAN_UART1_ACK_RTS_IO_ACK)}) },
{ P3_2, UART_2, (int)&((pin_function_t){&MXC_IOMAN->uart2_req, &MXC_IOMAN->uart2_ack, ((uint32_t)IOMAN_MAP_B | MXC_F_IOMAN_UART2_REQ_RTS_IO_REQ), (MXC_F_IOMAN_UART2_ACK_RTS_MAP | MXC_F_IOMAN_UART2_ACK_RTS_IO_ACK)}) },
{ NC, NC, 0 }
};
/************SPI***************/
const PinMap PinMap_SPI_SCLK[] = {
{ P0_4, SPI_0, (int)&((pin_function_t){&MXC_IOMAN->spim0_req, &MXC_IOMAN->spim0_ack, MXC_F_IOMAN_SPIM0_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM0_ACK_CORE_IO_ACK}) },
{ P1_0, SPI_1, (int)&((pin_function_t){&MXC_IOMAN->spim1_req, &MXC_IOMAN->spim1_ack, MXC_F_IOMAN_SPIM1_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM1_ACK_CORE_IO_ACK}) },
{ P2_4, SPI_2, (int)&((pin_function_t){&MXC_IOMAN->spim2_req, &MXC_IOMAN->spim2_ack, MXC_F_IOMAN_SPIM2_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM2_ACK_CORE_IO_ACK}) },
{ NC, NC, 0 }
};
const PinMap PinMap_SPI_MOSI[] = {
{ P0_5, SPI_0, (int)&((pin_function_t){&MXC_IOMAN->spim0_req, &MXC_IOMAN->spim0_ack, MXC_F_IOMAN_SPIM0_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM0_ACK_CORE_IO_ACK}) },
{ P1_1, SPI_1, (int)&((pin_function_t){&MXC_IOMAN->spim1_req, &MXC_IOMAN->spim1_ack, MXC_F_IOMAN_SPIM1_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM1_ACK_CORE_IO_ACK}) },
{ P2_5, SPI_2, (int)&((pin_function_t){&MXC_IOMAN->spim2_req, &MXC_IOMAN->spim2_ack, MXC_F_IOMAN_SPIM2_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM2_ACK_CORE_IO_ACK}) },
{ NC, NC, 0 }
};
const PinMap PinMap_SPI_MISO[] = {
{ P0_6, SPI_0, (int)&((pin_function_t){&MXC_IOMAN->spim0_req, &MXC_IOMAN->spim0_ack, MXC_F_IOMAN_SPIM0_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM0_ACK_CORE_IO_ACK}) },
{ P1_2, SPI_1, (int)&((pin_function_t){&MXC_IOMAN->spim1_req, &MXC_IOMAN->spim1_ack, MXC_F_IOMAN_SPIM1_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM1_ACK_CORE_IO_ACK}) },
{ P2_6, SPI_2, (int)&((pin_function_t){&MXC_IOMAN->spim2_req, &MXC_IOMAN->spim2_ack, MXC_F_IOMAN_SPIM2_REQ_CORE_IO_REQ, MXC_F_IOMAN_SPIM2_ACK_CORE_IO_ACK}) },
{ NC, NC, 0 }
};
const PinMap PinMap_SPI_SSEL[] = {
{ P0_7, SPI_0, (int)&((pin_function_t){&MXC_IOMAN->spim0_req, &MXC_IOMAN->spim0_ack, MXC_F_IOMAN_SPIM0_REQ_SS0_IO_REQ, MXC_F_IOMAN_SPIM0_ACK_SS0_IO_ACK}) },
{ P1_3, SPI_1, (int)&((pin_function_t){&MXC_IOMAN->spim1_req, &MXC_IOMAN->spim1_ack, MXC_F_IOMAN_SPIM1_REQ_SS0_IO_REQ, MXC_F_IOMAN_SPIM1_ACK_SS0_IO_ACK}) },
{ P2_7, SPI_2, (int)&((pin_function_t){&MXC_IOMAN->spim2_req, &MXC_IOMAN->spim2_ack, MXC_F_IOMAN_SPIM2_REQ_SS0_IO_REQ, MXC_F_IOMAN_SPIM2_ACK_SS0_IO_ACK}) },
{ NC, NC, 0 }
};
/************PWM***************/
const PinMap PinMap_PWM[] = {
{ P0_0, PWM_0, 1 }, { P2_0, PWM_0, 1 }, { P4_0, PWM_0, 1 },
{ P0_1, PWM_1, 1 }, { P2_1, PWM_1, 1 }, { P4_1, PWM_1, 1 },
{ P0_2, PWM_2, 1 }, { P2_2, PWM_2, 1 }, { P4_2, PWM_2, 1 },
{ P0_3, PWM_3, 1 }, { P2_3, PWM_3, 1 }, { P4_3, PWM_3, 1 },
{ P0_4, PWM_4, 1 }, { P2_4, PWM_4, 1 }, { P4_4, PWM_4, 1 },
{ P0_5, PWM_5, 1 }, { P2_5, PWM_5, 1 }, { P4_5, PWM_5, 1 },
{ P0_6, PWM_6, 1 }, { P2_6, PWM_6, 1 }, { P4_6, PWM_6, 1 },
{ P0_7, PWM_7, 1 }, { P2_7, PWM_7, 1 }, { P4_7, PWM_7, 1 },
{ P1_0, PWM_8, 1 }, { P3_0, PWM_8, 1 },
{ P1_1, PWM_9, 1 }, { P3_1, PWM_9, 1 },
{ P1_2, PWM_10, 1 }, { P3_2, PWM_10, 1 },
{ P1_3, PWM_11, 1 }, { P3_3, PWM_11, 1 },
{ P1_4, PWM_12, 1 }, { P3_4, PWM_12, 1 },
{ P1_5, PWM_13, 1 }, { P3_5, PWM_13, 1 },
{ P1_6, PWM_14, 1 }, { P3_6, PWM_14, 1 },
{ P1_7, PWM_15, 1 }, { P3_7, PWM_15, 1 },
{ NC, NC, 0 }
};
/************ADC***************/
const PinMap PinMap_ADC[] = {
{ AIN_0, ADC, ADC_CH_0 },
{ AIN_1, ADC, ADC_CH_1 },
{ AIN_2, ADC, ADC_CH_2 },
{ AIN_3, ADC, ADC_CH_3 },
{ AIN_4, ADC, ADC_CH_0_DIV_5 },
{ AIN_5, ADC, ADC_CH_1_DIV_5 },
{ NC, NC, 0 }
};
|
package gex.newsml.g2;
import lombok.ToString;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
import org.w3c.dom.Element;
/**
* An abstract class. All G2 items are inherited from this class.
*
* <p>
* Java class for AnyItemType complex type.
*
* <p>
* The following schema fragment specifies the expected content contained within
* this class.
*
* <pre>
* <complexType name="AnyItemType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <any processContents='lax' namespace='http://www.w3.org/2000/09/xmldsig#' minOccurs="0"/>
* <choice maxOccurs="unbounded">
* <element ref="{http://iptc.org/std/nar/2006-10-01/}catalogRef"/>
* <element ref="{http://iptc.org/std/nar/2006-10-01/}catalog"/>
* </choice>
* <element ref="{http://iptc.org/std/nar/2006-10-01/}hopHistory" minOccurs="0"/>
* <element ref="{http://iptc.org/std/nar/2006-10-01/}rightsInfo" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://iptc.org/std/nar/2006-10-01/}itemMeta"/>
* </sequence>
* <attGroup ref="{http://iptc.org/std/nar/2006-10-01/}i18nAttributes"/>
* <attribute name="standard" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="standardversion" use="required">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <pattern value="[0-9]+\.[0-9]+"/>
* </restriction>
* </simpleType>
* </attribute>
* <attribute name="conformance" type="{http://www.w3.org/2001/XMLSchema}string" default="core" />
* <attribute name="guid" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="version" type="{http://www.w3.org/2001/XMLSchema}positiveInteger" default="1" />
* <anyAttribute processContents='lax' namespace='##other'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "AnyItemType", propOrder = { "any", "catalogRefOrCatalog", "hopHistory", "rightsInfo", "itemMeta" })
@XmlSeeAlso({ KnowledgeItem.class, ConceptItem.class, NewsItem.class, PackageItem.class, CatalogItem.class,
PlanningItem.class })
@ToString
public abstract class AnyItemType {
@XmlAnyElement(lax = true)
protected Object any;
@XmlElements({ @XmlElement(name = "catalogRef", type = CatalogRef.class),
@XmlElement(name = "catalog", type = Catalog.class) })
protected List<Object> catalogRefOrCatalog;
protected HopHistory hopHistory;
protected List<RightsInfoType> rightsInfo;
@XmlElement(required = true)
protected ItemMetadataType itemMeta;
@XmlAttribute(name = "standard", required = true)
protected String standard;
@XmlAttribute(name = "standardversion", required = true)
protected String standardversion;
@XmlAttribute(name = "conformance")
protected String conformance;
@XmlAttribute(name = "guid", required = true)
protected String guid;
@XmlAttribute(name = "version")
@XmlSchemaType(name = "positiveInteger")
protected BigInteger version;
@XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace")
protected String lang;
@XmlAttribute(name = "dir")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String dir;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the any property.
*
* @return possible object is {@link Object } {@link Element }
*
*/
public Object getAny() {
return any;
}
/**
* Sets the value of the any property.
*
* @param value
* allowed object is {@link Object } {@link Element }
*
*/
public void setAny(Object value) {
this.any = value;
}
/**
* Gets the value of the catalogRefOrCatalog property.
*
* <p>
* This accessor method returns a reference to the live list, not a
* snapshot. Therefore any modification you make to the returned list will
* be present inside the JAXB object. This is why there is not a
* <CODE>set</CODE> method for the catalogRefOrCatalog property.
*
* <p>
* For example, to add a new item, do as follows:
*
* <pre>
* getCatalogRefOrCatalog().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link CatalogRef } {@link Catalog }
*
*
*/
public List<Object> getCatalogRefOrCatalog() {
if (catalogRefOrCatalog == null) {
catalogRefOrCatalog = new ArrayList<Object>();
}
return this.catalogRefOrCatalog;
}
/**
* Gets the value of the hopHistory property.
*
* @return possible object is {@link HopHistory }
*
*/
public HopHistory getHopHistory() {
return hopHistory;
}
/**
* Sets the value of the hopHistory property.
*
* @param value
* allowed object is {@link HopHistory }
*
*/
public void setHopHistory(HopHistory value) {
this.hopHistory = value;
}
/**
* Gets the value of the rightsInfo property.
*
* <p>
* This accessor method returns a reference to the live list, not a
* snapshot. Therefore any modification you make to the returned list will
* be present inside the JAXB object. This is why there is not a
* <CODE>set</CODE> method for the rightsInfo property.
*
* <p>
* For example, to add a new item, do as follows:
*
* <pre>
* getRightsInfo().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link RightsInfoType }
*
*
*/
public List<RightsInfoType> getRightsInfo() {
if (rightsInfo == null) {
rightsInfo = new ArrayList<RightsInfoType>();
}
return this.rightsInfo;
}
/**
* Gets the value of the itemMeta property.
*
* @return possible object is {@link ItemMetadataType }
*
*/
public ItemMetadataType getItemMeta() {
return itemMeta;
}
/**
* Sets the value of the itemMeta property.
*
* @param value
* allowed object is {@link ItemMetadataType }
*
*/
public void setItemMeta(ItemMetadataType value) {
this.itemMeta = value;
}
/**
* Gets the value of the standard property.
*
* @return possible object is {@link String }
*
*/
public String getStandard() {
return standard;
}
/**
* Sets the value of the standard property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setStandard(String value) {
this.standard = value;
}
/**
* Gets the value of the standardversion property.
*
* @return possible object is {@link String }
*
*/
public String getStandardversion() {
return standardversion;
}
/**
* Sets the value of the standardversion property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setStandardversion(String value) {
this.standardversion = value;
}
/**
* Gets the value of the conformance property.
*
* @return possible object is {@link String }
*
*/
public String getConformance() {
if (conformance == null) {
return "core";
} else {
return conformance;
}
}
/**
* Sets the value of the conformance property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setConformance(String value) {
this.conformance = value;
}
/**
* Gets the value of the guid property.
*
* @return possible object is {@link String }
*
*/
public String getGuid() {
return guid;
}
/**
* Sets the value of the guid property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setGuid(String value) {
this.guid = value;
}
/**
* Gets the value of the version property.
*
* @return possible object is {@link BigInteger }
*
*/
public BigInteger getVersion() {
if (version == null) {
return new BigInteger("1");
} else {
return version;
}
}
/**
* Sets the value of the version property.
*
* @param value
* allowed object is {@link BigInteger }
*
*/
public void setVersion(BigInteger value) {
this.version = value;
}
/**
* Specifies the language of this property and potentially all descendant
* properties. xml:lang values of descendant properties override this value.
* Values are determined by Internet BCP 47.
*
* @return possible object is {@link String }
*
*/
public String getLang() {
return lang;
}
/**
* Sets the value of the lang property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setLang(String value) {
this.lang = value;
}
/**
* Gets the value of the dir property.
*
* @return possible object is {@link String }
*
*/
public String getDir() {
return dir;
}
/**
* Sets the value of the dir property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setDir(String value) {
this.dir = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed
* property on this class.
*
* <p>
* the map is keyed by the name of the attribute and the value is the string
* value of the attribute.
*
* the map returned by this method is live, and you can add new attribute by
* updating the map directly. Because of this design, there's no setter.
*
*
* @return always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
|
from django import test
from django.urls import reverse
from hexa.user_management.models import Membership, Team, User
from .models import ExternalDashboard, ExternalDashboardPermission
class ExternalDashboardTest(test.TestCase):
@classmethod
def setUpTestData(cls):
cls.TEAM = Team.objects.create(name="Test Team")
cls.USER_NOTEAM = User.objects.create_user(
"<EMAIL>",
"noteam",
accepted_tos=True,
)
cls.USER_BJORN = User.objects.create_user(
"<EMAIL>",
"bjornbjorn",
accepted_tos=True,
)
Membership.objects.create(user=cls.USER_BJORN, team=cls.TEAM)
cls.DASHBOARD = ExternalDashboard.objects.create(
url="https://viz.company.invalid/", picture="__OVERRIDE_TEST__"
)
ExternalDashboardPermission.objects.create(
external_dashboard=cls.DASHBOARD, team=cls.TEAM
)
def test_empty_index_dashboard(self):
"""no team user cant see the list of dashboard"""
self.client.force_login(self.USER_NOTEAM)
response = self.client.get(reverse("visualizations:visualization_index"))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["dashboard_indexes"].count(), 0)
def test_index_dashboard(self):
"""Bjorn can see the list of dashboard"""
self.client.force_login(self.USER_BJORN)
response = self.client.get(reverse("visualizations:visualization_index"))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["dashboard_indexes"].count(), 1)
def test_dashboard_detail_noteam(self):
"""As a no team user, you cant access an existing dashboard."""
self.client.force_login(self.USER_NOTEAM)
response = self.client.get(
reverse(
"visualizations:dashboard_detail",
kwargs={"dashboard_id": self.DASHBOARD.id},
),
)
self.assertEqual(response.status_code, 404)
def test_dashboard_detail_bjorn(self):
"""As a team member, bjorn can see a dashboard detail"""
self.client.force_login(self.USER_BJORN)
response = self.client.get(
reverse(
"visualizations:dashboard_detail",
kwargs={"dashboard_id": self.DASHBOARD.id},
),
)
self.assertEqual(response.status_code, 200)
self.assertIsInstance(response.context["dashboard"], ExternalDashboard)
def test_dashboard_image_bjorn(self):
"""As a team member, bjorn can see a dashboard screenshot"""
self.client.force_login(self.USER_BJORN)
response = self.client.get(
reverse(
"visualizations:dashboard_image",
kwargs={"dashboard_id": self.DASHBOARD.id},
),
)
self.assertEqual(response.status_code, 200)
|
package mainclient.methodAbstractNowDefault;
import main.methodAbstractNowDefault.IMethodAbstractNowDefaultOther;
import main.methodAbstractNowDefault.IMethodAbstractNowDefaultSub;
public class MethodAbstractNowDefaultMultiIntSub implements IMethodAbstractNowDefaultSub, IMethodAbstractNowDefaultOther {
@Override
public int methodAbstractNowDef() {
return 0;
}
public int callMethod() {
return methodAbstractNowDef();
}
}
|
using UnityEngine;
public class PreloaderManager
{
private GameObject prefab;
private GameObject preloaderInstance;
public void SetPreloaderPrefab(GameObject go)
{
prefab = go;
}
public void ShowPreloader()
{
if (prefab == null)
{
Debug.LogError("Preloader prefab has not been set.");
return;
}
if (preloaderInstance == null)
{
preloaderInstance = GameObject.Instantiate(prefab);
}
else
{
preloaderInstance.SetActive(true);
}
}
public void HidePreloader()
{
if (preloaderInstance != null)
{
preloaderInstance.SetActive(false);
}
}
} |
#!/bin/bash
while read OBJ; do
if echo "$OBJ" | grep "_$" >/dev/null
then
[ "$OBJ" = "caxpyc_" ] && continue
[ "$OBJ" = "zaxpyc_" ] && continue
[ "$OBJ" = "blas_thread_shutdown_" ] && continue
O1 = $(echo "$OBJ" | sed -e 's/_$//')
if grep -w "$O1" exports/gensymbol >/dev/null
then
true
else
echo "$O1"
fi
continue
fi
if echo "$OBJ" | grep "^cblas" >/dev/null
then
if grep -w "$OBJ" exports/gensymbol >/dev/null
then
true
else
echo "$OBJ"
fi
continue
fi
if echo "$OBJ" | grep "^LAPACKE" >/dev/null
then
if grep -w "$OBJ" exports/gensymbol >/dev/null
then
true
else
echo "$OBJ"
fi
continue
fi
if echo "$OBJ" | grep "^lapack" >/dev/null
then
if grep -w "$OBJ" exports/gensymbol >/dev/null
then
true
else
echo "$OBJ"
fi
fi
done
|
import random
def generate_random_matrix(n,m):
matrix = []
for i in range(n):
matrix.append([])
for j in range(m):
matrix[i].append(random.randint(1,100))
return matrix |
<!DOCTYPE html>
<html>
<head>
<title>My Site</title>
</head>
<body>
<header>
<h1>My Site Logo</h1>
<form>
<div>
<input type="text" placeholder="Search..." />
</div>
</form>
</header>
<nav>
<a href=""><img src="logo1.png" alt="Icon 1"/></a>
<a href=""><img src="logo2.png" alt="Icon 2"/></a>
<a href=""><img src="logo3.png" alt="Icon 3"/></a>
</nav>
<main>
<!-- Your content goes here -->
</main>
</body>
</html> |
var productData = {
"result": "ok",
"message": "获取成功",
"data": [
{
"row_id": 1,
"ProID": 7224,
"ProductName": "冰糯种飘翠龙牌挂坠",
"Title": "",
"UpsetPrice": 45000,
"Price": 63000,
"MarketPrice": 63000,
"StockCount": 1,
"PostDate": "2016-06-25 17:42:09",
"BarCode": "6900042000167",
"IsStatus": 1,
"BrandID": 69,
"IsSpot": true,
"SupplierID": 150,
"AuditDate": "2016-09-24 16:52:58",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。",
"MID": 103,
"LastDate": "2016-09-24 16:52:58",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 122,
"ProductNumber": "A-YXS0420167",
"CID": null,
"IsPrivate": true,
"Source": null,
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/07e32ad1-6e25-40eb-826b-bcc99789f2c2_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": 1125
},
{
"row_id": 2,
"ProID": 20248,
"ProductName": "冰种飘花如意挂坠",
"Title": "",
"UpsetPrice": 17000,
"Price": 23800,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-17 09:00:31",
"BarCode": "6900037001111",
"IsStatus": 1,
"BrandID": 63,
"IsSpot": true,
"SupplierID": 145,
"AuditDate": "2016-09-24 16:47:24",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。\n",
"MID": 103,
"LastDate": "2016-09-24 16:47:24",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-SY03701111",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/60bdaad3-a770-4ac0-b48b-ebb43ce11bf4_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
},
{
"row_id": 3,
"ProID": 20381,
"ProductName": "玻璃种无色蛋面手链",
"Title": "",
"UpsetPrice": 23000,
"Price": 32200,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-23 16:50:46",
"BarCode": "6900031000036",
"IsStatus": 1,
"BrandID": 53,
"IsSpot": true,
"SupplierID": 133,
"AuditDate": "2016-09-24 16:44:38",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。",
"MID": 108,
"LastDate": "2016-09-24 16:44:38",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-HT0310036",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/611d2be4-a952-4dd2-8030-01c76743480f_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
},
{
"row_id": 4,
"ProID": 20301,
"ProductName": "冰糯种阳绿水滴耳饰",
"Title": "",
"UpsetPrice": 5300,
"Price": 7420,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-15 18:11:32",
"BarCode": "6900054000471",
"IsStatus": 1,
"BrandID": 81,
"IsSpot": true,
"SupplierID": 163,
"AuditDate": "2016-09-21 11:08:14",
"ReadCount": 0,
"Property": 2,
"Contents": "种质细腻,底子干净利落,造型饱满。",
"MID": 105,
"LastDate": "2016-09-21 11:08:14",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-Y0005400471",
"CID": 0,
"IsPrivate": true,
"Source": "可乐",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "//upload/product/20160912/25a22977-517b-47a9-8ad8-7dafa70c993f_Watermark.jpg?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": 1850
},
{
"row_id": 5,
"ProID": 20333,
"ProductName": "冰糯种紫罗兰佛手挂坠",
"Title": "",
"UpsetPrice": 7500,
"Price": 10500,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-17 09:33:51",
"BarCode": "6900060000207",
"IsStatus": 1,
"BrandID": 87,
"IsSpot": true,
"SupplierID": 169,
"AuditDate": "2016-09-21 11:06:41",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。",
"MID": 103,
"LastDate": "2016-09-21 11:06:41",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-MF0600207",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/71feea00-f56b-46c0-bc25-b26aa5b7cd99_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
},
{
"row_id": 6,
"ProID": 20338,
"ProductName": "冰种晴水观音挂坠",
"Title": "",
"UpsetPrice": 50000,
"Price": 70000,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-20 09:16:12",
"BarCode": "6900060000223",
"IsStatus": 1,
"BrandID": 87,
"IsSpot": true,
"SupplierID": 169,
"AuditDate": "2016-09-21 11:03:26",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。",
"MID": 103,
"LastDate": "2016-09-21 11:03:26",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-MF0600223",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/2a541582-29d7-44fd-8b80-9ff7322be624_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
},
{
"row_id": 7,
"ProID": 20372,
"ProductName": "冰糯种红翡大鹏展翅挂坠",
"Title": "",
"UpsetPrice": 9000,
"Price": 12600,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-20 09:20:10",
"BarCode": "6900057000008",
"IsStatus": 1,
"BrandID": 84,
"IsSpot": true,
"SupplierID": 166,
"AuditDate": "2016-09-21 11:02:48",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。",
"MID": 103,
"LastDate": "2016-09-21 11:02:48",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-AH0570008",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/62830f5d-6ff4-4a95-85a0-e4d381c0a981_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
},
{
"row_id": 8,
"ProID": 20384,
"ProductName": "高冰种无色连中三元挂坠",
"Title": "",
"UpsetPrice": 48000,
"Price": 67200,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-20 09:21:56",
"BarCode": "6900031000027",
"IsStatus": 1,
"BrandID": 53,
"IsSpot": true,
"SupplierID": 133,
"AuditDate": "2016-09-21 11:02:26",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。",
"MID": 103,
"LastDate": "2016-09-21 11:02:26",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-HF0310027",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/e796c5f1-4cc9-4838-99b6-1c40f78e9e05_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
},
{
"row_id": 9,
"ProID": 20337,
"ProductName": "冰糯种飘翠如意龙挂坠",
"Title": "",
"UpsetPrice": 27000,
"Price": 37800,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-17 09:10:52",
"BarCode": "6900060000224",
"IsStatus": 1,
"BrandID": 87,
"IsSpot": true,
"SupplierID": 169,
"AuditDate": "2016-09-18 09:19:56",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。",
"MID": 103,
"LastDate": "2016-09-18 09:19:56",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-MF0600224",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/c7a36fb0-0d7a-4eaa-9cca-291db6a50d5a_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
},
{
"row_id": 10,
"ProID": 20216,
"ProductName": "冰种无色蛋面戒指",
"Title": "",
"UpsetPrice": 45000,
"Price": 63000,
"MarketPrice": 0,
"StockCount": 1,
"PostDate": "2016-09-17 09:10:26",
"BarCode": "6900056000044",
"IsStatus": 1,
"BrandID": 83,
"IsSpot": true,
"SupplierID": 165,
"AuditDate": "2016-09-18 09:19:24",
"ReadCount": 0,
"Property": 2,
"Contents": "质地细腻 纯洁无瑕玉养人一生,人养玉三年,渊生珠而崖不牯,玉在山而草木润翡翠经磨励封藏地表多年,它充满生机却又那么坚定不移。它吸收了天地灵气,再经能工巧匠之手,剩下来的事情,就是静静的等候与你相遇。\n",
"MID": 104,
"LastDate": "2016-09-18 09:19:24",
"AuditRemark": "",
"IsReadNew": 0,
"StaffID": 0,
"ProductNumber": "A-SS0560044",
"CID": 0,
"IsPrivate": null,
"Source": "0",
"ProsectionID": 0,
"IsUpsetPrice": null,
"ImgUrl": "/GYSimg/image/0/862eab41-07fe-44cd-8e7c-fd56107dbf6b_Watermark.JPG?x-oss-process=image/resize,w_540,h_240,limit_0,m_fixed",
"IsVedioPath": null
}
],
"tableName": null,
"dataset": null,
"count": 0,
"other": null
} |
<gh_stars>10-100
from allegation.factories import (
AllegationFactory, OfficerFactory, OfficerAllegationFactory)
from common.tests.core import SimpleTestCase
from mobile.services.mobile_suggestion_service import suggest_crid, suggest_officer_star, suggest_officer_name, \
get_crid_from_query
class MobileSuggestionServiceTest(SimpleTestCase):
def test_get_crid_from_query_succesful_return_crid(self):
crid = '123456'
templates = ['cr {crid}', 'CR {crid}', 'cr{crid}', 'CR{crid}', 'crid {crid}', 'CRID {crid}', '{crid}']
for template in templates:
get_crid_from_query(template.format(crid=crid)).should.be.equal(crid)
def test_get_crid_from_query_failed_return_empty(self):
crid = '123456'
templates = ['cri {crid}', 'CRd {crid}', 'cr_{crid}', 'aCR{crid}', 'cridd {crid}', 'CRID 1 {crid}']
for template in templates:
get_crid_from_query(template.format(crid=crid)).should.be.equal('')
def test_suggest_crid(self):
allegation = AllegationFactory(crid='1051333')
OfficerAllegationFactory(allegation=allegation)
OfficerAllegationFactory(allegation=AllegationFactory(crid='306697'))
crid = str(allegation.crid)
partial_query = crid[0:3]
suggest_crid(partial_query).should.equal([])
expected_allegation = allegation.as_suggestion_entry()
allegation_result = suggest_crid(crid)
allegation_result[0]['meta']['allegation'].incident_date = \
allegation_result[0]['meta']['allegation'].incident_date.date()
allegation_result.should.equal([expected_allegation])
def test_suggest_crid_with_prefix(self):
officer_allegation = OfficerAllegationFactory()
crid = officer_allegation.allegation.crid
crid_query_templates = ['cr {crid}', 'CR {crid}', 'cr{crid}', 'CR{crid}', 'crid {crid}', 'CRID {crid}']
for template in crid_query_templates:
suggestions = suggest_crid(template.format(crid=crid))
suggestions.should.have.length_of(1)
suggestions[0]['meta']['allegation'].crid.should.be.equal(str(crid))
def test_suggest_officer_badge(self):
officer = OfficerFactory(star=19663)
OfficerFactory(star=17489)
star = str(officer.star)
partial_query = star[0:2]
bad_query = 'bad-query'
suggest_officer_star(partial_query).should.equal([])
suggest_officer_star(bad_query).should.equal([])
suggest_officer_star(star).should.equal(
[officer.as_suggestion_entry()])
def test_suggest_officer_name(self):
officer = OfficerFactory(officer_first='Test', officer_last='Name')
OfficerFactory(officer_first='Other', officer_last='Bad')
query_for_first_name = officer.officer_first[0:2]
query_for_last_name = officer.officer_last[0:2]
non_matched = 'nonmatchquery'
len(suggest_officer_name(query_for_first_name)).should.equal(1)
len(suggest_officer_name(query_for_last_name)).should.equal(1)
len(suggest_officer_name(non_matched)).should.equal(0)
def test_order_officer_by_number_of_complaints(self):
officer_name = 'matched'
officer_1 = OfficerFactory(
officer_first=officer_name, allegations_count=1)
officer_2 = OfficerFactory(
officer_first=officer_name, allegations_count=3)
officer_3 = OfficerFactory(
officer_first=officer_name, allegations_count=2)
officers = suggest_officer_name(officer_name)
ordered_officer_keys = [officer_2.pk, officer_3.pk, officer_1.pk]
officer_allegation_counts = [x['resource_key'] for x in officers]
officer_allegation_counts.should.equal(ordered_officer_keys)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.