text
stringlengths
1
1.05M
#!/bin/sh set -eu _main() { local tmpdir tmpdir="$(mktemp -d git_lfs_install.XXXXXX)" cd "$tmpdir" curl -Lo git.tar.gz https://github.com/github/git-lfs/releases/download/v2.1.1/git-lfs-linux-amd64-2.1.1.tar.gz gunzip git.tar.gz tar xf git.tar mv git-lfs-2.1.1/git-lfs /usr/bin cd .. rm -rf "$tmpdir" git lfs install --skip-smudge } _main "$@"
#!/bin/bash getArrayVar _V themeoptions "colors=dark"
<filename>knex/migrations/20200512190140_create_table_words.js exports.up = (knex) => knex.schema .hasTable('words') .then((exists) => { if (!exists) { knex.schema.createTable('words', (table) => { table.increments().primary(); table.integer('word').notNullable().unique(); table.timestamps(true, true); }); } }) .catch((err) => { console.error(err); }); exports.down = (knex) => knex.schema .hasTable('words') .then((exists) => { if (exists) { knex.schema.dropTableIfExists('words'); } }) .catch((err) => { console.error(err); }); exports.config = { transaction: false };
/* * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.aot.test.generator.compile; import java.io.IOException; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import javax.tools.FileObject; import javax.tools.ForwardingJavaFileManager; import javax.tools.JavaFileManager; import javax.tools.JavaFileObject; /** * {@link JavaFileManager} to create in-memory {@link DynamicClassFileObject * ClassFileObjects} when compiling. * * @author <NAME> * @since 6.0 */ class DynamicJavaFileManager extends ForwardingJavaFileManager<JavaFileManager> { private final ClassLoader classLoader; private final Map<String, DynamicClassFileObject> classFiles = Collections.synchronizedMap( new LinkedHashMap<>()); DynamicJavaFileManager(JavaFileManager fileManager, ClassLoader classLoader) { super(fileManager); this.classLoader = classLoader; } @Override public ClassLoader getClassLoader(Location location) { return this.classLoader; } @Override public JavaFileObject getJavaFileForOutput(Location location, String className, JavaFileObject.Kind kind, FileObject sibling) throws IOException { if (kind == JavaFileObject.Kind.CLASS) { return this.classFiles.computeIfAbsent(className, DynamicClassFileObject::new); } return super.getJavaFileForOutput(location, className, kind, sibling); } Map<String, DynamicClassFileObject> getClassFiles() { return Collections.unmodifiableMap(this.classFiles); } }
<html> <head> <title>Fibonacci Sequence</title> </head> <body> <h1>Fibonacci Sequence</h1> <ul> <li>0</li> <li>1</li> <?php $a = 0; $b = 1; for ($i = 0; $i < 10; $i++) { $c = $a + $b; echo '<li>'.$c.'</li>'; $a = $b; $b = $c; } ?> </ul> </body> </html>
class StaticDynamicDim: def __init__(self, static, dynamic): self.static = static self.dynamic = dynamic def op(self, func): try: new_static = func(self.static) return StaticDynamicDim(new_static, new_static) except: return StaticDynamicDim(None, func(self.static)) def __add__(self, other): return StaticDynamicDim(self.static + other.static, self.dynamic + other.dynamic)
<filename>core/src/main/java/de/ids_mannheim/korap/interfaces/EncryptionIface.java package de.ids_mannheim.korap.interfaces; import de.ids_mannheim.korap.exceptions.KustvaktException; import de.ids_mannheim.korap.user.User; import java.io.UnsupportedEncodingException; import java.security.NoSuchAlgorithmException; import java.util.Map; public interface EncryptionIface { public enum Encryption { @Deprecated SIMPLE, ESAPICYPHER, BCRYPT } /** * One-way hashing of String input. Used to canonicalize * * @param input * @param salt * @return */ public String secureHash (String input, String salt) throws KustvaktException; public String secureHash (String input); /** * @param plain * @param hash * @param salt * @return */ public boolean checkHash (String plain, String hash, String salt); public boolean checkHash (String plain, String hash); /** * create random String to be used as authentication token * * @return */ public String createToken (boolean hash, Object ... obj); public String createToken (); /** * create a random Integer to be used as ID for databases * * @return */ public String createRandomNumber (Object ... obj); public String encodeBase (); // @Deprecated //public Map<String, Object> validateMap (Map<String, Object> map) // throws KustvaktException; //@Deprecated //public String validateEntry (String input, String type) // throws KustvaktException; }
<gh_stars>1-10 package com.galfins.gnss_compare.PvtMethods; import android.location.Location; import java.util.HashMap; import java.util.Set; import com.galfins.gnss_compare.Constellations.Constellation; import com.galfins.gogpsextracts.Coordinates; /** * Created by <NAME> on 1/20/2018. * This class is for implementing a generic interface for PVT calculation methods */ public abstract class PvtMethod { /** * Calculates pose based on given parameters * @param constellation satellite constellation object for which the calculations are to * be performed * @return calculated pose of the receiver */ public abstract Coordinates calculatePose( Constellation constellation ); private static HashMap<String, Class<? extends PvtMethod>> registeredObjects = new HashMap<>(); protected static void register(String constellationName, Class<?extends PvtMethod> objectClass) { // if(registeredObjects.containsKey(constellationName)) // throw new IllegalArgumentException("This key is already registered! Select a different name!"); if(!registeredObjects.containsKey(constellationName)) registeredObjects.put(constellationName, objectClass); } public static Set<String> getRegistered(){ return registeredObjects.keySet(); } /** * @return Name of the PVT method, which is to be displayed in the UI */ public abstract String getName(); public static Class<? extends PvtMethod> getClassByName(String name) { return registeredObjects.get(name); } public abstract double getClockBias(); private static boolean initialized = false; public static void initialize() { if(!initialized) { WeightedLeastSquares.registerClass(); StaticExtendedKalmanFilter.registerClass(); DynamicExtendedKalmanFilter.registerClass(); PedestrianStaticExtendedKalmanFilter.registerClass(); initialized = true; } } public void startLog(String name) { } public void stopLog() { } public void logError(double latError, double lonError) { } public void logFineLocation(Location location) {} }
<filename>src/app/Modules/Posts/details.component.ts import { Component, OnInit } from '@angular/core'; import { PostsService } from './posts.service'; import {ActivatedRoute} from "@angular/router"; @Component ({ selector: 'my-app', templateUrl: 'app/Modules/Posts/details.component.html', providers: [ PostsService] }) export class DetailPosts implements OnInit { ListComments: any[]; id :any; constructor(public PostsService: PostsService, private route: ActivatedRoute) { // this.route = route; //this.route.params.subscribe( params => console.log(params) ); } ngOnInit() { let id = this.route.snapshot.paramMap.get('id'); //console.log(id); this.PostsService.getDetailPost(id).subscribe((data) => this.ListComments = data) } }
"like scenario_login.txt in NModel WebApplication" from WebModel import Login, Logout actions = (Login, Logout) # just these to allow interleaving testSuite = [ [ (Login, ( 'VinniPuhh', 'Correct' ), 'Success'), (Logout, ( 'VinniPuhh', ), None) ] ]
/*! /support/test/capability/touch 1.0.2 | http://nucleus.qoopido.com | (c) 2015 <NAME> */ !function(e,n){"use strict";function o(o){var t=o.defer();return"ontouchstart"in e||"DocumentTouch"in e&&document instanceof DocumentTouch||n.maxTouchPoints>0||n.msMaxTouchPoints>0?t.resolve():t.reject(),t.pledge}provide(["/demand/pledge"],o)}(this,navigator); //# sourceMappingURL=touch.js.map
#!/bin/bash if [ "$1" == "" ] || [ $# -gt 1 ]; then echo "Creating the minio-test-server docker image locally" docker build --tag eu.gcr.io/pagero-build/minio-test-server:1.0 . elif [ "$1" == "--push" ]; then echo "Creating the minio-test-server and pushing to eu.gcr.io/pagero-build" docker build --tag eu.gcr.io/pagero-build/minio-test-server:1.0 . docker push eu.gcr.io/pagero-build/minio-test-server:1.0 else echo "usage buildDocker [options]" echo "Options:" echo " --push push the local build image to cloud" fi #docker build --tag eu.gcr.io/pagero-build/minio-test-server:1.0 .
import { IsEmail, IsNotEmpty } from 'class-validator'; import { AuthErrors } from '../auth.errors'; export class LoginDto { @IsEmail({}, { message: AuthErrors.INVALID_CREDENTIALS }) emailAddress: string; @IsNotEmpty({ message: AuthErrors.INVALID_CREDENTIALS }) password: string; }
package com.vaadin.tests.themes.valo.test; import com.vaadin.server.FontAwesome; import com.vaadin.ui.Button; import com.vaadin.ui.Grid; import com.vaadin.ui.TextField; import com.vaadin.ui.themes.ValoTheme; import eu.maxschuster.vaadin.autocompletetextfield.AutocompleteSuggestionProvider; import eu.maxschuster.vaadin.autocompletetextfield.AutocompleteTextField; import eu.maxschuster.vaadin.autocompletetextfield.provider.CollectionSuggestionProvider; import eu.maxschuster.vaadin.autocompletetextfield.provider.MatchMode; import ve.com.pt.base.new_views.DetailUI; import java.util.Arrays; import java.util.Collection; import java.util.Locale; /** * TestDetail * Created by ypetrilli on 09/09/2016. */ public class TestDetail extends DetailUI { private TextField code; public TestDetail() { super(); setTitle("Detalle/Nuevo"); } @Override public void buildHeader(boolean showHeader) { super.buildHeader(true); } @Override public void buildForm() { super.buildForm(); form.setStyleName(ValoTheme.FORMLAYOUT_LIGHT, false); code = new TextField("Código"); code.setInputPrompt("código"); form.addComponent(code); Collection<String> theJavas = Arrays.asList("Jav", "JavaScript", "Join Java", "JavaFX Script"); AutocompleteSuggestionProvider suggestionProvider = new CollectionSuggestionProvider(theJavas, MatchMode.CONTAINS, true, Locale.US); AutocompleteTextField field = new AutocompleteTextField("Campo con autocompletado"); field.setInputPrompt("parámetro"); field.setSuggestionProvider(suggestionProvider); form.addComponent(field); } @Override public void buildHeaderGrid(boolean showHeaderGrid) { super.buildHeaderGrid(true); headerGrid.addComponents(new Button(FontAwesome.FILTER), new Button(FontAwesome.PLUS)); } @Override public void buildDetailGrid(boolean showDetailGrid) { super.buildDetailGrid(true); detailGrid.addColumn("Name", String.class); detailGrid.addColumn("Description", String.class); detailGrid.addColumn("Version", String.class); detailGrid.setSelectionMode(Grid.SelectionMode.MULTI); detailGrid.addRow("com.vaadin", "vaadin-server", "7.4.0"); detailGrid.addRow("com.vaadin", "vaadin-client-compiled", "7.4.0"); detailGrid.addRow("com.vaadin", "vaadin-client", "7.7.0"); } @Override public void buildFooter(boolean showFooter) { super.buildFooter(true); } }
<filename>internal/test/api/post_test.go package api import ( "bytes" "encoding/json" "io/ioutil" "net/http" "redditclone/internal/domain/post" "redditclone/internal/domain/user" "redditclone/internal/domain/vote" "redditclone/internal/pkg/apperror" "redditclone/internal/pkg/errorshandler" "github.com/minipkg/selection_condition" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" ) func (s *ApiTestSuite) TestPost_Create() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() newPost := &post.Post{} *newPost = *s.entities.post newPost.ID = "" newPost.Comments = nil newPost.Votes = nil s.repositoryMocks.post.On("Create", mock.Anything, newPost).Return(error(nil)) b, err := json.Marshal(newPost) require.NoErrorf(err, "can not json.Marshal() a value: %v, error", newPost, err) reqBody := bytes.NewReader(b) uri := "/api/posts" expectedData := newPost expectedStatus := http.StatusCreated req, _ := http.NewRequest(http.MethodPost, s.server.URL+uri, reqBody) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json %q, error: %v", string(resBody), err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_Delete() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() newPost := &post.Post{} *newPost = *s.entities.post newPost.ID = "" newPost.Comments = nil newPost.Votes = nil s.repositoryMocks.post.On("Delete", mock.Anything, s.entities.post.ID).Return(error(nil)) uri := "/api/post/" + s.entities.post.ID expectedData := errorshandler.SuccessMessage() expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodDelete, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_Get() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() p := &post.Post{} *p = *s.entities.post p.Views++ s.repositoryMocks.post.On("Get", mock.Anything, s.entities.post.ID).Return(s.entities.post, error(nil)) s.repositoryMocks.post.On("Update", mock.Anything, p).Return(error(nil)) uri := "/api/post/" + s.entities.post.ID expectedData := p expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodGet, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_List() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() list := []post.Post{*s.entities.post} query := selection_condition.SelectionCondition{ Where: &post.Post{}, } s.repositoryMocks.post.On("Query", mock.Anything, query).Return(list, error(nil)) uri := "/api/posts" expectedData := list expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodGet, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_ListByCategory() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() list := []post.Post{*s.entities.post} query := selection_condition.SelectionCondition{ Where: &post.Post{ Category: "category", }, } s.repositoryMocks.post.On("Query", mock.Anything, query).Return(list, error(nil)) uri := "/api/posts/category" expectedData := list expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodGet, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_ListByUser() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() list := []post.Post{*s.entities.post} searchedUser := &user.User{ Name: s.entities.user.Name, } query := selection_condition.SelectionCondition{ Where: &post.Post{ UserID: s.entities.user.ID, }, } s.repositoryMocks.user.On("First", mock.Anything, searchedUser).Return(s.entities.user, error(nil)) s.repositoryMocks.post.On("Query", mock.Anything, query).Return(list, error(nil)) uri := "/api/user/" + s.entities.user.Name expectedData := list expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodGet, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_Upvote() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() newVote := s.api.Domain.Vote.Service.NewEntity(s.entities.vote.UserID, s.entities.vote.PostID, 1) newVote.User = *s.entities.user searchedVote := &vote.Vote{ PostID: s.entities.vote.PostID, UserID: s.entities.vote.UserID, } p := &post.Post{} *p = *s.entities.post p.Score-- s.repositoryMocks.vote.On("First", mock.Anything, searchedVote).Return(nil, apperror.ErrNotFound) s.repositoryMocks.vote.On("Create", mock.Anything, newVote).Return(error(nil)) s.repositoryMocks.post.On("Get", mock.Anything, p.ID).Return(p, error(nil)) s.repositoryMocks.post.On("Update", mock.Anything, s.entities.post).Return(error(nil)) uri := "/api/post/" + s.entities.post.ID + "/upvote" expectedData := s.entities.post expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodGet, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_Downvote() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() newVote := s.api.Domain.Vote.Service.NewEntity(s.entities.vote.UserID, s.entities.vote.PostID, -1) newVote.User = *s.entities.user searchedVote := &vote.Vote{ PostID: s.entities.vote.PostID, UserID: s.entities.vote.UserID, } p := &post.Post{} *p = *s.entities.post p.Score++ s.repositoryMocks.vote.On("First", mock.Anything, searchedVote).Return(nil, apperror.ErrNotFound) s.repositoryMocks.vote.On("Create", mock.Anything, newVote).Return(error(nil)) s.repositoryMocks.post.On("Get", mock.Anything, p.ID).Return(p, error(nil)) s.repositoryMocks.post.On("Update", mock.Anything, s.entities.post).Return(error(nil)) uri := "/api/post/" + s.entities.post.ID + "/downvote" expectedData := s.entities.post expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodGet, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) } func (s *ApiTestSuite) TestPost_Unvote() { var result interface{} var expected interface{} require := require.New(s.T()) assert := assert.New(s.T()) s.setupSession() newVote := s.api.Domain.Vote.Service.NewEntity(s.entities.vote.UserID, s.entities.vote.PostID, 1) newVote.User = *s.entities.user searchedVote := &vote.Vote{ PostID: s.entities.vote.PostID, UserID: s.entities.vote.UserID, } p := &post.Post{} *p = *s.entities.post p.Score -= s.entities.vote.Value s.repositoryMocks.vote.On("First", mock.Anything, searchedVote).Return(s.entities.vote, nil) s.repositoryMocks.vote.On("Delete", mock.Anything, s.entities.vote.ID).Return(error(nil)) s.repositoryMocks.post.On("Get", mock.Anything, p.ID).Return(p, error(nil)) s.repositoryMocks.post.On("Update", mock.Anything, p).Return(error(nil)) uri := "/api/post/" + s.entities.post.ID + "/unvote" expectedData := p expectedStatus := http.StatusOK req, _ := http.NewRequest(http.MethodGet, s.server.URL+uri, nil) req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+s.token) resp, err := s.client.Do(req) require.NoErrorf(err, "request error: %v", err) defer resp.Body.Close() resBody, err := ioutil.ReadAll(resp.Body) require.NoErrorf(err, "read body error: %v", err) assert.Equalf(expectedStatus, resp.StatusCode, "expected http status %v, got %v", expectedStatus, resp.StatusCode) err = json.Unmarshal(resBody, &result) require.NoErrorf(err, "can not unpack json, error: %v", err) jsonData, err := json.Marshal(expectedData) json.Unmarshal(jsonData, &expected) assert.Equalf(expected, result, "results not match\nGot: %#v\nExpected: %#v", result, expectedData) }
#!/bin/bash # NoIP updater script. # Prerequisites (by Debian package name): # bash # dnsutils (for dig) # curl # python # Configuration noip_url="https://dynupdate.no-ip.com/nic/update" #noip_url="http://localhost:8080/update" user_agent='noip-update/1.0 jwilliams@codingforsoup.com' basedir="." log_file="$basedir/update.log" last_ip_file="$basedir/last_ip" last_error_file="$basedir/last_error" queryencode() { python -c "import urllib; print urllib.quote_plus(\"$1\", \"\")" } log() { echo "[`date --rfc-3339=seconds`] $1" | tee -a "$log_file" } if [ $# -ne 3 ]; then echo "Usage: $0 HOSTNAME USERNAME PASSWORD" echo "Updates HOSTNAME on no-ip.com, using the specified" echo "USERNAME and PASSWORD to authenticate." echo "" log "Update failed: invalid arguments" exit 1 fi # Arguments (by position): hostname_to_update=`queryencode "$1"` username="$2" password="$3" my_ip=`dig +short myip.opendns.com @resolver1.opendns.com` # Check for previous failure and bail out if true if [ -f "$last_error_file" ]; then last_error=`cat "$last_error_file"` log "Update failed: resolve previous error ($last_error) and remove $last_error_file to resume updates." exit 2 fi # Check to see if IP has changed and bail out if true if [ -f "$last_ip_file" ]; then last_ip=`cat "$last_ip_file"` if [ "$my_ip" == "$last_ip" ]; then log "IP hasn't changed since last update; nothing to do" exit 0 fi fi # Update our stored IP address echo "$my_ip" > "$last_ip_file" # If we've gotten down here, we should be good to perform the update result=`curl -A "$user_agent" -u "$username:$password" "$noip_url?hostname=$hostname_to_update&myip=$my_ip"` if [[ "$result" == *"good"* || "$result" == *"nochg"* ]]; then log "Update succeeded ($result)" else log "Update failed ($result)-- please resolve and remove $last_error_file to resume updates" echo "$result" > $last_error_file exit 3 fi
import React from 'react'; import { Table } from 'react-bootstrap'; const UsersTable = (props) => { const users = props.users; const rows = users.map(user => <tr key={user.id}> <td>{user.name}</td> <td>{user.email}</td> <td>{user.phone_no}</td> </tr> ); return ( <Table> <thead> <tr> <th>Name</th> <th>Email</th> <th>Phone number</th> </tr> </thead> <tbody>{rows}</tbody> </Table> ); } export default UsersTable;
<filename>inference.py import sys import torch import utils import dataloader # generate submissions.csv file def inference(): device = "cuda:0" if torch.cuda.is_available() else "cpu" save_file = input("save model name : ") try: if torch.cuda.is_available(): model = torch.load(save_file, map_location={"cpu": "cuda:0"}) else: model = torch.load(save_file, map_location={"cuda:0": "cpu"}) print("Success loading model") except IOError: print("Couldn't find model") sys.exit(0) print("best epoch was {}".format(model.info_dict['epoch'])) # 1783 : length of test data set test_data_loader = dataloader.DataLoader(1783, test=True) model.eval() with torch.no_grad(): X, _ = test_data_loader.get_batch() X = X.to(device) output = model(X) utils.generate_csv(output) if __name__ == "__main__": inference()
^[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$
#!/bin/bash C_RED="\033[1;31m" C_GREEN="\033[1;32m" C_YELLOW="\033[1;33m" C_BLUE="\033[1;34m" C_PURPLE="\033[1;35m" C_CYAN="\033[1;36m" C_RESET="$(tput sgr0)" find . -maxdepth 10 -name .git -type d -prune | while read d; do owd=`pwd` cd "${d}/.." nwd=`pwd` b=`git rev-parse --abbrev-ref HEAD` printf "${C_PURPLE}[${b}] ${C_GREEN}${nwd} ...${C_RESET} \n" if [[ $1 == 'pull' ]]; then printf "${C_YELLOW}Pulling...${C_RESET} \n" git pull --quiet else printf "${C_YELLOW}Fetching...${C_RESET} \n" git fetch --all 2>&1 > /dev/null fi ./gitStatus.sh printf "\n" cd "${owd}" done
#include <iostream> using namespace std; int main(){ int x, y; cout<<"Enter first number: "; cin>>x; cout<<"Enter second number: "; cin>>y; (x > y) ? cout << x << " is the biggest number." : cout << y << " is the biggest number."; return 0; }
<filename>src/software/webapp/front/components/games/typing.d.ts /* Godot Engine declaration ======================== Authors: Julien & <NAME> - RE-FACTORY SARL Company: ORTHOPUS SAS License: Creative Commons Zero v1.0 Universal Website: orthopus.com Last edited: October 2021 */ declare class Engine { constructor(initConfig?: EngineConfig); static isWebGLAvailable: (majorVersion?: number) => boolean; static load: (path: string) => Promise<any>; static unload: () => void; init: (basePath: string) => Promise<any>; preloadFile: (file: string|ArrayBuffer, path?: string) => Promise<any>; start: (config: EngineConfig) => Promise<any>; startGame: (config: EngineConfig) => Promise<any>; copyToFS: (path: string, buffer: ArrayBuffer) => void; requestQuit: () => void; } interface EngineConfig { unloadAfterInit?: boolean, canvas?: HTMLCanvasElement, executable?: string, mainPack?: string, locale?: string, canvasResizePolicy?: number, args?: Array<string>, experimentalVK?: boolean, persistentPaths?: Array<string>, persistentDrops?: boolean, gdnativeLibs?: Array<string>, fileSizes?: any, onExecute?: (path: string, args: Array<string>) => void, onExit?: (status_code: number) => void, onProgress?: (current: number, total: number) => void, onPrint?: (var_args?: Array<string>) => void, onPrintError?: (var_args?: Array<string>) => void, } type EngineLoaderDescription = string interface GameDescription { name: string, url: string, path: string, width: number, height: number, fileSizes: any, }
class ImageUploader < CarrierWave::Uploader::Base include Cloudinary::CarrierWave process convert: 'png' process tags: ['article_image'] version :standard do process resize_to_fill: [100, 150, :north] end version :thumbnail do resize_to_fit(50, 50) end CarrierWave.configure do |config| config.cache_storage = :file end def store_dir "uploads/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}" end def extension_whitelist %w[jpg jpeg gif png] end end
package com.java.study.algorithm.zuo.dadvanced.advanced_class_08; /** * 数组中有一个数出现了奇数次,剩下的数出现了偶数次,打印 * 这个出现奇数次的数 * 数组中有两个数出现了奇数次,剩下的数出现了偶数次,打印 * 这两个出现奇数次的数 * 数组中有一个数出现1次,剩下的数出现了k次,打印这个出现k 次的数 */ public class Code_02_KTimesOneTime{ }
def solve_equation(a, b, c): d = b**2 - 4*a*c if d < 0: print("This equation has no real solution") elif d == 0: x = (-b + d**0.5) / (2*a) print("This equation has one solutions: ") print("x = ", x) else: x1 = (-b + d**0.5) / (2*a) x2 = (-b - d**0.5) / (2*a) print("This equation has two solutions: ") print("x1 = ", x1, " and x2 = ", x2)
#include "test_clear_colour.h" #include "test_texture.h" #include "test_3d_cube.h" // TODO: may be able to put this back into test.h??? do we want to?? void test_setup (enum test_type type, struct test_data *d) { d->type = type; switch (d->type) { case TEST_CLEAR_COLOUR: { test_clear_colour_setup (d); break; } case TEST_TEXTURE: { test_texture_setup (d); break; } case TEST_3D_CUBE: { test_3d_cube_setup (d); break; } default: ASSERT (!"unknown setup type"); } } void test_update (struct test_data *t, float dt) { } void test_render (struct test_data *t) { switch (t->type) { case TEST_CLEAR_COLOUR: { test_clear_colour_render (t); break; } case TEST_TEXTURE: { test_texture_render (t); break; } case TEST_3D_CUBE: { test_3d_cube_render (t); break; } default: ASSERT (!"unknown render type"); } } void test_render_gui (struct test_data *t, struct nk_context *nk) { switch (t->type) { case TEST_CLEAR_COLOUR: { test_clear_colour_render_gui (t, nk); break; } case TEST_TEXTURE: { test_texture_render_gui (t, nk); break; } case TEST_3D_CUBE: { test_3d_cube_render_gui (t, nk); break; } default: ASSERT (!"unknown render gui type"); } } void test_teardown (struct test_data *t) { switch (t->type) { case TEST_CLEAR_COLOUR: { test_clear_colour_teardown (t); break; } case TEST_TEXTURE: { test_texture_teardown (t); break; } case TEST_3D_CUBE: { test_3d_cube_teardown (t); break; } default: ASSERT (!"unknown teardown type"); } }
#include <stdio.h> #include "isshe_rand.h" int isshe_rand_bytes_dev_urandom(unsigned char *buf, int num) { int res; FILE *fp = fopen(ISSHE_DEV_URANDOM, "rb"); if (!fp) { return -1; } res = fread(buf, 1, num, fp); fclose(fp); if (res != num) { return -1; } return res; } int isshe_rand_bytes(unsigned char *buf, int num) { return isshe_rand_bytes_dev_urandom(buf, num); }
#!/bin/bash docker build -t $USER_NAME/cadvisor .
<reponame>johanley/astro /* Return occultation predictions for a single where and when. Returns an array of N objects, of form: {UT:'2016-10-20 21:08:18', star:'ZC 123', mag:5.5, ph:'DD', el:92, pa:145} Uses output from the occult.exe software from the IOTA, for N stations in North America. The target 'where' needs to be within M kilometers of one of these N stations. Occult.exe's predictions have decimal minutes; they are replaced with seconds, for ease of interaction with the ephem.js API. The output is generated of 13 months, to avoid running out at the end of the year. The output is massaged offline into a javascript array, and embedded directly into this file. */ var fetch_occultation_predictions = function(where, when, num_days_ahead /*of when*/){ // the database of occultations for all stations, for an extended period of time var all_occns = [ { name : 'Montréal', λ:-73.6, φ:45.5, events: [ {UT:'2016-10-20 21:08:18', star:'ZC 123', mag:5.5, ph:'DD', el:92, pa:145, a:+1.2, b:-1.5} ] } //more stations here ]; /* Returns null if nearest station is over 400 kms away. */ var the_nearest_station = function(){ var i, station, this_dist, min_dist = 401, result = null; for(i=0; i < all_occns.length; ++i){ station = all_occns[i]; this_dist = EPH.distance_kms(station, where); if (this_dist < min_dist){ min_dist = this_dist; result = station; } } return result; }; /* Use alpha-order to filter-in occultation events. */ var filter_date_time_window = function(events){ var result = []; var base_when = when.toString().substring(2).trim(); //trim leading 'UT ' var when_start = EPH.date_time_odometer(base_when, 'day', -1); var when_end = EPH.date_time_odometer(base_when, 'day', num_days_ahead + 1); for (var i=0; i < events.length; ++i){ if (when_start <= events[i].UT && events[i].UT <= when_end){ result.push(events[i]); } } return result; }; var calc_local_correction_to_time_Δsecs = function(event, station){ //the a, b params are in minutes/degree, as decimals var Δt_λ = event.a * (where.λ - station.λ); var Δt_φ = event.b * (where.φ - station.φ); var Δmin = Δt_λ + Δt_φ; //fractional minutes var result = EPH.round(Δmin * 60, 0); //now whole seconds, to be added to the UT of the event return result; }; var find_occns = function(where, when, num_days_ahead){ var result = []; // {UT:'2016-10-20 21:08:18', star:'ZC 123', mag:5.5, ph:'DD', el:92, pa:145} var filtered_event, local_event, Δsec; var nearest_station = the_nearest_station(); var filtered_events = filter_date_time_window(nearest_station.events); for (var i=0; i < filtered_events.length; ++i){ filtered_event = filtered_events[i]; Δsec = calc_local_correction_to_time_Δsecs(filtered_event, nearest_station); //conservative: make a new object, instead of changing state of the 'database' object local_event = { UT: EPH.date_time_odometer(filtered_event.UT, 'sec', Δsec), star: filtered_event.star, mag: filtered_event.mag, ph: filtered_event.ph, el: filtered_event.el, pa: filtered_event.pa }; result.push(local_event); } return result; }; return find_occns(where, when, num_days_ahead); };
<reponame>GZH-INVESTER/zhaoxin-2020-be<gh_stars>0 import { Controller } from 'egg'; export default class UserController extends Controller{ async isLogged(){ const {ctx} = this if(ctx.session.userId){ ctx.body={isLogged:1} return } ctx.body={isLogged:0} return } async login(){ const { ctx,app,service} = this const { password } = ctx.request.body const createRule={password:'<PASSWORD>'} let errs = app.validator.validate(createRule, ctx.request.body); if(errs){ if(errs[0].message=='required') errs[0].message='密码不能为空' ctx.body={ success:0, errMsg:errs[0].message } return } const user = await service.user.login(password) if(user){ ctx.session.userId = user.id ctx.body = { success: 1, } } } async logout(){ const {ctx} = this ctx.session.userId=null ctx.body={ success:1 } return } }
public static int countPrimes(int min, int max) { int count = 0; for (int num = min; num <= max; num++) { boolean isPrime = true; for (int i = 2; i < num; i++) { if (num % i == 0) { isPrime = false; break; } } if (isPrime) { count++; } } return count; }
#define HAS_VTK 1 #include "LaShell2ShellPointsCSV.h" /* * Author: * Dr. <NAME> * Department of Biomedical Engineering, King's College London * Email: rashed 'dot' <EMAIL> * Copyright (c) 2017 * * This application demonstrates the LaShell2ShellPointsCSV class * Reads a CSV file containing 3D points, locates them in source shell and then locates their closest points in target shell * Outputs a CSV file containing cloest points xyz in target shell */ int main(int argc, char * argv[]) { char* input_f1, *input_f2, *input_f3, *input_csv, *output_csv; bool foundArgs1 = false; bool foundArgs2 = false; bool foundArgs3 = false; bool foundArgs4 = false; bool foundArgs5 = false; if (argc >= 1) { for (int i = 1; i < argc; i++) { if (i + 1 != argc) { if (string(argv[i]) == "-source") { input_f1 = argv[i + 1]; foundArgs1 = true; } else if (string(argv[i]) == "-target") { input_f2 = argv[i + 1]; foundArgs2 = true; } else if (string(argv[i]) == "-starget") { input_f3 = argv[i + 1]; foundArgs3 = true; } else if (string(argv[i]) == "-csv") { input_csv = argv[i + 1]; foundArgs4 = true; } else if (string(argv[i]) == "-out") { output_csv = argv[i + 1]; foundArgs5 = true; } } // end outer if } } if (!(foundArgs1 && foundArgs2 && foundArgs3 && foundArgs4 && foundArgs5)) { cerr << "Cheeck your parameters\n\nUsage:" "\nReads a CSV file containing 3D points, locates them in source shell and then locates their closest points in target shell." "\nOutputs a CSV file containing cloest points xyz in target shell" "\n(Mandatory)\n\t-source <source shell1> \n\t-target <target shell2> \n\t-starget (source registered to target vtk)" "\n\t-csv <csv file>\n\t-out <csv output>\n" << endl; exit(1); } else { LaShell* source = new LaShell(input_f1); LaShell* target = new LaShell(input_f2); LaShell* source_in_target = new LaShell(input_f3); LaShell2ShellPointsCSV* algorithm = new LaShell2ShellPointsCSV(); algorithm->SetSourceData(source); algorithm->SetTargetData(target); algorithm->SetSourceInTargetData(source_in_target); algorithm->SetOutputFileName(output_csv); algorithm->ReadCSVFile(input_csv); algorithm->Update(); } }
package com.ioDemo.nio.NIOWebServer.connector; import javax.servlet.ServletOutputStream; import javax.servlet.ServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Locale; public class Response implements ServletResponse { private static final int BUFFER_SIZE = 1024; Request request; OutputStream outputStream; public Response(OutputStream outputStream) { this.outputStream = outputStream; } public void setRequest(Request request) { this.request = request; } public void sendStaticResource() throws IOException { InputStream is = getClass().getResourceAsStream(request.getRequestUri()); try { if (null != is) { if (request.getRequestUri().contains("/404.html")) { write(is, HttpStatus.SC_NT_FOUND); } else { write(is, HttpStatus.SC_OK); } } else { is = getClass().getResourceAsStream("/404.html"); write(is, HttpStatus.SC_NT_FOUND); } } catch (IOException e) { is = getClass().getResourceAsStream("/404.html"); write(is, HttpStatus.SC_NT_FOUND); } } private void write(InputStream inputStream, HttpStatus httpStatus) throws IOException { outputStream.write(ConnectorUtils.renderStatus(httpStatus).getBytes()); byte[] bytes = new byte[BUFFER_SIZE]; int length; while (-1 != (length = inputStream.read(bytes, 0, BUFFER_SIZE))) { outputStream.write(bytes, 0, length); } } @Override public String getCharacterEncoding() { return null; } @Override public String getContentType() { return null; } @Override public ServletOutputStream getOutputStream() throws IOException { return null; } @Override public PrintWriter getWriter() throws IOException { return new PrintWriter(outputStream, true); } @Override public void setCharacterEncoding(String s) { } @Override public void setContentLength(int i) { } @Override public void setContentLengthLong(long l) { } @Override public void setContentType(String s) { } @Override public void setBufferSize(int i) { } @Override public int getBufferSize() { return 0; } @Override public void flushBuffer() throws IOException { } @Override public void resetBuffer() { } @Override public boolean isCommitted() { return false; } @Override public void reset() { } @Override public void setLocale(Locale locale) { } @Override public Locale getLocale() { return null; } }
SELECT Salary FROM Employees ORDER BY Salary DESC LIMIT 5;
const App = getApp(); import { formatTime, formatNumber } from '../../utils/util.js'; import { alarmInterfaceInit, alarmListRequestPage} from '../../lib/api'; let timer = null; let scrollFlag = true, // 防止一直滚一直请求接口 loadFlag = false; // 是否还存在数据 Page({ /** * 页面的初始数据 */ data: { currentTime: "2020-09-11", activeTime: "2020-09-13", datetimes: [], scrollTop: 0, alarmData: [], totalRecords: 0, isEmptyPage: false, loadRefresher: false, }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { this.selectStartTime = ''; this.selectEndTime = ''; this.initDateTimes(); }, // 初始化 重置数据和接口 resetPageData(callback){ this.page = 0; this.limit = 15; this.pageTotal = 0; loadFlag = false; const { activeTime } = this.data; const alarmStartTime = this.selectStartTime ? this.selectStartTime : activeTime + " 00:00:00", alarmEndTime = this.selectEndTime ? this.selectEndTime : activeTime + " 23:59:59"; const param = { alarmType: "3,13,14,29,30,157,158,0,1,2,18,19,2011,2012,20,2111,2112,21,2211,2212,22,23,15,148,149,164,24,25,26,27,28,31,4,5,6,7,8,9,10,11,12,16,17,651,652,653,661,662,663,14303,14301,14300,14302,14304,14305,14306,681,682,701,1321,702,683,67,124,12411,14411,125,126,1271,1272,130,128,131,129,77,75,147,76,7211,7212,7311,7312,11911,11912,151,209,150,203,82,7702,7703,79,81,32,118,11511,11512,11611,11612,123,1111,1112,113,114,107,106,104,105,108,109,18712,18715,18811,18718,18810,18812,18813,18814,18815,18716,18719,18717,18714,18711,18713,19711", alarmStartTime, alarmEndTime, status: -1, alarmSource: -1, pushType: -1 }; App.showLoading("请稍后"); alarmInterfaceInit(param).then(res => { if (res.success) { this.setData({ alarmData: [], totalRecords: 0 }, () => { wx.hideLoading(); callback && callback() }) } }) }, /** * 初始化时间列表 */ initDateTimes() { const date = new Date(); const { year, month, day } = formatTime(date); let lastDay = day, datetimes = []; for (lastDay; lastDay > -31; lastDay--) { let item = {}; if (lastDay > 0) { item = { year: year, month: month, day: lastDay } } else { const monthDay = 31; let $month = 12, $year = year; if (month > 1) { $month = month - 1; } else { $year--; } let $lastDay = lastDay + monthDay; item = { year: $year, month: $month, day: $lastDay } } const $month = formatNumber(item.month), $day = formatNumber(item.day); const _week = new Date(`${item.year}-${$month}-${$day}`).getDay(), $week = App.initWeekText(_week); if(datetimes.length >= 5) break; datetimes.unshift({ datetime: item.year + `-${$month}-${$day}`, year: item.year, date: `${$month}-${$day}`, week: `${$week}` }) } if(datetimes.length){ const current = datetimes[datetimes.length-1]; this.setData({ datetimes, currentTime: current, activeTime: current.datetime, }) } }, /** * 顶部时间选择 */ handleDateTimeChange(e){ const { datetime } = e.currentTarget.dataset; const { activeTime } = this.data; if(activeTime == datetime) return false; if(datetime){ this.setData({ activeTime: datetime, }, () => { this.selectStartTime = ''; this.selectEndTime = ''; getApp().globalData.alarmTime = {startTime: '', endTime: ''}; this.resetPageData(() => this.getPageData()); }) } }, /** * 自定义时间点击页面跳转 */ goCustomTime(){ wx.navigateTo({ url: "/pages/dateTime/dateTime?from=alarm" }) }, getPageData() { let data = []; const { alarmData } = this.data; if(loadFlag) return false; // 开始数据大于等于总数据 则不执行 App.showLoading("正在查询"); const param = { // alarmType: 3, // alarmStartTime: -1, // alarmEndTime: -1, start: this.page * this.limit, length: this.limit, draw: this.page + 1 }; alarmListRequestPage(param).then(res => { if (res) { const {totalPages, records, totalRecords} = res; let newIsEmptyPage = false; for(let i = 0, len = records.length; i < len; i++){ const item = records[i]; // 找不到地点 但是有经纬度 则通过经纬度查询地点 if(!item.alarmEndSpecificLocation && item.alarmEndLatitude && item.alarmEndLongitude){ getReverseGeocoder({latitude: item.alarmEndLatitude, longitude: item.alarmEndLongitude}).then(res => { if(res) item.alarmEndSpecificLocation = res; }, error => { console.log("逆地址解析出错", error) }) } } if(totalPages) this.pageTotal = totalPages; if(records) data = alarmData.concat(records); if (!data.length) newIsEmptyPage = true; loadFlag = data.length >= res.recordsTotal; try { wx.setStorageSync('alarmList', data) } catch (e) {} scrollFlag = true; this.setData({ alarmData: data, totalRecords: totalRecords ? totalRecords : 0, isEmptyPage: newIsEmptyPage }, () => { const newtime = setTimeout(() => { wx.hideLoading(); clearTimeout(newtime); }, 500) }) } }) }, /** * 列表容器下拉刷新被触发 */ onRefresherRefresh(e) { console.log("下拉刷新被触发", e); this.resetPageData(() => this.getPageData()); this.setData({ loadRefresher: false }) }, /** * 滚动到列表容器底部/右边时触发 */ scrollLoadMore(e) { if (this.page === this.pageTotal) { App.showToast("已经是最后一页了"); return false; } if(scrollFlag){ console.log("scrollLoadMore滚动到底部并获取数据", e); scrollFlag = false; this.page += 1; this.getPageData(); } }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 生命周期函数--监听页面显示 */ onShow: function () { const { alarmTime } = App.globalData, { startTime, endTime } = alarmTime; const {alarmData} = this.data; if(startTime && endTime){ // 选择的时候等于当前查询数据的时间时 则不执行查询操作 将滚动条滚动至最顶部 if(startTime == this.selectStartTime && this.selectEndTime == endTime){ this.setData({ scrollTop: 0 }) return false; } this.selectStartTime = startTime; this.selectEndTime = endTime; this.setData({ activeTime: "" }) }else if(alarmData.length){ return false; } this.resetPageData(() => { this.getPageData(); }); }, /** * 生命周期函数--监听页面隐藏 */ onHide: function () { }, /** * 生命周期函数--监听页面卸载 */ onUnload: function () { timer = null; wx.hideLoading(); clearInterval(timer); } })
# Build the Colab notebooks # copy the notebooks with solutions cp solutions/[01]*.ipynb . # remove solutions python remove_soln.py # pip install pytest nbmake # run nbmake pytest --nbmake [01]*.ipynb # push to GitHub git add [01]*.ipynb utils.py git commit -m "Updating notebooks" git push
<reponame>izikaj/sunrise<gh_stars>1-10 # frozen_string_literal: true class AttachmentFileUploader < Sunrise::CarrierWave::BaseUploader def extension_white_list %w[pdf doc docx xls xlsx ppt pptx zip rar csv] end end
<gh_stars>0 /************************************************************************************************* * String utilities * * Copyright 2020 Google LLC * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * https://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. *************************************************************************************************/ #include "tkrzw_lib_common.h" #include "tkrzw_str_util.h" #include "tkrzw_sys_config.h" namespace tkrzw { int64_t StrToInt(std::string_view str, int64_t defval) { const unsigned char* rp = reinterpret_cast<const unsigned char*>(str.data()); const unsigned char* ep = rp + str.size(); while (rp < ep && *rp <= ' ') { rp++; } int32_t sign = 1; if (rp < ep) { if (*rp == '-') { rp++; sign = -1; } else if (*rp == '+') { rp++; } } while (rp < ep && *rp <= ' ') { rp++; } bool has_number = false; int64_t num = 0; while (rp < ep) { if (*rp < '0' || *rp > '9') break; has_number = true; num = num * 10 + *rp - '0'; rp++; } if (!has_number) { return defval; } return num * sign; } int64_t StrToIntMetric(std::string_view str, int64_t defval) { const unsigned char* rp = reinterpret_cast<const unsigned char*>(str.data()); const unsigned char* ep = rp + str.size(); while (rp < ep && *rp <= ' ') { rp++; } int32_t sign = 1; if (rp < ep) { if (*rp == '-') { rp++; sign = -1; } else if (*rp == '+') { rp++; } } while (rp < ep && *rp <= ' ') { rp++; } bool has_number = false; long double num = 0; while (rp < ep) { if (*rp < '0' || *rp > '9') break; has_number = true; num = num * 10 + *rp - '0'; rp++; } if (rp < ep && *rp == '.') { rp++; long double base = 10; while (rp < ep) { if (*rp < '0' || *rp > '9') break; has_number = true; num += (*rp - '0') / base; rp++; base *= 10; } } if (!has_number) { return defval; } num *= sign; while (rp < ep && *rp <= ' ') { rp++; } if (rp < ep) { if (*rp == 'k' || *rp == 'K') { rp++; if (rp < ep && (*rp == 'i' || *rp == 'I')) { num *= 1LL << 10; } else { num *= 1LL * 1000LL; } } else if (*rp == 'm' || *rp == 'M') { rp++; if (rp < ep && (*rp == 'i' || *rp == 'I')) { num *= 1LL << 20; } else { num *= 1LL * 1000000LL; } } else if (*rp == 'g' || *rp == 'G') { rp++; if (rp < ep && (*rp == 'i' || *rp == 'I')) { num *= 1LL << 30; } else { num *= 1LL * 1000000000LL; } } else if (*rp == 't' || *rp == 'T') { rp++; if (rp < ep && (*rp == 'i' || *rp == 'I')) { num *= 1LL << 40; } else { num *= 1LL * 1000000000000LL; } } else if (*rp == 'p' || *rp == 'P') { rp++; if (rp < ep && (*rp == 'i' || *rp == 'I')) { num *= 1LL << 50; } else { num *= 1LL * 1000000000000000LL; } } else if (*rp == 'e' || *rp == 'E') { rp++; if (rp < ep && (*rp == 'i' || *rp == 'I')) { num *= 1LL << 60; } else { num *= 1LL * 1000000000000000000LL; } } } if (num > INT64MAX) return INT64MAX; if (num < INT64MIN) return INT64MIN; return static_cast<int64_t>(num); } uint64_t StrToIntOct(std::string_view str, uint64_t defval) { const unsigned char* rp = reinterpret_cast<const unsigned char*>(str.data()); const unsigned char* ep = rp + str.size(); while (rp < ep && *rp <= ' ') { rp++; } bool has_number = false; uint64_t num = 0; while (rp < ep) { if (*rp >= '0' && *rp <= '7') { num = num * 8 + *rp - '0'; } else { break; } has_number = true; rp++; } if (!has_number) { return defval; } return num; } uint64_t StrToIntHex(std::string_view str, uint64_t defval) { const unsigned char* rp = reinterpret_cast<const unsigned char*>(str.data()); const unsigned char* ep = rp + str.size(); while (rp < ep && *rp <= ' ') { rp++; } if (rp < ep - 1 && rp[0] == '0' && (rp[1] == 'x' || rp[1] == 'X')) { rp += 2; } bool has_number = false; uint64_t num = 0; while (rp < ep) { if (*rp >= '0' && *rp <= '9') { num = num * 0x10 + *rp - '0'; } else if (*rp >= 'a' && *rp <= 'f') { num = num * 0x10 + *rp - 'a' + 10; } else if (*rp >= 'A' && *rp <= 'F') { num = num * 0x10 + *rp - 'A' + 10; } else { break; } has_number = true; rp++; } if (!has_number) { return defval; } return num; } uint64_t StrToIntBigEndian(std::string_view str) { if (str.empty()) { return 0; } const size_t size = std::min(str.size(), sizeof(uint64_t)); return ReadFixNum(str.data(), size); } double StrToDouble(std::string_view str, double defval) { const unsigned char* rp = reinterpret_cast<const unsigned char*>(str.data()); const unsigned char* ep = rp + str.size(); while (rp < ep && *rp <= ' ') { rp++; } int32_t sign = 1; if (rp < ep) { if (*rp == '-') { rp++; sign = -1; } else if (*rp == '+') { rp++; } } if (rp < ep - 2 && (rp[0] == 'i' || rp[0] == 'I') && (rp[1] == 'n' || rp[1] == 'N') && (rp[2] == 'f' || rp[2] == 'F')) { return HUGE_VAL * sign; } if (rp < ep - 2 && (rp[0] == 'n' || rp[0] == 'N') && (rp[1] == 'a' || rp[1] == 'A') && (rp[2] == 'n' || rp[2] == 'N')) { return DOUBLENAN; } while (rp < ep && *rp <= ' ') { rp++; } bool has_number = false; long double num = 0; int32_t col = 0; while (rp < ep) { if (*rp < '0' || *rp > '9') break; has_number = true; num = num * 10 + *rp - '0'; rp++; if (num > 0) col++; } if (rp < ep && *rp == '.') { rp++; long double fract = 0.0; long double base = 10; while (col < 16 && rp < ep) { if (*rp < '0' || *rp > '9') break; has_number = true; fract += (*rp - '0') / base; rp++; col++; base *= 10; } num += fract; } if (!has_number) { return defval; } if (rp < ep && (*rp == 'e' || *rp == 'E')) { rp++; const std::string_view pow_expr(reinterpret_cast<const char*>(rp), ep - rp); num *= std::pow((long double)10, (long double)StrToInt(pow_expr)); } return num * sign; } void VSPrintF(std::string* dest, const char* format, va_list ap) { assert(dest != nullptr && format != nullptr); while (*format != '\0') { if (*format == '%') { char cbuf[NUM_BUFFER_SIZE]; cbuf[0] = '%'; size_t cbsiz = 1; int32_t lnum = 0; format++; while (std::strchr("0123456789 .+-hlLz", *format) && *format != '\0' && cbsiz < NUM_BUFFER_SIZE - 1) { if (*format == 'l' || *format == 'L') lnum++; cbuf[cbsiz++] = *(format++); } cbuf[cbsiz++] = *format; cbuf[cbsiz] = '\0'; switch (*format) { case 's': { const char* tmp = va_arg(ap, const char*); if (tmp) { if (cbsiz == 2) { dest->append(tmp); } else { char tbuf[NUM_BUFFER_SIZE * 2]; size_t tsiz = std::snprintf(tbuf, sizeof(tbuf), cbuf, tmp); dest->append(tbuf, tsiz); } } else { dest->append("(null)"); } break; } case 'd': { char tbuf[NUM_BUFFER_SIZE]; size_t tsiz; if (lnum >= 2) { tsiz = std::sprintf(tbuf, cbuf, va_arg(ap, long long)); } else if (lnum >= 1) { tsiz = std::sprintf(tbuf, cbuf, va_arg(ap, long)); } else { tsiz = std::sprintf(tbuf, cbuf, va_arg(ap, int)); } dest->append(tbuf, tsiz); break; } case 'o': case 'u': case 'x': case 'X': case 'c': { char tbuf[NUM_BUFFER_SIZE]; size_t tsiz; if (lnum >= 2) { tsiz = std::sprintf(tbuf, cbuf, va_arg(ap, unsigned long long)); } else if (lnum >= 1) { tsiz = std::sprintf(tbuf, cbuf, va_arg(ap, unsigned long)); } else { tsiz = std::sprintf(tbuf, cbuf, va_arg(ap, unsigned int)); } dest->append(tbuf, tsiz); break; } case 'e': case 'E': case 'f': case 'g': case 'G': { char tbuf[NUM_BUFFER_SIZE * 2]; size_t tsiz; if (lnum >= 1) { tsiz = std::snprintf(tbuf, sizeof(tbuf), cbuf, va_arg(ap, long double)); } else { tsiz = std::snprintf(tbuf, sizeof(tbuf), cbuf, va_arg(ap, double)); } if (tsiz > sizeof(tbuf)) { tbuf[sizeof(tbuf)-1] = '*'; tsiz = sizeof(tbuf); } dest->append(tbuf, tsiz); break; } case 'p': { char tbuf[NUM_BUFFER_SIZE]; size_t tsiz = std::sprintf(tbuf, "%p", va_arg(ap, void*)); dest->append(tbuf, tsiz); break; } case '%': { dest->push_back('%'); break; } } } else { dest->append(format, 1); } format++; } } bool StrToBool(std::string_view str, bool defval) { const std::string& lower = StrLowerCase(StrStripSpace(str)); if (lower == "true" || lower == "t" || lower == "yes" || lower == "y" || lower == "1") { return true; } if (lower == "false" || lower == "f" || lower == "no" || lower == "n" || lower == "0") { return false; } return defval; } void SPrintF(std::string* dest, const char* format, ...) { assert(dest != nullptr && format != nullptr); va_list ap; va_start(ap, format); VSPrintF(dest, format, ap); va_end(ap); } std::string SPrintF(const char* format, ...) { assert(format != nullptr); std::string str; va_list ap; va_start(ap, format); VSPrintF(&str, format, ap); va_end(ap); return str; } std::string IntToStrBigEndian(uint64_t data, size_t size) { if (size < 1) { return ""; } size = std::min(size, sizeof(data)); std::string str(size, 0); WriteFixNum(const_cast<char*>(str.data()), data, size); return str; } std::vector<std::string> StrSplit(std::string_view str, char delim, bool skip_empty) { std::vector<std::string> segments; segments.reserve(2); size_t i = 0; while (i < str.size()) { const size_t pos = str.find(delim, i); if (pos == std::string::npos) { break; } if (!skip_empty || i < pos) { segments.emplace_back(str.substr(i, pos - i)); } i = pos + 1; } if (!skip_empty || i < str.size()) { segments.emplace_back(str.substr(i)); } return segments; } std::vector<std::string> StrSplit(std::string_view str, std::string_view delim, bool skip_empty) { std::vector<std::string> segments; if (delim.empty()) { segments.reserve(str.size()); for (const char c : str) { segments.emplace_back(std::string(1, c)); } } else { segments.reserve(2); size_t i = 0; while (i < str.size()) { const size_t pos = str.find(delim, i); if (pos == std::string::npos) { break; } if (!skip_empty || i < pos) { segments.emplace_back(str.substr(i, pos - i)); } i = pos + delim.size(); } if (!skip_empty || i < str.size()) { segments.emplace_back(str.substr(i)); } } return segments; } std::vector<std::string> StrSplitAny(std::string_view str, std::string_view delims, bool skip_empty) { std::vector<std::string> segments; segments.reserve(2); size_t i = 0; while (i < str.size()) { const size_t pos = str.find_first_of(delims, i); if (pos == std::string::npos) { break; } if (!skip_empty || i < pos) { segments.emplace_back(str.substr(i, pos - i)); } i = pos + 1; } if (!skip_empty || i < str.size()) { segments.emplace_back(str.substr(i)); } return segments; } std::map<std::string, std::string> StrSplitIntoMap( std::string_view str, std::string_view delim_records, std::string_view delim_kv) { std::map<std::string, std::string> map; for (const auto& record : StrSplit(str, delim_records, true)) { const size_t pos = record.find(delim_kv); if (pos != std::string::npos) { map.emplace(record.substr(0, pos), record.substr(pos + delim_kv.size())); } } return map; } std::string StrUpperCase(std::string_view str) { std::string converted; converted.reserve(str.size()); for (int32_t c : str) { if (c >= 'a' && c <= 'z') { c -= 'a' - 'A'; } converted.push_back(c); } return converted; } std::string StrLowerCase(std::string_view str) { std::string converted; converted.reserve(str.size()); for (int32_t c : str) { if (c >= 'A' && c <= 'Z') { c += 'a' - 'A'; } converted.push_back(c); } return converted; } bool StrContains(std::string_view text, std::string_view pattern) { return memmem(text.data(), text.size(), pattern.data(), pattern.size()) != nullptr; } bool StrBeginsWith(std::string_view text, std::string_view pattern) { if (pattern.size() > text.size()) { return false; } return std::memcmp(text.data(), pattern.data(), pattern.size()) == 0; } bool StrEndsWith(std::string_view text, std::string_view pattern) { if (pattern.size() > text.size()) { return false; } return std::memcmp(text.data() + text.size() - pattern.size(), pattern.data(), pattern.size()) == 0; } int32_t StrCaseCompare(std::string_view a, std::string_view b) { int32_t length = std::min(a.size(), b.size()); for (int32_t i = 0; i < length; i++) { int32_t ac = static_cast<unsigned char>(a[i]); if (ac >= 'A' && ac <= 'Z') { ac += 'a' - 'A'; } int32_t bc = static_cast<unsigned char>(b[i]); if (bc >= 'A' && bc <= 'Z') { bc += 'a' - 'A'; } if (ac != bc) { return ac < bc ? -1 : 1; } } if (a.size() < b.size()) { return -1; } if (a.size() > b.size()) { return 1; } return 0; } int32_t StrSearch(std::string_view text, std::string_view pattern) { return text.find(pattern); } int32_t StrSearchDoubleLoop(std::string_view text, std::string_view pattern) { if (pattern.size() > text.size()) { return -1; } const size_t text_end = text.size() - pattern.size() + 1; for (size_t text_index = 0; text_index < text_end; text_index++) { size_t pattern_index = 0; while (pattern_index < pattern.size() && text[text_index + pattern_index] == pattern[pattern_index]) { pattern_index++; } if (pattern_index == pattern.size()) { return text_index; } } return -1; } int32_t StrSearchMemchr(std::string_view text, std::string_view pattern) { if (pattern.empty()) { return 0; } const int32_t first_pattern_char = pattern.front(); const char* pattern_data = pattern.data() + 1; const int32_t pattern_size = pattern.size() - 1; const char* current_pointer = text.data(); int32_t size = static_cast<int32_t>(text.size()) - static_cast<int32_t>(pattern.size()) + 1; while (size > 0) { const char* match_pointer = static_cast<const char*>(std::memchr(current_pointer, first_pattern_char, size)); if (match_pointer == nullptr) { break; } if (memcmp(match_pointer + 1, pattern_data, pattern_size) == 0) { return match_pointer - text.data(); } match_pointer++; size -= match_pointer - current_pointer; current_pointer = match_pointer; } return -1; } int32_t StrSearchMemmem(std::string_view text, std::string_view pattern) { const void* result = memmem(text.data(), text.size(), pattern.data(), pattern.size()); if (result == nullptr) { return -1; } return static_cast<const char*>(result) - text.data(); } int32_t StrSearchKMP(std::string_view text, std::string_view pattern) { if (pattern.empty()) { return 0; } std::vector<int32_t> table(pattern.size() + 1); table[0] = -1; size_t pattern_index = 0; int32_t offset = -1; while (pattern_index < pattern.size()) { while (offset >= 0 && pattern[pattern_index] != pattern[offset]) { offset = table[offset]; } pattern_index++; offset++; table[pattern_index] = offset; } size_t text_index = 0; offset = 0; while (text_index < text.size() && offset < static_cast<int32_t>(pattern.size())) { while (offset >= 0 && text[text_index] != pattern[offset]) { offset = table[offset]; } text_index++; offset++; } if (offset == static_cast<int32_t>(pattern.size())) { return text_index - pattern.size(); } return -1; } int32_t StrSearchBM(std::string_view text, std::string_view pattern) { if (pattern.empty()) { return 0; } int32_t table[UINT8MAX]; for (int32_t table_index = 0; table_index < UINT8MAX; table_index++) { table[table_index] = pattern.size(); } int32_t shift = pattern.size(); int32_t pattern_index = 0; while (shift > 0) { const uint8_t table_index = pattern[pattern_index++]; table[table_index] = --shift; } int32_t pattern_end = pattern.size() - 1; int32_t begin_index = 0; int32_t end_index = text.size() - pattern_end; while (begin_index < end_index) { int32_t pattern_index = pattern_end; while (text[begin_index + pattern_index] == pattern[pattern_index]) { if (pattern_index == 0) { return begin_index; } pattern_index--; } const uint8_t table_index = text[begin_index + pattern_index]; const int32_t step = table[table_index] - pattern_end + pattern_index; begin_index += step > 0 ? step : 2; } return -1; } int32_t StrSearchRK(std::string_view text, std::string_view pattern) { if (pattern.empty()) { return 0; } const unsigned char* text_p = reinterpret_cast<const unsigned char*>(text.data()); const unsigned char* pattern_p = reinterpret_cast<const unsigned char*>(pattern.data()); constexpr int32_t base = 239; constexpr int32_t modulo = 1798201; int32_t power = 1; for (size_t i = 0; i < pattern.size(); i++) { power = (power * base) % modulo; } int32_t pattern_hash = 0; for (size_t i = 0; i < pattern.size(); i++) { pattern_hash = pattern_hash * base + pattern_p[i]; pattern_hash %= modulo; } int32_t text_hash = 0; for (size_t i = 0; i < text.size(); i++) { text_hash = text_hash * base + text_p[i]; text_hash %= modulo; if (i >= pattern.size()) { text_hash -= power * text_p[i - pattern.size()] % modulo; if (text_hash < 0) { text_hash += modulo; } } if (pattern_hash == text_hash && i >= pattern.size() - 1) { const size_t offset = i - (pattern.size() - 1); if (std::memcmp(text_p + offset, pattern_p, pattern.size()) == 0) { return offset; } } } return -1; } int32_t StrSearchZ(std::string_view text, std::string_view pattern) { std::string concat; concat.reserve(pattern.size() + 1 + text.size()); concat.append(pattern); concat.push_back('\0'); concat.append(text); std::vector<size_t> z(concat.size(), 0); size_t left = 0; size_t right = 0; for (size_t i = 1; i < concat.size(); i++) { if (i > right) { left = i; right = i; while (right < concat.size() && concat[right - left] == concat[right]) { right++; } z[i] = right - left; right--; } else { size_t k = i - left; if (z[k] < right - i + 1) { z[i] = z[k]; } else { left = i; while (right < concat.size() && concat[right - left] == concat[right]) { right++; } z[i] = right - left; right--; } } } for (size_t i = pattern.size() + 1; i < concat.size(); i++) { if (z[i] == pattern.size()) { return i - 1 - pattern.size(); } } return -1; } std::vector<int32_t> StrSearchWhole( std::string_view text, std::string_view pattern, size_t max_results) { std::vector<int32_t> result; if (pattern.empty()) { const size_t num_results = max_results > 0 ? std::min(max_results, text.size()) : text.size(); result.reserve(num_results); for (size_t i = 0; i < num_results; ++i) { result.emplace_back(i); } return result; } size_t pos = 0; while (pos != text.size()) { pos = text.find(pattern, pos); if (pos == std::string::npos) { break; } result.emplace_back(pos); if (max_results > 0 && result.size() >= max_results) { return result; } pos++; } return result; } std::vector<int32_t> StrSearchWholeKMP( std::string_view text, std::string_view pattern, size_t max_results) { std::vector<int32_t> result; if (pattern.empty()) { const size_t num_results = max_results > 0 ? std::min(max_results, text.size()) : text.size(); result.reserve(num_results); for (size_t i = 0; i < num_results; ++i) { result.emplace_back(i); } return result; } std::vector<int32_t> table(pattern.size() + 1); table[0] = -1; size_t pattern_index = 0; int32_t offset = -1; while (pattern_index < pattern.size()) { while (offset >= 0 && pattern[pattern_index] != pattern[offset]) { offset = table[offset]; } pattern_index++; offset++; table[pattern_index] = offset; } size_t text_index = 0; while (text_index < text.size()) { offset = 0; while (text_index < text.size() && offset < static_cast<int32_t>(pattern.size())) { while (offset >= 0 && text[text_index] != pattern[offset]) { offset = table[offset]; } text_index++; offset++; } if (offset == static_cast<int32_t>(pattern.size())) { const size_t offset = text_index - pattern.size(); result.emplace_back(offset); if (max_results > 0 && result.size() >= max_results) { return result; } text_index = offset + 1; } } return result; } std::vector<int32_t> StrSearchWholeBM( std::string_view text, std::string_view pattern, size_t max_results) { std::vector<int32_t> result; if (pattern.empty()) { const size_t num_results = max_results > 0 ? std::min(max_results, text.size()) : text.size(); result.reserve(num_results); for (size_t i = 0; i < num_results; ++i) { result.emplace_back(i); } return result; } int32_t table[UINT8MAX]; for (int32_t table_index = 0; table_index < UINT8MAX; table_index++) { table[table_index] = pattern.size(); } int32_t shift = pattern.size(); int32_t pattern_index = 0; while (shift > 0) { const uint8_t table_index = pattern[pattern_index++]; table[table_index] = --shift; } int32_t pattern_end = pattern.size() - 1; int32_t begin_index = 0; int32_t end_index = text.size() - pattern_end; while (begin_index < end_index) { int32_t pattern_index = pattern_end; bool hit = false; while (text[begin_index + pattern_index] == pattern[pattern_index]) { if (pattern_index == 0) { result.emplace_back(begin_index); if (max_results > 0 && result.size() >= max_results) { return result; } hit = true; break; } pattern_index--; } const uint8_t table_index = text[begin_index + pattern_index]; const int32_t step = table[table_index] - pattern_end + pattern_index; begin_index += step > 0 ? step : (hit ? 1 : 2); } return result; } std::vector<int32_t> StrSearchWholeRK( std::string_view text, std::string_view pattern, size_t max_results) { std::vector<int32_t> result; if (pattern.empty()) { const size_t num_results = max_results > 0 ? std::min(max_results, text.size()) : text.size(); result.reserve(num_results); for (size_t i = 0; i < num_results; ++i) { result.emplace_back(i); } return result; } const unsigned char* text_p = reinterpret_cast<const unsigned char*>(text.data()); const unsigned char* pattern_p = reinterpret_cast<const unsigned char*>(pattern.data()); constexpr int32_t base = 239; constexpr int32_t modulo = 1798201; int32_t power = 1; for (size_t i = 0; i < pattern.size(); i++) { power = (power * base) % modulo; } int32_t pattern_hash = 0; for (size_t i = 0; i < pattern.size(); i++) { pattern_hash = (pattern_hash * base + pattern_p[i]) % modulo; } int32_t text_hash = 0; for (size_t i = 0; i < text.size(); i++) { text_hash = (text_hash * base + text_p[i]) % modulo; if (i >= pattern.size()) { text_hash -= power * text_p[i - pattern.size()] % modulo; if (text_hash < 0) { text_hash += modulo; } } if (pattern_hash == text_hash && i >= pattern.size() - 1) { const size_t offset = i - (pattern.size() - 1); if (std::memcmp(text_p + offset, pattern_p, pattern.size()) == 0) { result.emplace_back(offset); if (max_results > 0 && result.size() >= max_results) { return result; } } } } return result; } std::vector<std::vector<int32_t>> StrSearchBatch( std::string_view text, const std::vector<std::string>& patterns, size_t max_results) { std::vector<std::vector<int32_t>> result; result.resize(patterns.size()); for (size_t i = 0; i < patterns.size(); i++) { result[i] = StrSearchWhole(text, patterns[i], max_results); } return result; } std::vector<std::vector<int32_t>> StrSearchBatchKMP( std::string_view text, const std::vector<std::string>& patterns, size_t max_results) { std::vector<std::vector<int32_t>> result; result.resize(patterns.size()); for (size_t i = 0; i < patterns.size(); i++) { result[i] = StrSearchWholeKMP(text, patterns[i], max_results); } return result; } std::vector<std::vector<int32_t>> StrSearchBatchBM( std::string_view text, const std::vector<std::string>& patterns, size_t max_results) { std::vector<std::vector<int32_t>> result; result.resize(patterns.size()); for (size_t i = 0; i < patterns.size(); i++) { result[i] = StrSearchWholeBM(text, patterns[i], max_results); } return result; } std::vector<std::vector<int32_t>> StrSearchBatchRK( std::string_view text, const std::vector<std::string>& patterns, size_t max_results) { std::vector<std::vector<int32_t>> result(patterns.size()); const unsigned char* text_p = reinterpret_cast<const unsigned char*>(text.data()); std::map<size_t, std::vector<size_t>> batch; for (size_t i = 0; i < patterns.size(); i++) { batch[patterns[i].size()].emplace_back(i); } for (const auto& batch_item : batch) { const size_t pattern_size = batch_item.first; const auto& pattern_indices = batch_item.second; if (pattern_size < 1) { for (const auto& pattern_index : pattern_indices) { for (size_t i = 0; i < text.size(); i++) { result[pattern_index].emplace_back(i); } } continue; } constexpr int32_t base = 239; constexpr int32_t modulo = 1798201; int32_t power = 1; for (size_t i = 0; i < pattern_size; i++) { power = (power * base) % modulo; } std::vector<std::pair<int32_t, size_t>> pattern_hashes; for (const auto& pattern_index : pattern_indices) { const auto& pattern = patterns[pattern_index]; const unsigned char* pattern_p = reinterpret_cast<const unsigned char*>(pattern.data()); int32_t pattern_hash = 0; for (size_t i = 0; i < pattern_size; i++) { const int32_t c = i < pattern.size() ? pattern_p[i] : 0; pattern_hash = (pattern_hash * base + c) % modulo; } pattern_hashes.emplace_back(std::make_pair(pattern_hash, pattern_index)); } int32_t text_hash = 0; for (size_t i = 0; i < text.size(); i++) { text_hash = (text_hash * base + text_p[i]) % modulo; if (i >= pattern_size) { text_hash -= power * text_p[i - pattern_size] % modulo; if (text_hash < 0) { text_hash += modulo; } } for (const auto& pattern_hash : pattern_hashes) { const auto& pattern = patterns[pattern_hash.second]; if (pattern_hash.first == text_hash) { if (i >= pattern.size() - 1) { const size_t offset = i - (pattern.size() - 1); if (std::memcmp(text_p + offset, pattern.data(), pattern.size()) == 0) { if (max_results > 0) { auto& pat_result = result[pattern_hash.second]; if (pat_result.size() < max_results) { pat_result.emplace_back(offset); } } else { result[pattern_hash.second].emplace_back(offset); } } } } } } } return result; } std::string StrStripSpace(std::string_view str) { std::string converted; converted.reserve(str.size()); int32_t content_size = 0; for (size_t i = 0; i < str.size(); i++) { const int32_t c = static_cast<unsigned char>(str[i]); if (c <= ' ') { if (converted.empty()) { continue; } converted.push_back(c); } else { converted.push_back(c); content_size = converted.size(); } } converted.resize(content_size); return converted; } std::string StrStripLine(std::string_view str) { size_t size = str.size(); while (size > 0) { const int32_t c = str[size - 1]; if (c != '\r' && c != '\n') { break; } size--; } return std::string(str.data(), size); } std::string StrSqueezeAndStripSpace(std::string_view str) { std::string converted; converted.reserve(str.size()); int32_t content_size = 0; bool last_is_space = true; for (size_t i = 0; i < str.size(); i++) { const int32_t c = static_cast<unsigned char>(str[i]); if (c <= ' ') { if (last_is_space) { continue; } converted.push_back(c); last_is_space = true; } else { converted.push_back(c); last_is_space = false; content_size = converted.size(); } } converted.resize(content_size); return converted; } std::string StrTrimForTSV(std::string_view str) { std::string converted; converted.reserve(str.size()); for (size_t i = 0; i < str.size(); i++) { int32_t c = static_cast<unsigned char>(str[i]); if (c <= ' ' || c == 0x7f) { c = ' '; } converted.push_back(c); } return converted; } std::string StrEscapeC(std::string_view str, bool esc_nonasc) { std::string converted; converted.reserve(str.size() * 2); const uint8_t* rp = reinterpret_cast<const uint8_t*>(str.data()); const uint8_t* ep = rp + str.size(); while (rp < ep) { const int32_t c = *rp; switch (c) { case '\0': { if (rp == ep - 1) { converted.append("\\0"); } else { const int32_t n = *(rp + 1); if (n >= '0' && n <= '7') { converted.append("\\x00"); } else { converted.append("\\0"); } } break; } case '\a': converted.append("\\a"); break; case '\b': converted.append("\\b"); break; case '\t': converted.append("\\t"); break; case '\n': converted.append("\\n"); break; case '\v': converted.append("\\v"); break; case '\f': converted.append("\\f"); break; case '\r': converted.append("\\r"); break; case '\\': converted.append("\\\\"); break; default: { if ((c >= 0 && c < ' ') || c == 0x7F) { converted.append(SPrintF("\\x%02x", c)); } else if (esc_nonasc && (c >= 0x80)) { converted.append(SPrintF("\\x%02x", c)); } else { converted.push_back(c); } break; } } rp++; } return converted; } std::string StrUnescapeC(std::string_view str) { std::string converted; converted.reserve(str.size()); for (size_t i = 0; i < str.size();) { int32_t c = str[i]; if (c == '\\') { i++; if (i >= str.size()) { break; } c = str[i]; switch (c) { case 'a': i++; converted.push_back('\a'); break; case 'b': i++; converted.push_back('\b'); break; case 't': i++; converted.push_back('\t'); break; case 'n': i++; converted.push_back('\n'); break; case 'v': i++; converted.push_back('\v'); break; case 'f': i++; converted.push_back('\f'); break; case 'r': i++; converted.push_back('\r'); break; case 'x': case 'X': { i++; int32_t num = 0; const size_t end = std::min(i + 2, str.size()); while (i < end) { c = str[i]; if (c >= '0' && c <= '9') { num = num * 16 + c - '0'; } else if (c >= 'A' && c <= 'F') { num = num * 16 + c - 'A' + 10; } else if (c >= 'a' && c <= 'f') { num = num * 16 + c - 'a' + 10; } else { break; } i++; } converted.push_back(num); break; } default: if (c >= '0' && c <= '8') { i++; int32_t num = c - '0'; const size_t end = std::min(i + 2, str.size()); while (i < end) { c = str[i]; if (c >= '0' && c <= '7') { num = num * 8 + c - '0'; } else { break; } i++; } converted.push_back(num); } else { converted.push_back(c); i++; } break; } } else { converted.push_back(c); i++; } } return converted; } std::string StrEncodeBase64(std::string_view str) { const char* const table = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; const unsigned char* rp = reinterpret_cast<const unsigned char*>(str.data()); std::string encoded; encoded.reserve(str.size() * 4 / 3 + 4); for (size_t i = 0; i < str.size(); i += 3) { switch (str.size() - i) { case 1: { encoded.push_back(table[rp[0] >> 2]); encoded.push_back(table[(rp[0] & 3) << 4]); encoded.push_back('='); encoded.push_back('='); break; } case 2: { encoded.push_back(table[rp[0] >> 2]); encoded.push_back(table[((rp[0] & 3) << 4) + (rp[1] >> 4)]); encoded.push_back(table[(rp[1] & 0xf) << 2]); encoded.push_back('='); break; } default: { encoded.push_back(table[rp[0] >> 2]); encoded.push_back(table[((rp[0] & 3) << 4) + (rp[1] >> 4)]); encoded.push_back(table[((rp[1] & 0xf) << 2) + (rp[2] >> 6)]); encoded.push_back(table[rp[2] & 0x3f]); break; } } rp += 3; } return encoded; } std::string StrDecodeBase64(std::string_view str) { size_t index = 0; size_t num_equals = 0; std::string decoded; decoded.reserve(str.size()); while (index < str.size() && num_equals == 0) { size_t bits = 0; size_t step = 0; while (index < str.size() && step < 4) { if (str[index] >= 'A' && str[index] <= 'Z') { bits = (bits << 6) | (str[index] - 'A'); step++; } else if (str[index] >= 'a' && str[index] <= 'z') { bits = (bits << 6) | (str[index] - 'a' + 26); step++; } else if (str[index] >= '0' && str[index] <= '9') { bits = (bits << 6) | (str[index] - '0' + 52); step++; } else if (str[index] == '+') { bits = (bits << 6) | 62; step++; } else if (str[index] == '/') { bits = (bits << 6) | 63; step++; } else if (str[index] == '=') { bits <<= 6; step++; num_equals++; } index++; } if (step == 0 && index >= str.size()) continue; switch (num_equals) { case 0: { decoded.push_back((bits >> 16) & 0xff); decoded.push_back((bits >> 8) & 0xff); decoded.push_back(bits & 0xff);; break; } case 1: { decoded.push_back((bits >> 16) & 0xff); decoded.push_back((bits >> 8) & 0xff); break; } case 2: { decoded.push_back((bits >> 16) & 0xff); break; } } } return decoded; } std::string StrEncodeURL(std::string_view str) { const unsigned char* rp = reinterpret_cast<const unsigned char*>(str.data()); std::string encoded; encoded.reserve(str.size() * 2); for (const unsigned char* ep = rp + str.size(); rp < ep; rp++) { int32_t c = *rp; if ((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || (c != '\0' && std::strchr("_-.~", c))) { encoded.push_back(c); } else { encoded.push_back('%'); int32_t num = c >> 4; if (num < 10) { encoded.push_back('0' + num); } else { encoded.push_back('a' + num - 10); } num = c & 0x0f; if (num < 10) { encoded.push_back('0' + num); } else { encoded.push_back('a' + num - 10); } } } return encoded; } std::string StrDecodeURL(std::string_view str) { std::string decoded; decoded.reserve(str.size()); const char *rp = str.data(); const char* ep = rp + str.size(); while (rp < ep) { int32_t c = *rp; if (c == '%') { int32_t num = 0; if (++rp >= ep) break; c = *rp; if (c >= '0' && c <= '9') { num = c - '0'; } else if (c >= 'a' && c <= 'f') { num = c - 'a' + 10; } else if (c >= 'A' && c <= 'F') { num = c - 'A' + 10; } if (++rp >= ep) break; c = *rp; if (c >= '0' && c <= '9') { num = num * 0x10 + c - '0'; } else if (c >= 'a' && c <= 'f') { num = num * 0x10 + c - 'a' + 10; } else if (c >= 'A' && c <= 'F') { num = num * 0x10 + c - 'A' + 10; } decoded.push_back(num); rp++; } else if (c == '+') { decoded.push_back(' '); rp++; } else if (c <= ' ' || c == 0x7f) { rp++; } else { decoded.push_back(c); rp++; } } return decoded; } std::vector<uint32_t> ConvertUTF8ToUCS4(std::string_view utf) { std::vector<uint32_t> ucs; ucs.reserve(utf.size()); const size_t size = utf.size(); size_t index = 0; while (index < size) { uint32_t c = (unsigned char)utf[index]; if (c < 0x80) { ucs.emplace_back(c); } else if (c < 0xe0) { if (c >= 0xc0 && index + 1 < size) { c = ((c & 0x1f) << 6) | (utf[index+1] & 0x3f); if (c >= 0x80) ucs.emplace_back(c); index++; } } else if (c < 0xf0) { if (index + 2 < size) { c = ((c & 0x0f) << 12) | ((utf[index+1] & 0x3f) << 6) | (utf[index+2] & 0x3f); if (c >= 0x800) ucs.emplace_back(c); index += 2; } } else if (c < 0xf8) { if (index + 3 < size) { c = ((c & 0x07) << 18) | ((utf[index+1] & 0x3f) << 12) | ((utf[index+2] & 0x3f) << 6) | (utf[index+3] & 0x3f); if (c >= 0x10000) ucs.emplace_back(c); index += 3; } } else if (c < 0xfc) { if (index + 4 < size) { c = ((c & 0x03) << 24) | ((utf[index+1] & 0x3f) << 18) | ((utf[index+2] & 0x3f) << 12) | ((utf[index+3] & 0x3f) << 6) | (utf[index+4] & 0x3f); if (c >= 0x200000) ucs.emplace_back(c); index += 4; } } else if (c < 0xfe) { if (index + 5 < size) { c = ((c & 0x01) << 30) | ((utf[index+1] & 0x3f) << 24) | ((utf[index+2] & 0x3f) << 18) | ((utf[index+3] & 0x3f) << 12) | ((utf[index+4] & 0x3f) << 6) | (utf[index+5] & 0x3f); if (c >= 0x4000000) ucs.emplace_back(c); index += 5; } } index++; } return ucs; } std::string ConvertUCS4ToUTF8(const std::vector<uint32_t>& ucs) { std::string utf; utf.reserve(ucs.size() * 3); std::vector<uint32_t>::const_iterator it = ucs.begin(); std::vector<uint32_t>::const_iterator itend = ucs.end(); while (it != itend) { const uint32_t c = *it; if (c < 0x80) { utf.push_back(c); } else if (c < 0x800) { utf.push_back(0xc0 | (c >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x10000) { utf.push_back(0xe0 | (c >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x200000) { utf.push_back(0xf0 | (c >> 18)); utf.push_back(0x80 | ((c & 0x3ffff) >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x4000000) { utf.push_back(0xf8 | (c >> 24)); utf.push_back(0x80 | ((c & 0xffffff) >> 18)); utf.push_back(0x80 | ((c & 0x3ffff) >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x80000000) { utf.push_back(0xfc | (c >> 30)); utf.push_back(0x80 | ((c & 0x3fffffff) >> 24)); utf.push_back(0x80 | ((c & 0xffffff) >> 18)); utf.push_back(0x80 | ((c & 0x3ffff) >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } ++it; } return utf; } std::wstring ConvertUTF8ToWide(std::string_view utf) { std::wstring wstr; wstr.reserve(utf.size()); const size_t size = utf.size(); size_t index = 0; while (index < size) { uint32_t c = (unsigned char)utf[index]; if (c < 0x80) { wstr.push_back(c); } else if (c < 0xe0) { if (c >= 0xc0 && index + 1 < size) { c = ((c & 0x1f) << 6) | (utf[index+1] & 0x3f); if (c >= 0x80) wstr.push_back(c); index++; } } else if (c < 0xf0) { if (index + 2 < size) { c = ((c & 0x0f) << 12) | ((utf[index+1] & 0x3f) << 6) | (utf[index+2] & 0x3f); if (c >= 0x800) wstr.push_back(c); index += 2; } } else if (c < 0xf8) { if (index + 3 < size) { c = ((c & 0x07) << 18) | ((utf[index+1] & 0x3f) << 12) | ((utf[index+2] & 0x3f) << 6) | (utf[index+3] & 0x3f); if (c >= 0x10000) wstr.push_back(c); index += 3; } } else if (c < 0xfc) { if (index + 4 < size) { c = ((c & 0x03) << 24) | ((utf[index+1] & 0x3f) << 18) | ((utf[index+2] & 0x3f) << 12) | ((utf[index+3] & 0x3f) << 6) | (utf[index+4] & 0x3f); if (c >= 0x200000) wstr.push_back(c); index += 4; } } else if (c < 0xfe) { if (index + 5 < size) { c = ((c & 0x01) << 30) | ((utf[index+1] & 0x3f) << 24) | ((utf[index+2] & 0x3f) << 18) | ((utf[index+3] & 0x3f) << 12) | ((utf[index+4] & 0x3f) << 6) | (utf[index+5] & 0x3f); if (c >= 0x4000000) wstr.push_back(c); index += 5; } } index++; } return wstr; } std::string ConvertWideToUTF8(const std::wstring& wstr) { std::string utf; utf.reserve(wstr.size() * 3); std::wstring::const_iterator it = wstr.begin(); std::wstring::const_iterator itend = wstr.end(); while (it != itend) { const uint32_t c = *it; if (c < 0x80) { utf.push_back(c); } else if (c < 0x800) { utf.push_back(0xc0 | (c >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x10000) { utf.push_back(0xe0 | (c >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x200000) { utf.push_back(0xf0 | (c >> 18)); utf.push_back(0x80 | ((c & 0x3ffff) >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x4000000) { utf.push_back(0xf8 | (c >> 24)); utf.push_back(0x80 | ((c & 0xffffff) >> 18)); utf.push_back(0x80 | ((c & 0x3ffff) >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } else if (c < 0x80000000) { utf.push_back(0xfc | (c >> 30)); utf.push_back(0x80 | ((c & 0x3fffffff) >> 24)); utf.push_back(0x80 | ((c & 0xffffff) >> 18)); utf.push_back(0x80 | ((c & 0x3ffff) >> 12)); utf.push_back(0x80 | ((c & 0xfff) >> 6)); utf.push_back(0x80 | (c & 0x3f)); } ++it; } return utf; } std::string MakeRandomCharacterText( int32_t length, int32_t seed, uint8_t first_char, uint8_t last_char) { std::mt19937 mt(seed); std::uniform_int_distribution<uint8_t> dist(first_char, last_char); std::string text; text.resize(length); for (int32_t i = 0; i < length; i++) { text[i] = dist(mt); } return text; } std::string SerializeStrPair(std::string_view first, std::string_view second) { const size_t size = SizeVarNum(first.size()) + first.size() + SizeVarNum(second.size()) + second.size(); std::string serialized(size, 0); char* wp = const_cast<char*>(serialized.data()); wp += WriteVarNum(wp, first.size()); std::memcpy(wp, first.data(), first.size()); wp += first.size(); wp += WriteVarNum(wp, second.size()); std::memcpy(wp, second.data(), second.size()); return serialized; } void DeserializeStrPair( std::string_view serialized, std::string_view* first, std::string_view* second) { assert(first != nullptr && second != nullptr); const char* rp = serialized.data(); size_t size = serialized.size(); uint64_t first_size = 0; size_t step = ReadVarNum(rp, size, &first_size); rp += step; size -= step; *first = std::string_view(rp, first_size); rp += first_size; size -= first_size; uint64_t second_size = 0; step = ReadVarNum(rp, size, &second_size); rp += step; *second = std::string_view(rp, second_size); } std::string_view GetFirstFromSerializedStrPair(std::string_view serialized) { uint64_t first_size = 0; size_t step = ReadVarNum(serialized.data(), serialized.size(), &first_size); return std::string_view(serialized.data() + step, first_size); } std::string SerializeStrVector(const std::vector<std::string>& values) { size_t size = 0; for (const auto& value : values) { size += SizeVarNum(value.size()) + value.size(); } std::string serialized(size, 0); char* wp = const_cast<char*>(serialized.data()); for (const auto& value : values) { wp += WriteVarNum(wp, value.size()); std::memcpy(wp, value.data(), value.size()); wp += value.size(); } return serialized; } std::vector<std::string> DeserializeStrVector(std::string_view serialized) { std::vector<std::string> values; const char* rp = serialized.data(); int32_t size = serialized.size(); while (size > 0) { uint64_t value_size = 0; const size_t step = ReadVarNum(rp, size, &value_size); rp += step; size -= step; if (size < static_cast<int64_t>(value_size)) { break; } values.emplace_back(std::string(rp, value_size)); rp += value_size; size -= value_size; } return values; } std::string SerializeStrMap(const std::map<std::string, std::string>& records) { size_t size = 0; for (const auto& record : records) { size += SizeVarNum(record.first.size()) + record.first.size(); size += SizeVarNum(record.second.size()) + record.second.size(); } std::string serialized(size, 0); char* wp = const_cast<char*>(serialized.data()); for (const auto& record : records) { wp += WriteVarNum(wp, record.first.size()); std::memcpy(wp, record.first.data(), record.first.size()); wp += record.first.size(); wp += WriteVarNum(wp, record.second.size()); std::memcpy(wp, record.second.data(), record.second.size()); wp += record.second.size(); } return serialized; } std::map<std::string, std::string> DeserializeStrMap(std::string_view serialized) { std::map<std::string, std::string> records; const char* rp = serialized.data(); int32_t size = serialized.size(); while (size > 0) { uint64_t key_size = 0; size_t step = ReadVarNum(rp, size, &key_size); rp += step; size -= step; if (size < static_cast<int64_t>(key_size)) { break; } const char* key_ptr = rp; rp += key_size; size -= key_size; uint64_t value_size = 0; step = ReadVarNum(rp, size, &value_size); rp += step; size -= step; if (size < static_cast<int64_t>(value_size)) { break; } const char* value_ptr = rp; rp += value_size; size -= value_size; records.emplace(std::string(key_ptr, key_size), std::string(value_ptr, value_size)); } return records; } } // namespace tkrzw // END OF FILE
<reponame>gamwang/NPTSP import util from collections import deque import sys import random parent = dict() rank = dict() graph = None N = 0 def make_set(vertice): parent[vertice] = vertice rank[vertice] = 0 def find(vertice): if parent[vertice] != vertice: parent[vertice] = find(parent[vertice]) return parent[vertice] def union(vertice1, vertice2): root1 = find(vertice1) root2 = find(vertice2) if root1 != root2: if rank[root1] > rank[root2]: parent[root2] = root1 else: parent[root1] = root2 if rank[root1] == rank[root2]: rank[root2] += 1 def MSTalg(graph): edges = [] visited = [] for i in range(graph.numCities): make_set(i) visited.append(i) for node, dist in enumerate(graph.edges[i]): if (node != i) and(node not in visited): edges.append((dist, i, node, graph.colors[i])) minimum_spanning_tree = set() edges.sort() for edge in edges: weight, vertice1, vertice2, color = edge if find(vertice1) != find(vertice2): union(vertice1, vertice2) minimum_spanning_tree.add(edge) return minimum_spanning_tree def createPathMST(graph, edges): edges = sorted(list(edges), key = lambda x: x[1]) length = len(edges) for e in range(length): edges.append((edges[e][0], edges[e][2], edges[e][1], edges[e][3])) best = [] bestlen = sys.maxint for i in range(graph.numCities): tally = [] for s in range(graph.numCities): tally.append(s) visited = [] path = [] stack = [] for e in range(len(edges)): if (edges[e][1] == i or edges[e][2] == i): stack.append(edges[e]) visited = [i] pathgroup = [] path = [(i, graph.colors[i])] count = (1,graph.colors[i]) while len(stack) > 0: edge = stack.pop() if edge[1] in visited and edge[2] in visited: continue if edge[1] not in visited: v = edge[1] elif edge[2] not in visited: v = edge[2] if graph.colors[v] == count[1]: count = (count[0]+1,graph.colors[v]) if count[0] > 3: continue else: count = (1, graph.colors[v]) i = 0 tmp = [] for e in graph.edges[v]: tmp.append([e,v, i, graph.colors[i]]) i += 1 tmp = sorted(tmp, key=lambda x: x[0]) tmp.reverse() for item in tmp: stack.append(item) visited.append(v) path.append((v, graph.colors[v])) for i in visited: tally.remove(i) while len(tally) > 0: v = tally.pop() bestdump = sys.maxint bestloc = len(path)+1 for i in range(1,len(path)): if (graph.colors[v] != path[i][1] and graph.colors[v] != path[i+1][1]): path.insert(i+1,(v,graph.colors[v])) if calcLength(path) < bestdump: bestdump = calcLength(path) bestloc = i+1 path.remove((v,graph.colors[v])) path.insert(bestloc,(v,graph.colors[v])) length = calcLength(path) if (length < bestlen): best = path bestlen = length return best def createRandomPath(graph): currCity = random.randint(0, graph.numCities - 1) start = currCity edges = [] notVisited = range(0, graph.numCities) notVisited.remove(currCity) color = graph.colors[currCity] while len(edges) < graph.numCities - 1: v = random.randint(0, len(notVisited) - 1) nextEdge = notVisited[v] if graph.colors[nextEdge] == color: continue else: color = graph.colors[nextEdge] if nextEdge in notVisited: edges.append((currCity, nextEdge, graph.edges[currCity][nextEdge], graph.colors[currCity], graph.colors[nextEdge])) currCity = nextEdge notVisited.remove(currCity) last = edges[graph.numCities - 2][1] edges.append((last, start, graph.edges[start][last], graph.colors[last], graph.colors[start])) return edges def checkPath(graph, edges): temp = list(edges) curr = temp[0][0] start = curr color = graph.colors[curr] count = 1 lap = 0 x = 0 while len(temp) > 0: nextEdge = -1 for e in temp: if e[0] == curr or e[1] == curr: nextEdge = e break if nextEdge == -1: return False elif nextEdge[0] == curr: curr = nextEdge[1] elif nextEdge[1] == curr: curr = nextEdge[0] if x < graph.numCities: x += 1 else: lap = 1 if lap == 1: temp.remove(nextEdge) if color == graph.colors[curr] and count >= 3: return False elif color == graph.colors[curr]: count += 1 else: color = graph.colors[curr] count = 1 return True def switch2Edges(graph, edges, length): for i in range(graph.numCities): possible = [] other = list(edges) for e in edges: if e[0] == i or e[1] == i: possible.append(e) other.remove(e) for edge1 in possible: for edge2 in other: newList = list(edges) newEdge1 = (edge1[0], edge2[1], graph.edges[edge1[0]][edge2[1]], graph.colors[edge1[0]], graph.colors[edge2[1]]) newEdge2 = (edge1[1], edge2[0], graph.edges[edge1[1]][edge2[0]], graph.colors[edge1[1]], graph.colors[edge2[0]]) newList.remove(edge1) newList.append(newEdge1) newList.remove(edge2) newList.append(newEdge2) if not checkPath(graph, newList): newList = list(edges) newEdge1 = (edge1[0], edge2[0], graph.edges[edge1[0]][edge2[0]], graph.colors[edge1[0]], graph.colors[edge2[0]]) newEdge2 = (edge1[1], edge2[1], graph.edges[edge1[1]][edge2[1]], graph.colors[edge1[1]], graph.colors[edge2[1]]) newList.remove(edge1) newList.append(newEdge1) newList.remove(edge2) newList.append(newEdge2) if not checkPath(graph, newList): continue newLength = sum([edge[2] for edge in newList]) if newLength < length: return (newList, newLength) return 0 def localSearch2(graph, edges): """asd""" length = sum([edge[2] for edge in edges]) x = 0 newPath = edges while True: count = 0 result = switch2Edges(graph, edges, length) if result == 0: break else: newPath, newLength = result edges = newPath if newLength == length: break length = newLength count += 1 x += 1 if not checkPath(graph, newPath): print "error in path edges line 108" thing = list(edges) highest = 0 edge = 0 for e in edges: if e[2] >= highest: highest = e[2] edge = e edges.remove(edge) length -= highest curr = edge[0] path = [curr] while len(path) < graph.numCities: for e in edges: if e[0] == curr or e[1] == curr: nextEdge = e break if e[0] == curr: curr = e[1] else: curr = e[0] path.append(curr) edges.remove(nextEdge) return path, length, thing def checkVertices(graph, path): color = 'a' count = 0 seen = [] for v in path: if v in seen: return False else: seen.append(v) #print str(v) + ", color: " + str(graph.colors[v]) if color != graph.colors[v]: color = graph.colors[v] count = 1 elif color == graph.colors[v] and count == 3: return False elif color == graph.colors[v]: count += 1 return True def toColor(i): return graph.colors[i] def last3Color(path): if len(path) < 3: return False, None c1 = toColor(path[-1]) c2 = toColor(path[-2]) c3 = toColor(path[-3]) if (c1 == "R" and c2 == "R" and c3 == "R"): return True, "R" elif (c1 == "B" and c2 == "B" and c3 == "B"): return True, "B" else: return False, None def calcDist(path): length = 0 prev = -1 for x in path: if prev >= 0: length += graph.edges[prev][x] prev = x return length def removeEmpty(paths): valid = [] for p in paths: if p is None: continue valid.append(p) return valid #[(path, dist), (path,dist)] def returnShort(paths): paths = removeEmpty(paths) if len(paths) == 0: return None shortsofar = paths[0] for p in paths: if p[1] < shortsofar[1]: shortsofar = p return shortsofar def findPath(): bestSoFar = () pathLeft = [x for x in range(0, N)] tup = bestPath([], pathLeft) return tup[0] #shortest = ([2,1,4,3], 20) #path = [4,2,1] [3] #returns [(path, length of path)] def bestPath(path, left): assert len(path) + len(left) == N if len(path) == N: return (path, calcDist(path)) collectPath = [] isLast3Same, c = last3Color(path) for x in left: if (isLast3Same and ((c == "R" and toColor(x) == "R") or (c == "B" and toColor(x) == "B"))): continue newLeft = left[:] newLeft.remove(x) newPath = path[:] newPath.append(x) shortest = bestPath(newPath, newLeft) collectPath.append(shortest) if len(collectPath) == 0: return None return returnShort(collectPath) def createPath(path): result = [0 for _ in range(N)] for x in range(len(path)): result[x] = (path[x], graph.colors[x]) return result def calcLength(result): length = 0 prev = -1 for x in range(len(result)): if prev >= 0: length += graph.edges[prev][result[x][0]] prev = result[x][0] return length def MSTGreedy(graph, start): #preprocess visited = [start] path = [(start, graph.colors[start])] tmp = [] for i, e in enumerate(graph.edges[start]): if i not in visited: tmp.append([e, start, i, graph.colors[i]]) tmp = sorted(tmp, key=lambda x: x[0]) tmp.reverse() stack = [] for item in tmp: stack.append(item) # Greedily find a path while len(stack) > 0: edge = stack.pop() if edge[1] in visited and edge[2] in visited and not isEdge(path, edge): continue if edge[1] not in visited: v = edge[1] u = edge[2] else: v = edge[2] u = edge[1] inserted = False # Choose whether the edge is for head or tail of the path flg = u == path[-1][0] if flg and not util.backColor(graph, path, v): visited.append(v) path.append((v, graph.colors[v])) inserted = True if not flg and not util.frontColor(graph, path, v) and not inserted: visited.append(v) path.insert(0, (v, graph.colors[v])) inserted = True if inserted: tmp = [] for i, e in enumerate(graph.edges[path[-1][0]]): if i not in visited: tmp.append([e, path[-1][0], i, graph.colors[i]]) for i, e in enumerate(graph.edges[path[0][0]]): if i not in visited: tmp.append([e, path[0][0], i, graph.colors[i]]) tmp = sorted(tmp, key=lambda x: x[0]) tmp.reverse() stack = [] for item in tmp: stack.append(item) return path def solveTSP(graph, N, debug = False): opt_result = None opt_length = sys.maxint for i in range(N): result_out = MSTGreedy(graph, i) # cost length = util.getCost(graph, result_out) if opt_length > length and len(result_out) == N: opt_length = length opt_result = result_out if opt_result == None: return opt_result out = map(lambda x: x[0] + 1, opt_result) if debug: print "Number of nodes: %d" % N print "Number of nodes in path: %d" % len(out) print "Cost: ", opt_length print "Result: ", opt_result return out def processCase(c, perm, d, name): # check it's valid v = [0] * N prev = 'X' count = 0 for i in xrange(N): if v[perm[i]-1] == 1: if name != "Random": print name + " Your answer must be a permutation of {1,...,N}." return -1 v[perm[i]-1] = 1 cur = c[perm[i]-1] if cur == prev: count += 1 else: prev = cur count = 1 if count > 3: if name != "Random": print name + " Your tour cannot visit more than 3 same colored cities consecutively." return -1 cost = 0 for i in xrange(N-1): cur = perm[i]-1 next = perm[i+1]-1 cost += d[cur][next] return cost def genRandom(c, d): x = [i + 1 for i in range(N)] random.shuffle(x) while processCase(c, x, d, "Random") == -1: random.shuffle(x) return x if __name__ == "__main__": T = 495 # number of test cases fout = open ("answer2.out", "w") for t in xrange(1, T+1): fin = open("instances/"+str(t) + ".in", "r") N = int(fin.readline()) d = [[] for i in range(N)] for i in xrange(N): d[i] = [int(x) for x in fin.readline().split()] c = fin.readline() graph = util.Graph(N, d, c) print "Input " + str(t) if N <= 10: #Brute Force result = findPath() result = createPath(result) sol = [] for i in range(len(result)): sol.append(result[i][0]+1) l = processCase(c,sol,d,"Brute force") for k in range(len(result)): fout.write("{0} ".format(str(sol[k]))) if k == N -1: fout.write("{0}\n".format(str(sol[k]))) print ">>>>>>>>>>>>>> Bruteforce " + str(N) + " " + str(l) print "Path: " + str(sol) else: #Random resultRand = genRandom(c, d) lengthRand = processCase(c,resultRand,d, "Rand") #MST resultMST = MSTalg(graph) sol = createPathMST(graph, resultMST) resultMST = [] for i in range(len(sol)): resultMST.append(sol[i][0]+1) lengthMST = processCase(c,resultMST,d, "MST") #Greedy resultGreedy = solveTSP(graph, N) if resultGreedy == None: resultGreedy = genRandom(c, d) lengthGreedy = processCase(c,resultGreedy,d, "Greedy") if lengthGreedy == -1: resultGreedy = genRandom(c,d) lengthGreedy = processCase(c, resultGreedy, d, "Greedy") #Local Search initial = createRandomPath(graph) sol, length, newPath = localSearch2(graph, initial) resultLocal = [] for i in range(len(sol)): resultLocal.append(sol[i]+1) lengthLocal = processCase(c, resultLocal, d, "Local Search") if lengthLocal == -1: resultLocal = genRandom(c,d) lengthLocal = processCase(c, resultLocal, d, "Local Search") bestScore = min(lengthMST, lengthGreedy, lengthLocal, lengthRand) if bestScore == lengthLocal: print ">>>>>>>>>>>>>>>> Local Search " + str(N) + " " + str(lengthLocal) print "Path: " + str(resultLocal) result = resultLocal elif bestScore == lengthMST: print ">>>>>>>>>>>>>>>> MST " + str(N) + " " + str(lengthMST) print "Path: " + str(resultMST) result = resultMST elif bestScore == lengthGreedy: result = resultGreedy print ">>>>>>>>>>>>>>>> Greedy " + str(N) + " " + str(lengthGreedy) print "Path: " + str(resultGreedy) else: print "Nothing works so GENERATING RANDOM YOLO" result = resultRand for k in range(len(result)): fout.write("{0} ".format(str(result[k]))) if k == N -1: fout.write("{0}\n".format(str(result[k]))) fout.close()
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for USN-2152-1 # # Security announcement date: 2014-03-24 00:00:00 UTC # Script generation date: 2017-01-01 21:03:45 UTC # # Operating System: Ubuntu 12.04 LTS # Architecture: x86_64 # # Vulnerable packages fix on version: # - apache2.2-bin:2.2.22-1ubuntu1.5 # # Last versions recommanded by security team: # - apache2.2-bin:2.2.22-1ubuntu1.11 # # CVE List: # - CVE-2013-6438 # - CVE-2014-0098 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade apache2.2-bin=2.2.22-1ubuntu1.11 -y
<reponame>abul-abul/jewelry $(document).ready(function () { $(document).on('click', '.header_fac', function(){ if($(this).hasClass('open')) $('.login-wrapper').removeClass('open'); $('.navbar-toggle').addClass('collapsed'); $('.navbar-collapse1').removeClass('in'); }); function removeParam(key, sourceURL) { var rtn = sourceURL.split("?")[0], param, params_arr = [], queryString = (sourceURL.indexOf("?") !== -1) ? sourceURL.split("?")[1] : ""; if (queryString !== "") { params_arr = queryString.split("&"); for (var i = params_arr.length - 1; i >= 0; i -= 1) { param = params_arr[i].split("=")[0]; if (param === key) { params_arr.splice(i, 1); } } rtn = rtn + "?" + params_arr.join("&"); } return rtn; } // var originalURL = window.location.href; $(document).on('click', 'ul.dropdown-menu.sort li a', function(){ window.location.search = jQuery.query.set("page", 1); }); // $('ul.dropdown-menu.sort li a').click(function(e){ // $( "ul.dropdown-menu.sort li:nth-child(2)").trigger('click'); // return false; // location.href=location.href.replace(/&?((crop-image)|(subscription))=([^&]$|[^&]*)/gi, ""); // originalURL = removeParam("page", window.location.href); // window.location.href = originalURL; // }); $(document).on('click', '.header_login', function(){ if($('.login-wrapper').hasClass('open')) $('.header_fac').removeClass('open'); $('.navbar-toggle').addClass('collapsed'); $('.navbar-collapse1').removeClass('in'); }); $(document).on('click', '.navbar-toggle', function(){ if($('.navbar-toggle').hasClass('collapsed')) $('.header_fac').removeClass('open'); $('.login-wrapper').removeClass('open'); }); $.ajaxSetup({ headers: { 'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content'), } }); var CSRF_TOKEN = $('meta[name="csrf-token"]').attr('content'); $('.form-newsletter').submit(function(e) { e.preventDefault(); var current = $(this); $.ajax({ url: '/get-newsletter', type: "post", data: { user_email: $('input[name=user_email]').val(), _token: CSRF_TOKEN }, dataType: 'JSON' }).done(function (data) { var parsed = jQuery.parseJSON(JSON.stringify(data)); $('.newsletter-success').hide(); $('.newsletter-danger').hide(); if(current.parent().hasClass('not-footer-letter')) { if(parsed.success){ $('.not-footer-letter .newsletter-success').show(); $('.not-footer-letter .newsletter-success').html(parsed.message); } else { $('.not-footer-letter .newsletter-danger').show(); $('.not-footer-letter .newsletter-danger').html(parsed.error); } } else { if(parsed.success){ $('.newsletter-success.footer-one').show(); $('.newsletter-success.footer-one').html(parsed.message); } else { $('.newsletter-danger.footer-one').show(); $('.newsletter-danger.footer-one').html(parsed.error); } } }); return false; }); $.fn.exists = function () { return this.length !== 0; }; function setHeader(xhr) { xhr.setRequestHeader('Authorization', sessionId); xhr.setRequestHeader('X-PrettyPrint', '1'); } $('li.dropdown.megamenu').hover( function() { $(this).addClass('open'); }, function() { $(this).removeClass('open'); } ); $('li.dropdown.megamenu').click( function(){ var url = $(this).children('a').attr('href'); window.location.href = url; } ); });
def detect_cycle(linked_list): # Initialize the slow and fast pointer slow = linked_list.head fast = linked_list.head # Iterate through the linked list while slow and fast and fast.next: iterate_slow_pointer(slow) iterate_fast_pointer(fast) # If the slow and fast pointer meet, # There must be a cycle if same_node(slow, fast): return true return false def iterate_slow_pointer(slow_pointer): slow_pointer = slow_pointer.next def iterate_fast_pointer(fast_pointer): fast_pointer = fast_pointer.next if fast_pointer.next: fast_pointer = fast_pointer.next def same_node(slow_pointer, fast_pointer): return slow_pointer == fast_pointer
def invert_case(s): result = "" for c in s: if c.islower(): result += c.upper() else: result += c.lower() return result
<reponame>tokzy/pet-mangement-system-graphql-nest- import { UseGuards } from '@nestjs/common'; import { Args, Int, Mutation, Query, Resolver } from '@nestjs/graphql'; import { JwtAuthGuard } from '../auth/guards/jwt-auth.guard'; import { deleteResponse } from '../user/dto/user-delete-response.dto'; import { UpdateResponse } from '../user/dto/user-update-response'; import { CreatePetInput } from './dto/create-pet-input.dto'; import { deletePetResponse } from './dto/delete-pet-response.dto'; import { updatePetInput } from './dto/update-pet-input.dto'; import { updatePetResponse } from './dto/update-pet-response.dto'; import { Pet } from './entities/pet.entitiy'; import { PetsService } from './pets.service'; @Resolver((of) => Pet) export class PetsResolver { constructor(private readonly petService: PetsService) {} @Mutation(() => Pet) @UseGuards(JwtAuthGuard) async CreatePet( @Args('CreatePetInput') CreatePetInput: CreatePetInput, ): Promise<Pet> { return await this.petService.CreatePets(CreatePetInput); } @Query(() => [Pet]) @UseGuards(JwtAuthGuard) async getAllPets(): Promise<Pet[]> { return this.petService.getAllPets(); } @Query(() => Pet) @UseGuards(JwtAuthGuard) async getOnePet(@Args('id', { type: () => Int }) id: number): Promise<Pet> { return await this.petService.getSinglePet(id); } @Mutation(() => updatePetResponse) @UseGuards(JwtAuthGuard) async updatePets( @Args('updatePetInput') updatePetInput: updatePetInput, ): Promise<UpdateResponse> { return this.petService.updatePets(updatePetInput); } @Mutation(() => deletePetResponse) @UseGuards(JwtAuthGuard) async deletePets( @Args('id', { type: () => Int }) id: number, ): Promise<deleteResponse> { return this.petService.deletePets(id); } }
/** * This program and the accompanying materials * are made available under the terms of the License * which accompanies this distribution in the file LICENSE.txt */ package com.archimatetool.editor.diagram.actions; import java.util.List; import org.eclipse.gef.EditPart; import org.eclipse.gef.commands.Command; import org.eclipse.gef.commands.CompoundCommand; import org.eclipse.gef.ui.actions.SelectionAction; import org.eclipse.ui.IWorkbenchPart; import com.archimatetool.model.IDiagramModelContainer; import com.archimatetool.model.IDiagramModelObject; import com.archimatetool.model.ILockable; /** * Bring To Front Action * * @author <NAME> */ public class BringToFrontAction extends SelectionAction { public static final String ID = "BringToFrontAction"; //$NON-NLS-1$ public static final String TEXT = Messages.BringToFrontAction_0; public BringToFrontAction(IWorkbenchPart part) { super(part); setText(TEXT); setId(ID); } @Override protected boolean calculateEnabled() { List<?> selected = getSelectedObjects(); // Quick checks if(selected.isEmpty()) { return false; } for(Object object : selected) { if(!(object instanceof EditPart)) { return false; } } Command command = createCommand(selected); if(command == null) { return false; } return command.canExecute(); } @Override public void run() { execute(createCommand(getSelectedObjects())); } private Command createCommand(List<?> selection) { CompoundCommand result = new CompoundCommand(Messages.BringToFrontAction_0); for(Object object : selection) { if(object instanceof EditPart) { Object model = ((EditPart)object).getModel(); if(model instanceof ILockable && ((ILockable)model).isLocked()) { continue; } if(model instanceof IDiagramModelObject) { result.add(new BringToFrontCommand((IDiagramModelObject)model)); } } } return result.unwrap(); } private static class BringToFrontCommand extends Command { private IDiagramModelContainer fParent; private IDiagramModelObject fDiagramObject; private int fNewPos, fOldPos = -1; /* * Parent can be null when objects are selected (with marquee tool) and transferred from one container * to another and the Diagram Editor updates the enablement state of Actions. */ public BringToFrontCommand(IDiagramModelObject diagramObject) { fDiagramObject = diagramObject; fParent = (IDiagramModelContainer)fDiagramObject.eContainer(); if(fParent != null) { fNewPos = fParent.getChildren().size() - 1; fOldPos = fParent.getChildren().indexOf(fDiagramObject); } setLabel(Messages.BringToFrontAction_0); } @Override public boolean canExecute() { return fParent != null && fOldPos < fNewPos; } @Override public void execute() { fParent.getChildren().move(fNewPos, fOldPos); } @Override public void undo() { fParent.getChildren().move(fOldPos, fNewPos); } @Override public void dispose() { fParent = null; fDiagramObject = null; } } }
#!/bin/bash # # Fetches 3rd party packages from github ready to be uploaded to device. # PACKAGES=" https://github.com/adafruit/micropython-adafruit-ads1015/raw/master/ads1x15.py https://github.com/neliogodoi/MicroPython-SI1145/raw/master/si1145.py https://github.com/catdog2/mpy_bme280_esp8266/raw/master/bme280.py " for fname in ${PACKAGES} do curl -L -O $fname done
package com.twelvemonkeys.servlet; import com.twelvemonkeys.io.NullOutputStream; import org.junit.Test; import java.io.PrintWriter; import static org.junit.Assert.*; /** * ServletConfigExceptionTestCase * * @author <a href="mailto:<EMAIL>"><NAME></a> * @author last modified by $Author: haku $ * @version $Id: //depot/branches/personal/haraldk/twelvemonkeys/release-2/twelvemonkeys-servlet/src/test/java/com/twelvemonkeys/servlet/ServletConfigExceptionTestCase.java#2 $ */ public class ServletConfigExceptionTestCase { @Test public void testThrowCatchPrintStacktrace() { try { throw new ServletConfigException("FooBar!"); } catch (ServletConfigException e) { e.printStackTrace(new PrintWriter(new NullOutputStream())); } } @Test public void testThrowCatchGetNoCause() { try { throw new ServletConfigException("FooBar!"); } catch (ServletConfigException e) { assertEquals(null, e.getRootCause()); // Old API assertEquals(null, e.getCause()); e.printStackTrace(new PrintWriter(new NullOutputStream())); } } @Test public void testThrowCatchInitCauseNull() { try { ServletConfigException e = new ServletConfigException("FooBar!"); e.initCause(null); throw e; } catch (ServletConfigException e) { assertEquals(null, e.getRootCause()); // Old API assertEquals(null, e.getCause()); e.printStackTrace(new PrintWriter(new NullOutputStream())); } } @Test public void testThrowCatchInitCause() { //noinspection ThrowableInstanceNeverThrown Exception cause = new Exception(); try { ServletConfigException exception = new ServletConfigException("FooBar!"); exception.initCause(cause); throw exception; } catch (ServletConfigException e) { // NOTE: We don't know how the superclass is implemented, so we assume nothing here //assertEquals(null, e.getRootCause()); // Old API assertSame(cause, e.getCause()); e.printStackTrace(new PrintWriter(new NullOutputStream())); } } @Test public void testThrowCatchGetNullCause() { try { throw new ServletConfigException("FooBar!", null); } catch (ServletConfigException e) { assertEquals(null, e.getRootCause()); // Old API assertEquals(null, e.getCause()); e.printStackTrace(new PrintWriter(new NullOutputStream())); } } @Test public void testThrowCatchGetCause() { IllegalStateException cause = new IllegalStateException(); try { throw new ServletConfigException("FooBar caused by stupid API!", cause); } catch (ServletConfigException e) { assertSame(cause, e.getRootCause()); // Old API assertSame(cause, e.getCause()); e.printStackTrace(new PrintWriter(new NullOutputStream())); } } }
#!/usr/bin/env bash # Delete previous artifacts of the plugin rm -rf localRepo/* # Run test on plugin project and deploy it to localRepo folder ./gradlew -p localise-plugin test uploadArchives
/** * */ package org.fhwa.c2cri.logger; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileInputStream; import java.io.FileWriter; import java.io.InputStreamReader; import java.io.Serializable; import org.apache.log4j.Logger; import org.apache.log4j.FileAppender; import org.apache.log4j.xml.XMLLayout; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; /** * Maintains the RI logging settings to be used during RI operation and testing. * It also provides for log4j configuration for logging to the RI log file. * * @author TransCore ITS, LLC * Last Updated: 1/8/2014 */ public class RILoggingInfo implements Serializable{ /** flag which indicates that all Actions and Messages should be stored in the log file. **/ private boolean logActionsAndMessages; /** flag which indicates that only Messages should be logged *. */ private boolean logMessagesOnly; /** flag which indicates that only failure events should be logged *. */ private boolean logFailuresOnly; /** The ri appender. */ private FileAppender riAppender; /** The log. */ private Logger log; /** The log file. */ private String logFile; //Basic Constructor for the RILoggingInfo Class /** * Instantiates a new rI logging info. * * Pre-Conditions: N/A * Post-Conditions: N/A */ public RILoggingInfo(){ this(false, false, true); } //Constructor with arguments /** * Instantiates a new rI logging info. * * Pre-Conditions: N/A * Post-Conditions: N/A * * @param messagesOnly the messages only * @param failuresOnly the failures only * @param actionsAndMessages the actions and messages */ public RILoggingInfo(boolean messagesOnly, boolean failuresOnly, boolean actionsAndMessages){ this.logActionsAndMessages = actionsAndMessages; this.logFailuresOnly = failuresOnly; this.logMessagesOnly = messagesOnly; } /** * Checks if is log actions and messages. * * Pre-Conditions: N/A * Post-Conditions: N/A * * @return the logActionsAndMessages */ public boolean isLogActionsAndMessages() { return logActionsAndMessages; } /** * Sets the log actions and messages. * * @param logActionsAndMessages the logActionsAndMessages to set */ public void setLogActionsAndMessages(boolean logActionsAndMessages) { this.logActionsAndMessages = logActionsAndMessages; } /** * Checks if is log failures only. * * Pre-Conditions: N/A * Post-Conditions: N/A * * @return the logFailuresOnly */ public boolean isLogFailuresOnly() { return logFailuresOnly; } /** * Sets the log failures only. * * @param logFailuresOnly the logFailuresOnly to set */ public void setLogFailuresOnly(boolean logFailuresOnly) { this.logFailuresOnly = logFailuresOnly; } /** * Checks if is log messages only. * * Pre-Conditions: N/A * Post-Conditions: N/A * * @return the logMessagesOnly */ public boolean isLogMessagesOnly() { return logMessagesOnly; } /** * Sets the log messages only. * * @param logMessagesOnly the logMessagesOnly to set */ public void setLogMessagesOnly(boolean logMessagesOnly) { this.logMessagesOnly = logMessagesOnly; } /** * verifies that the parameters that are to be set are valid. */ public void verifyLoggingParameters(){ } /** * Sets up log4j logging parameters for the test. log4j can be set to log various test levels. Based on the logging parameters these levels will be set appropriately. * Next this method, * logs the current Test Configuration in the Test Log * logs the name of the Test Operator in the Test Log * logs the unique test name in the Test Log * logs the current date and time in the Test Log * logs any descriptive test notes in the Test Log * * @param logFileName the log file name * @param testConfiguration the test configuration * @param logDescription the log description */ public void configureLogging(String logFileName,org.fhwa.c2cri.testmodel.TestConfiguration testConfiguration, String logDescription){ try{ DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss"); Date date = new Date(); logFile = logFileName + "." + dateFormat.format(date)+".xml"; riAppender = new FileAppender(); log = Logger.getLogger("net.sf.jameleon"); log.addAppender(riAppender); riAppender.setName("STDOUT"); riAppender.setFile(logFile); XMLLayout xmlLayout = new XMLLayout(); riAppender.setLayout(xmlLayout); riAppender.activateOptions(); System.out.println("Set the file for riAppender to --> " + logFile); System.out.println("STDOUT riAppender was created and successfully activated"); String initLogEvent = ""; initLogEvent = initLogEvent+"<initEvent>"; initLogEvent = initLogEvent+"<fileName>"+logFile+"</fileName>"; initLogEvent = initLogEvent+"<startTime>"+dateFormat.format(date)+"</startTime>"; initLogEvent = initLogEvent+"<creator>"+System.getProperty("user.name")+"</creator>"; initLogEvent = initLogEvent+"<description>"+ logDescription+"</description>"; initLogEvent = initLogEvent+"</initEvent>"; log.info(initLogEvent); } catch (Exception ex) { System.out.println("No STDOUT Appender was found"); } } /** * Stop logging. * * Pre-Conditions: N/A * Post-Conditions: N/A */ public void stopLogging(){ //remove the custom appender to stop logging to the file. log.removeAppender(riAppender); System.out.println("Now Altering file "+ logFile + " to remove log4j: references"); // readReplace(logFile, "log4j:", ""); System.out.println("Now Finished creating the LogXML file "+ logFile + ".xml"); } /** * Log event. * * Pre-Conditions: N/A * Post-Conditions: N/A * * @param theEvent the the event */ public static void logEvent(String theEvent){ Logger log = Logger.getLogger("net.sf.jameleon"); log.info(theEvent); } /** * Replaces the log4j: namespace references from the log file and creates a new * version which is a well formed XML Document. * * @param fname the fname * @param oldPattern the old pattern * @param replPattern the repl pattern */ public static void readReplace(String fname, String oldPattern, String replPattern){ String line; StringBuffer sb = new StringBuffer(); try { FileInputStream fis = new FileInputStream(fname); BufferedReader reader=new BufferedReader ( new InputStreamReader(fis)); while((line = reader.readLine()) != null) { line = line.replaceAll(oldPattern, replPattern); sb.append(line+"\n"); } reader.close(); BufferedWriter out=new BufferedWriter ( new FileWriter(fname)); out.write("<?xml version=\"1.0\" ?>\n"); out.write("<logFile>\n"); out.write("<eventSet version=\"1.2\" xmlns:log4j=\"http://jakarta.apache.org/log4j/\">"); out.write(sb.toString()); out.write("</eventSet>\n"); out.write("</logFile>\n"); out.close(); } catch (Throwable e) { System.err.println("*** exception ***"); } } }
#!/usr/bin/env bash FILES=$(find vault/ -type f -iname "*.yml*" ! -iname "*sample.yml") for line in $FILES; do ansible-vault decrypt --vault-password-file ${PWD}/.vault-pass.txt "$line" done
/** * Copyright 2015 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {makeCorrelator} from './correlator'; import {validateData, loadScript} from '../../3p/3p'; import {dev} from '../../src/log'; import {setStyles} from '../../src/style'; import {getMultiSizeDimensions} from './utils'; /** * @enum {number} * @private */ const GladeExperiment = { NO_EXPERIMENT: 0, GLADE_CONTROL: 1, GLADE_EXPERIMENT: 2, GLADE_OPT_OUT: 3, }; /** * @param {!Window} global * @param {!Object} data */ export function doubleclick(global, data) { // TODO: check mandatory fields validateData(data, [], [ 'slot', 'targeting', 'categoryExclusions', 'tagForChildDirectedTreatment', 'cookieOptions', 'overrideWidth', 'overrideHeight', 'loadingStrategy', 'consentNotificationId', 'useSameDomainRenderingUntilDeprecated', 'experimentId', 'multiSize', 'multiSizeValidation', 'ampSlotIndex', ]); if (global.context.clientId) { // Read by GPT/Glade for GA/Doubleclick integration. global.gaGlobal = { cid: global.context.clientId, hid: global.context.pageViewId, }; } centerAd(global); const gptFilename = selectGptExperiment(data); writeAdScript(global, data, gptFilename); } /** * @param {!Window} global * @param {!Object} data * @param {!GladeExperiment} gladeExperiment * @param {!string} url */ function doubleClickWithGpt(global, data, gladeExperiment, url) { const dimensions = [[ parseInt(data.overrideWidth || data.width, 10), parseInt(data.overrideHeight || data.height, 10), ]]; // Handle multi-size data parsing, validation, and inclusion into dimensions. const multiSizeDataStr = data.multiSize || null; if (multiSizeDataStr) { const primarySize = dimensions[0]; const primaryWidth = primarySize[0]; const primaryHeight = primarySize[1]; getMultiSizeDimensions( multiSizeDataStr, primaryWidth, primaryHeight, (data.multiSizeValidation || 'true') == 'true', dimensions); } loadScript(global, url, () => { global.googletag.cmd.push(() => { const googletag = global.googletag; const pubads = googletag.pubads(); const slot = googletag.defineSlot(data.slot, dimensions, 'c') .addService(pubads); if (gladeExperiment === GladeExperiment.GLADE_CONTROL) { pubads.markAsGladeControl(); } else if (gladeExperiment === GladeExperiment.GLADE_OPT_OUT) { pubads.markAsGladeOptOut(); } if (data['experimentId']) { const experimentIdList = data['experimentId'].split(','); pubads.forceExperiment = pubads.forceExperiment || function() {}; experimentIdList && experimentIdList.forEach(eid => pubads.forceExperiment(eid)); } pubads.markAsAmp(); pubads.set('page_url', global.context.canonicalUrl); pubads.setCorrelator(Number(getCorrelator(global))); googletag.enableServices(); if (data.categoryExclusions) { if (Array.isArray(data.categoryExclusions)) { for (let i = 0; i < data.categoryExclusions.length; i++) { slot.setCategoryExclusion(data.categoryExclusions[i]); } } else { slot.setCategoryExclusion(data.categoryExclusions); } } if (data.cookieOptions) { pubads.setCookieOptions(data.cookieOptions); } if (data.tagForChildDirectedTreatment != undefined) { pubads.setTagForChildDirectedTreatment( data.tagForChildDirectedTreatment); } if (data.targeting) { for (const key in data.targeting) { slot.setTargeting(key, data.targeting[key]); } } pubads.addEventListener('slotRenderEnded', event => { const primaryInvSize = dimensions[0]; const pWidth = primaryInvSize[0]; const pHeight = primaryInvSize[1]; const returnedSize = event.size; const rWidth = returnedSize ? returnedSize[0] : null; const rHeight = returnedSize ? returnedSize[1] : null; let creativeId = event.creativeId || '_backfill_'; // If the creative is empty, or either dimension of the returned size // is larger than its counterpart in the primary size, then we don't // want to render the creative. if (event.isEmpty || returnedSize && (rWidth > pWidth || rHeight > pHeight)) { global.context.noContentAvailable(); creativeId = '_empty_'; } else { // We only want to call renderStart with a specific size if the // returned creative size matches one of the multi-size sizes. let newSize; for (let i = 1; i < dimensions.length; i++) { // dimensions[0] is the primary or overridden size. if (dimensions[i][0] == rWidth && dimensions[i][1] == rHeight) { newSize = { width: rWidth, height: rHeight, }; break; } } global.context.renderStart(newSize); } global.context.reportRenderedEntityIdentifier('dfp-' + creativeId); }); // Exported for testing. global.document.getElementById('c')['slot'] = slot; googletag.display('c'); }); }); } /** * @param {!Window} global * @param {!Object} data * @param {!GladeExperiment} gladeExperiment */ function doubleClickWithGlade(global, data, gladeExperiment) { const requestHeight = parseInt(data.overrideHeight || data.height, 10); const requestWidth = parseInt(data.overrideWidth || data.width, 10); const jsonParameters = {}; if (data.categoryExclusions) { jsonParameters.categoryExclusions = data.categoryExclusions; } if (data.cookieOptions) { jsonParameters.cookieOptOut = data.cookieOptions; } if (data.tagForChildDirectedTreatment != undefined) { jsonParameters.tagForChildDirectedTreatment = data.tagForChildDirectedTreatment; } if (data.targeting) { jsonParameters.targeting = data.targeting; } if (gladeExperiment === GladeExperiment.GLADE_EXPERIMENT) { jsonParameters.gladeEids = '108809102'; } const expIds = data['experimentId']; if (expIds) { jsonParameters.gladeEids = jsonParameters.gladeEids ? jsonParameters.gladeEids + ',' + expIds : expIds; } const slot = global.document.getElementById('c'); slot.setAttribute('data-glade', ''); slot.setAttribute('data-amp-ad', ''); slot.setAttribute('data-ad-unit-path', data.slot); if (Object.keys(jsonParameters).length > 0) { slot.setAttribute('data-json', JSON.stringify(jsonParameters)); } slot.setAttribute('data-page-url', global.context.canonicalUrl); // Center the ad in the container. slot.setAttribute('height', requestHeight); slot.setAttribute('width', requestWidth); slot.addEventListener('gladeAdFetched', event => { if (event.detail.empty) { global.context.noContentAvailable(); } global.context.renderStart(); }); window.glade = {correlator: getCorrelator(global)}; loadScript(global, 'https://securepubads.g.doubleclick.net/static/glade.js'); } /** * @param {!Window} global * @return {number} */ function getCorrelator(global) { return makeCorrelator(global.context.clientId, global.context.pageViewId); } /** * @param {!Window} global */ function centerAd(global) { setStyles(dev().assertElement(global.document.getElementById('c')), { top: '50%', left: '50%', bottom: '', right: '', transform: 'translate(-50%, -50%)', }); } /** * @param {!Object} data * @return {!string} */ export function selectGptExperiment(data) { const fileExperimentConfig = { 21060540: 'gpt_sf_a.js', 21060541: 'gpt_sf_b.js', }; // Note that reduce will return the first item that matches but it is // expected that only one of the experiment ids will be present. let expFilename; (data['experimentId'] || '').split(',').forEach( val => expFilename = expFilename || fileExperimentConfig[val]); return expFilename; } /** * @param {!Window} global * @param {!Object} data * @param {!string} gptFilename */ export function writeAdScript(global, data, gptFilename) { const url = `https://www.googletagservices.com/tag/js/${gptFilename || 'gpt.js'}`; if (gptFilename || data.useSameDomainRenderingUntilDeprecated != undefined || data.multiSize) { doubleClickWithGpt(global, data, GladeExperiment.GLADE_OPT_OUT, url); } else { const experimentFraction = 0.1; const dice = global.Math.random(); const href = global.context.location.href; if ((href.indexOf('google_glade=0') > 0 || dice < experimentFraction) && href.indexOf('google_glade=1') < 0) { doubleClickWithGpt(global, data, GladeExperiment.GLADE_CONTROL, url); } else { const exp = (dice < 2 * experimentFraction) ? GladeExperiment.GLADE_EXPERIMENT : GladeExperiment.NO_EXPERIMENT; doubleClickWithGlade(global, data, exp); } } }
<gh_stars>0 # Fix the code number1 = int(input("Please type in the first number: ")) number2 = int(input("Please type in the second number: ")) number3 = int(input("Please type in the third number: ")) product = number1 * number2 * number3 print("The product is", product)
#!/bin/bash #SBATCH --partition=aaiken #SBATCH --tasks=1 #SBATCH --nodes=1 #SBATCH --cpus-per-task=10 #SBATCH --gres=gpu:4 #SBATCH --exclusive #SBATCH --time=00:05:00 source /home/groups/aaiken/eslaught/tutorial/env.sh srun regent 6.rg -ll:cpu 1
package org.baade.eel.core.utils; import java.util.UUID; /** * ID生成工具 * Created by zz on 2017/5/27. */ public class IDUtils { /** * id字符串的数组 */ private static final String[] CHARACTERS = new String[]{ "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", }; /** * 生成一个唯一的id * @param fixNums 区服的信息 * @return 唯一的id */ public static String generate(int... fixNums){ String fixNumsStr = ""; if (fixNums != null){ for (int i = 0; i < fixNums.length; i++) { String hexStr = int2Hex(fixNums[i]); fixNumsStr += hexStr; } } String uuidStr = UUID.randomUUID().toString().replace("-", ""); return hex2Str(fixNumsStr + uuidStr); } /** * 生成一个唯一的id * @return 唯一的id */ public static String generate(){ return generate(null); } /** * 将int转成4位的16进制数 * 注:该int值不能大于65535(因为大于65535的int值,4位的16进制数就存不下了) * @param i int值 * @return 4位的16进制数 */ private static String int2Hex(int i){ String hexStr = Integer.toHexString(i); while (hexStr.length() < 4) { hexStr = "0" + hexStr; } return hexStr; } /** * 将16进制的字符串按每4位转换成id字符串 * @param str 16进制的字符串 * @return id字符串 */ private static String hex2Str(String str){ StringBuffer buff = new StringBuffer(); while (str.length() >= 4){ String s = str.substring(0, 4); int index = Integer.parseInt(s, 16); buff.append(CHARACTERS[index % CHARACTERS.length]); str = str.substring(4); } return buff.toString(); } }
<gh_stars>0 #ifndef AETHER_DIAGNOSTICS_H #define AETHER_DIAGNOSTICS_H #include "symbol_export.h" SHO_PUBLIC void enter_exit(const char *sub_name, int place); SHO_PUBLIC void set_diagnostics_on(int state); #endif /* AETHER_DIAGNOSTICS_H */
<reponame>saljuama-katas/tdd_bake-sale package bakesale case class Inventory(var products: Seq[Item]) { def checkAvailability(product: String, quantity: Int = 1): Option[Double] = products .find(_.identifier == product) .filter(_.stock >= quantity) .map(_.price * quantity) def sellProduct(product: String, quantity: Int = 1): Option[Double] = { val price = checkAvailability(product, quantity) products = products.map(item => { val updatedStock = if (item.identifier == product) item.stock - quantity else item.stock item.copy(stock = updatedStock) }) price } } case class Item(identifier: String, price: Double, stock: Int)
$(document).ready(function() { $("#profile_img_link").click(function(e) { e.preventDefault(); console.log("link"); console.log($("#profile_img_file")); // $("#profile_img_file").trigger("click"); $("#profile_img_file").click(); }); $("#profile_img_file").change(function() { $("#send_profile_img").click(); }); });
package dht.chord.rpc; public enum RPCMessage { ERROR, OK, STORE, // STORE <key> <value> TRANSFER, // TRANSFER <chord_id> (returns <key:value> ... <key:value>) SHUT_DOWN, // No args PUT, // PUT <key> <value> GET, // GET <key> (returns <value>) DELETE, // DELETE <key> FIND_SUCCESSOR, // FIND_SUCCESSOR <chord_id> (returns <chord_id>@<host>:<port>) GET_SUCCESSOR_LIST, // No args (returns <chord_id>@<host>:<port> ... <chord_id>@<host>:<port>) NOTIFY, // NOTIFY <chord_id>@<host>:<port> GET_PREDECESSOR, // No args (returns <chord_id>@<host>:<port>) PING, // No args (returns PONG) PONG, // No args NULL // No args (Will be transformed to null) }
var params = { left: 0, top: 0, currentX: 0, currentY: 0, flag: false }; var getCss = function(o, key) { return o.currentStyle ? o.currentStyle[key] : document.defaultView.getComputedStyle(o, false)[key]; }; /** * 全部是那个rnd元素 */ window.startDrag = function(bar, target, callback) { if (getCss(target, "left") !== "auto") { params.left = getCss(target, "left"); } if (getCss(target, "top") !== "auto") { params.top = getCss(target, "top"); } /** * 获取元素的clientX/clientY */ bar.onmousedown = function(event) { params.flag = true; if (!event) { event = window.event; bar.onselectstart = function() { return false; }; } var e = event; // params.currentX = e.clientX; // params.currentY = e.clientY; console.log('e===',e); params.currentX = e.pageX; params.currentY = e.pageY; }; /** * document.onmouseup => bar.onmouseup */ bar.onmouseup = function() { params.flag = false; if (getCss(target, "left") !== "auto") { params.left = getCss(target, "left"); } if (getCss(target, "top") !== "auto") { params.top = getCss(target, "top"); } }; /** * document.onmousemove => bar.onmousemove */ bar.onmousemove = function(event) { var e = event ? event : window.event; if (params.flag) { // var nowX = e.clientX, // nowY = e.clientY; var nowX = e.pageX, nowY = e.pageY; var disX = nowX - params.currentX, disY = nowY - params.currentY; target.style.left = parseInt(params.left) + disX + "px"; target.style.top = parseInt(params.top) + disY + "px"; if (typeof callback == "function") { callback( (parseInt(params.left) || 0) + disX, (parseInt(params.top) || 0) + disY ); } if (event.preventDefault) { event.preventDefault(); } return false; } }; };
#!/bin/bash # Copyright (c) 2019 Ivano Coltellacci. All rights reserved. # Use of this source code is governed by the MIT License that can be found in # the LICENSE file. set -eu source ./config/lxdk8s.config mkdir -p ./setup; cd ./setup echo ">>> Generating the certificates for the deployment" echo "(0) Generate the Certificate Authority certificate" cat > ca-config.json <<EOF { "signing": { "default": { "expiry": "8760h" }, "profiles": { "kubernetes": { "usages": ["signing", "key encipherment", "server auth", "client auth"], "expiry": "8760h" } } } } EOF cat > ca-csr.json <<EOF { "CN": "Kubernetes", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "Kubernetes", "OU": "CA", "ST": "Ile-de-France" } ] } EOF ./cfssl gencert -initca ca-csr.json | ./cfssljson -bare ca echo "(1) Generate the Admin Client certificate" cat > admin-csr.json <<EOF { "CN": "admin", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "system:masters", "OU": "lxdk8s", "ST": "Ile-de-France" } ] } EOF ./cfssl gencert \ -ca=ca.pem \ -ca-key=ca-key.pem \ -config=ca-config.json \ -profile=kubernetes \ admin-csr.json | ./cfssljson -bare admin echo "(2) Generate the Kubelet Client certificates" for instance in lxdk8s-w0 lxdk8s-w1 lxdk8s-w2; do cat > ${instance}-csr.json <<EOF { "CN": "system:node:${instance}", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "system:nodes", "OU": "lxdk8s", "ST": "Ile-de-France" } ] } EOF EXTERNAL_IP=$(lxc info ${instance} | grep eth0 | head -1 | awk '{print $3}') ./cfssl gencert \ -ca=ca.pem \ -ca-key=ca-key.pem \ -config=ca-config.json \ -hostname=${instance},${EXTERNAL_IP} \ -profile=kubernetes \ ${instance}-csr.json | ./cfssljson -bare ${instance} done echo "(3) Generate the Controller Manager Client certificate" cat > kube-controller-manager-csr.json <<EOF { "CN": "system:kube-controller-manager", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "system:kube-controller-manager", "OU": "lxdk8s", "ST": "Ile-de-France" } ] } EOF ./cfssl gencert \ -ca=ca.pem \ -ca-key=ca-key.pem \ -config=ca-config.json \ -profile=kubernetes \ kube-controller-manager-csr.json | ./cfssljson -bare kube-controller-manager echo "(4) Generate the Kube Proxy Client certificate" cat > kube-proxy-csr.json <<EOF { "CN": "system:kube-proxy", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "system:node-proxier", "OU": "lxdk8s", "ST": "Ile-de-France" } ] } EOF ./cfssl gencert \ -ca=ca.pem \ -ca-key=ca-key.pem \ -config=ca-config.json \ -profile=kubernetes \ kube-proxy-csr.json | ./cfssljson -bare kube-proxy echo "(5) Generate the Scheduler Client certificate" cat > kube-scheduler-csr.json <<EOF { "CN": "system:kube-scheduler", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "system:kube-scheduler", "OU": "lxdk8s", "ST": "Ile-de-France" } ] } EOF ./cfssl gencert \ -ca=ca.pem \ -ca-key=ca-key.pem \ -config=ca-config.json \ -profile=kubernetes \ kube-scheduler-csr.json | ./cfssljson -bare kube-scheduler echo "(6) Generate the Kubernetes API Server certificate" cat > kubernetes-csr.json <<EOF { "CN": "kubernetes", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "Kubernetes", "OU": "lxdk8s", "ST": "Ile-de-France" } ] } EOF ./cfssl gencert \ -ca=ca.pem \ -ca-key=ca-key.pem \ -config=ca-config.json \ -hostname=10.32.0.1,${M0_IP},${M1_IP},${M2_IP},${LXDK8S_PUBLIC_ADDR},127.0.0.1,kubernetes.default \ -profile=kubernetes \ kubernetes-csr.json | ./cfssljson -bare kubernetes echo "(7) Generate the Service Account key pair" cat > service-account-csr.json <<EOF { "CN": "service-accounts", "key": { "algo": "rsa", "size": 2048 }, "names": [ { "C": "FR", "L": "Paris", "O": "Kubernetes", "OU": "lxdk8s", "ST": "Ile-de-France" } ] } EOF ./cfssl gencert \ -ca=ca.pem \ -ca-key=ca-key.pem \ -config=ca-config.json \ -profile=kubernetes \ service-account-csr.json | ./cfssljson -bare service-account
import { Component,OnInit,ViewContainerRef} from '@angular/core'; import {AuthenticateService} from '../../service/authenticate.service'; import {EmailService} from '../../service/email.service'; import { Ng4LoadingSpinnerService } from 'ng4-loading-spinner'; import { Overlay } from 'ngx-modialog'; import { Modal } from 'ngx-modialog/plugins/bootstrap'; import * as _ from 'underscore'; import { ToastsManager } from 'ng2-toastr/ng2-toastr'; import { Router} from '@angular/router'; import {ActivatedRoute} from "@angular/router"; @Component({ moduleId: module.id, selector: 'deletedModified', templateUrl: 'deletedModified.component.html' }) export class DeletedModifiedComponent implements OnInit { //-- ATTRIBUTS Employe = {codePersonne : null,agence: {codeAgence:0}}; EmailStat = {}; dateEmail: string; from:string; messageEmail:string; nomPersonne:string; objectEmail:string; to:string; username:string; idEmail:number; deleted:true; //-- END ATTRIBUTS //-- CONSTRUCTOR && INJECTING SERVICES constructor( private authService: AuthenticateService, private emailService : EmailService, private spinnerService: Ng4LoadingSpinnerService, private modal: Modal, private toastr: ToastsManager, private vcr: ViewContainerRef, private route: ActivatedRoute, private router: Router ){ //this.spinnerService.show(); } //-- END CONSTRUCTOR && INJECTING SERVICES //-- METHODES //-- INITIALIZING FUNCTIONS ngOnInit() { this.route.queryParams.subscribe(params => { this.dateEmail = params["dateEmail"]; this.from = params["from"]; this.messageEmail = params["messageEmail"]; this.nomPersonne = params["nomPersonne"]; this.objectEmail = params["objectEmail"]; this.to = params["to"]; this.username = params["username"]; this.idEmail = params["IdEmail"]; this.deleted = params["deleted"]; }); this.authService.getUsernameInfo$().subscribe( res => { this.authService.getUserInfo$(res.data.userName).subscribe( resp => { this.Employe = resp; } ); }); } //-- END INITIALIZING FUNCTIONS delete(){ this.emailService.DeleteEmail(this.idEmail).subscribe( res => { this.showValid(res.messageResult); this.router.navigate(['stb/email/deleted']); }); } GoBack(){ this.router.navigate(['stb/email/deleted']); } //-- CONTROLLING showError(msg) { this.toastr.error(msg, "Error Message"); } showValid(msg) { this.toastr.success(msg, 'Confirming message!'); } //-- END CONTROLLING //-- END METHODES }
#!/bin/bash source testing/test_preamble.sh echo DHCP Tests >> $TEST_RESULTS cat <<EOF > local/system.conf include=../config/system/default.yaml site_description="Multi-Device Configuration" switch_setup.uplink_port=7 interfaces.faux-1.opts= interfaces.faux-2.opts=xdhcp interfaces.faux-3.opts= interfaces.faux-4.opts= interfaces.faux-5.opts= interfaces.faux-6.opts= monitor_scan_sec=1 EOF intf_mac="9a02571e8f03" rm -rf local/site mkdir -p local/site/mac_addrs/$intf_mac cat <<EOF > local/site/mac_addrs/$intf_mac/device_config.json { "modules": { "ipaddr": { "timeout_sec": 320, "dhcp_mode": "LONG_RESPONSE" } } } EOF # Multi subnet tests intf_mac="9a02571e8f04" mkdir -p local/site/mac_addrs/$intf_mac cat <<EOF > local/site/mac_addrs/$intf_mac/device_config.json { "modules": { "ipaddr": { "enabled": true, "port_flap_timeout_sec": 20, "dhcp_ranges": [{"start": "192.168.0.1", "end": "192.168.255.254", "prefix_length": 16}, {"start": "10.255.255.1", "end": "10.255.255.255", "prefix_length": 24}, {"start": "172.16.0.1", "end": "172.16.0.200", "prefix_length": 24}] } } } EOF # ip and dhcp change tests intf_mac="9a02571e8f05" mkdir -p local/site/mac_addrs/$intf_mac cat <<EOF > local/site/mac_addrs/$intf_mac/device_config.json { "modules": { "ipaddr": { "enabled": true, "port_flap_timeout_sec": 20, "dhcp_ranges": [] } } } EOF # DHCP times out in extended DHCP tests intf_mac="9a02571e8f06" mkdir -p local/site/mac_addrs/$intf_mac cat <<EOF > local/site/mac_addrs/$intf_mac/device_config.json { "modules": { "ipaddr": { "enabled": true, "timeout_sec": 0, "port_flap_timeout_sec": 20, "dhcp_ranges": [] } } } EOF function kill_dhcp_client { CONTAINER=$1 pid=$(docker exec $CONTAINER ps aux | grep dhclient | awk '{print $2}') echo Killing dhcp client in $CONTAINER pid $pid docker exec $CONTAINER kill $pid } # Check that killing the dhcp client on a device times out the ipaddr test. monitor_log "Target port 6 connect successful" "kill_dhcp_client daq-faux-6" build_if_not_release cmd/run -s settle_sec=0 dhcp_lease_time=120s cat inst/result.log | sort | tee -a $TEST_RESULTS for iface in $(seq 1 6); do intf_mac=9a:02:57:1e:8f:0$iface ip_file=inst/run-9a02571e8f0$iface/scans/ip_triggers.txt report_file=inst/run-9a02571e8f0$iface/nodes/ipaddr0$iface/tmp/report.txt cat $ip_file ip_triggers=$(fgrep done $ip_file | wc -l) long_triggers=$(fgrep long $ip_file | wc -l) num_ips=$(cat $ip_file | cut -d ' ' -f 1 | sort | uniq | wc -l) dhcp_change=$(cat $report_file | fgrep 'pass connection.ipaddr.disconnect_ip_change' | wc -l) ip_change=$(cat $report_file | fgrep 'pass connection.ipaddr.ip_change' | wc -l) echo Found $ip_triggers ip triggers and $long_triggers long ip responses. if [ $iface == 6 ]; then device_dhcp_timeouts=$(cat inst/cmdrun.log | fgrep 'DHCP times out after 120s lease time' | fgrep "ipaddr_ipaddr0$iface" | wc -l) echo "Device $iface DHCP timeouts: $device_dhcp_timeouts" | tee -a $TEST_RESULTS echo "Device $iface ip change: $((ip_change))" | tee -a $TEST_RESULTS echo "Device $iface dhcp change: $((dhcp_change))" | tee -a $TEST_RESULTS elif [ $iface == 5 ]; then echo "Device $iface ip triggers: $(((ip_triggers + long_triggers) >= 3))" | tee -a $TEST_RESULTS echo "Device $iface num of ips: $num_ips" | tee -a $TEST_RESULTS echo "Device $iface ip change: $((ip_change))" | tee -a $TEST_RESULTS echo "Device $iface dhcp change: $((dhcp_change))" | tee -a $TEST_RESULTS elif [ $iface == 4 ]; then echo "Device $iface ip triggers: $(((ip_triggers + long_triggers) >= 4))" | tee -a $TEST_RESULTS subnet_ip=$(fgrep "ip notification 192.168" inst/run-*/nodes/ipaddr*/tmp/activate.log | wc -l) subnet2_ip=$(fgrep "ip notification 10.255.255" inst/run-*/nodes/ipaddr*/tmp/activate.log | wc -l) subnet3_ip=$(fgrep "ip notification 172.16.0" inst/run-*/nodes/ipaddr*/tmp/activate.log | wc -l) subnet_ip_change=$(fgrep "ip notification 172.16.0" inst/run-*/nodes/ipaddr*/tmp/activate.log | awk '{print $6}' | uniq | wc -l) echo "Device $iface subnet 1 ip: $subnet_ip subnet 2 ip: $subnet2_ip subnet 3 ip: $subnet3_ip ip_changed: $((subnet_ip_change > 1))" | tee -a $TEST_RESULTS elif [ $iface == 3 ]; then echo "Device $iface long ip triggers: $((long_triggers > 0))" | tee -a $TEST_RESULTS else echo "Device $iface ip triggers: $((ip_triggers > 0)) $((long_triggers > 0))" | tee -a $TEST_RESULTS fi done echo Done with tests | tee -a $TEST_RESULTS
#!/bin/bash # assoc-array.bash declare -A user # must be declared user=( \ [frodeh]="Frode Haug" \ [ivarm]="Ivar Moe" \ ) user+=([lailas]="Laila Skiaker") echo "${user[ivarm]}" # print Ivar Moe echo "${user[@]}" # print entire array echo "${#user[@]}" # length of array
/*********************************************************************************************************************** * OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. * * (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided with the distribution. * * (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products * derived from this software without specific prior written permission from the respective party. * * (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works * may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior * written permission from Alliance for Sustainable Energy, LLC. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED * STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ***********************************************************************************************************************/ #include "OtherEquipment.hpp" #include "OtherEquipment_Impl.hpp" #include "OtherEquipmentDefinition.hpp" #include "OtherEquipmentDefinition_Impl.hpp" #include "Schedule.hpp" #include "Schedule_Impl.hpp" #include "Space.hpp" #include "Space_Impl.hpp" #include "SpaceType.hpp" #include "SpaceType_Impl.hpp" #include "DefaultScheduleSet.hpp" #include "DefaultScheduleSet_Impl.hpp" #include "LifeCycleCost.hpp" #include "Model.hpp" #include <utilities/idd/OS_OtherEquipment_FieldEnums.hxx> #include <utilities/idd/IddEnums.hxx> #include <utilities/idd/IddFactory.hxx> #include "../utilities/core/Assert.hpp" namespace openstudio { namespace model { namespace detail { OtherEquipment_Impl::OtherEquipment_Impl(const IdfObject& idfObject, Model_Impl* model, bool keepHandle) : SpaceLoadInstance_Impl(idfObject, model, keepHandle) { OS_ASSERT(idfObject.iddObject().type() == OtherEquipment::iddObjectType()); } OtherEquipment_Impl::OtherEquipment_Impl(const openstudio::detail::WorkspaceObject_Impl& other, Model_Impl* model, bool keepHandle) : SpaceLoadInstance_Impl(other, model, keepHandle) { OS_ASSERT(other.iddObject().type() == OtherEquipment::iddObjectType()); } OtherEquipment_Impl::OtherEquipment_Impl(const OtherEquipment_Impl& other, Model_Impl* model, bool keepHandle) : SpaceLoadInstance_Impl(other, model, keepHandle) {} const std::vector<std::string>& OtherEquipment_Impl::outputVariableNames() const { static const std::vector<std::string> result{ "Other Equipment Fuel Rate", "Other Equipment Fuel Energy", "Other Equipment Radiant Heating Energy", "Other Equipment Radiant Heating Rate", "Other Equipment Convective Heating Energy", "Other Equipment Convective Heating Rate", "Other Equipment Latent Gain Energy", "Other Equipment Latent Gain Rate", "Other Equipment Lost Heat Energy", "Other Equipment Lost Heat Rate", "Other Equipment Total Heating Energy", "Other Equipment Total Heating Rate" // Reported in ThermalZone //"Zone Other Equipment Radiant Heating Energy", //"Zone Other Equipment Radiant Heating Rate", //"Zone Other Equipment Convective Heating Energy", //"Zone Other Equipment Convective Heating Rate", //"Zone Other Equipment Latent Gain Energy", //"Zone Other Equipment Latent Gain Rate", //"Zone Other Equipment Lost Heat Energy", //"Zone Other Equipment Lost Heat Rate", //"Zone Other Equipment Total Heating Energy", //"Zone Other Equipment Total Heating Rate" }; return result; } IddObjectType OtherEquipment_Impl::iddObjectType() const { return OtherEquipment::iddObjectType(); } std::vector<ScheduleTypeKey> OtherEquipment_Impl::getScheduleTypeKeys(const Schedule& schedule) const { std::vector<ScheduleTypeKey> result; UnsignedVector fieldIndices = getSourceIndices(schedule.handle()); UnsignedVector::const_iterator b(fieldIndices.begin()), e(fieldIndices.end()); if (std::find(b, e, OS_OtherEquipmentFields::ScheduleName) != e) { result.push_back(ScheduleTypeKey("OtherEquipment", "Other Equipment")); } return result; } bool OtherEquipment_Impl::hardSize() { OptionalSpace space = this->space(); if (!space) { return false; } makeUnique(); OtherEquipmentDefinition definition = otherEquipmentDefinition(); for (LifeCycleCost cost : definition.lifeCycleCosts()) { cost.convertToCostPerEach(); } if (definition.designLevel()) { return true; } if (OptionalDouble areaDensity = definition.wattsperSpaceFloorArea()) { definition.setDesignLevel(*areaDensity * space->floorArea()); return true; } if (OptionalDouble peopleDensity = definition.wattsperPerson()) { definition.setDesignLevel(*peopleDensity * space->numberOfPeople()); return true; } // should never get here. OS_ASSERT(false); return false; } bool OtherEquipment_Impl::hardApplySchedules() { bool result(false); OptionalSchedule schedule = this->schedule(); if (schedule) { result = setSchedule(*schedule); } return result; } std::string OtherEquipment_Impl::endUseSubcategory() const { return getString(OS_OtherEquipmentFields::EndUseSubcategory, true).get(); } bool OtherEquipment_Impl::isEndUseSubcategoryDefaulted() const { return isEmpty(OS_OtherEquipmentFields::EndUseSubcategory); } std::string OtherEquipment_Impl::fuelType() const { return this->getString(OS_OtherEquipmentFields::FuelType, true).get(); } bool OtherEquipment_Impl::isFuelTypeDefaulted() const { return isEmpty(OS_OtherEquipmentFields::FuelType); } OtherEquipmentDefinition OtherEquipment_Impl::otherEquipmentDefinition() const { return definition().cast<OtherEquipmentDefinition>(); } boost::optional<Schedule> OtherEquipment_Impl::schedule() const { OptionalSchedule result = getObject<OtherEquipment>().getModelObjectTarget<Schedule>(OS_OtherEquipmentFields::ScheduleName); if (!result) { // search upwards OptionalSpace space = this->space(); OptionalSpaceType spaceType = this->spaceType(); if (space) { result = space->getDefaultSchedule(DefaultScheduleType::OtherEquipmentSchedule); } else if (spaceType) { result = spaceType->getDefaultSchedule(DefaultScheduleType::OtherEquipmentSchedule); } } return result; } bool OtherEquipment_Impl::isScheduleDefaulted() const { return isEmpty(OS_OtherEquipmentFields::ScheduleName); } double OtherEquipment_Impl::multiplier() const { boost::optional<double> value = getDouble(OS_OtherEquipmentFields::Multiplier, true); OS_ASSERT(value); return value.get(); } bool OtherEquipment_Impl::isMultiplierDefaulted() const { return isEmpty(OS_OtherEquipmentFields::Multiplier); } bool OtherEquipment_Impl::isAbsolute() const { OtherEquipmentDefinition definition = otherEquipmentDefinition(); if (definition.designLevel()) { return true; } return false; } bool OtherEquipment_Impl::setOtherEquipmentDefinition(const OtherEquipmentDefinition& definition) { return setPointer(definitionIndex(), definition.handle()); } bool OtherEquipment_Impl::setEndUseSubcategory(const std::string& endUseSubcategory) { return setString(OS_OtherEquipmentFields::EndUseSubcategory, endUseSubcategory); } void OtherEquipment_Impl::resetEndUseSubcategory() { OS_ASSERT(setString(OS_OtherEquipmentFields::EndUseSubcategory, "")); } bool OtherEquipment_Impl::setFuelType(const std::string& fuelType) { return this->setString(OS_OtherEquipmentFields::FuelType, fuelType); } void OtherEquipment_Impl::resetFuelType() { this->setString(OS_OtherEquipmentFields::FuelType, ""); } bool OtherEquipment_Impl::setDefinition(const SpaceLoadDefinition& definition) { if (OptionalOtherEquipmentDefinition otherEquipmentDefinition = definition.optionalCast<OtherEquipmentDefinition>()) { return setOtherEquipmentDefinition(*otherEquipmentDefinition); } return false; } bool OtherEquipment_Impl::setSchedule(Schedule& schedule) { bool result = ModelObject_Impl::setSchedule(OS_OtherEquipmentFields::ScheduleName, "OtherEquipment", "Other Equipment", schedule); return result; } void OtherEquipment_Impl::resetSchedule() { bool ok = setString(OS_OtherEquipmentFields::ScheduleName, ""); OS_ASSERT(ok); } bool OtherEquipment_Impl::setMultiplier(double multiplier) { bool result = setDouble(OS_OtherEquipmentFields::Multiplier, multiplier); return result; } void OtherEquipment_Impl::resetMultiplier() { bool result = setString(OS_OtherEquipmentFields::Multiplier, ""); OS_ASSERT(result); } double OtherEquipment_Impl::getDesignLevel(double floorArea, double numPeople) const { return otherEquipmentDefinition().getDesignLevel(floorArea, numPeople) * multiplier(); } double OtherEquipment_Impl::getPowerPerFloorArea(double floorArea, double numPeople) const { return otherEquipmentDefinition().getPowerPerFloorArea(floorArea, numPeople) * multiplier(); } double OtherEquipment_Impl::getPowerPerPerson(double floorArea, double numPeople) const { return otherEquipmentDefinition().getPowerPerPerson(floorArea, numPeople) * multiplier(); } int OtherEquipment_Impl::spaceIndex() const { return OS_OtherEquipmentFields::SpaceorSpaceTypeName; } int OtherEquipment_Impl::definitionIndex() const { return OS_OtherEquipmentFields::OtherEquipmentDefinitionName; } boost::optional<ModelObject> OtherEquipment_Impl::otherEquipmentDefinitionAsModelObject() const { OptionalModelObject result = otherEquipmentDefinition(); return result; } boost::optional<ModelObject> OtherEquipment_Impl::scheduleAsModelObject() const { OptionalModelObject result; OptionalSchedule intermediate = schedule(); if (intermediate) { result = *intermediate; } return result; } bool OtherEquipment_Impl::setOtherEquipmentDefinitionAsModelObject(const boost::optional<ModelObject>& modelObject) { if (modelObject) { OptionalOtherEquipmentDefinition intermediate = modelObject->optionalCast<OtherEquipmentDefinition>(); if (intermediate) { return setOtherEquipmentDefinition(*intermediate); } } return false; } bool OtherEquipment_Impl::setScheduleAsModelObject(const boost::optional<ModelObject>& modelObject) { if (modelObject) { OptionalSchedule intermediate = modelObject->optionalCast<Schedule>(); if (intermediate) { Schedule schedule(*intermediate); return setSchedule(schedule); } else { return false; } } else { resetSchedule(); } return true; } std::vector<EMSActuatorNames> OtherEquipment_Impl::emsActuatorNames() const { std::vector<EMSActuatorNames> actuators{{"OtherEquipment", "Power Level"}}; return actuators; } std::vector<std::string> OtherEquipment_Impl::emsInternalVariableNames() const { std::vector<std::string> types{"Other Equipment Design Level"}; return types; } } // namespace detail OtherEquipment::OtherEquipment(const OtherEquipmentDefinition& definition) : SpaceLoadInstance(OtherEquipment::iddObjectType(), definition) { OS_ASSERT(getImpl<detail::OtherEquipment_Impl>()); /* *Schedule sch = this->model().alwaysOnDiscreteSchedule(); *setSchedule(sch); *setMultiplier(1.0); *setFuelType("NaturalGas"); */ setEndUseSubcategory("General"); } IddObjectType OtherEquipment::iddObjectType() { IddObjectType result(IddObjectType::OS_OtherEquipment); return result; } std::vector<std::string> OtherEquipment::validFuelTypeValues() { return getIddKeyNames(IddFactory::instance().getObject(iddObjectType()).get(), OS_OtherEquipmentFields::FuelType); } std::string OtherEquipment::endUseSubcategory() const { return getImpl<detail::OtherEquipment_Impl>()->endUseSubcategory(); } bool OtherEquipment::isEndUseSubcategoryDefaulted() const { return getImpl<detail::OtherEquipment_Impl>()->isEndUseSubcategoryDefaulted(); } bool OtherEquipment::setEndUseSubcategory(const std::string& endUseSubcategory) { return getImpl<detail::OtherEquipment_Impl>()->setEndUseSubcategory(endUseSubcategory); } void OtherEquipment::resetEndUseSubcategory() { getImpl<detail::OtherEquipment_Impl>()->resetEndUseSubcategory(); } std::string OtherEquipment::fuelType() const { return getImpl<detail::OtherEquipment_Impl>()->fuelType(); } bool OtherEquipment::isFuelTypeDefaulted() const { return getImpl<detail::OtherEquipment_Impl>()->isFuelTypeDefaulted(); } bool OtherEquipment::setFuelType(const std::string& fuelType) { return getImpl<detail::OtherEquipment_Impl>()->setFuelType(fuelType); } void OtherEquipment::resetFuelType() { getImpl<detail::OtherEquipment_Impl>()->resetFuelType(); } OtherEquipmentDefinition OtherEquipment::otherEquipmentDefinition() const { return getImpl<detail::OtherEquipment_Impl>()->otherEquipmentDefinition(); } boost::optional<Schedule> OtherEquipment::schedule() const { return getImpl<detail::OtherEquipment_Impl>()->schedule(); } bool OtherEquipment::isScheduleDefaulted() const { return getImpl<detail::OtherEquipment_Impl>()->isScheduleDefaulted(); } bool OtherEquipment::setOtherEquipmentDefinition(const OtherEquipmentDefinition& definition) { return getImpl<detail::OtherEquipment_Impl>()->setOtherEquipmentDefinition(definition); } bool OtherEquipment::setSchedule(Schedule& schedule) { return getImpl<detail::OtherEquipment_Impl>()->setSchedule(schedule); } void OtherEquipment::resetSchedule() { getImpl<detail::OtherEquipment_Impl>()->resetSchedule(); } bool OtherEquipment::setMultiplier(double multiplier) { return getImpl<detail::OtherEquipment_Impl>()->setMultiplier(multiplier); } void OtherEquipment::resetMultiplier() { getImpl<detail::OtherEquipment_Impl>()->resetMultiplier(); } double OtherEquipment::getDesignLevel(double floorArea, double numPeople) const { return getImpl<detail::OtherEquipment_Impl>()->getDesignLevel(floorArea, numPeople); } double OtherEquipment::getPowerPerFloorArea(double floorArea, double numPeople) const { return getImpl<detail::OtherEquipment_Impl>()->getPowerPerFloorArea(floorArea, numPeople); } double OtherEquipment::getPowerPerPerson(double floorArea, double numPeople) const { return getImpl<detail::OtherEquipment_Impl>()->getPowerPerPerson(floorArea, numPeople); } /// @cond OtherEquipment::OtherEquipment(std::shared_ptr<detail::OtherEquipment_Impl> impl) : SpaceLoadInstance(std::move(impl)) {} /// @endcond } // namespace model } // namespace openstudio
#ifndef _MARKOV_NET #define _MARKOV_NET #define NET_BACKLOG 3 /**< How many connections to tell listen() to keep a backlog for */ #define NET_PING_INTERVAL 45000 /**< time (in ms) between pings */ #define NET_TIMEOUT 4000 /**< time (in ms) to wait for a reply before giving up */ extern int net_init(); extern int net_listen(int fd); #endif
#!/bin/bash ########## Init ########## SCRIPT_NAME=$0 KUROMOJI_NEOLOGD_BUILD_WORK_DIR=`pwd` ########## Proxy Settings ########## #export http_proxy=http://your.proxy-host:your.proxy-port #export https_proxy=http://your.proxy-host:your.proxy-port ########## Define Functions ########## logging() { LABEL=$1 LEVEL=$2 MESSAGE=$3 TIME=`date +"%Y-%m-%d %H:%M:%S"` echo "### [$TIME] [$LABEL] [$LEVEL] $MESSAGE" } usage() { cat <<EOF Usage: ${SCRIPT_NAME} [options...] options: -N ... mecab-ipadic-NEologd Tag, use git checkout argument. (default: ${DEFAULT_MECAB_IPADIC_NEOLOGD_TAG}) ***deprecated*** -T ... install adjective ext. if you want enable, specified 1. (default: ${DEFAULT_INSTALL_ADJECTIVE_EXT}) ***deprecated*** -K ... Kuromoji Version Tag, use git checkout argument. (default: ${DEFAULT_KUROMOJI_VERSION_TAG}) -M ... Kuromoji build max heapsize. (default: ${DEFAULT_KUROMOJI_BUILD_MAX_HEAPSIZE}) -o ... generated Kuromoji JAR file output directory. (default: ${DEFAULT_JAR_FILE_OUTPUT_DIRECTORY} (current directory)) -p ... build Kuromoji Java Package. (default: ${DEFAULT_KUROMOJI_PACKAGE}) -h ... print this help. EOF } ########## Default & Fixed Values ########## ## MeCab MECAB_VERSION=mecab-0.996 MECAB_INSTALL_DIR=${KUROMOJI_NEOLOGD_BUILD_WORK_DIR}/mecab ## mecab-ipadic-NEologd Target Tag DEFAULT_MECAB_IPADIC_NEOLOGD_TAG=master MECAB_IPADIC_NEOLOGD_TAG=${DEFAULT_MECAB_IPADIC_NEOLOGD_TAG} ## install adjective ext DEFAULT_INSTALL_ADJECTIVE_EXT=0 INSTALL_ADJECTIVE_EXT=${DEFAULT_INSTALL_ADJECTIVE_EXT} ## Kuromoji Target Tag DEFAULT_KUROMOJI_VERSION_TAG=0.9.0 KUROMOJI_VERSION_TAG=${DEFAULT_KUROMOJI_VERSION_TAG} ## Kuromoji build max heapsize DEFAULT_KUROMOJI_BUILD_MAX_HEAPSIZE=7g KUROMOJI_BUILD_MAX_HEAPSIZE=${DEFAULT_KUROMOJI_BUILD_MAX_HEAPSIZE} ## generated JAR file output directory DEFAULT_JAR_FILE_OUTPUT_DIRECTORY=. JAR_FILE_OUTPUT_DIRECTORY=${DEFAULT_JAR_FILE_OUTPUT_DIRECTORY} ## Source Package DEFAULT_KUROMOJI_PACKAGE=com.atilika.kuromoji.ipadic REDEFINED_KUROMOJI_PACKAGE=${DEFAULT_KUROMOJI_PACKAGE} ########## Arguments Process ########## while getopts K:N:T:M:o:p:h OPTION do case $OPTION in K) KUROMOJI_VERSION_TAG=${OPTARG};; N) MECAB_IPADIC_NEOLOGD_TAG=${OPTARG};; T) INSTALL_ADJECTIVE_EXT=${OPTARG} logging main WARN "*** -T is deprecated option ***" ;; M) KUROMOJI_BUILD_MAX_HEAPSIZE=${OPTARG};; o) JAR_FILE_OUTPUT_DIRECTORY=${OPTARG};; p) REDEFINED_KUROMOJI_PACKAGE=${OPTARG};; h) usage exit 0;; \?) usage exit 1;; esac done shift `expr "${OPTIND}" - 1` logging main INFO 'START.' cat <<EOF #################################################################### applied build options. [Auto Install MeCab Version ] ... ${MECAB_VERSION} [mecab-ipadic-NEologd Tag (-N)] ... ${MECAB_IPADIC_NEOLOGD_TAG} *** deprecated option *** [install adjective ext (-T)] ... ${INSTALL_ADJECTIVE_EXT} *** deprecated option *** [Kuromoji Version Tag (-K)] ... ${KUROMOJI_VERSION_TAG} [Kuromoji build Max Heapsize (-M)] ... ${KUROMOJI_BUILD_MAX_HEAPSIZE} [Kuromoji JAR File Output Directory Name (-o)] ... ${JAR_FILE_OUTPUT_DIRECTORY} [Kuromoji Package Name (-p)] ... ${REDEFINED_KUROMOJI_PACKAGE} #################################################################### EOF sleep 3 ########## Main Process ########## if [ ! -d ${JAR_FILE_OUTPUT_DIRECTORY} ]; then logging pre-check ERROR "directory[${JAR_FILE_OUTPUT_DIRECTORY}], not exits." exit 1 fi if [ ! `which mecab` ]; then if [ ! -e ${MECAB_INSTALL_DIR}/bin/mecab ]; then logging mecab INFO 'MeCab Install Local.' if [ ! -e ${MECAB_VERSION}.tar.gz ]; then curl 'https://drive.google.com/uc?export=download&id=0B4y35FiV1wh7cENtOXlicTFaRUE' -L -o ${MECAB_VERSION}.tar.gz fi tar -zxf ${MECAB_VERSION}.tar.gz cd ${MECAB_VERSION} if [ ! -e ${MECAB_INSTALL_DIR} ]; then mkdir -p ${MECAB_INSTALL_DIR} fi ./configure --prefix=${MECAB_INSTALL_DIR} make make install fi PATH=${MECAB_INSTALL_DIR}/bin:${PATH} fi cd ${KUROMOJI_NEOLOGD_BUILD_WORK_DIR} logging mecab-ipadic-NEologd INFO 'Download mecab-ipadic-NEologd.' if [ ! -e mecab-ipadic-neologd ]; then git clone https://github.com/neologd/mecab-ipadic-neologd.git else cd mecab-ipadic-neologd if [ -d build ]; then rm -rf build fi git checkout master git fetch origin git reset --hard origin/master git pull --tags cd .. fi cd mecab-ipadic-neologd git checkout ${MECAB_IPADIC_NEOLOGD_TAG} if [ $? -ne 0 ]; then logging mecab-ipadic-NEologd ERROR "git checkout[${MECAB_IPADIC_NEOLOGD_TAG}] failed. Please re-run after execute 'rm -f mecab-ipadic-neologd'" exit 1 fi libexec/make-mecab-ipadic-neologd.sh -T ${INSTALL_ADJECTIVE_EXT} DIR=`pwd` NEOLOGD_BUILD_DIR=`find ${DIR}/build/mecab-ipadic-* -maxdepth 1 -type d` NEOLOGD_DIRNAME=`basename ${NEOLOGD_BUILD_DIR}` NEOLOGD_VERSION_DATE=`echo ${NEOLOGD_DIRNAME} | perl -wp -e 's!.+-(\d+)!$1!'` cd ${KUROMOJI_NEOLOGD_BUILD_WORK_DIR} logging kuromoji INFO 'Kuromoji Repository Clone.' if [ ! -e kuromoji ]; then git clone https://github.com/atilika/kuromoji.git else cd kuromoji git checkout * git checkout master git fetch origin git reset --hard origin/master git status -s | grep '^?' | perl -wn -e 's!^\?+ ([^ ]+)!git clean -df $1!; system("$_")' mvn clean git pull --tags cd .. fi cd kuromoji KUROMOJI_SRC_DIR=`pwd` git checkout ${KUROMOJI_VERSION_TAG} if [ $? -ne 0 ]; then logging kuromoji ERROR "git checkout[${KUROMOJI_VERSION_TAG}] failed. Please re-run after execute 'rm -f kuromoji'" exit 1 fi export MAVEN_OPTS="-Xmx${KUROMOJI_BUILD_MAX_HEAPSIZE}" logging kuromoji INFO 'Build Kuromoji, with mecab-ipadic-NEologd.' test ! -e kuromoji-ipadic/dictionary && mkdir kuromoji-ipadic/dictionary cp -Rp ${NEOLOGD_BUILD_DIR} kuromoji-ipadic/dictionary if [ "${REDEFINED_KUROMOJI_PACKAGE}" != "${DEFAULT_KUROMOJI_PACKAGE}" ]; then logging lucene INFO "redefine package [${DEFAULT_KUROMOJI_PACKAGE}] => [${REDEFINED_KUROMOJI_PACKAGE}]." ORIGINAL_SRC_DIR=`echo ${DEFAULT_KUROMOJI_PACKAGE} | perl -wp -e 's!\.!/!g'` NEW_SRC_DIR=`echo ${REDEFINED_KUROMOJI_PACKAGE} | perl -wp -e 's!\.!/!g'` test -d ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/main/java/${NEW_SRC_DIR} && rm -rf ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/main/java/${NEW_SRC_DIR} mkdir -p ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/main/java/${NEW_SRC_DIR} find ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/main/java/${ORIGINAL_SRC_DIR} -mindepth 1 -maxdepth 1 | xargs -I{} mv {} ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/main/java/${NEW_SRC_DIR} find ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/main/java/${NEW_SRC_DIR} -type f | xargs perl -wp -i -e "s!${DEFAULT_KUROMOJI_PACKAGE//./\\.}!${REDEFINED_KUROMOJI_PACKAGE}!g" test -d ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/test/java/${NEW_SRC_DIR} && rm -rf ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/test/java/${NEW_SRC_DIR} mkdir -p ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/test/java/${NEW_SRC_DIR} find ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/test/java/${ORIGINAL_SRC_DIR} -mindepth 1 -maxdepth 1 | xargs -I{} mv {} ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/test/java/${NEW_SRC_DIR} find ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/src/test/java/${NEW_SRC_DIR} -type f | xargs perl -wp -i -e "s!${DEFAULT_KUROMOJI_PACKAGE//./\\.}!${REDEFINED_KUROMOJI_PACKAGE}!g" perl -wp -i -e "s!${ORIGINAL_SRC_DIR}!${NEW_SRC_DIR}!g" kuromoji-ipadic/pom.xml perl -wp -i -e "s!${DEFAULT_KUROMOJI_PACKAGE//./\\.}!${REDEFINED_KUROMOJI_PACKAGE}!g" kuromoji-ipadic/pom.xml fi mvn -pl kuromoji-ipadic -am package \ -DskipTests=true \ -DskipDownloadDictionary=true \ -Dkuromoji.dict.dir=kuromoji-ipadic/dictionary/mecab-ipadic-2.7.0-20070801-neologd-${NEOLOGD_VERSION_DATE} \ -Dkuromoji.dict.encoding=utf-8 if [ $? -ne 0 ]; then logging kuromoji ERROR 'Kuromoji Build Fail.' exit 1 fi cd ${KUROMOJI_NEOLOGD_BUILD_WORK_DIR} KUROMOJI_JAR_VERSION=`echo ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/target/kuromoji-ipadic-*.jar | perl -wp -e 's!.+/kuromoji-ipadic-([.\d]+)(.*).jar!$1!'` KUROMOJI_JAR_SUFFIX=`echo ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/target/kuromoji-ipadic-*.jar | perl -wp -e 's!.+/kuromoji-ipadic-([.\d]+)(.*).jar!$2!'` cp ${KUROMOJI_SRC_DIR}/kuromoji-ipadic/target/kuromoji-ipadic-${KUROMOJI_JAR_VERSION}${KUROMOJI_JAR_SUFFIX}.jar ${JAR_FILE_OUTPUT_DIRECTORY}/kuromoji-ipadic-neologd-${KUROMOJI_JAR_VERSION}-${NEOLOGD_VERSION_DATE}${KUROMOJI_JAR_SUFFIX}.jar ls -l ${JAR_FILE_OUTPUT_DIRECTORY}/kuromoji-ipadic* logging main INFO 'END.'
<reponame>suhuanzheng7784877/ParallelExecute<gh_stars>0 package org.para.db.execute; import java.util.concurrent.CountDownLatch; import org.para.db.task.DataTransferParallelTask; import org.para.execute.ParallelExecute; import org.para.execute.model.TaskProperty; import org.para.execute.task.ParallelTask; import org.para.jobType.db.DbSourceJobType; import org.para.jobType.db.DbTargetJobType; import org.para.trace.listener.FailEventListener; import org.para.util.DBDataUtil; /** * * @author liuyan * @Email:<EMAIL> * @version 0.1 * @Date: 2013-9-4 * @Copyright: 2013 story All rights reserved. */ public class DataTranferParallelExecute extends ParallelExecute<DbSourceJobType> { @Override protected void init(DbSourceJobType dbSourceJobType,Object... objects) { } @Override protected ParallelTask<DbSourceJobType> buildParallelTask( CountDownLatch countDownLatch, TaskProperty taskProperty, DbSourceJobType srcObject, FailEventListener failEventListener, Object... objects) { DbTargetJobType dbTargetJobType = (DbTargetJobType) objects[0]; ParallelTask<DbSourceJobType> byteFileParallelTask = new DataTransferParallelTask( countDownLatch, taskProperty, srcObject,failEventListener, dbTargetJobType); return byteFileParallelTask; } @Override protected int analyzeResultCount(DbSourceJobType srcObject) { return DBDataUtil.queryResultCount(srcObject); } }
#!/bin/bash mafSpeciesSubset 2> /dev/null || [[ "$?" == 255 ]]
func reverseWords(string: String) -> String { let words = string.components(separatedBy: " ") let reversedWords = words.map { String($0.reversed()) } return reversedWords.joined(separator: " ") } let string = "Hello World" let reversedString = reverseWords(string: string) print(reversedString) // Output: olleH dlroW
function ModelLoader() { this.models = {}; this.material = null; var loader = new THREE.OBJLoader(); this.loadModel = function loadModel(path, finishCallback) { loader.load(path, function(object) { object.position.x += 0.5; object.position.z += 0.5; object.traverse(function(child) { if(child instanceof THREE.Mesh) { child.material.map = textures["palette"]; ModelLoader.material = child.material; child.geometry.attributes.normal.array = new Int16Array(child.geometry.attributes.normal.array); } }); var vertices = object.children[0].geometry.attributes.position.array; for(var i = 0, length = vertices.length; i < length; i++) { vertices[i] *= 0.0625; } ModelLoader.models[path] = object; finishCallback(); }); } } var ModelLoader = new ModelLoader();
#!/bin/bash cd src/ python cap_detector.py --video_path /media/jarvis/CommonFiles/4th_semester/CV/CV_Project/test_dataset/videos --result_path /media/jarvis/CommonFiles/4th_semester/CV/CV_Project/test_dataset/output_images/ --frozen_graph_path ../models/tf1_model/frozen_inference_graph.pb --config_path ../models/tf1_model/sample.pbtxt --ckpt_path ../models/tf2_model/saved_model
<reponame>PavlikPolivka/cas<filename>support/cas-server-support-scim/src/main/java/org/apereo/cas/scim/v1/ScimV1PrincipalProvisioner.java package org.apereo.cas.scim.v1; import org.apereo.cas.api.PrincipalProvisioner; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.Credential; import org.apereo.cas.authentication.principal.Principal; import com.unboundid.scim.data.UserResource; import com.unboundid.scim.schema.CoreSchema; import com.unboundid.scim.sdk.OAuthToken; import com.unboundid.scim.sdk.SCIMEndpoint; import com.unboundid.scim.sdk.SCIMService; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.lang3.StringUtils; import javax.ws.rs.core.MediaType; import java.net.URI; /** * This is {@link ScimV1PrincipalProvisioner}. * * @author <NAME> * @since 5.1.0 */ @Slf4j public class ScimV1PrincipalProvisioner implements PrincipalProvisioner { private final SCIMEndpoint<UserResource> endpoint; private final ScimV1PrincipalAttributeMapper mapper; public ScimV1PrincipalProvisioner(final String target, final String oauthToken, final String username, final String password, final ScimV1PrincipalAttributeMapper mapper) { this.mapper = mapper; val uri = URI.create(target); val scimService = StringUtils.isNotBlank(oauthToken) ? new SCIMService(uri, new OAuthToken(oauthToken)) : new SCIMService(uri, username, password); scimService.setAcceptType(MediaType.APPLICATION_JSON_TYPE); this.endpoint = scimService.getUserEndpoint(); } @Override public boolean create(final Authentication auth, final Principal p, final Credential credential) { try { val resources = endpoint.query("userName eq \"" + p.getId() + '"'); if (resources.getTotalResults() <= 0) { LOGGER.debug("User [{}] not found", p.getId()); return false; } val user = resources.iterator().next(); if (user != null) { return updateUserResource(user, p, credential); } return createUserResource(p, credential); } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } return false; } /** * Create user resource boolean. * * @param p the p * @param credential the credential * @return true/false */ @SneakyThrows protected boolean createUserResource(final Principal p, final Credential credential) { val user = new UserResource(CoreSchema.USER_DESCRIPTOR); this.mapper.map(user, p, credential); return endpoint.create(user) != null; } /** * Update user resource boolean. * * @param user the user * @param p the p * @param credential the credential * @return true/false */ @SneakyThrows protected boolean updateUserResource(final UserResource user, final Principal p, final Credential credential) { this.mapper.map(user, p, credential); return endpoint.update(user) != null; } }
package org.queasy; import io.dropwizard.Application; import io.dropwizard.setup.Bootstrap; import io.dropwizard.setup.Environment; import org.queasy.core.bundles.QueasyServerBundle; import org.queasy.core.bundles.QueasyMigrationBundle; import javax.servlet.ServletException; public class ServerApplication extends Application<ServerConfiguration> { public static void main(final String[] args) throws Exception { new ServerApplication().run(args); } @Override public String getName() { return "QueasyServer"; } @Override public void initialize(final Bootstrap<ServerConfiguration> bootstrap) { bootstrap.addBundle(new QueasyMigrationBundle()); bootstrap.addBundle(new QueasyServerBundle()); } @Override public void run(final ServerConfiguration config, final Environment env) throws ServletException { } }
import numpy as np def calculate_occupation_numbers(Nelec, eigenvals, smear_sigma): Nocc = Nelec // 2 # Calculate the number of occupied orbitals e_homo = eigenvals[Nocc - 1] # Energy of the HOMO e_lumo = eigenvals[Nocc] # Energy of the LUMO print('HOMO: ', e_homo, 'LUMO: ', e_lumo) print("mo_energy:") print(eigenvals[:Nocc + 5]) # Print molecular orbital energies e_fermi = e_homo # Set the Fermi energy to the energy of the HOMO Norb = len(eigenvals) # Total number of molecular orbitals mo_occ = np.zeros((Norb)) # Initialize the array for occupation numbers if smear_sigma < 1e-8: # Check if the temperature is close to absolute zero mo_occ[:Nocc] = 1.0 # Set occupation numbers to 1 for occupied orbitals return mo_occ # Return the calculated occupation numbers
import time from functools import wraps def execution_time_decorator(func): @wraps(func) def wrapper(*args, **kwargs): start_time = time.time() result = func(*args, **kwargs) end_time = time.time() execution_time = (end_time - start_time) * 1000 print(f"Function '{func.__name__}' executed in {execution_time:.2f} milliseconds") return result return wrapper # Example usage @execution_time_decorator def example_function(): time.sleep(1) example_function()
package v2d2.actions.who import scala.collection.immutable import scala.concurrent.Future import scala.concurrent.duration._ import scala.util.{Failure, Success} import akka.actor.{Actor, ActorContext, ActorLogging, ActorSystem} import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.http.scaladsl.unmarshalling.Unmarshal import akka.pattern.ask import akka.stream.ActorMaterializer import akka.util.Timeout import org.apache.commons.lang3.StringUtils import v2d2.V2D2 import v2d2.protocols.Response import slack.models.Message import scala.concurrent.ExecutionContext.Implicits.global import slack.models.User import slack.rtm.SlackRtmClient import v2d2.protocols.SlashRelay import v2d2.protocols.EphemResponse class WhoAct extends Actor with ActorLogging with WhoJPTL { implicit val system = ActorSystem("slack") implicit val materializer = ActorMaterializer() implicit val timeout = Timeout(25.seconds) def best( s: String, person: WhoUser ): Int = { List( StringUtils.getLevenshteinDistance(person.last.toLowerCase(), s), StringUtils.getLevenshteinDistance(person.first.toLowerCase(), s), StringUtils.getLevenshteinDistance(person.gitHubUsername.getOrElse("").toLowerCase(), s), StringUtils.getLevenshteinDistance(person.hipchatMention.getOrElse("").toLowerCase(), s) ).min } def lookup( search: String, people: Seq[WhoUser], nickMap: Map[String, User], emailMap: Map[String, User] ): Tuple2[Int, Seq[WhoUser]] = { log.info("in lookup") // first we look for literal matches on full name, first name, gitHubUsername // we already checked literal matches of user.name (email), first.last and hipchatmention // then we need to figure out the format of the string // "user.name" => check email filed // "first last" => check the name field // "someString" => we will just default for this ie _ // Then we will need to be createive here: // check first name, githubusername, hipchatMention val uname = s"([^\\.]*\\.[^\\.]*)".r val fname = s"([^\\.]*\\s[^\\.]*)".r people.filter( p => p.first.equalsIgnoreCase(search) || p.last.equalsIgnoreCase(search) || p.name.equalsIgnoreCase(search) || p.gitHubUsername.getOrElse("").equalsIgnoreCase(search) ) match { case Nil => log.info("in lookup NIL") search match { case uname(n) => log.info("in lookup UNAME") // group by distance sort by distance take the closest list val out = people .groupBy( p => StringUtils.getLevenshteinDistance(p.email.toLowerCase(), n.toLowerCase()) ) .toList .sortBy(_._1) .head out case fname(n) => log.info("in lookup fullname") people .groupBy( p => StringUtils.getLevenshteinDistance(p.name.toLowerCase(), n.toLowerCase()) ) .toList .sortBy(_._1) .head case _ => // check first name, githubusername, hipchatMention log.info("in lookup default") people.groupBy(p => best(search.toLowerCase(), p)).toList.sortBy(_._1).head } case p => Tuple2(0, p) } } def genResponse( msg: Message, data: List[WhoUser], silent: Boolean ): v2d2.protocols.Responder = { if (data.length > 4) { val str = data.map { u => s"Name: ${u.name}" }.mkString("\n") if(silent) EphemResponse(msg, str) else Response(msg, str) } else { val str = data.map { e => e.avatar.getOrElse("https://who.werally.in/images/avatar/anon.svg") }.mkString("\n") if(silent) EphemResponse(msg, str) else Response(msg, str) } } def receive: Receive = { case who: GetWhoUser => val target = (who.target.profile match { case Some(p) => p.email.getOrElse( p.first_name.getOrElse("empty") + "." + p.last_name.getOrElse("empty") ) case _ => "empty" }).replaceAll("@.*$", "") val uri = s"${V2D2.whourl}/people/${target}" log.info(s"request love for ${uri}") val content = for { response <- Http().singleRequest( HttpRequest(method = HttpMethods.GET, uri = s"${V2D2.whourl}/people/${target}") ) entity <- Unmarshal(response.entity).to[WhoUser] } yield entity content.onComplete { case Success(data) => context.parent ! genResponse(who.msg, List(data), who.silent) case Failure(t) => context.parent ! Response(who.msg, s"An error has occured: " + t.getMessage) } case who: GetWhoAll => val content = for { response <- Http().singleRequest( HttpRequest(method = HttpMethods.GET, uri = s"${V2D2.whourl}/people") ) entity <- Unmarshal(response.entity).to[Seq[WhoUser]] } yield entity content.onComplete { case Success(data) => for { userslist <- V2D2.users } yield { val nmap = userslist.map(u => u.id -> u).toMap val emap = userslist.map { u => val email = u.profile match { case Some(p) => p.email.getOrElse(u.id) case _ => u.id } email -> u }.toMap context.parent ! genResponse( who.msg, lookup(who.search, data, nmap, emap)._2.toList, who.silent ) } case Failure(t) => context.parent ! Response(who.msg, s"An error has occured: " + t.getMessage) } case SlashRelay(msg) => WhoIs(msg).map(x => self.forward(x.copy(silent = true))) case msg: Message => WhoIs(msg).map(self.forward(_)) case who: WhoIs => val msg = who.msg log.info("entering who is") for { userslist <- V2D2.users } yield { val nmap = userslist.map(u => u.id -> u).toMap val emap = userslist.map { u => val email = u.profile match { case Some(p) => p.email.getOrElse(u.id) case _ => u.id } email -> u }.toMap nmap.get(who.target) match { case Some(user) => log.info(s"who is ${who.target}") self ! GetWhoUser(msg, user, who.silent) case _ => emap.get(who.target + "@rallyhealth.com") match { case Some(user) => self ! GetWhoUser(msg, user, who.silent) case _ => self ! GetWhoAll(msg, who.target, who.silent) } } } } }
#!/bin/bash ./src/shared/MUDS/build_scripts/build_clean.sh -DCMAKE_BUILD_TYPE=Release $@
<filename>fee_calculator/apps/calculator/migrations/0009_auto_20171010_1623.py # -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-10-10 16:23 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('calculator', '0008_auto_20171010_1606'), ] operations = [ migrations.CreateModel( name='ModifierValue', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('limit_from', models.IntegerField()), ('limit_to', models.IntegerField(null=True)), ('modifier_percent', models.DecimalField(decimal_places=2, max_digits=6)), ], ), migrations.RemoveField( model_name='modifier', name='limit_from', ), migrations.RemoveField( model_name='modifier', name='limit_to', ), migrations.RemoveField( model_name='modifier', name='modifier_percent', ), migrations.AddField( model_name='modifier', name='description', field=models.CharField(default='Description', max_length=150), preserve_default=False, ), migrations.AddField( model_name='modifier', name='name', field=models.CharField(default='Name', max_length=64), preserve_default=False, ), migrations.AddField( model_name='modifiervalue', name='modifier', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='values', to='calculator.Modifier'), ), ]
import { Ref } from 'vue'; import { useAppProviderContext } from '../core/useAppContext'; // import { computed } from 'vue'; // import { lowerFirst } from 'lodash-es'; export function useDesign(scope: string) { const values = useAppProviderContext(); // const $style = cssModule ? useCssModule() : {}; // const style: Record<string, string> = {}; // if (cssModule) { // Object.keys($style).forEach((key) => { // // const moduleCls = $style[key]; // const k = key.replace(new RegExp(`^${values.prefixCls}-?`, 'ig'), ''); // style[lowerFirst(k)] = $style[key]; // }); // } if(values.prefixCls === undefined){ values.prefixCls = 'i' as unknown as Ref<string>; } return { // prefixCls: computed(() => `${values.prefixCls}-${scope}`), prefixCls: `${values.prefixCls}-${scope}`, prefixVar: values.prefixCls, // style, }; }
def logical_lines(physical_lines, joiner=''.join): logical_line = [] for line in physical_lines: stripped = line.rstrip() if stripped.endswith('\\'): # a line which continues w/the next physical line logical_line.append(stripped[:-1]) else: # a line which does not continue, end of logical line logical_line.append(line) yield joiner(logical_line) logical_line = [] if logical_line: # end of sequence implies end of last logical line yield joiner(logical_line) if __name__=='__main__': text = 'some\\\n', 'lines\\\n', 'get\n', 'joined\\\n', 'up\n' for line in text: print 'P:', repr(line) for line in logical_lines(text, ' '.join): print 'L:', repr(line)
#pragma once #include "glad.h" #include <vector> #include <Core/Base/include/Macros.hpp> #include <Core/Base/include/Types.hpp> namespace AVLIT { class OGLVAO { public: OGLVAO(const OGLVAO &) = delete; void operator=(const OGLVAO &) = delete; OGLVAO() = default; OGLVAO(const Mesh &mesh); ~OGLVAO(); inline void operator=(OGLVAO &&vao) noexcept; inline bool operator==(const OGLVAO &vao) const; inline void bind() const; static inline void unbindAll(); private: GLuint m_vaoID{0}; std::vector<GLuint> m_buffers; }; } // namespace AVLIT #include <Core/Renderer/OpenGL/inline/OGLVAO.inl>
<filename>lib/boleto/bancos/cecred.js const path = require('path'); const StringUtils = require('../../utils/string-utils') const CodigoDeBarrasBuilder = require('../codigo-de-barras-builder'); const Cecred = (function() { const NUMERO_CECRED = '085'; const DIGITO_CECRED = '0'; function Cecred() {} Cecred.prototype.getTitulos = function() { return { instrucoes: 'Instruções (texto de responsabilidade do beneficiário)', nomeDoPagador: 'Pagador', especie: 'Moeda', quantidade: 'Quantidade', valor: 'x Valor', moraMulta: '(+) Moras / Multa' }; }; Cecred.prototype.exibirReciboDoPagadorCompleto = function() { return true; }; Cecred.prototype.exibirCampoCip = function() { return true; }; Cecred.prototype.geraCodigoDeBarrasPara = boleto => { const beneficiario = boleto.getBeneficiario(); const errorMsg = 'Erro ao gerar código de barras,'; if (!beneficiario.getNumeroConvenio() || beneficiario.getNumeroConvenio().length != 6) throw new Error(`${errorMsg} número convênio da cooperativa não possui 6 dígitos: ${beneficiario.getNumeroConvenio()}`); if (!beneficiario.getNossoNumero() || beneficiario.getNossoNumero().length != 17) throw new Error(`${errorMsg} nosso número não possui 17 dígitos: ${beneficiario.getNossoNumero()}`); if (!beneficiario.getCarteira() || beneficiario.getCarteira().length != 2) throw new Error(`${errorMsg} código carteira não possui 2 dígitos: ${beneficiario.getCarteira()}`); const campoLivre = []; campoLivre.push(beneficiario.getNumeroConvenio()); campoLivre.push(beneficiario.getNossoNumero()); campoLivre.push(beneficiario.getCarteira()); return new CodigoDeBarrasBuilder(boleto).comCampoLivre(campoLivre); } Cecred.prototype.getNumeroFormatadoComDigito = function() { return [NUMERO_CECRED, DIGITO_CECRED].join('-'); } Cecred.prototype.getNumeroFormatado = function() { return NUMERO_CECRED; } Cecred.prototype.getCarteiraFormatado = function(beneficiario) { return StringUtils.pad(beneficiario.getCarteira(), 2, '0'); } Cecred.prototype.getCarteiraTexto = function(beneficiario) { return StringUtils.pad(beneficiario.getCarteira(), 2, '0'); } Cecred.prototype.getCodigoFormatado = function(beneficiario) { return StringUtils.pad(beneficiario.getCodigoBeneficiario(), 7, '0'); } Cecred.prototype.getImagem = function() { return path.join(__dirname, 'logotipos/ailos.png'); } Cecred.prototype.getNossoNumeroFormatado = function(beneficiario) { return StringUtils.pad(beneficiario.getNossoNumero(), 11, '0'); } Cecred.prototype.getNossoNumeroECodigoDocumento = function(boleto) { const beneficiario = boleto.getBeneficiario(); let nossoNumero = this.getNossoNumeroFormatado(beneficiario); if (beneficiario.getDigitoNossoNumero()) nossoNumero += `-${beneficiario.getDigitoNossoNumero()}`; return this.getCarteiraFormatado(beneficiario) + '/' + nossoNumero; } Cecred.prototype.getNome = function() { return 'Ailos'; } Cecred.prototype.getImprimirNome = function() { return false; } Cecred.prototype.getLocaisDePagamentoPadrao = function() { return ['PAGAVEL PREFERENCIALMENTE NAS COOPERATIVAS DO SISTEMA AILOS.', 'APOS VENCIMENTO PAGAR SOMENTE NA COOPERATIVA '] } Cecred.prototype.getAgenciaECodigoBeneficiario = function(boleto) { const beneficiario = boleto.getBeneficiario(); const digitoCodigo = beneficiario.getDigitoCodigoBeneficiario(); let codigo = this.getCodigoFormatado(beneficiario); if (digitoCodigo) codigo += '-' + digitoCodigo; const agenciaComDigito = beneficiario.getAgenciaFormatada() + '-' + beneficiario.getDigitoAgencia(); return agenciaComDigito + '/' + codigo; } Cecred.novoCecred = function() { return new Cecred(); } return Cecred; })(); module.exports = Cecred;
<reponame>hmrc/amls<filename>test/models/fe/businessactivities/CustomersOutsideUKSpec.scala<gh_stars>1-10 /* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package models.fe.businessactivities import models.des.businessactivities.{ExpectedAMLSTurnover => DesExpectedAMLSTurnover, _} import org.scalatestplus.play.PlaySpec import play.api.libs.json.{JsSuccess, Json} class CustomersOutsideUKSpec extends PlaySpec { "CustomersOutsideUK" must { "round trip through Json correctly" in { val model: CustomersOutsideUK = CustomersOutsideUK(true, Some(Seq("GB"))) Json.fromJson[CustomersOutsideUK](Json.toJson(model)) mustBe JsSuccess(model) } "convert des model to frontend model successfully" in { val businessActivitiesAll = BusinessActivitiesAll(None, Some("2001-01-01"), None, BusinessActivityDetails(true, Some(DesExpectedAMLSTurnover(Some("11122233344")))), Some(FranchiseDetails(true, Some(Seq("FranchiserName1", "FranchiserName2")))), Some("12345678901"), Some("11223344556"), NonUkResidentCustDetails(true, Some(Seq("AD", "GB"))), AuditableRecordsDetails("Yes", Some(TransactionRecordingMethod(true, true, true, Some("CommercialPackageName")))), true, true, Some(FormalRiskAssessmentDetails(true, Some(RiskAssessmentFormat(true, true)))), None) CustomersOutsideUK.conv(businessActivitiesAll) must be(Some(CustomersOutsideUK(true, Some(List("AD", "GB"))))) } "convert des model to frontend model successfully when countries option is none" in { val businessActivitiesAll = BusinessActivitiesAll(None, Some("2001-01-01"), None, BusinessActivityDetails(true, Some(DesExpectedAMLSTurnover(Some("11122233344")))), Some(FranchiseDetails(true, Some(Seq("FranchiserName1", "FranchiserName2")))), Some("12345678901"), Some("11223344556"), NonUkResidentCustDetails(false, None), AuditableRecordsDetails("Yes", Some(TransactionRecordingMethod(true, true, true, Some("CommercialPackageName")))), true, true, Some(FormalRiskAssessmentDetails(true, Some(RiskAssessmentFormat(true, true)))), None) CustomersOutsideUK.conv(businessActivitiesAll) must be(Some(CustomersOutsideUK(false, None))) } } }
<filename>recipes/deploy.rb<gh_stars>10-100 applications.select(&:repository?).each do |app| deploy_revision app.name do repo app.repository deploy_to app.path user app.user_name group app.group_name before_migrate do install_bower_packages(release_path) { app(app) } if app.bower? build_middleman_app(release_path) { app(app) } if app.middleman? build_meteor_app(release_path) { app(app) } if app.meteor? build_node_app(release_path) { app(app) } if app.node? build_phoenix_app(release_path) { app(app) } if app.phoenix? build_rails_app(release_path) { app(app) } if app.rails? end before_restart do release_phoenix_app(release_path) { app(app) } if app.phoenix? release_rails_app(release_path) { app(app) } if app.rails? end after_restart do update_whenever { app(app) } if app.whenever? update_workers { app(app) } end symlink_before_migrate({}) purge_before_symlink app.directories_purged_for_release create_dirs_before_symlink app.directories_created_for_release symlinks app.symlinks restart_command do if app.proxy? update_proxy { app(app) } end file "#{app.release_working_directory(release_path)}/tmp/restart.txt" do action :touch end end ignore_failure true end end
kubectl get pods -l app=istio-ingressgateway -n istio-system kubectl describe pod/istio-ingressgateway-66c76dfc5f-mxzbz -n istio-system istioctl proxy-config route istio-ingressgateway-66c76dfc5f-mxzbz -n istio-system istioctl proxy-config route istio-ingressgateway-66c76dfc5f-mxzbz.istio-system istioctl proxy-config bootstrap k360api-deployment-7cbd8fcc4d-dg864.k360api kubectl logs -f --all-containers -l app=istio-ingressgateway -n istio-system kubectl logs -f -c istio-proxy -l app.kubernetes.io/name=whatsapp-kbot -n whatsapp
#!/bin/sh # # $Id: jp2k-crypt-tst.sh,v 1.4 2009/04/09 19:16:49 msheby Exp $ # Copyright (c) 2007-2009 John Hurst. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. The name of the author may not be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # crypto JPEG 2000 tests ${BUILD_DIR}/asdcp-test${EXEEXT} -k ${CRYPT_KEY} \ -c ${TEST_FILES}/write_crypt_test_jp2k.mxf ${TEST_FILES}/${TEST_FILE_PREFIX} if [ $? -ne 0 ]; then exit 1 fi ${BUILD_DIR}/asdcp-test${EXEEXT} -i ${TEST_FILES}/write_crypt_test_jp2k.mxf if [ $? -ne 0 ]; then exit 1 fi (${BUILD_DIR}/asdcp-test${EXEEXT} -k ${CRYPT_KEY_B} \ -x ${TEST_FILES}/plaintext ${TEST_FILES}/write_crypt_test_jp2k.mxf; \ if [ $? -eq 1 ]; then exit 0; fi; exit 1 ) if [ $? -ne 0 ]; then exit 1 fi ${BUILD_DIR}/asdcp-test${EXEEXT} -m -k ${CRYPT_KEY} \ -x ${TEST_FILES}/plaintext/${JP2K_PREFIX} ${TEST_FILES}/write_crypt_test_jp2k.mxf if [ $? -ne 0 ]; then exit 1 fi for file in `ls ${TEST_FILES}/${TEST_FILE_PREFIX}`; do \ echo "$file"; \ cmp ${TEST_FILES}/${TEST_FILE_PREFIX}/$file ${TEST_FILES}/plaintext/$file; \ if [ $? -ne 0 ]; then \ exit 1; \ fi; \ done
def power(x, n): if (n == 0): return 1 elif (n % 2 == 0): y = power(x, n / 2) return y * y else: y = power(x, (n - 1) / 2) return x * y * y
<reponame>Dithn/graphql-js<gh_stars>1-10 // @flow strict export default function invariant(condition: mixed, message?: string): void { const booleanCondition = Boolean(condition); // istanbul ignore else (see transformation done in './resources/inlineInvariant.js') if (!booleanCondition) { throw new Error( message != null ? message : 'Unexpected invariant triggered.', ); } }
import Vue from 'vue' import Vuet from 'vuet' Vue.use(Vuet) let fetchCount = 0 const vuet = new Vuet() vuet.addModules('test', { data () { return { count: 0, fetchCount: 0 } }, fetch () { this.count++ this.fetchCount = ++fetchCount } }) export default vuet
Train an encoder-decoder recurrent neural network (RNN) on a corpus of data to generate natural language summaries of the data. The encoder-decoder RNN first encodes each sentence in the data into numerical representation using an embedding layer and Long Short-Term Memory (LSTM) layers. Then, the decoder generates natural language summaries by decoding the numerical representation using another set of LSTM layers. For more accurate results, the representations can be weighted using attention mechanisms.
_just() { local i cur prev opts cmds COMPREPLY=() cur="${COMP_WORDS[COMP_CWORD]}" prev="${COMP_WORDS[COMP_CWORD-1]}" cmd="" opts="" for i in ${COMP_WORDS[@]} do case "${i}" in just) cmd="just" ;; *) ;; esac done case "${cmd}" in just) opts=" -q -u -v -e -l -h -V -f -d -s --dry-run --highlight --no-dotenv --no-highlight --quiet --clear-shell-args --unsorted --verbose --choose --dump --edit --evaluate --init --list --summary --variables --help --version --chooser --color --justfile --set --shell --shell-arg --working-directory --completions --show <ARGUMENTS>... " if [[ ${cur} == -* ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 elif [[ ${COMP_CWORD} -eq 1 ]]; then local recipes=$(just --summary --color never 2> /dev/null) if [[ $? -eq 0 ]]; then COMPREPLY=( $(compgen -W "${recipes}" -- "${cur}") ) return 0 fi fi case "${prev}" in --chooser) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; --color) COMPREPLY=($(compgen -W "auto always never" -- "${cur}")) return 0 ;; --justfile) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; -f) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; --set) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; --shell) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; --shell-arg) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; --working-directory) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; -d) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; --completions) COMPREPLY=($(compgen -W "zsh bash fish powershell elvish" -- "${cur}")) return 0 ;; --show) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; -s) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; *) COMPREPLY=() ;; esac COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; esac } complete -F _just -o bashdefault -o default just
import java.util.concurrent.atomic.AtomicInteger; /** * Debug context holder interface. By default debugging context stores in ThreadLocal variable {@link DefaultDebugContextHolder} * * @author John Doe */ public interface DebugContextHolder { /** * Get debug context. * * @return DebugContext */ DebugContext getDebugContext(); } /** * Concrete implementation of DebugContextHolder using ThreadLocal for storing debugging context. */ public class DefaultDebugContextHolder implements DebugContextHolder { private static final ThreadLocal<DebugContext> debugContextThreadLocal = new ThreadLocal<DebugContext>() { @Override protected DebugContext initialValue() { return new DebugContext(); } }; @Override public DebugContext getDebugContext() { return debugContextThreadLocal.get(); } } /** * Sample usage scenario to demonstrate the functionality of the debugging context holder in a multi-threaded environment. */ public class DebugContextDemo { public static void main(String[] args) { DefaultDebugContextHolder debugContextHolder = new DefaultDebugContextHolder(); // Create and start multiple threads to access the debugging context for (int i = 0; i < 5; i++) { Thread thread = new Thread(() -> { DebugContext debugContext = debugContextHolder.getDebugContext(); System.out.println("Thread " + Thread.currentThread().getId() + " - Debug ID: " + debugContext.getId()); }); thread.start(); } } } /** * DebugContext class representing the debugging context. */ class DebugContext { private static final AtomicInteger idGenerator = new AtomicInteger(0); private int id; DebugContext() { this.id = idGenerator.incrementAndGet(); } public int getId() { return id; } }
def binary_search(array, search_item): low = 0 high = len(array) - 1 while low <= high: mid = (high + low) // 2 mid_value = array[mid] if mid_value == search_item: return mid elif mid_value > search_item: high = mid - 1 else: low = mid + 1 return -1 array = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24] search_item = 14 index = binary_search(array, search_item) if index == -1: print("Item not found") else: print("Item found at position", index)
<filename>env_test.py from baselines.envs import TorchEnv, NoisyEnv, const import torch MAX_EPISODE_LEN = 200 if __name__ == "__main__": env = TorchEnv(const.SPARSE_HALF_CHEETAH, MAX_EPISODE_LEN) env = NoisyEnv(env, 0.02) s = env.reset() for _ in range(MAX_EPISODE_LEN): a = env.sample_action() s, r, d = env.step(a) print(r) env.render() if d: break env.close()
#!/bin/bash ​ #------------------------------------------------------------------------------- # PINGによるサーバーの死活確認用スクリプト。 #------------------------------------------------------------------------------- # 監視先サーバーのIPのリスト IP_LIST=(172.17.12.143 172.17.11.130 172.17.15.81 172.17.15.82) # アラートメールの送信先 MAILTO='your_address@sample.com' # ログファイルの出力先 LOG_FILE=./ping.log ​ for ip in ${IP_LIST[@]} do ping_result=$(ping -w 5 $ip | grep '100% packet loss') date_result=$(date) # echo $ping_result ​ if [[ -n $ping_result ]]; then echo "[SEVERE] server inactive: $ip $date_result" >> $LOG_FILE echo $ip | mail -s "[ALERT] server down!! $date_result" $MAILTO else echo "[INFO] server active: $ip $date_result" >> $LOG_FILE fi done
import PromiseKit struct ActionEnvelop { // Define properties and methods relevant to the action envelop } struct StateType { // Define properties and methods relevant to the state type } struct CustomReducer: PMKReducing { func handle(envelop: ActionEnvelop, state: StateType) -> Promise<StateType> { // Implement the logic to update the state based on the action envelop // Example: return Promise { seal in // Perform state update based on the action envelop // ... // Resolve the promise with the updated state seal.fulfill(updatedState) } } }
/** * @inheritdoc */ public function up() { $this->createTable('calendar', [ 'id' => $this->primaryKey(), 'event_name' => $this->string(), 'event_date' => $this->date(), 'created_at' => $this->timestamp()->defaultExpression('CURRENT_TIMESTAMP'), 'updated_at' => $this->timestamp()->defaultExpression('CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP'), ]); } /** * @inheritdoc */ public function down() { $this->dropTable('calendar'); }
<filename>examples/express/etc/init/00_ioc.js /** * Module dependencies. */ var ioc = require('electrolyte'); /** * Initialize IoC container. * * The IoC loader needs to be configured with the location where components * are found. In this case, components are split accross two directories. * * Route handlers are implemented as components, and located in `app/handlers`. * * All other components (including database connections, logging facilities, * etc.) are located in `app/components`. */ module.exports = function() { ioc.loader('handlers', ioc.node('app/handlers')); ioc.loader(ioc.node('app/components')); }
public class ParentTracker { private int parent_changed; public ParentTracker() { parent_changed = -1; // Initialize parent_changed to -1 } public void TrackParentChange(int index) { parent_changed = index; // Update parent_changed with the provided index } public int GetParentChanged() { return parent_changed; // Return the index of the last changed parent } }
#!/bin/sh ## kiceDownloader ## kice.re.kr에서 평가원 모의평가 및 수능 답지를 다운로드하는 스크립트입니다. 만약 답지가 서버에 업로드되지 않았다면 업로드될 때가지 무한히 체크해서 업로드가 감지되면 자동으로 다운로드합니다. ## 최근 고사 답지만 다운로드 가능합니다. VERSION=10 function showHelpMessage(){ echo "kiceDownloader (Version: ${VERSION}): 평가원 사이트에 답지가 뜰 때까지 무한히 확인해주는 스크립트. 답지가 뜨면 자동으로 보여줍니다. (고3만 지원하며, 지난 고사는 지원하지 않습니다.)" echo "이 스크립트를 사용하면서 발생하는 법적 문제에 책임지지 않습니다." echo echo "--type [시험유형]" echo "시험유형을 필수적으로 입력하셔야 합니다. 예로 들면 2018학년도 9월 모의평가 (2017 시행)은 201809sumoi이며, 2017학년도 수능 (2016 시행)은 suneungtnsmd_2017입니다. 형식은 바뀔 수 있으니 평가원 사이트를 참고해 주세요." echo echo "--subject [과목코드]" echo "과목코드를 필수적으로 입력하셔야 합니다. 국어: 1, 수학(가/나형 상관없이): 2, 영어: 3, 한국사: 41, 사회탐구: 42, 과학탐구: 43, 직업탐구: 44, 제2외국어/한문: 5" echo echo "--server [서버코드]" echo "퍙가원 서버 코드를 입력하실 수 있는데 선택적인 사항이며 기본값은 1입니다." echo echo "--nodelay" echo "딜레이 없이 받습니다. 선택적인 사항입니다. 기본값은 1초입니다." echo echo "예시: 2018학년도 9월 모의평가 수학 답지 다운로드" echo "$ ./kiceDownloader.sh --type 201809sumoi --subject 2" } function setOption(){ if [[ "${1}" == "--type" ]]; then TestType="${2}" fi if [[ "${2}" == "--type" ]]; then TestType="${3}" fi if [[ "${3}" == "--type" ]]; then TestType="${4}" fi if [[ "${4}" == "--type" ]]; then TestType="${5}" fi if [[ "${5}" == "--type" ]]; then TestType="${6}" fi if [[ "${6}" == "--type" ]]; then TestType="${7}" fi if [[ "${7}" == "--type" ]]; then TestType="${8}" fi if [[ "${8}" == "--type" ]]; then TestType="${9}" fi if [[ "${1}" == "--subject" ]]; then TestSubject="${2}" fi if [[ "${2}" == "--subject" ]]; then TestSubject="${3}" fi if [[ "${3}" == "--subject" ]]; then TestSubject="${4}" fi if [[ "${4}" == "--subject" ]]; then TestSubject="${5}" fi if [[ "${5}" == "--subject" ]]; then TestSubject="${6}" fi if [[ "${6}" == "--subject" ]]; then TestSubject="${7}" fi if [[ "${7}" == "--subject" ]]; then TestSubject="${8}" fi if [[ "${8}" == "--subject" ]]; then TestSubject="${9}" fi if [[ "${1}" == "--server" ]]; then KiceServer="${2}" fi if [[ "${2}" == "--server" ]]; then KiceServer="${3}" fi if [[ "${3}" == "--server" ]]; then KiceServer="${4}" fi if [[ "${4}" == "--server" ]]; then KiceServer="${5}" fi if [[ "${5}" == "--server" ]]; then KiceServer="${6}" fi if [[ "${6}" == "--server" ]]; then KiceServer="${7}" fi if [[ "${7}" == "--server" ]]; then KiceServer="${8}" fi if [[ "${8}" == "--server" ]]; then KiceServer="${9}" fi if [[ "${1}" == "--nodelay" || "${2}" == "--nodelay" || "${3}" == "--nodelay" || "${4}" == "--nodelay" || "${5}" == "--nodelay" || "${6}" == "--nodelay" || "${7}" == "--nodelay" || "${8}" == "--nodelay" || "${9}" == "--nodelay" ]]; then NO_DELAY=YES fi if [[ -z "${TestType}" || -z "${TestSubject}" ]]; then showHelpMessage exit 0 fi if [[ -z "${KiceServer}" ]]; then KiceServer=1 fi DOWNLOAD_URL="http://webfs${KiceServer}.kice.re.kr/${TestType}/$(($(date +"%Y")+1))_${TestSubject}.pdf" } function showSummary(){ showLines "*" echo "요약 (Version: ${VERSION})" showLines "-" echo "시험코드: ${TestType} ($(($(date +"%Y")+1))학년도)" echo "과목코드: ${TestSubject}" echo "링크: ${DOWNLOAD_URL}" showLines "-" echo "*** 이 스크립트를 사용하면서 발생하는 법적 문제에 책임지지 않습니다." showLines "*" } function downloadFile(){ COUNT=0 FILE_COUNT=1 while(true); do if [[ -f "/tmp/kicefile${FILE_COUNT}.pdf" ]]; then FILE_COUNT=$((${FILE_COUNT}+1)) else FILE_PATH="/tmp/kicefile${FILE_COUNT}.pdf" break fi done while(true); do COUNT=$((${COUNT}+1)) echo "다운로드 중... (${COUNT})" if [[ -f "${FILE_PATH}" || -d "${FILE_PATH}" ]]; then rm -rf "${FILE_PATH}" fi curl -o "${FILE_PATH}" "${DOWNLOAD_URL}" > /dev/null 2>&1 if [[ "${COUNT}" == 1 ]]; then echo "파일 확인 중..." for VALUE in $(cat "${FILE_PATH}"); do if [[ "${VALUE}" == "XHTML" ]]; then IS_FAKE_FILE=YES break fi done if [[ "${IS_FAKE_FILE}" == YES ]]; then FILE_SHA1="$(shasum "${FILE_PATH}" | awk '{ print $1 }')" else echo "완료! 답지 파일 여는 중..." open "${FILE_PATH}" exit 0 fi else echo "파일 확인 중..." if [[ ! "$(shasum "${FILE_PATH}" | awk '{ print $1 }')" == "${FILE_SHA1}" ]]; then echo "완료! 답지 파일 여는 중..." open "${FILE_PATH}" exit 0 fi fi if [[ ! "${NO_DELAY}" == YES ]]; then sleep 1 fi done } function showLines(){ PRINTED_COUNTS=0 COLS=`tput cols` if [[ "${COLS}" -ge 1 ]]; then while [[ ! ${PRINTED_COUNTS} == $COLS ]]; do printf "$1" PRINTED_COUNTS=$((${PRINTED_COUNTS}+1)) done echo fi } setOption "${1}" "${2}" "${3}" "${4}" "${5}" "${6}" "${7}" "${8}" "${9}" showSummary downloadFile