text
stringlengths
27
775k
import java.io.*; import java.util.*; /** * * @author Christine Peterson * @version 2018 */ public class FFB_2018_R_Peterson_HowMany { public static void main(String[] args) throws FileNotFoundException { Scanner scan = new Scanner(new File("R2.txt")); int attackers = scan.nextInt(); if(attackers>12) System.out.println("They've broken through!"); else System.out.println("All is well."); } }
namespace Dgt.Minesweeper.Engine { public static class AssemblyMarker { } }
// // Product.h // MyTableViewSamples // // Created by Alireza Davoodi on 2017-03-16. // Copyright © 2017 CICCC. All rights reserved. // #import <Foundation/Foundation.h> @interface Product : NSObject @property (strong, nonatomic) NSString* imageName; @property (strong, nonatomic) NSString* desc; -(id) initWithImageName:(NSString*) imageName desc:(NSString*) desc; @end
# toast-archive Python web-scraper to create an archive of website The-Toast.net. Outputs a tsv file.
from PyQt5.QtWebEngine import * #from PyQt5.QtGui import * #from PyQt5.QtDesigner import * from PyQt5.QtCore import * from PyQt5.QtWebEngineWidgets import * from PyQt5.QtWidgets import * class TethysBrowser(QMainWindow): def __init__(self, *args, **kwargs): super(TethysBrowser, self).__init__(*args, **kwargs) self.window = QWidget() self.window.setWindowTitle("Tethys Browser") self.layout = QVBoxLayout() self.horizontal = QHBoxLayout() self.url_bar = QTextEdit() self.url_bar.setMaximumHeight(30) self.go_btn = QPushButton("GO") self.go_btn.setMinimumHeight(30) self.back_btn = QPushButton("<") self.back_btn.setMinimumHeight(30) self.fwd_btn = QPushButton(">") self.fwd_btn.setMinimumHeight(30) self.home_btn = QPushButton("π") self.home_btn.setMinimumHeight(30) self.horizontal.addWidget(self.url_bar) self.horizontal.addWidget(self.go_btn) self.horizontal.addWidget(self.back_btn) self.horizontal.addWidget(self.fwd_btn) # self.horizontal.addWidget(self.home_btn) self.browser = QWebEngineView() self.go_btn.clicked.connect(lambda: self.navigate(self.url_bar.toPlainText())) self.back_btn.clicked.connect(self.browser.back) self.fwd_btn.clicked.connect(self.browser.forward) # self.home_btn.clicked.connect(lambda: self.home()) self.layout.addLayout(self.horizontal) self.layout.addWidget(self.browser) self.browser.setUrl(QUrl("http://www.tethyseid.com")) self.window.setLayout(self.layout) self.window.show() def navigate(self, url): if not url.startswith("http"): url = "http://" + url self.url_bar.setText(url) self.browser.setUrl(QUrl(url)) # def home(self,url): # url = "http://www.google.com" # self.home_btn.setText(url) App = QApplication([]) Window = TethysBrowser() App.exec()
package excel.decoder import com.typesafe.scalalogging.LazyLogging import java.util.Date import org.apache.poi.ss.usermodel.Cell import org.apache.poi.xssf.usermodel.XSSFWorkbook import org.scalatest.{ FlatSpec, GivenWhenThen, Matchers } import scala.collection.mutable class CellImplicitsSpec extends FlatSpec with GivenWhenThen with Matchers { trait CellFixture extends LazyLogging { private var columnIndex = 0 private val rowInstance: mutable.ListBuffer[Cell] = mutable.ListBuffer.empty def withCell(f: Cell => Unit): Unit = { val cell: Cell = new XSSFWorkbook().createSheet().createRow(0).createCell(columnIndex) columnIndex += 1 f(cell) rowInstance.append(cell) } def cell(index: Int): Cell = { rowInstance(index) } def row: mutable.ListBuffer[Cell] = rowInstance.clone() } "String Cell Decoder" should "decode cell value" in new CellFixture { Given("cell testificant") withCell(_.setCellValue("1")) When("decode value") val result = implicits.stringDecoder.decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("cell value and decoded value should match") result should equal(Right(cell(0).getStringCellValue)) } it should "decode cell value of double cell" in new CellFixture { Given("cell testificant") withCell(_.setCellValue(1.2)) When("decode value") val result = implicits.stringDecoder.decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("cell value and decoded value should match") result should equal(Right(cell(0).getNumericCellValue.toString)) } it should "decode cell value of boolean cell" in new CellFixture { Given("cell testificant") withCell(_.setCellValue(true)) When("decode value") val result = implicits.stringDecoder.decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("cell value and decoded value should match") result should equal(Right(cell(0).getBooleanCellValue.toString)) } it should "decode cell value of formula cell" in new CellFixture { Given("cell testificant") withCell(_.setCellFormula("SQRT(4)")) When("decode value") val result = implicits.stringDecoder.decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("cell value and decoded value should match") result should equal(Right(cell(0).getCellFormula)) } it should "not decode cell if no value setted" in new CellFixture { Given("cell testificant") withCell(_ => ()) When("decode value") val result = implicits.stringDecoder.decode(row) Then("no error occur") result shouldBe a[Left[_, _]] } "Integer Cell Decoder" should "decode cell value" in new CellFixture { Given("cell testificant") withCell(_.setCellValue(1)) When("decode value") val result = implicits.integerDecoder.decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("cell value and decoded value should match") result should equal(Right(cell(0).getNumericCellValue.intValue())) } it should "not decode cell value if not integer number" in new CellFixture { Given("cell testificant") withCell(_.setCellValue(1.2)) When("decode value") val result = implicits.integerDecoder.decode(row) Then("error occur") result shouldBe a[Left[_, _]] } it should "not decode cell value if not numeric" in new CellFixture { Given("cell testificant") withCell(_.setCellValue("1")) When("decode value") val result = implicits.integerDecoder.decode(row) Then("error occur") result shouldBe a[Left[_, _]] } "Double Cell Decoder" should "decode cell value" in new CellFixture { Given("cell testificant") withCell(_.setCellValue(1.2D)) When("decode value") val result = implicits.doubleDecoder.decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("cell value and decoded value should match") result should equal(Right(cell(0).getNumericCellValue)) } it should "not decode cell value" in new CellFixture { Given("cell testificant") withCell(_.setCellValue("1.2D")) When("decode value") val result = implicits.doubleDecoder.decode(row) Then("error occur") result shouldBe a[Left[_, _]] } "Date Cell Decoder" should "decode cell value" in new CellFixture { Given("cell testificant") withCell(_.setCellValue(new Date())) When("decode value") val result = implicits.dateTimeDecoder.decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("cell value and decoded value should match") result should equal(Right(cell(0).getDateCellValue)) } it should "not decode cell value" in new CellFixture { Given("cell testificant") withCell(_.setCellValue("1.2D")) When("decode value") val result = implicits.dateTimeDecoder.decode(row) Then("error occur") result shouldBe a[Left[_, _]] } "Option Decoder" should "not decode cell value" in new CellFixture { Given("cell testificant") withCell(_ => ()) When("decode value") val result = implicits.optionDecoder[Date](implicits.dateTimeDecoder).decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("decoded value should be None") result should equal(Right(None)) } "Option Decoder" should "decode cell value" in new CellFixture { Given("cell testificant") withCell(_.setCellValue("Foo")) When("decode value") val result = implicits.optionDecoder[String](implicits.stringDecoder).decode(row) Then("no error occur") result shouldBe a[Right[_, _]] And("decoded value should be None") result should equal(Right(Some(cell(0).getStringCellValue))) } }
package org.directwebremoting; import javax.servlet.ServletContext; import org.directwebremoting.extend.Builder; import org.directwebremoting.extend.Factory; /** * Accessor for the current ServerContext. * @author Joe Walker [joe at getahead dot ltd dot uk] */ public class ServerContextFactory { /** * Accessor for the current ServerContext. * @return The current ServerContext. */ public static ServerContext get() { return factory.get(); } /** * This method was designed to support more complex DWR setups, although * it is unlikely that it ever did this properly. If you have multiple * DWR servlets in a single ServletContext or wish to do cross-context * access to DWR then please contact the DWR mailing list. * @param ctx The servlet context to allow us to bootstrap * @return The current ServerContext. * @deprecated Use the plain ServerContextFactory.get() version * @see #get() */ @Deprecated public static ServerContext get(ServletContext ctx) { return factory.get(); } /** * Internal method to allow us to get the Builder from which we * will get ServerContext objects. * Do NOT call this method from outside of DWR. */ public static ServerContext attach(Container container) { return factory.attach(container); } /** * The factory helper class */ private static Factory<ServerContext> factory = Factory.create(ServerContextBuilder.class); /** * Hack to get around Generics not being implemented by erasure */ public interface ServerContextBuilder extends Builder<ServerContext> { } }
export default function objectAssign(target, ...agrvs) { for (let i = 0, j = agrvs.length; i < j; i++) { const source = agrvs[i] || {} for (const prop in source) { if (source.hasOwnProperty(prop)) { const value = source[prop] if (value !== undefined) { target[prop] = value } } } } return target }
Run: ```sh git checkout mfig-localnet-3.1 make localnet-start cp build.linux/ag0{,-agoric-3.1} ``` Propose software upgrade governance: ```sh voting_period_s=240 latest_height=$(./ag0.sh status | jq -r .SyncInfo.latest_block_height) height=$(( $latest_height + $voting_period_s / 3 )) chainid=$( ./ag0.sh status | jq -r .NodeInfo.network ) ./ag0.sh tx gov submit-proposal software-upgrade agoric-3.2 --upgrade-height="$height" \ --title="Enable true vesting accounts" --description="allow bla bla bla" \ --from=node0 --chain-id="$chainid" -bblock ``` Add deposit and votes. ```sh proposal=2 ./ag0.sh tx gov deposit $proposal 10000000stake --from=node0 --chain-id="$chainid" -bblock --yes for f in `seq 0 3`; do ./ag0.sh tx gov vote $proposal yes --from=node$f --chain-id="$chainid" -bblock --yes done ``` Wait until the nodes stop due to upgrade. ```sh ./check-prop-status.sh $proposal ./ag0.sh ``` ```sh git checkout mfig-localnet-3.2 make localnet-start ```
package config import "fmt" // ManagerConfig is config object that will hold everything todo with the endpoints itself, including // the datamodels, as well as the generic endpoints that will be set. Things like healthcheck and eventually, // the login endpoint type ManagerConfig struct { Generics []string `json:"generics,omitempty"` Plugs []*PlugConfig `json:"plugins"` } func (conf *ManagerConfig) validateGenerics() error { for _, gen := range conf.Generics { found := false for _, generic := range allowedGenerics { if gen == generic { found = true } } if !found { return fmt.Errorf("No such generic exists - %s", gen) } } return nil } func (conf *ManagerConfig) collectTypeIdMap() (map[string]string, error) { types := make(map[string]string, len(conf.Plugs)) for _, plug := range conf.Plugs { if _, ok := types[plug.Name]; ok { return nil, fmt.Errorf("Duplicate plugin name %s", plug.Name) } types[plug.Name] = plug.Model.GetID() } return types, nil } func (conf *ManagerConfig) validate() error { err := conf.validateGenerics() if err != nil { return err } m, err := conf.collectTypeIdMap() if err != nil { return err } paths := make(map[string]bool, len(conf.Plugs)) for _, plug := range conf.Plugs { if _, ok := paths[plug.Path]; ok { return fmt.Errorf(`Duplicate path - %s`, plug.Path) } err = plug.validate(m) if err != nil { return err } paths[plug.Path] = true } return nil }
template<typename T, class Sub> State<T,Sub>::State(T v) : value(v) {} template<typename T, class Sub> State<T,Sub>::State(){} template<typename T, class Sub> T State<T,Sub>::get() const { return value; } template<typename T, class Sub> void State<T,Sub>::set(T v) { value=v; } template<typename T, class Sub> std::string State<T,Sub>::to_string() const { return std::string(""); } template<typename T, class Sub> bool State<T,Sub>::finished(const o80::TimePoint &start, const o80::TimePoint &now, const Sub &start_state, const Sub &current_state, const Sub &previous_desired_state, const Sub &target_state, const o80::Speed &speed) const { return o80::finished(start,now, start_state.value, previous_desired_state.value, target_state.value, speed); } template<typename T, class Sub> Sub State<T,Sub>::intermediate_state( const o80::TimePoint &start, const o80::TimePoint &now, const Sub &start_state, const Sub &current_state, const Sub &previous_desired_state, const Sub &target_state, const o80::Speed &speed) const { return o80::intermediate_state(start,now, start_state.value, previous_desired_state.value, target_state.value, speed); } template<typename T, class Sub> Sub State<T,Sub>::intermediate_state( const o80::TimePoint &start, const o80::TimePoint &now, const Sub &start_state, const Sub &current_state, const Sub &previous_desired_state, const Sub &target_state, const o80::Duration_us &duration) const { return o80::intermediate_state(start,now, start_state.value, previous_desired_state.value, target_state.value, duration); } template<typename T, class Sub> Sub State<T,Sub>::intermediate_state( long int start_iteration, long int current_iteration, const Sub &start_state, const Sub &current_state, const Sub &previous_desired_state, const Sub &target_state, const o80::Iteration &iteration) const { return o80::intermediate_state(start_iteration, current_iteration, start_state.value, previous_desired_state.value, target_state.value, iteration); }
/* * * Headwind MDM: Open Source Android MDM Software * https://h-mdm.com * * Copyright (C) 2019 Headwind Solutions LLC (http://h-sms.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.hmdm.persistence; import com.google.inject.Inject; import com.google.inject.Singleton; import com.hmdm.persistence.domain.Customer; import com.hmdm.persistence.domain.UploadedFile; import com.hmdm.persistence.mapper.UploadedFileMapper; import com.hmdm.security.SecurityException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Named; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; /** * <p>A DAO used for managing the icon data.</p> * * @author isv */ @Singleton public class UploadedFileDAO extends AbstractDAO<UploadedFile> { private static final Logger logger = LoggerFactory.getLogger(UploadedFileDAO.class); /** * <p>An interface to file data persistence layer.</p> */ private final UploadedFileMapper fileMapper; private final CustomerDAO customerDAO; private String filesDirectory; /** * <p>Constructs new <code>UploadedFileDAO</code> instance. This implementation does nothing.</p> */ @Inject public UploadedFileDAO(UploadedFileMapper fileMapper, CustomerDAO customerDAO, @Named("files.directory") String filesDirectory) { this.fileMapper = fileMapper; this.customerDAO = customerDAO; this.filesDirectory = filesDirectory; } /** * <p>Inserts new record for the specified uploaded file.</p> * * @param file an uploaded filed to be inserted into DB. * @return a created file. */ public UploadedFile insertFile(UploadedFile file) { insertRecord(file, this.fileMapper::insertFile); return getSingleRecord(() -> this.fileMapper.getFileById(file.getId()), SecurityException::onUploadedFileAccessViolation); } /** * <p>Removes new record for the specified uploaded file.</p> * * @param fileId an uploaded filed to be removed from DB and disk. */ public void removeFile(int fileId) { final UploadedFile file = getSingleRecord(() -> this.fileMapper.getFileById(fileId), SecurityException::onUploadedFileAccessViolation); final Customer customer = this.customerDAO.findById(file.getCustomerId()); java.nio.file.Path filePath; if (customer.getFilesDir() == null || customer.getFilesDir().isEmpty()) { filePath = Paths.get( this.filesDirectory, file.getFilePath()); } else { filePath = Paths.get(this.filesDirectory, customer.getFilesDir(), file.getFilePath()); } try { logger.debug("Deleting file: {}", filePath); Files.delete(filePath); } catch (IOException e) { logger.error("Failed to delete file: {}", filePath, e); } this.fileMapper.deleteFile(fileId); } }
# frozen_string_literal: true require 'spec_helper' RSpec.describe Sidekiq::Prometheus::Exporter::Cron do describe '#to_s' do let(:exporter) { described_class.new } let(:metrics_text) do # rubocop:disable Layout/IndentHeredoc <<-TEXT.chomp # HELP sidekiq_cron_jobs The number of cron jobs. # TYPE sidekiq_cron_jobs gauge sidekiq_cron_jobs 42 TEXT # rubocop:enable Layout/IndentHeredoc end before do stub_const('Sidekiq::Cron', SidekiqCronMock) allow(SidekiqCronMock::Job).to receive(:count).and_return(42) end it { expect(exporter.to_s).to eq(metrics_text) } end end
package typingsSlinky.maximMazurokGapiClientSheets.gapi.client.sheets import org.scalablytyped.runtime.StObject import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess} @js.native trait DimensionRange extends StObject { /** The dimension of the span. */ var dimension: js.UndefOr[String] = js.native /** The end (exclusive) of the span, or not set if unbounded. */ var endIndex: js.UndefOr[Double] = js.native /** The sheet this span is on. */ var sheetId: js.UndefOr[Double] = js.native /** The start (inclusive) of the span, or not set if unbounded. */ var startIndex: js.UndefOr[Double] = js.native } object DimensionRange { @scala.inline def apply(): DimensionRange = { val __obj = js.Dynamic.literal() __obj.asInstanceOf[DimensionRange] } @scala.inline implicit class DimensionRangeMutableBuilder[Self <: DimensionRange] (val x: Self) extends AnyVal { @scala.inline def setDimension(value: String): Self = StObject.set(x, "dimension", value.asInstanceOf[js.Any]) @scala.inline def setDimensionUndefined: Self = StObject.set(x, "dimension", js.undefined) @scala.inline def setEndIndex(value: Double): Self = StObject.set(x, "endIndex", value.asInstanceOf[js.Any]) @scala.inline def setEndIndexUndefined: Self = StObject.set(x, "endIndex", js.undefined) @scala.inline def setSheetId(value: Double): Self = StObject.set(x, "sheetId", value.asInstanceOf[js.Any]) @scala.inline def setSheetIdUndefined: Self = StObject.set(x, "sheetId", js.undefined) @scala.inline def setStartIndex(value: Double): Self = StObject.set(x, "startIndex", value.asInstanceOf[js.Any]) @scala.inline def setStartIndexUndefined: Self = StObject.set(x, "startIndex", js.undefined) } }
#! /usr/bin/ruby1.8 # # read/berkeley_read_ruby.rb # # Mar/22/2013 # require 'bdb' require 'json' # db_file="/var/tmp/berkeley/cities.db" # ee = BDB::Btree::open(db_file,"table1","r",0777) ww=ee.to_hash; ee.close # puts "Content-type: text/json\n\n" # str_out="{" ww.sort.each {|key,value| # print (key,value) llx=value.length str_aa=value[1,llx] out_str = "\"" + key + "\": {" + str_aa + "," str_out += out_str } llx=str_out.length json_str=str_out[0,llx-1] + "}" puts json_str # # #
#!/bin/sh if ! which puppet > /dev/null; then wget https://apt.puppetlabs.com/puppetlabs-release-trusty.deb sudo dpkg -i puppetlabs-release-trusty.deb sudo apt-get --assume-yes update sudo apt-get --assume-yes upgrade sudo apt-get --assume-yes install puppet fi;
--- layout: post title: '20110425' date: '2011-04-25 18:44:00' --- <p>追求重要的东西,哪怕失败了,这个世界也会因为你的尝试而更加精彩。—Tim O’Reilly</p>
package azweather import ( "net/url" "strconv" ) const DefaultApiVersion = "1.0" const DefaultFormat = "json" type Optionable interface { Options() Options } type Options struct { Format string ApiVersion string Details string Duration *int Language string Unit string IndexGroupId *int IndexId *int Interval *int SubscriptionKey string MSClientId string } func InitializeOptions(op Optionable, subscriptionKey string, msClientId string) Options { options := Options{} if op != nil { options = op.Options() } if options.Format == "" { options.Format = DefaultFormat } if options.ApiVersion == "" { options.ApiVersion = DefaultApiVersion } if options.SubscriptionKey == "" { options.SubscriptionKey = subscriptionKey } if options.MSClientId == "" { options.MSClientId = msClientId } return options } func (p Options) Encode(query string) string { q := url.Values{} if p.ApiVersion != "" { q.Add("api-version", p.ApiVersion) } if query != "" { q.Add("query", query) } if p.Details != "" { q.Add("details", p.Details) } if p.Duration != nil { q.Add("duration", strconv.Itoa(*p.Duration)) } if p.Language != "" { q.Add("language", p.Language) } if p.Unit != "" { q.Add("unit", p.Unit) } if p.IndexGroupId != nil { q.Add("indexGroupId", strconv.Itoa(*p.IndexGroupId)) } if p.IndexId != nil { q.Add("indexId", strconv.Itoa(*p.IndexId)) } if p.Interval != nil { q.Add("interval", strconv.Itoa(*p.Interval)) } if p.SubscriptionKey != "" { q.Add("subscription-key", p.SubscriptionKey) } return q.Encode() }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.confignode.manager; import org.apache.iotdb.confignode.rpc.thrift.ConfigIService; import org.apache.iotdb.confignode.rpc.thrift.DataNodeMessage; import org.apache.iotdb.confignode.rpc.thrift.DataNodeRegisterReq; import org.apache.iotdb.confignode.rpc.thrift.DataNodeRegisterResp; import org.apache.iotdb.rpc.RpcTransportFactory; import org.apache.iotdb.rpc.TSStatusCode; import org.apache.iotdb.service.rpc.thrift.EndPoint; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.junit.Assert; import java.util.Map; import java.util.concurrent.TimeUnit; public class ConfigManagerManualTest { // TODO: Optimize this manual test to automatic test after the test environment is set up. // @YongzaoDan private static final String localhost = "0.0.0.0"; private static final int timeOutInMS = 2000; private ConfigIService.Client[] clients; /** * This is a temporary test of ConfigNode's integration with the ratis-consensus protocol. To run * this code, follow these steps: 1. Compile IoTDB 2. Copy at least three * iotdb-confignode-0.14.0-SNAPSHOT 3. Make sure these parameters: config_node_rpc_address(all * 0.0.0.0), config_node_rpc_port(22277, 22279, 22281), config_node_internal_port(22278, 22280, * 22282), consensus_type(all ratis) and config_node_group_address_list(all 0.0.0.0:22278, * 0.0.0.0:22280, 0.0.0.0:22282) in each iotdb-confignode.properties file are set 4. Start these * ConfigNode by yourself 5. Add @Test and run */ public void ratisConsensusTest() throws TException, InterruptedException { createClients(); registerDataNodes(); queryDataNodes(); } private void createClients() throws TTransportException { clients = new ConfigIService.Client[3]; for (int i = 0; i < 3; i++) { TTransport transport = RpcTransportFactory.INSTANCE.getTransport(localhost, 22277 + i * 2, timeOutInMS); transport.open(); clients[i] = new ConfigIService.Client(new TBinaryProtocol(transport)); } } private void registerDataNodes() throws TException { for (int i = 0; i < 3; i++) { DataNodeRegisterReq req = new DataNodeRegisterReq(new EndPoint("0.0.0.0", 6667 + i)); DataNodeRegisterResp resp = clients[0].registerDataNode(req); Assert.assertEquals( TSStatusCode.SUCCESS_STATUS.getStatusCode(), resp.registerResult.getCode()); Assert.assertEquals(i, resp.getDataNodeID()); } } private void queryDataNodes() throws InterruptedException, TException { // sleep 1s to make sure all ConfigNode in ConfigNodeGroup hold the same PartitionTable TimeUnit.SECONDS.sleep(1); for (int i = 0; i < 3; i++) { Map<Integer, DataNodeMessage> msgMap = clients[i].getDataNodesMessage(-1); Assert.assertEquals(3, msgMap.size()); for (int j = 0; j < 3; j++) { Assert.assertNotNull(msgMap.get(j)); Assert.assertEquals(j, msgMap.get(j).getDataNodeID()); Assert.assertEquals(localhost, msgMap.get(j).getEndPoint().getIp()); Assert.assertEquals(6667 + j, msgMap.get(j).getEndPoint().getPort()); } } } /** * This is a temporary test of ConfigNode's integration with the ratis-consensus protocol. This * code tests the high availability of the ratis-consensus protocol. Make sure that you have run * according to the comments of ratisConsensusTest before executing this code. Next, close * ConfigNode that occupies ports 22281 and 22282 on the local machine. Finally, run this test. */ public void killTest() throws TException { clients = new ConfigIService.Client[2]; for (int i = 0; i < 2; i++) { TTransport transport = RpcTransportFactory.INSTANCE.getTransport(localhost, 22277 + i * 2, timeOutInMS); transport.open(); clients[i] = new ConfigIService.Client(new TBinaryProtocol(transport)); } DataNodeRegisterResp resp = clients[1].registerDataNode(new DataNodeRegisterReq(new EndPoint("0.0.0.0", 6670))); Assert.assertEquals(TSStatusCode.SUCCESS_STATUS.getStatusCode(), resp.registerResult.getCode()); Assert.assertEquals(3, resp.getDataNodeID()); for (int i = 0; i < 2; i++) { Map<Integer, DataNodeMessage> msgMap = clients[i].getDataNodesMessage(-1); Assert.assertEquals(4, msgMap.size()); for (int j = 0; j < 4; j++) { Assert.assertNotNull(msgMap.get(j)); Assert.assertEquals(j, msgMap.get(j).getDataNodeID()); Assert.assertEquals(localhost, msgMap.get(j).getEndPoint().getIp()); Assert.assertEquals(6667 + j, msgMap.get(j).getEndPoint().getPort()); } } } }
import React from 'react'; class Cat extends React.Component { render() { const mouse = this.props.mouse; return ( <img src='./cat.jpg' alt='猫咪' style={{ position: 'absolute', left: mouse.x, top: mouse.y }} /> ); } } class MouseWithCat extends React.Component { constructor(props) { super(props); this.handleMouseMove = this.handleMouseMove.bind(this); this.state = { x: 0, y: 0 }; } handleMouseMove(event) { this.setState({ x: event.clientX, y: event.clientY, }); } render() { return ( <div style={{ height: '100vh' }} onMouseMove={this.handleMouseMove}> {/* 我们可以在这里换掉 <p> 的 <Cat> ...... 但是接着我们需要创建一个单独的 <MouseWithSomethingElse> 每次我们需要使用它时,<MouseWithCat> 是不是真的可以重复使用. */} <Cat mouse={this.state} /> </div> ); } } class MouseTracker extends React.Component { render() { return ( <div> <h1>移动鼠标!</h1> <MouseWithCat /> </div> ); } } export default MouseTracker;
/* (c) Copyright 2012 Felipe Magno de Almeida * * Distributed under the Boost Software License, Version 1.0. (See * accompanying file LICENSE_1_0.txt or copy at * http://www.boost.org/LICENSE_1_0.txt) */ #ifndef MORBID_IIOP_OUTPUT_ITERATOR_HPP #define MORBID_IIOP_OUTPUT_ITERATOR_HPP #include <boost/spirit/home/karma.hpp> namespace morbid { namespace iiop { namespace spirit = boost::spirit; namespace karma = spirit::karma; template <typename O> struct output_iterator; template <typename I, typename T1, typename T2> struct output_iterator<karma::detail::output_iterator<I, T1, T2> > { typedef I type; }; } } #endif
package au.com.tilbrook.android.rxkotlin.fragments import android.support.v4.app.Fragment import au.com.tilbrook.android.rxkotlin.MyApp open class BaseFragment : Fragment() { override fun onDestroy() { super.onDestroy() MyApp.instance.refWatcher.watch(this) } }
<?php defined('BASEPATH') or exit('No direct script access allowed'); /** * Базовая модель * * Общие методы для работы в пользовательской части * * @author Sergey Nizhnik <kroloburet@gmail.com> */ class Front_basic_model extends CI_Model { function __construct() { parent::__construct(); } /** * Получить значение из массива "app" * * app[] содержит: конфигурацию ресурса, публичные данные пользователей, * массив локализации и иные вспомогательные опции общего пользования. * Метод реализует удобный доступ к нужному значению передав в него "путь" * из ключей через точку. * Например: $this->app('conf.langs') === $app['conf']['langs'] * * @param string $path Путь к значению через точку или null чтобы получить весь массив * @return mixed Значение из массива, весь массив или null если ключ не найден */ function app(string $path = null) { // заполнить app[] если пуст if (empty($this->config->item('app'))) { $this->config->set_item('app', $this->get_config()); } // вернуть весь массив если путь не передан if (!$path || !is_string($path)) { return $this->config->item('app'); } // обработать путь и вернуть значение массива return array_reduce(explode('.', $path), function ($i, $k) { return isset($i[$k]) ? $i[$k] : null; }, $this->config->item('app')); } /** * Редактировать массив "app" * * Добавляет или изменяет значение * * @param array $data Данные путь=>значение ['conf.langs.ru.title'=>'RU','lexic.basic.home'=>'Домой'] * @return void|boolean */ function set_app(array $data = []) { if (empty($data)) { return false; } foreach ($data as $path => $val) { $level = &$this->config->config['app']; foreach (explode('.', $path) as $k) { if (!key_exists($k, $level) || !is_array($level[$k])) { $level[$k] = []; } $level = &$level[$k]; } $level = $val; } } /** * Получить конфигурацию * * Метод записывает и возвращает в массиве все публичные * настройки, данные сеанса, язык пользователя с данными локализации... * Задавайте здесь ваши данные которые повсеместно используются * в текущем сеансе * * @return array */ function get_config() { // получить массив конфигурации $q = $this->db->get('config')->result_array(); foreach ($q as $v) { $json = @json_decode($v['value'], true); // если значение - json - преобразовать в массив $data['conf'][$v['name']] = $json === null ? $v['value'] : $json; } // модераторы системы $m = []; // массив будет хранить email всех модераторов $ip = $this->input->server('REMOTE_ADDR'); // текущий ip $q = $this->db->get('back_users')->result_array(); $data['conf']['back_user'] = false; foreach ($q as $v) { // это админ/разрешенный модератор или обычный смертный $v['ip'] === $ip && $v['access'] ? $data['conf']['back_user'] = true : null; switch ($v['status']) { case'administrator': $data['conf']['admin_mail'] = $v['email']; break; case'moderator': $v['access'] ? $m[] = $v['email'] : null; break; } } // email всех разрешенных модераторов в строку через запятую $data['conf']['moderator_mail'] = implode(',', $m); // языки системы $data['conf']['langs'] = $this->db->get('languages')->result_array(); // язык системы по умолчанию foreach ($data['conf']['langs'] as $i) { if ($i['def']) { $data['conf']['lang_def'] = $i; break; } } $tags = array_column($data['conf']['langs'], 'tag'); // массив тегов языков системы $hal = substr($this->input->server('HTTP_ACCEPT_LANGUAGE'), 0, 2); // тег языка браузера пользователя $ulc = $this->input->cookie('user_lang'); // куки с языком пользователя // язык пользователя $data['conf']['user_lang'] = $ulc && in_array($ulc, $tags) ? $ulc : (in_array($hal, $tags) ? $hal : $data['conf']['lang_def']['tag']); // данные локализации $data['lexic'] = $this->lang->load('front_template', $data['conf']['user_lang'], true); return $data; } /** * Получить меню * * Получает из БД все пункты меню принадлежащие языку пользователя, * возвращает в отформатированном массиве (дерево) * * @return array */ function get_menu() { $q = $this->db->where(['public' => 1, 'lang' => $this->app('conf.user_lang')])-> order_by('order')->get('menu')->result_array(); if (empty($q)) { return []; } // выборку в многомерный массив (дерево) function maketree($input, $pid = 0) { $output = []; // будет содержать результирующий массив // обход входного массива foreach ($input as $n => $v) { // родитель равен запрашиваемому if ($v['pid'] == $pid) { $bufer = $v; // записать в буфер unset($input[$n]); // удалить записанный элемент из входного массива $nodes = maketree($input, $v['id']); // рекурсивно выбрать дочерние элементы // есть дочерние - записать в буфер if (count($nodes) > 0) { $bufer['nodes'] = $nodes; } $output[] = $bufer; // записать буфер в результирующий массив } } return $output; } return maketree($q); } }
# tMind-Ui --- ## [规范(Rule)](./Rule/index.md) ## 基础(Base) ### 字体(tFont) ### 颜色(tColor) ### 图标(tIcon) ## 布局容器 (LayerWrapper) ### 栅格(tGrid) ### 单元(tUnit) ### 表单(tForm) ### tab页(tTab) ### 步骤条(Steps) ## 数据容器(DataWrapper) ### 卡片(tCard) ### 列表(tList) ### 折叠面板(tCollapseableList) ### 选择卡(tSelectCard) ### 表格(tTable) ### 时间轴(tTimeline) ## 数据元素(DataElement) ### 输入框(tInput) ### 下拉框(tSelect) ### 复选框(tCheckBox) ### 单选框(tRadio) ### 切换开关(tSwitch) ### 日期选择器(tDate) ### 时间选择器(tTime) ### 级联选择器(tCascade) ### 地址选择器(tAddr) ### 颜色选择器(tColorPicker) ### 评分(tScore) ### 上传(tUpdate) ### 进度条(tProgress) ### 气泡(tPoptip) ## 操作元素(OptElement) ### 按钮(tButton) ### 菜单栏(tMenu) ### 操作条(tOptRow) ### 面包屑(tBreadcrumb) ## 消息与提示(Message) ### 信息(tInfo) ### 通知(tAlert) ### 弹窗(tModal) ## 部件(Widget) ### 徽标(tBadge)
CREATE OR REPLACE FUNCTION test_excep (arg INTEGER) RETURNS INTEGER AS $$ DECLARE res INTEGER; BEGIN res := 100 / arg; RETURN res; EXCEPTION WHEN division_by_zero THEN RETURN 999; END; $$ LANGUAGE plpgsql;
exports.acssFilesList = (pathToProp = '') => [ `${pathToProp}acss/acss-color.json`, `${pathToProp}acss/acss-margin.json`, `${pathToProp}acss/acss-padding.json`, `${pathToProp}acss/acss-width.json`, `${pathToProp}acss/acss-height.json`, `${pathToProp}acss/acss-top-right-bottom-left.json`, `${pathToProp}acss/acss-display.json`, `${pathToProp}acss/acss-f.json`, `${pathToProp}acss/acss-position.json`, `${pathToProp}acss/acss-text.json`, ];
/* * main.c */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include "list.h" #include "token.h" #include "common.h" #include "parse.h" #include "error.h" #include "reduce.h" #include "defopcode.h" #include "uniquestring.h" static void usage (const char *progname); static void unrecognized (const char *progname, const char *badopt); /* Global variables to control compilation options. */ int optimization_level; BOOL preprocess_only; FILE *syn68k_c_stream, *mapinfo_c_stream, *mapindex_c_stream; FILE *profileinfo_stream; BOOL verbose; int main (int argc, char *argv[]) { SymbolTable *sym = make_symbol_table (); const char *include_dirs[32] = { ".", NULL }; static const char *current_dir[] = { ".", NULL }; int include_dir_index = 1; int i; init_tokenizer (); init_unique_string (); /* Set up global variables with default values. */ optimization_level = 0; preprocess_only = FALSE; verbose = FALSE; syn68k_c_stream = NULL; mapinfo_c_stream = NULL; mapindex_c_stream = NULL; profileinfo_stream = NULL; /* Parse command line arguments. */ for (i = 1; i < argc; i++) { if (argv[i][0] != '-') { open_file (argv[i], current_dir); /* Open output code streams. */ if (!preprocess_only) { syn68k_c_stream = fopen ("syn68k.c", "w"); mapinfo_c_stream = fopen ("mapinfo.c", "w"); mapindex_c_stream = fopen ("mapindex.c", "w"); profileinfo_stream = fopen ("profileinfo", "w"); } /* Initialize code generator and output header stuff. */ begin_generating_code (); /* Munch through everything and generate appropriate stuff. */ parse_all_expressions (sym); /* Do any necessary cleanup. */ done_generating_code (); /* Close up all streams. */ if (!preprocess_only) { fclose (syn68k_c_stream); syn68k_c_stream = NULL; fclose (mapinfo_c_stream); mapinfo_c_stream = NULL; fclose (mapindex_c_stream); mapindex_c_stream = NULL; fclose (profileinfo_stream); profileinfo_stream = NULL; } continue; } switch (argv[i][1]) { case 's': if (!strcmp (argv[i], "-stdin")) { open_stream ("<standard input>", stdin); parse_all_expressions (sym); } else unrecognized (argv[0], argv[i]); break; case 'I': if (argv[i][2] == '\0') fatal_error ("Missing include directory for -I option.\n"); if (include_dir_index >= sizeof include_dirs / sizeof include_dirs[0]) fatal_error ("Too many -I directories specified.\n"); include_dirs[include_dir_index++] = argv[i] + 2; include_dirs[include_dir_index] = NULL; break; case 'E': if (argv[i][2] != '\0') unrecognized (argv[0], argv[i]); preprocess_only = TRUE; break; case 'O': if (argv[i][2] == '\0') optimization_level = 1; else optimization_level = atoi (argv[i] + 2); break; case 'v': if (argv[i][2] != '\0') unrecognized (argv[0], argv[i]); verbose = TRUE; break; default: unrecognized (argv[0], argv[i]); break; } } return 0; } static void unrecognized (const char *progname, const char *badopt) { error ("Unrecognized option \"%s\".\n", badopt); usage (progname); } /* Prints out usage and exits. */ static void usage (const char *progname) { fatal_error ("Usage: %s <usage entered when stabilized>\n", progname); }
# Embroidery This program is created to generate embroidery patterns been given only the input image and the size of the embroidery picture. It uses special algorithms to find the most appropriate DMC thread for a particular color represented as RGB tuple. Involves using Floyd Steinberg's algorithm for better effect and to make the output image smoother for the human eye. ... Based on the selected threads, generates all the needed documentation for an ordinary person to start embroidering. Specific scripts for generating .json file of DMC threads, creating icons for a pattern from Unicode symbols, selecting most dissimilar ones provided.
#!/bin/bash # # Copyright 2015 The Bazel Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -euo pipefail # Load the test setup defined in the parent directory CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" source "${CURRENT_DIR}/../integration_test_setup.sh" \ || { echo "integration_test_setup.sh not found!" >&2; exit 1; } source "${CURRENT_DIR}/execution_statistics_utils.sh" \ || { echo "execution_statistics_utils.sh not found!" >&2; exit 1; } enable_errexit readonly CPU_TIME_SPENDER="${CURRENT_DIR}/../../../test/shell/integration/spend_cpu_time" readonly OUT_DIR="${TEST_TMPDIR}/out" readonly OUT="${OUT_DIR}/outfile" readonly ERR="${OUT_DIR}/errfile" readonly EXIT_STATUS_SIGABRT=$((128 + 6)) readonly EXIT_STATUS_SIGKILL=$((128 + 9)) readonly EXIT_STATUS_SIGALRM=$((128 + 14)) readonly EXIT_STATUS_SIGTERM=$((128 + 15)) function set_up() { rm -rf $OUT_DIR mkdir -p $OUT_DIR } function assert_stdout() { assert_equals "$1" "$(cat $OUT)" } function assert_output() { assert_equals "$1" "$(cat $OUT)" assert_equals "$2" "$(cat $ERR)" } function test_basic_functionality() { $process_wrapper --stdout=$OUT --stderr=$ERR /bin/echo hi there &> $TEST_log || fail assert_output "hi there" "" } function test_to_stderr() { $process_wrapper --stdout=$OUT --stderr=$ERR /bin/sh -c "/bin/echo hi there >&2" &> $TEST_log || fail assert_output "" "hi there" } function test_exit_code() { local code=0 $process_wrapper --stdout=$OUT --stderr=$ERR /bin/sh -c "exit 71" &> $TEST_log || code=$? assert_equals 71 "$code" } function test_signal_death() { local code=0 $process_wrapper --stdout=$OUT --stderr=$ERR /bin/sh -c 'kill -ABRT $$' &> $TEST_log || code=$? assert_equals "${EXIT_STATUS_SIGABRT}" "$code" } function test_signal_catcher() { local code=0 $process_wrapper --timeout=1 --kill_delay=10 --stdout=$OUT --stderr=$ERR /bin/sh -c \ 'trap "echo later; exit 0" INT TERM ALRM; sleep 10' &> $TEST_log || code=$? assert_equals "${EXIT_STATUS_SIGALRM}" "$code" assert_stdout "later" } function test_basic_timeout() { $process_wrapper --timeout=1 --kill_delay=2 --stdout=$OUT --stderr=$ERR /bin/sh -c \ "echo before; sleep 10; echo after" &> $TEST_log || code=$? assert_equals "${EXIT_STATUS_SIGALRM}" "$code" assert_stdout "before" } # Tests that the timeout causes the process to receive a SIGTERM, but with the # process exiting on its own without the need for a SIGKILL. To make sure that # this is the case, we pass a very large kill delay to cause the outer test to # fail if we violate this expectation. function test_timeout_grace() { local code=0 $process_wrapper --timeout=1 --kill_delay=100000 --stdout=$OUT --stderr=$ERR \ /bin/sh -c \ 'trap "echo ignoring signal" INT TERM ARLM; \ for i in $(seq 5); do sleep 1; done; echo after' \ &> $TEST_log || code=$? assert_equals "${EXIT_STATUS_SIGALRM}" "$code" assert_stdout 'ignoring signal after' } # Tests that the timeout causes the process to receive a SIGTERM and waits until # the process has exited on its own, even if that takes a little bit of time. To # make sure that this is the case, we pass a very large kill delay to cause the # outer test to fail if we violate this expectation. # # In the past, even though we would terminate the process quickly, we would # get stuck until the kill delay passed (because we'd be stuck waiting for a # zombie process without us actually collecting it). So this is a regression # test for that subtle bug. function test_timeout_exits_as_soon_as_process_terminates() { local code=0 $process_wrapper --timeout=1 --kill_delay=100000 --stdout=$OUT --stderr=$ERR \ /bin/sh -c \ 'trap "" INT TERM ARLM; \ for i in $(seq 5); do echo sleeping $i; sleep 1; done' \ &> $TEST_log || code=$? assert_equals "${EXIT_STATUS_SIGALRM}" "$code" assert_stdout 'sleeping 1 sleeping 2 sleeping 3 sleeping 4 sleeping 5' } # Tests that the timeout causes the process to receive a SIGTERM first and a # SIGKILL later once a kill delay has passed without the process exiting on # its own. We make the process get stuck indefinitely until killed, and we do # this with individual calls to sleep instead of a single one to ensure that a # single termination of the sleep subprocess doesn't cause us to spuriously # exit and thus pass this test. function test_timeout_kill() { local code=0 $process_wrapper --timeout=1 --kill_delay=5 --stdout=$OUT --stderr=$ERR \ /bin/sh -c \ 'trap "echo ignoring signal" INT TERM ARLM; \ while :; do sleep 1; done; echo after' \ &> $TEST_log || code=$? assert_equals "${EXIT_STATUS_SIGALRM}" "$code" assert_stdout 'ignoring signal' } # Tests that sending a SIGTERM causes the process to receive such SIGTERM if # graceful SIGTERM handling is enabled, but with the process exiting on its own # without the need for a SIGKILL. To make sure that this is the case, we pass a # very large kill delay to cause the outer test to fail if we violate this # expectation. function test_sigterm_grace() { $process_wrapper --graceful_sigterm --kill_delay=100000 \ --stdout=$OUT --stderr=$ERR \ /bin/sh -c \ 'trap "echo ignoring signal" INT TERM ARLM; \ for i in $(seq 5); do sleep 1; done; echo after' \ &> $TEST_log & local pid=$! sleep 1 kill -TERM "${pid}" local code=0 wait "${pid}" || code=$? assert_equals "${EXIT_STATUS_SIGTERM}" "$code" assert_stdout 'ignoring signal after' } # Tests that sending a SIGTERM causes the process to receive such SIGTERM if # graceful SIGTERM handling is enabled and waits until the process has exited # on its own, even if that takes a little bit of time. To make sure that this is # the case, we pass a very large kill delay to cause the outer test to fail if # we violate this expectation. # # In the past, even though we would terminate the process quickly, we would # get stuck until the kill delay passed (because we'd be stuck waiting for a # zombie process without us actually collecting it). So this is a regression # test for that subtle bug. function test_sigterm_exits_as_soon_as_process_terminates() { $process_wrapper --graceful_sigterm --kill_delay=100000 \ --stdout=$OUT --stderr=$ERR \ /bin/sh -c \ 'trap "" INT TERM ARLM; \ for i in $(seq 5); do echo sleeping $i; sleep 1; done' \ &> $TEST_log & local pid=$! sleep 1 kill -TERM "${pid}" local code=0 wait "${pid}" || code=$? assert_equals "${EXIT_STATUS_SIGTERM}" "$code" assert_stdout 'sleeping 1 sleeping 2 sleeping 3 sleeping 4 sleeping 5' } # Tests that sending a SIGTERM causes the process to receive such SIGTERM if # graceful SIGTERM handling is enabled and a SIGKILL later once a kill delay has # passed without the process exiting on its own. We make the process get stuck # indefinitely until killed, and we do this with individual calls to sleep # instead of a single one to ensure that a single termination of the sleep # subprocess doesn't cause us to spuriously exit and thus pass this test. function test_sigterm_kill() { $process_wrapper --graceful_sigterm --kill_delay=5 \ --stdout=$OUT --stderr=$ERR \ /bin/sh -c \ 'trap "echo ignoring signal" INT TERM ARLM; \ while :; do sleep 1; done; echo after' \ &> $TEST_log & local pid=$! sleep 1 kill -TERM "${pid}" local code=0 wait "${pid}" || code=$? assert_equals "${EXIT_STATUS_SIGTERM}" "$code" assert_stdout 'ignoring signal' } function test_execvp_error_message() { local code=0 $process_wrapper --stdout=$OUT --stderr=$ERR /bin/notexisting &> $TEST_log || code=$? assert_equals 1 "$code" assert_contains "\"execvp(/bin/notexisting, ...)\": No such file or directory" "$ERR" } function assert_process_wrapper_exec_time() { local user_time_low="$1"; shift local user_time_high="$1"; shift local sys_time_low="$1"; shift local sys_time_high="$1"; shift local local_tmp="$(mktemp -d "${OUT_DIR}/assert_process_wrapper_timeXXXXXX")" local stdout_path="${local_tmp}/stdout" local stderr_path="${local_tmp}/stderr" local stats_out_path="${local_tmp}/statsfile" local stats_out_decoded_path="${local_tmp}/statsfile.decoded" # Wrapped process will be terminated after 100 seconds if not self terminated. local code=0 "${process_wrapper}" \ --timeout=100 \ --kill_delay=2 \ --stdout="${stdout_path}" \ --stderr="${stderr_path}" \ --stats="${stats_out_path}" \ "${CPU_TIME_SPENDER}" "${user_time_low}" "${sys_time_low}" \ &> "${TEST_log}" || code="$?" sed -e 's,^subprocess stdout: ,,' "${stdout_path}" >>"${TEST_log}" sed -e 's,^subprocess stderr: ,,' "${stderr_path}" >>"${TEST_log}" assert_equals 0 "${code}" assert_execution_time_in_range \ "${user_time_low}" \ "${user_time_high}" \ "${sys_time_low}" \ "${sys_time_high}" \ "${stats_out_path}" } function test_stats_high_user_time() { assert_process_wrapper_exec_time 10 19 0 9 } function test_stats_high_system_time() { assert_process_wrapper_exec_time 0 9 10 19 } function test_stats_high_user_time_and_high_system_time() { assert_process_wrapper_exec_time 10 25 10 25 } run_suite "process-wrapper"
<?php namespace Alcaeus\MongoDbAdapter\Tests\Mongo; use Alcaeus\MongoDbAdapter\Tests\TestCase; /** * @author alcaeus <alcaeus@alcaeus.org> */ class MongoClientTest extends TestCase { public function testSerialize() { $this->assertInternalType('string', serialize($this->getClient())); } public function testGetDb() { $client = $this->getClient(); $db = $client->selectDB('mongo-php-adapter'); $this->assertInstanceOf('\MongoDB', $db); $this->assertSame('mongo-php-adapter', (string) $db); } public function testSelectDBWithEmptyName() { $this->setExpectedException('Exception', 'Database name cannot be empty'); $this->getClient()->selectDB(''); } public function testSelectDBWithInvalidName() { $this->setExpectedException('Exception', 'Database name contains invalid characters'); $this->getClient()->selectDB('/'); } public function testGetDbProperty() { $client = $this->getClient(); $db = $client->{'mongo-php-adapter'}; $this->assertInstanceOf('\MongoDB', $db); $this->assertSame('mongo-php-adapter', (string) $db); } public function testGetCollection() { $client = $this->getClient(); $collection = $client->selectCollection('mongo-php-adapter', 'test'); $this->assertInstanceOf('MongoCollection', $collection); $this->assertSame('mongo-php-adapter.test', (string) $collection); } public function testGetHosts() { $client = $this->getClient(); $hosts = $client->getHosts(); $this->assertArraySubset( [ 'localhost:27017;-;.;' . getmypid() => [ 'host' => 'localhost', 'port' => 27017, 'health' => 1, 'state' => 0, ], ], $hosts ); } public function testReadPreference() { $client = $this->getClient(); $this->assertSame(['type' => \MongoClient::RP_PRIMARY], $client->getReadPreference()); $this->assertTrue($client->setReadPreference(\MongoClient::RP_SECONDARY, [['a' => 'b']])); $this->assertSame(['type' => \MongoClient::RP_SECONDARY, 'tagsets' => [['a' => 'b']]], $client->getReadPreference()); } public function testWriteConcern() { $client = $this->getClient(); $this->assertTrue($client->setWriteConcern('majority', 100)); $this->assertSame(['w' => 'majority', 'wtimeout' => 100], $client->getWriteConcern()); } public function testListDBs() { $document = ['foo' => 'bar']; $this->getCollection()->insert($document); $databases = $this->getClient()->listDBs(); $this->assertSame(1.0, $databases['ok']); $this->assertArrayHasKey('totalSize', $databases); $this->assertArrayHasKey('databases', $databases); foreach ($databases['databases'] as $database) { $this->assertArrayHasKey('name', $database); $this->assertArrayHasKey('empty', $database); $this->assertArrayHasKey('sizeOnDisk', $database); if ($database['name'] == 'mongo-php-adapter') { $this->assertFalse($database['empty']); return; } } $this->fail('Could not find mongo-php-adapter database in list'); } public function testNoPrefixUri() { $client = $this->getClient(null, 'localhost'); $this->assertNotNull($client); } }
# frozen_string_literal: true require "rails_helper" RSpec.describe HomeController, type: :controller do let!(:proposed_event) { create :event, :proposed } let!(:past_event) { create :event, :scheduled, starts_at: 1.month.ago } let!(:future_event) { create :event, :scheduled, starts_at: 1.month.from_now } let!(:todays_event) { create :event, :scheduled, starts_at: Time.zone.today } describe "#index" do before do get :index end it { is_expected.to respond_with 200 } it "does not get proposed events" do expect(assigns(:past_events)).not_to include proposed_event expect(assigns(:future_events)).not_to include proposed_event end it "gets past events" do expect(assigns(:past_events)).to include past_event expect(assigns(:past_events)).not_to include future_event expect(assigns(:past_events)).not_to include todays_event end it "gets future events" do expect(assigns(:future_events)).to include future_event expect(assigns(:future_events)).not_to include past_event expect(assigns(:future_events)).not_to include todays_event end it "gets today's events" do expect(assigns(:todays_event)).not_to include future_event expect(assigns(:todays_event)).not_to include past_event expect(assigns(:todays_event)).to include todays_event end end end
module Days.Day09 where import qualified AoC import Data.Bifunctor (Bifunctor (first)) import Data.List (tails) windows :: Int -> [a] -> [[a]] windows m = foldr (zipWith (:)) (repeat []) . take m . tails validNum :: (Eq a, Num a) => a -> [a] -> Bool validNum n xs = or [True | x <- xs, y <- xs, x + y == n] partA :: [Int] -> Int -> Int partA nums n = fst . head . filter (not . snd) $ zip (map fst xs) (map (uncurry validNum) xs) where xs = zipWith (curry (first head)) (windows 1 (drop n nums)) (windows n nums) partB :: [Int] -> Int -> Int partB nums n = sum [minimum ns, maximum ns] where ns = concat $ filter (not . null) $ concatMap s xs xs = zip (repeat n) (map (`windows` nums) [2 .. length nums]) s (y, ys) = map fst $ filter snd $ map (\x -> (x, sum x == y)) ys main :: IO () main = do input <- map read . lines <$> readFile (AoC.inputName 9) let invalid = partA input 25 print invalid print (partB input invalid)
#! /bin/bash # # Run the docker container. Stop any previously running copies. # ./stop.sh relex-plain docker run --rm --name="relex-plain" -p 3333:3333 \ -w /home/Downloads/relex-master opencog/relex /bin/sh plain-text-server.sh clear
package com.ricardomiranda.shop import org.scalatest.{Matchers, WordSpec} class BillingSpec extends WordSpec with Matchers { "Regular Billing constructor" should { "Create a Regular Billing given correct parameters" in { val actual: Regular = Regular(code = "PEN") assert(actual.isInstanceOf[Regular]) } "Has the code stored" in { val regular: Regular = Regular(code = "PEN") val actual: String = regular.code val expected: String = "PEN" assert(expected.equals(actual)) } } "Compute bill for a regular code" should { "Compute 0.00 if quanty and price are 0" in { val price: Double = 0.00 val quantity: Long = 0 val regular: Regular = Regular(code = "PEN") val actual: Double = regular.computeBill(price, quantity) val expected: Double = 0d assert(expected.equals(actual)) } "Compute 10.00 if quanty is 2 and price 5.00" in { val price: Double = 5.00 val quantity: Long = 2 val regular: Regular = Regular(code = "PEN") val actual: Double = regular.computeBill(price, quantity) val expected: Double = 10d assert(expected.equals(actual)) } } "TwoForOne Billing constructor" should { "Create a TwoForOne Billing given correct parameters" in { val actual: TwoForOne = TwoForOne(code = "PEN") assert(actual.isInstanceOf[TwoForOne]) } "Has the code stored" in { val twoForOne: TwoForOne = TwoForOne(code = "PEN") val actual: String = twoForOne.code val expected: String = "PEN" assert(expected.equals(actual)) } } "Compute bill for a twoForOne code" should { "Compute 0.00 if quanty and price are 0" in { val price: Double = 0.00 val quantity: Long = 0 val twoForOne: TwoForOne = TwoForOne(code = "PEN") val actual: Double = twoForOne.computeBill(price, quantity) val expected: Double = 0d assert(expected.equals(actual)) } "Compute 5.00 if quanty is 2 and price 5.00" in { val price: Double = 5.00 val quantity: Long = 2 val twoForOne: TwoForOne = TwoForOne(code = "PEN") val actual: Double = twoForOne.computeBill(price, quantity) val expected: Double = 5d assert(expected.equals(actual)) } } } class BillingFactorySpec extends WordSpec with Matchers { "Billing Factory" should { "Create a Regular Billing if keyword regular is given" in { val actual: Billing = BillingFactory(billingType = "regular", code = "PEN") assert(actual.isInstanceOf[Regular]) } "Create a TwoForOne Billing if keyword two_for_one is given" in { val actual: Billing = BillingFactory(billingType = "two_for_one", code = "PEN") assert(actual.isInstanceOf[TwoForOne]) } "Create a XOrMore Billing if keyword x_or_more is given" in { val actual: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 2) assert(actual.isInstanceOf[XOrMore]) } } "Compute bill for a regular code" should { "Compute 0.00 if quanty and price are 0" in { val price: Double = 0.00 val quantity: Long = 0 val billing: Billing = BillingFactory(billingType = "regular", code = "PEN") val actual: Double = billing.computeBill(price, quantity) val expected: Double = 0d assert(expected.equals(actual)) } "Compute 10.00 if quanty is 2 and price 5.00" in { val price: Double = 5.00 val quantity: Long = 2 val billing: Billing = BillingFactory(billingType = "regular", code = "PEN") val actual: Double = billing.computeBill(price, quantity) val expected: Double = 10d assert(expected.equals(actual)) } } "Compute bill for a two_for_one code" should { "Compute 0.00 if quanty and price are 0" in { val price: Double = 0.00 val quantity: Long = 0 val billing: Billing = BillingFactory(billingType = "two_for_one", code = "PEN") val actual: Double = billing.computeBill(price, quantity) val expected: Double = 0d assert(expected.equals(actual)) } "Compute 5.00 if quanty is 2 and price 5.00" in { val price: Double = 5.00 val quantity: Long = 2 val billing: Billing = BillingFactory(billingType = "two_for_one", code = "PEN") val actual: Double = billing.computeBill(price, quantity) val expected: Double = 5.00 assert(expected.equals(actual)) } } "XorMore Billing constructor" should { "Create a XOrMore Billing given correct parameters" in { val actual: XOrMore = XOrMore(code = "PEN", promoPrice = 22.00, x = 2) assert(actual.isInstanceOf[XOrMore]) } "Has the code stored" in { val xOrMore: XOrMore = XOrMore(code = "PEN", promoPrice = 22.00, x = 2) val actual: String = xOrMore.code val expected: String = "PEN" assert(expected.equals(actual)) } } "Compute bill for a x_or_more code" should { "Compute 0.00 if quanty and price are 0" in { val price: Double = 0.00 val quantity: Long = 0 val billing: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 2) val actual: Double = billing.computeBill(price, quantity) val expected: Double = 0d assert(expected.equals(actual)) } "Compute 25.00 if quanty is 1, price 25.00 and promoPrice is 22.00" in { val price: Double = 25.00 val quantity: Long = 1 val billing: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 4) val actual: Double = billing.computeBill(price, quantity) val expected: Double = 25.00 assert(expected.equals(actual)) } "Compute 50.00 if quanty is 2, price 25.00 and promoPrice is 22.00" in { val price: Double = 25.00 val quantity: Long = 2 val billing: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 4) val actual: Double = billing.computeBill(price, quantity) val expected: Double = 50.00 assert(expected.equals(actual)) } "Compute 25.00 if quanty is 3, price 25.00 and promoPrice is 22.00" in { val price: Double = 25.00 val quantity: Long = 3 val billing: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 4) val actual: Double = billing.computeBill(price, quantity) val expected: Double = 75.00 assert(expected.equals(actual)) } "Compute 25.00 if quanty is 4, price 25.00 and promoPrice is 22.00" in { val price: Double = 25.00 val quantity: Long = 4 val billing: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 4) val actual: Double = billing.computeBill(price, quantity) val expected: Double = 88.00 assert(expected.equals(actual)) } "Compute 25.00 if quanty is 10, price 25.00 and promoPrice is 22.00" in { val price: Double = 25.00 val quantity: Long = 10 val billing: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 4) val actual: Double = billing.computeBill(price, quantity) val expected: Double = 220.00 assert(expected.equals(actual)) } "Compute 25.00 if quanty is 15, price 25.00 and promoPrice is 22.00" in { val price: Double = 25.00 val quantity: Long = 15 val billing: Billing = BillingFactory(billingType = "x_or_more", code = "PEN", promoPrice = 22.00, x = 4) val actual: Double = billing.computeBill(price, quantity) val expected: Double = 330.00 assert(expected.equals(actual)) } } }
package eu.supersede.integration.api.pubsub.evolution; import javax.jms.MessageListener; import eu.supersede.integration.api.pubsub.iTopicSubscriber; public interface iEvolutionSubscriber extends iTopicSubscriber{ //Adaptation Alert Subscriptor void createEvolutionAlertSubscriptionAndKeepListening(MessageListener messageListener); void createEvolutionAlertSubscriptionAndCloseAfterAWhile(MessageListener messageListener, int timeInMilliseconds) throws InterruptedException; EvolutionAlertMessageListener createEvolutionAlertSubscriptionAndKeepListening(); EvolutionAlertMessageListener createEvolutionAlertSubscriptionAndCloseAfterAWhile(int timeInMilliseconds) throws InterruptedException; }
#![allow(dead_code)] #![warn(clippy::unused_io_amount)] use std::io::{self, Read}; fn question_mark<T: io::Read + io::Write>(s: &mut T) -> io::Result<()> { s.write(b"test")?; let mut buf = [0u8; 4]; s.read(&mut buf)?; Ok(()) } fn unwrap<T: io::Read + io::Write>(s: &mut T) { s.write(b"test").unwrap(); let mut buf = [0u8; 4]; s.read(&mut buf).unwrap(); } fn vectored<T: io::Read + io::Write>(s: &mut T) -> io::Result<()> { s.read_vectored(&mut [io::IoSliceMut::new(&mut [])])?; s.write_vectored(&[io::IoSlice::new(&[])])?; Ok(()) } fn ok(file: &str) -> Option<()> { let mut reader = std::fs::File::open(file).ok()?; let mut result = [0u8; 0]; reader.read(&mut result).ok()?; Some(()) } #[allow(clippy::redundant_closure)] #[allow(clippy::bind_instead_of_map)] fn or_else(file: &str) -> io::Result<()> { let mut reader = std::fs::File::open(file)?; let mut result = [0u8; 0]; reader.read(&mut result).or_else(|err| Err(err))?; Ok(()) } #[derive(Debug)] enum Error { Kind, } fn or(file: &str) -> Result<(), Error> { let mut reader = std::fs::File::open(file).unwrap(); let mut result = [0u8; 0]; reader.read(&mut result).or(Err(Error::Kind))?; Ok(()) } fn combine_or(file: &str) -> Result<(), Error> { let mut reader = std::fs::File::open(file).unwrap(); let mut result = [0u8; 0]; reader .read(&mut result) .or(Err(Error::Kind)) .or(Err(Error::Kind)) .expect("error"); Ok(()) } fn main() {}
<?php class dbc{ protected $host; protected $username; protected $password; protected $dbname; protected $conn; function __construct($config){ dbc::filtering(extract($config)); $this->host = $host; $this->username = $username; $this->password = $password; $this->dbname = $dbname; } protected function dbconnect(){ $this->conn = mysqli_connect($this->host,$this->username,$this->password,$this->dbname); $this->conn->set_charset("utf8"); mysqli_query($this->conn,"SET CHARACTER_SET utf8;"); mysqli_query($this->conn,"SET NAMES utf8;"); return $this->conn; } public function insert($table,$data){ $sql = dbc::filtering("INSERT into $this->dbname.$table VALUES ($data);"); return $this->set_to_database($sql); } public function update($table,$data,$clause){ $sql = dbc::filtering("UPDATE $this->dbname.$table SET $data WHERE $clause;"); return $this->set_to_database($sql); } public function delete($table,$column,$clause){ $sql = dbc::filtering("DELETE FROM $this->dbname.$table WHERE $table.$column = '$clause';"); return $this->set_to_database($sql); } public function deleteand($table,$clause){ $sql = dbc::filtering("DELETE FROM $this->dbname.$table WHERE $clause;"); return $this->set_to_database($sql); } public function select($selection,$table,$clause){ $sql = dbc::filtering("select $selection from $table where $clause;"); return $this->get_from_database($sql); } public function select_all($table,$order=''){ $sql = dbc::filtering("select * from $this->dbname.$table $order;"); return $this->get_from_database($sql); } function insertpathes($table,$data){ $sql="insert into $table VALUES ($data);"; return $this->set_to_database($sql); } function selectpathes($selection,$table,$clause){ $sql ="select $selection from $this->dbname.$table WHERE $clause;"; return $this->get_from_database($sql); } function set_to_database($sql){ if(!mysqli_query($this->dbconnect(),$sql)){ return mysqli_error(dbc::dbconnect()); }else{ return true; } } function get_from_database($sql){ if(!$result=mysqli_query($this->dbconnect(),$sql)){ return mysqli_error(dbc::dbconnect()); }else{ return $result; } } private function filtering($data){ $data = mysqli_real_escape_string(dbc::dbconnect(),$data); $data = trim($data); $data = stripslashes($data); return $data; } }
using NotaFiscalNet.Core.Interfaces; using NotaFiscalNet.Core.Validacao; namespace NotaFiscalNet.Core { /// <summary> /// Contém as informações de Totalização da Nota Fiscal Eletrônica. /// </summary> public sealed class TotalNFe : ISerializavel { /// <summary> /// Retorna o Total de ICMS /// </summary> [NFeField(ID = "W01", FieldName = "ICMSTot")] [CampoValidavel(1, ChaveErroValidacao.CampoNaoPreenchido)] public TotalICMS ICMS { get; } = new TotalICMS(); /// <summary> /// Retorna o Total de ISSQN. Opcional. /// </summary> [NFeField(ID = "W17", FieldName = "ISSQNtot", Opcional = true)] [CampoValidavel(2, Opcional = true)] public TotalISSQN ISSQN { get; } = new TotalISSQN(); /// <summary> /// Retorna as Retenções de Tributos Federais. Opcional. /// </summary> /// <remarks> /// Exemplos de atos normativos que definem obrigatoriedade da retenção de contribuições: /// a) IRPJ/CSLL/PIS/COFINS - Fonte - Recebimentos de Órgãos Públicos Federais Lei nº 9.430, /// de 27 de dezembro de 1996, art. 64 Lei nº 10.833/2003, art. 34 como normas /// infralegais, temos como exemplo: Instrução Normativa SRF nº 480/2004 e Instrução /// Normativa nº 539, de 25/04/2005. /// b) Retenção do Imposto de Renda pelas Fontes Pagadoras REMUNERAÇÃO DE SERVIÇOS /// PROFISSIONAIS PRESTADOS POR PESSOA JURÍDICA LEI Nº 7.450/85, ART. 52 /// c) IRPJ, CSLL, COFINS e PIS - Serviços Prestados por Pessoas Jurídicas - Retenção na /// Fonte Lei nº 10.833 de 29.12.2003, arts. 30, 31, 32, 35 e 36 /// </remarks> [NFeField(ID = "W23", FieldName = "retTrib", Opcional = true)] [CampoValidavel(3, Opcional = true)] public RetencaoTributosFederais RetencaoTributosFederais { get; } = new RetencaoTributosFederais(); /// <summary> /// Retorna o valor indicando se a Nota Fiscal Eletrônica está em modo somente-leitura. /// </summary> /// <remarks> /// A Nota Fiscal Eletrônica estará em modo somente-leitura quando for instanciada a partir /// de um arquivo assinado digitalmente. /// </remarks> public bool IsReadOnly { get; } = false; void ISerializavel.Serializar(System.Xml.XmlWriter writer, INFe nfe) { writer.WriteStartElement("total"); ((ISerializavel)ICMS).Serializar(writer, nfe); if (ISSQN.Modificado) ((ISerializavel)ISSQN).Serializar(writer, nfe); if (RetencaoTributosFederais.Modificado) ((ISerializavel)RetencaoTributosFederais).Serializar(writer, nfe); writer.WriteEndElement(); // fim do elemento 'total' } } }
<?php /** * @copyright 2006-2013, Miles Johnson - http://milesj.me * @license https://github.com/milesj/utility/blob/master/license.md * @link http://milesj.me/code/cakephp/utility */ Configure::write('debug', 2); Configure::write('Cache.disable', true); App::uses('BaseInstallShell', 'Utility.Console/Command'); App::uses('ConnectionManager', 'Model'); App::uses('Validation', 'Utility'); App::uses('AppShell', 'Console/Command'); abstract class BaseUpgradeShell extends BaseInstallShell { /** * Array of completed version upgrades. * * @type array */ public $complete = array(); /** * Available upgrade versions. * * @type array */ public $versions = array(); /** * Set the list of available upgrade versions. * * @param array $versions * @return BaseUpgradeShell */ public function setVersions(array $versions) { $this->versions = $versions; return $this; } /** * Upgrade to a specific version and trigger any migration callbacks. * * @param int $version * @return bool */ public function to($version) { $this->out(sprintf('<success>Upgrading to %s...</success>', $version)); $schema = sprintf('%s/Config/Schema/Upgrade/%s.sql', CakePlugin::path($this->plugin), $version); if (!file_exists($schema)) { $this->err(sprintf('<error>Upgrade schema %s does not exist</error>', basename($schema))); return false; } // Trigger any migration changes $method = 'to_' . preg_replace('/[^0-9]+/', '', $version); if (method_exists($this, $method)) { call_user_func(array($this, $method)); } // Execute the schema if ($this->executeSchema($schema, false)) { $this->complete[] = $version; $this->out(sprintf('<success>Upgrade to %s complete</success>', $version)); $this->versions(); } return true; } /** * Output a list of available version to upgrade to. */ public function versions() { $this->hr(1); $versions = array(); if ($this->versions) { foreach ($this->versions as $version => $title) { if (in_array($version, $this->complete)) { $this->out('[x] ' . $title); } else { $this->out(sprintf('[%s] <info>%s</info>', $version, $title)); $versions[] = $version; } } } else { $this->err('<error>No versions found</error>'); return; } $this->out('[E]xit'); $this->out(); $versions[] = 'E'; $answer = strtoupper($this->in('Which version do you want to upgrade to?', $versions)); if ($answer === 'E') { exit(); } $this->to($answer); } }
echo "Installing Oracle JDK..." sudo apt-get install -y python-software-properties sudo add-apt-repository -y ppa:webupd8team/java sudo apt-get update echo "oracle-java8-installer shared/accepted-oracle-license-v1-1 select true" | sudo debconf-set-selections sudo apt-get install -y oracle-java8-installer # java -version
* [AllFiles.html](AllFiles.html) * [FromInClassDiscussionOfRoutes.html](FromInClassDiscussionOfRoutes.html) * [GoogleSiteAssignment.html](GoogleSiteAssignment.html) * [Ideas.html](Ideas.html) * [Prog270-2016-Student-Sites.html](Prog270-2016-Student-Sites.html) * [Prog270-Resources-2016-Fall.html](Prog270-Resources-2016-Fall.html) * [Prog270-Resources-2016.html](Prog270-Resources-2016.html) * [Prog270-Week01-2015.html](Prog270-Week01-2015.html) * [Prog270-Week01.html](Prog270-Week01.html) * [Prog270-Week02-2015.html](Prog270-Week02-2015.html) * [Prog270-Week02.html](Prog270-Week02.html) * [Prog270-Week03-2015.html](Prog270-Week03-2015.html) * [Prog270-Week03-2016.html](Prog270-Week03-2016.html) * [Prog270-Week04-2015.html](Prog270-Week04-2015.html) * [Prog270-Week04-2016.html](Prog270-Week04-2016.html) * [Prog270-Week05-2015.html](Prog270-Week05-2015.html) * [Prog270-Week05-2016.html](Prog270-Week05-2016.html) * [Prog270-Week06-2015.html](Prog270-Week06-2015.html) * [Prog270-Week06-2016.html](Prog270-Week06-2016.html) * [Prog270-Week07-2015.html](Prog270-Week07-2015.html) * [Prog270-Week07-2016.html](Prog270-Week07-2016.html) * [Prog270-Week08-2015.html](Prog270-Week08-2015.html) * [Prog270-Week08-2016.html](Prog270-Week08-2016.html) * [Prog270-Week09-2015.html](Prog270-Week09-2015.html) * [Prog270-Week09-2016.html](Prog270-Week09-2016.html) * [Prog270-Week10-2015.html](Prog270-Week10-2015.html) * [Prog270-Week10-2016.html](Prog270-Week10-2016.html) * [Prog270-Week11-2015.html](Prog270-Week11-2015.html) * [Prog270-Week11-2016.html](Prog270-Week11-2016.html) * [Prog270Final2014.html](Prog270Final2014.html) * [Prog270Final2016.html](Prog270Final2016.html) * [Prog270Midterm2014.html](Prog270Midterm2014.html) * [Prog270Midterm2016.html](Prog270Midterm2016.html) * [master-list.html](master-list.html)
/* * Dedekind Reals - Java Library for computing with Dedekind Reals * Copyright (c) 2019 Ivo List * * This software is distributed under the terms found * in file LICENSE.txt that is included with this distribution. */ package com.github.comius.reals.newton import org.junit.Test import com.github.comius.RoundingContext import com.github.comius.reals.Context import com.github.comius.interval.Interval import com.github.comius.reals.syntax.Const import com.github.comius.reals.syntax.Integrate import com.github.comius.reals.newton.AutomaticDifferentiation.Down import com.github.comius.reals.syntax.Var class NewtonApproximationsTest { import ApproximateNewton._ import com.github.comius.reals.syntax.Real._ @Test def testEstimate(): Unit = { println(estimate(0 < "x" - 1)(new Context(new RoundingContext(10, 10)), "x", Interval(0, 4))) // [1,4] println(estimate(0 < Const(1) - "x")(new Context(new RoundingContext(10, 10)), "x", Interval(0, 4))) // [0,1] println(estimate(0 < "x" - 3)(new Context(new RoundingContext(10, 10)), "x", Interval(0, 4))) // [3,4] println(estimate(0 < Const(3) - "x")(new Context(new RoundingContext(10, 10)), "x", Interval(0, 4))) // [0,3] println(estimate(0 < Const(1))(new Context(new RoundingContext(10, 10)), "x", Interval(0, 4))) // [0,4] println(estimate(0 < Const(-1))(new Context(new RoundingContext(10, 10)), "x", Interval(0, 4))) // [] println(estimate(0 < Var("x") * "x")(new Context(new RoundingContext(10, 10)), "x", Interval(-1, 1))) // [] println(estimate(Var("x") * "x" < 0)(new Context(new RoundingContext(10, 10)), "x", Interval(-1, 1))) // [] } @Test def testIntegrate(): Unit = { println("->" + AutomaticDifferentiation.evalr(Integrate("x", 0, 1, Var("x") * "x"))( Context(new RoundingContext(10, 10)), Set(), Down)) // ([0,1],0) // ([0.000,0.5],0) } }
package com.farmerworking.db.rabbitDb.impl.file; import com.farmerworking.db.rabbitDb.api.DBComparator; import com.farmerworking.db.rabbitDb.impl.memtable.InternalEntryComparator; import com.farmerworking.db.rabbitDb.impl.memtable.InternalKey; import java.util.ArrayList; import java.util.List; public class FileManager { private InternalEntryComparator comparator; private List<FileMetaData> fileIndex; public FileManager(DBComparator comparator) { this.fileIndex = new ArrayList<>(); this.comparator = new InternalEntryComparator(comparator); } public synchronized void addTableFile(FileMetaData fileMetaData) { this.fileIndex.add(fileMetaData); } public synchronized List<FileMetaData> filesToLookup(InternalKey key) { List<FileMetaData> result = new ArrayList<>(); for (FileMetaData item : fileIndex) { if (comparator.compare(item.getMinKey(), key) >= 0) { continue; } if (comparator.compare(item.getMaxKey(), key) <= 0) { continue; } result.add(item); } return result; } }
import tensorflow as tf def root_mean_sqrt_error(labels=None, predictions=None): rmse = tf.sqrt(tf.reduce_mean(tf.square(tf.subtract(labels, predictions))), name="rmse") return rmse
package main; public class ErrorResponse { private String type; private String title; private String detail; private int status; private String instance; public ErrorResponse(String detail, String instance) { this.type = "http://cs.iit.edu/~virgil/cs445/project/api/problems/data-validation"; this.title = "Your request data didn't pass validation"; this.detail = detail; this.status = 400; this.instance = instance; } }
class Term < ActiveRecord::Base attr_accessible :doc_freq, :word set_primary_key :word has_many :freq_term_in_doc, :foreign_key => "word" end
#!/bin/bash cp README.md docs/source/README.md cp Scripts.md docs/source/Scripts.md cp GUI.md docs/source/GUI.md cp Contributing.md docs/source/Contributing.md cp CODE_OF_CONDUCT.md docs/source/CODE_OF_CONDUCT.md cp _static/*.png docs/source/_static/ cd docs if [ -d build ]; then rm -r build fi make html cd ..
# Playing with Eleventy ## Reference <https://www.11ty.dev/> ## Prerequisites Eleventy v0.11.1 requires Node 8 or newer. ```text $ node --version v12.16.3 ``` ## Getting Started Let's first install eleventy using `npm`: ```text $ npm install -g @11ty/eleventy ... + @11ty/eleventy@0.11.1 added 393 packages from 395 contributors in 74.023s ... ``` Then, we create a simple sample page with an editor: ```text $ touch README.md ``` ```markdown # This is a Title This is a text. ``` And, we can _compile_ it with `eleventy`: ```text $ eleventy Writing _site/README/index.html from ./README.md. Wrote 1 file in 0.39 seconds (v0.11.1) ``` Finally, we can see the result locally: ```text $ eleventy --serve Writing _site/README/index.html from ./README.md. Wrote 1 file in 0.26 seconds (v0.11.1) Watching… [Browsersync] Access URLs: ------------------------------------- Local: http://localhost:8080 External: http://192.168.1.10:8080 ------------------------------------- UI: http://localhost:3001 UI External: http://localhost:3001 ------------------------------------- [Browsersync] Serving files from: _site ``` <http://localhost:8080/README/>
""" pmc ~~~ Simple package metadata compiler. You can use it with package.meta to describe your composer, npm, etc. packages. :copyright: (c) 2013-2016 OctoLab, https://www.octolab.org/ <feedback@octolab.org> :license: MIT """ from .filesystem import File, Watcher from .parser import Parser, YamlParser, ParseError, ValidateError from .processor import Processor, ComposerProcessor __all__ = [ 'File', 'Watcher', 'Parser', 'YamlParser', 'ParseError', 'ValidateError', 'Processor', 'ComposerProcessor', ] __version__ = '1.0.0'
-- | Utility functions for providing random elements to the game. module Shuffle where import System.Random -- | Select an element from a non-empty list with uniform distribution. randomElement :: [a] -> IO a randomElement xs | n == 0 = fail "randomElement: no elements" | otherwise = do r <- randomRIO (0,n-1) return $! xs !! r where n = length xs -- | Select an element from a non-empty list with custom distribution. randomElementDist :: [(a,Int)] -> IO a randomElementDist dist | total <= 0 = fail "randomElementDist: bad distribution" | otherwise = do r <- randomRIO (1,total) return $! select r dist where total = sum (map snd dist) select r ((x,v):xs) | r <= v = x | otherwise = select (r-v) xs select _ _ = error "select: impossible"
/* * Copyright 2021 IceRock MAG Inc. Use of this source code is governed by the Apache 2.0 license. */ package dev.icerock.tools.shaper.core import kotlin.test.Test import kotlin.test.assertEquals class StringExtTest { @Test fun `camel to snake case cases`() { assertEquals( expected = "test_words_in_camel", actual = "testWordsInCamel".camelToSnakeCase() ) assertEquals( expected = "test_words_in_camel", actual = "TestWordsInCamel".camelToSnakeCase() ) } @Test fun `snake to upper camel case cases`() { assertEquals( expected = "TestWordsInCamel", actual = "test_words_in_camel".snakeToUpperCamelCase() ) } @Test fun `snake to lower camel case cases`() { assertEquals( expected = "testWordsInCamel", actual = "test_words_in_camel".snakeToLowerCamelCase() ) } }
package util import ( "github.com/winhtaikaung/Rabbit-Go/rabbit" ) // search keyword: golang zawgyi to unicode // https://github.com/winhtaikaung/Rabbit-Go func Zg2uni(str string) string { return rabbit.Zg2uni(str) }
<?php namespace App\File\Saver; /** * Interface SaverInterface */ interface SaverInterface { /** * @param string $filename * @param string $content * @return string $path */ public function saveToFile($filename, $content); }
import { Request } from '../request'; import { Response } from '../response'; export interface TransportInterface { execute(request: Request): Promise<Response>; }
using Newtonsoft.Json; namespace Kentico.Kontent.Management.Models.Types; /// <summary> /// Represents the limitation for the number of items. /// </summary> public class LimitModel { /// <summary> /// Specifies the image size or how many times something can be used within the element. /// </summary> [JsonProperty("value")] public int Value { get; set; } /// <summary> /// Specifies how to apply the <see cref="Value"/>. /// </summary> [JsonProperty("condition")] public LimitType Condition { get; set; } }
package service import ( "fmt" "io" "net/http" "net/url" "strconv" "strings" "github.com/gorilla/websocket" "github.com/livekit/protocol/livekit" "github.com/livekit/protocol/logger" "github.com/livekit/livekit-server/pkg/config" "github.com/livekit/livekit-server/pkg/routing" "github.com/livekit/livekit-server/pkg/routing/selector" "github.com/livekit/livekit-server/pkg/rtc" "github.com/livekit/livekit-server/pkg/rtc/types" "github.com/livekit/livekit-server/pkg/telemetry/prometheus" ) type RTCService struct { router routing.MessageRouter roomAllocator RoomAllocator upgrader websocket.Upgrader currentNode routing.LocalNode isDev bool limits config.LimitConfig } func NewRTCService(conf *config.Config, ra RoomAllocator, router routing.MessageRouter, currentNode routing.LocalNode) *RTCService { s := &RTCService{ router: router, roomAllocator: ra, upgrader: websocket.Upgrader{}, currentNode: currentNode, isDev: conf.Development, limits: conf.Limit, } // allow connections from any origin, since script may be hosted anywhere // security is enforced by access tokens s.upgrader.CheckOrigin = func(r *http.Request) bool { return true } return s } func (s *RTCService) Validate(w http.ResponseWriter, r *http.Request) { _, _, code, err := s.validate(r) if err != nil { handleError(w, code, err.Error()) return } _, _ = w.Write([]byte("success")) } func (s *RTCService) validate(r *http.Request) (livekit.RoomName, routing.ParticipantInit, int, error) { claims := GetGrants(r.Context()) // require a claim if claims == nil || claims.Video == nil { return "", routing.ParticipantInit{}, http.StatusUnauthorized, rtc.ErrPermissionDenied } onlyName, err := EnsureJoinPermission(r.Context()) if err != nil { return "", routing.ParticipantInit{}, http.StatusUnauthorized, err } roomName := livekit.RoomName(r.FormValue("room")) reconnectParam := r.FormValue("reconnect") autoSubParam := r.FormValue("auto_subscribe") publishParam := r.FormValue("publish") if onlyName != "" { roomName = onlyName } // this is new connection for existing participant - with publish only permissions if publishParam != "" { // Make sure grant has CanPublish set, if claims.Video.CanPublish != nil && *claims.Video.CanPublish == false { return "", routing.ParticipantInit{}, http.StatusUnauthorized, rtc.ErrPermissionDenied } // Make sure by default subscribe is off claims.Video.SetCanSubscribe(false) claims.Identity += "#" + publishParam } if router, ok := s.router.(routing.Router); ok { if foundNode, err := router.GetNodeForRoom(r.Context(), roomName); err == nil { if selector.LimitsReached(s.limits, foundNode.Stats) { return "", routing.ParticipantInit{}, http.StatusServiceUnavailable, rtc.ErrLimitExceeded } } } pi := routing.ParticipantInit{ Reconnect: boolValue(reconnectParam), Identity: livekit.ParticipantIdentity(claims.Identity), Name: livekit.ParticipantName(claims.Name), AutoSubscribe: true, Metadata: claims.Metadata, Hidden: claims.Video.Hidden, Recorder: claims.Video.Recorder, Client: ParseClientInfo(r.Form), } if autoSubParam != "" { pi.AutoSubscribe = boolValue(autoSubParam) } pi.Permission = permissionFromGrant(claims.Video) return roomName, pi, http.StatusOK, nil } func (s *RTCService) ServeHTTP(w http.ResponseWriter, r *http.Request) { // reject non websocket requests if !websocket.IsWebSocketUpgrade(r) { prometheus.ServiceOperationCounter.WithLabelValues("signal_ws", "error", "reject").Add(1) w.WriteHeader(404) return } roomName, pi, code, err := s.validate(r) if err != nil { handleError(w, code, err.Error()) return } // create room if it doesn't exist, also assigns an RTC node for the room rm, err := s.roomAllocator.CreateRoom(r.Context(), &livekit.CreateRoomRequest{Name: string(roomName)}) if err != nil { prometheus.ServiceOperationCounter.WithLabelValues("signal_ws", "error", "create_room").Add(1) handleError(w, http.StatusInternalServerError, err.Error()) return } // this needs to be started first *before* using router functions on this node connId, reqSink, resSource, err := s.router.StartParticipantSignal(r.Context(), roomName, pi) if err != nil { prometheus.ServiceOperationCounter.WithLabelValues("signal_ws", "error", "start_signal").Add(1) handleError(w, http.StatusInternalServerError, "could not start session: "+err.Error()) return } pLogger := rtc.LoggerWithParticipant( rtc.LoggerWithRoom(logger.Logger(logger.GetLogger()), roomName, ""), pi.Identity, "", ) done := make(chan struct{}) // function exits when websocket terminates, it'll close the event reading off of response sink as well defer func() { pLogger.Infow("server closing WS connection", "connID", connId) reqSink.Close() close(done) }() // upgrade only once the basics are good to go conn, err := s.upgrader.Upgrade(w, r, nil) if err != nil { prometheus.ServiceOperationCounter.WithLabelValues("signal_ws", "error", "upgrade").Add(1) pLogger.Warnw("could not upgrade to WS", err) handleError(w, http.StatusInternalServerError, err.Error()) return } sigConn := NewWSSignalConnection(conn) if types.ProtocolVersion(pi.Client.Protocol).SupportsProtobuf() { sigConn.useJSON = false } prometheus.ServiceOperationCounter.WithLabelValues("signal_ws", "success", "").Add(1) pLogger.Infow("new client WS connected", "connID", connId, "roomID", rm.Sid, ) // handle responses go func() { defer func() { // when the source is terminated, this means Participant.Close had been called and RTC connection is done // we would terminate the signal connection as well _ = conn.Close() }() defer rtc.Recover() for { select { case <-done: return case msg := <-resSource.ReadChan(): if msg == nil { pLogger.Infow("source closed connection", "connID", connId) return } res, ok := msg.(*livekit.SignalResponse) if !ok { pLogger.Errorw("unexpected message type", nil, "type", fmt.Sprintf("%T", msg), "connID", connId) continue } if err = sigConn.WriteResponse(res); err != nil { pLogger.Warnw("error writing to websocket", err) return } } } }() // handle incoming requests from websocket for { req, err := sigConn.ReadRequest() // normal closure if err != nil { if err == io.EOF || strings.HasSuffix(err.Error(), "use of closed network connection") || websocket.IsCloseError(err, websocket.CloseAbnormalClosure, websocket.CloseGoingAway, websocket.CloseNormalClosure, websocket.CloseNoStatusReceived) { return } else { pLogger.Errorw("error reading from websocket", err) return } } if err := reqSink.WriteMessage(req); err != nil { pLogger.Warnw("error writing to request sink", err, "connID", connId) } } } func ParseClientInfo(values url.Values) *livekit.ClientInfo { ci := &livekit.ClientInfo{} if pv, err := strconv.Atoi(values.Get("protocol")); err == nil { ci.Protocol = int32(pv) } sdkString := values.Get("sdk") switch sdkString { case "js": ci.Sdk = livekit.ClientInfo_JS case "ios", "swift": ci.Sdk = livekit.ClientInfo_SWIFT case "android": ci.Sdk = livekit.ClientInfo_ANDROID case "flutter": ci.Sdk = livekit.ClientInfo_FLUTTER case "go": ci.Sdk = livekit.ClientInfo_GO case "unity": ci.Sdk = livekit.ClientInfo_UNITY } ci.Version = values.Get("version") ci.Os = values.Get("os") ci.OsVersion = values.Get("os_version") ci.Browser = values.Get("browser") ci.BrowserVersion = values.Get("browser_version") ci.DeviceModel = values.Get("device_model") return ci }
--- layout: agenda2 date: March 5th - 7th, 2012 permalink: meetings/2012/03/agenda year: "2012" month: "03" ---
package src var () type ArgsNewConnection struct { Cid string Uid string Hostid string } type ResponseNewConnection struct { Connected bool RemoteHostId string } type ArgsNewConnectionPacket struct { Cid string Uid string Hostid string Packet []byte }
using System; using System.IO; namespace CIEID { public enum LogLevel { NONE = 0, DEBUG = 1, INFO = 2, ERROR = 3, }; public class Logger { public static LogLevel DefaultLogLevel = LogLevel.NONE; private LogLevel _level; public LogLevel Level { get => _level; set => _level = value; } // The default argument value should be `Logger.DefaultLogLevel` // rather than an explicit LogLevel enum value. public Logger(LogLevel logLevel = LogLevel.NONE) { Info($"Init della classe Logger, livello di log {logLevel}"); Level = logLevel; } // Public functions public void Debug(string message) { Log($"[D] {message}", LogLevel.DEBUG); } public void Info(string message) { Log($"[I] {message}", LogLevel.INFO); } public void Error(string message) { Log($"[E] {message}", LogLevel.ERROR); } public void Log(string message, LogLevel messageLevel) { if (Level > 0 && Level <= messageLevel) { Write(message); } } private void Write(string message) { string programDataDir = System.Environment.GetEnvironmentVariable("PROGRAMDATA"); string currentDate = DateTime.Now.ToString("yyyy-MM-dd"); string timestamp = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.fff"); string logFilePath = $"{programDataDir}\\CIEPKI\\CIEID_{currentDate}.log"; try { StreamWriter streamWriter = new StreamWriter(logFilePath, append: true); streamWriter.WriteLineAsync($"{timestamp} {message}"); streamWriter.Close(); } catch(Exception) { Console.WriteLine($"{timestamp} {message}"); } } } }
// IGNORE_BACKEND: JS_IR // TODO: muted automatically, investigate should it be ran for JS or not // IGNORE_BACKEND: JS, NATIVE class A1 : MutableCollection<String> { override val size: Int get() = 56 override fun isEmpty(): Boolean { throw UnsupportedOperationException() } override fun contains(o: String): Boolean { throw UnsupportedOperationException() } override fun iterator(): MutableIterator<String> { throw UnsupportedOperationException() } override fun containsAll(c: Collection<String>): Boolean { throw UnsupportedOperationException() } override fun add(e: String): Boolean { throw UnsupportedOperationException() } override fun remove(o: String): Boolean { throw UnsupportedOperationException() } override fun addAll(c: Collection<String>): Boolean { throw UnsupportedOperationException() } override fun removeAll(c: Collection<String>): Boolean { throw UnsupportedOperationException() } override fun retainAll(c: Collection<String>): Boolean { throw UnsupportedOperationException() } override fun clear() { throw UnsupportedOperationException() } } class A2 : java.util.AbstractCollection<String>() { override val size: Int get() = 56 override fun iterator(): MutableIterator<String> { throw UnsupportedOperationException() } } class A3 : java.util.ArrayList<String>() { override val size: Int get() = 56 } interface Sized { val size: Int } class A4 : java.util.ArrayList<String>(), Sized { override val size: Int get() = 56 } fun check56(x: Collection<String>) { if (x.size != 56) throw java.lang.RuntimeException("fail ${x.size}") } fun box(): String { val a1 = A1() if (a1.size != 56) return "fail 1: ${a1.size}" check56(a1) val a2 = A2() if (a2.size != 56) return "fail 2: ${a2.size}" check56(a2) val a3 = A3() if (a3.size != 56) return "fail 3: ${a3.size}" check56(a3) val a4 = A4() if (a4.size != 56) return "fail 4: ${a4.size}" check56(a4) val sized: Sized = a4 if (sized.size != 56) return "fail 5: ${a4.size}" return "OK" }
import 'dart:io'; import 'package:cloud_firestore/cloud_firestore.dart'; import 'package:flutter/material.dart'; class OurPost { String uid; List<TextEditingController> descriptions; TextEditingController coverDescription; int imageCount; List<File> images; File coverImage; Timestamp time; int likeCount; String category; OurPost({ this.uid, this.descriptions, this.imageCount, this.images, this.time, this.likeCount, this.category, }); }
<?php namespace App\Models; use Illuminate\Database\Eloquent\Factories\HasFactory; use Illuminate\Database\Eloquent\Model; use App\Models\item; class Requisition extends Model { //Asignacion masiva, Para protejer id y el status protected $guarded = ['id', 'created_at', 'update_at']; //protected $fillable = ['assigned_id']; use HasFactory; const ORDENADO = 1; const PRODUCTION = 2; const TERMINADO = 3; const TIENDA = 4; // Relacion UNO A UNO INVERSA public function item() { return $this->belongsTo('App\Models\Item'); } public function user(){ return $this->hasOne('App\Models\User','cus_id','cus_id'); } // Relacion una a muchos INVERSA public function status(){ return $this->belongsTo('App\Models\Status'); } // FILTROS PERSONALIZADOS // Query Scope - ES UNA FORMA DE PERSONALIZAR LOS FILTROS QUE TIENE ELOQUET // Filtro llamdo Category public function scopeAssigned($query, $assigned_id){ // Si existe algo almacenado,en la variable if($assigned_id){ // Entonces que se realice la CONSULTA return $query->where('assigned_id', $assigned_id); } } }
const menuIcon = document.querySelector(".menu-icon"); const mobileMenu = document.querySelector(".navbar-mobile"); const remoteWorkshopBtn = document.querySelector(".tab-item.remote"); const inPersonWorkshopBtn = document.querySelector(".tab-item.in-person"); const remoteGuidelines = document.querySelector(".remote.workshop"); const inPersonGuidelines = document.querySelector(".in-person.workshop"); const toggleMenu = () => { mobileMenu.classList.toggle("show"); }; menuIcon.addEventListener("click", toggleMenu); const showRemoteWorkshopDetails = () => { inPersonWorkshopBtn.classList.remove("active"); remoteWorkshopBtn.classList.add("active"); inPersonGuidelines.style.display = "none"; remoteGuidelines.style.display = "flex"; }; const showInPersonWorkshopDetails = () => { remoteWorkshopBtn.classList.remove("active"); inPersonWorkshopBtn.classList.add("active"); inPersonGuidelines.style.display = "flex"; remoteGuidelines.style.display = "none"; }; remoteWorkshopBtn.addEventListener("click", showRemoteWorkshopDetails); inPersonWorkshopBtn.addEventListener("click", showInPersonWorkshopDetails);
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ngrinder.common.util; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import static org.ngrinder.common.util.NoOp.noOp; import static org.ngrinder.common.util.Preconditions.checkArgument; import static org.ngrinder.common.util.Preconditions.checkNotNull; /** * Reflection Utility functions. * * @author JunHo Yoon * @since 3.0 */ public abstract class ReflectionUtils { private static final Logger LOGGER = LoggerFactory.getLogger(ReflectionUtils.class); private static final Map<Class, List<Field>> fieldCache = new ConcurrentHashMap<>(); /** * Get object field value, bypassing getter method. * * @param object object * @param fieldName field Name * @return fileValue */ public static Object getFieldValue(final Object object, final String fieldName) { Field field = getDeclaredField(object, fieldName); checkNotNull(field, "Could not find field [%s] on target [%s]", fieldName, object); makeAccessible(field); try { return field.get(object); } catch (IllegalAccessException e) { LOGGER.error(e.getMessage(), e); } return null; } private static Field getDeclaredField(final Object object, final String fieldName) { checkNotNull(object); checkArgument(StringUtils.isNotBlank(fieldName)); // CHECKSTYLE:OFF for (Class<?> superClass = object.getClass(); superClass != Object.class; superClass = superClass .getSuperclass()) { try { return superClass.getDeclaredField(fieldName); } catch (NoSuchFieldException e) { // Fall through noOp(); } } return null; } private static void makeAccessible(final Field field) { if (!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers())) { field.setAccessible(true); } } public static List<Field> getDeclaredFieldsIncludingParent(Class<?> current) { if (fieldCache.containsKey(current)) { return fieldCache.get(current); } List<Field> fields = new ArrayList<>(); fieldCache.put(current, fields); while (current != null) { List<Field> declared = Arrays.asList(current.getDeclaredFields()); fields.addAll(declared); current = current.getSuperclass(); } return fields; } }
class HierarchyEntriesController < ApplicationController # GET /pages/:taxon_id/entries/:id/ def show hierarchy_entry_id = params[:id] redirect_to taxon_entry_overview_path(params[:taxon_id], hierarchy_entry_id) end # POST /pages/:taxon_id/entries/:id/switch def switch hierarchy_entry_id = params[:hierarchy_entry][:id] if params[:hierarchy_entry] unless hierarchy_entry_id flash[:notice] = I18n.t(:hierarchy_entry_switch_missing_id_error) hierarchy_entry_id ||= params[:id] end if params[:return_to] return_to_params = Eol::Application.routes.recognize_path(params[:return_to], method: :get) return_to_params[:hierarchy_entry_id] = hierarchy_entry_id store_location url_for(return_to_params) end redirect_to taxon_entry_overview_path(params[:taxon_id], hierarchy_entry_id) end end
@BussFacade public interface MyInterface { String doA(int lo); String doB(int lolo); }
# [DEPRECATED] hugo-learn-doc **Deprecated** documentation for Hugo Learn Theme (v1). Learn theme is not deprecated though. It's is now in v2 and is available here : - Live Demo/Documentation : https://learn.netlify.com/ - Theme : https://github.com/matcornic/hugo-theme-learn - Documentation: https://github.com/matcornic/hugo-theme-learn/tree/master/exampleSite
WookieController ================ Tiny project to play around with Chibios on a STM32F4 Discovery board
package ru.tetraquark.pathfinderlib.core.map data class MapCell( val x: Int, val y: Int, val cellType: CellType )
(in-package #:simple-prog) (defvar *environment-type* :small) (defvar *use-complex-environment* nil) (defvar *environment* nil) (defvar *program* #'simple-robot-prog) (defvar *exploration-strategy* :random) (defparameter *algorithm-names* (list ;; 'smdpq ;; 'hordq ;; 'gold-standard ;; 'hordq-a-0 'hordq-a-1 'hordq-a-2 ;; 'hordq-a-3 )) (defvar *current-exploration-strategy*) (defvar *step-number-multiplier* 1) (defun algorithm-index (algorithm) (let ((index (position algorithm *algorithm-names*))) (assert index (index) "Algorithm ~A not defined." algorithm) index)) (defvar *algorithms* (make-array (list (length *algorithm-names*)) :adjustable t :fill-pointer 0)) (defun algorithm-for (name) (aref *algorithms* (algorithm-index name))) (defun algorithms () *algorithms*) (defun make-new-environment (&optional (type *environment-type*) (complexp *use-complex-environment*)) (ecase type ((:small) (if complexp (make-simple-env-1) (make-simple-env-0))) ((:medium) (if complexp (make-simple-env-3) (make-simple-env-2))) ((:large) (if complexp (make-simple-env-5) (make-simple-env-4))) ((:maze :labyrinth) (make-simple-env-6)))) (defun steps-for-environment () (let ((base-size (ecase *environment-type* ((:small) (if *use-complex-environment* 5000 2500)) ((:medium) (if *use-complex-environment* 100000 25000)) ((:large) (if *use-complex-environment* 2500000 1000000)) ((:maze :labyrinth) (if (eq *exploration-strategy* :random) 1000000 500000))))) (* base-size *step-number-multiplier*))) (defun initialize-environment (&optional (force t)) (when (or force (not *environment*)) (setf *environment* (make-new-environment)))) (defun explore-environment (&optional (recreate nil)) (initialize-environment (not recreate)) (env:io-interface *environment*)) (defun initialize-algorithms (&optional (algorithm-names *algorithm-names*)) (setf *algorithm-names* algorithm-names) (setf (fill-pointer *algorithms*) 0) (mapc (lambda (alg) (when (member (first alg) algorithm-names) (vector-push-extend (second alg) *algorithms*))) (list (list 'smdpq (alisp-smdpq:make-smdpq-alg :hist-out-dir "Temp/")) (list 'hordq (make-instance 'ahq:<hordq>)) (list 'gold-standard (alisp-gold-standard:make-alisp-gold-standard-learning-alg)) (list 'hordq-a-0 (make-instance 'ahq:<hordq> :features *simple-featurizer-0*)) (list 'hordq-a-1 (make-instance 'ahq:<hordq> :features *simple-featurizer-1*)) (list 'hordq-a-2 (make-instance 'ahq:<hordq> :features *simple-featurizer-2*)) (list 'hordq-a-3 (make-instance 'ahq:<hordq> :features *simple-featurizer-3*)))) (values)) (defun explore-policies (&optional (show-advice t)) (initialize-environment nil) (set-up-exploration) (io-interface *program* *environment* (if show-advice (let ((hists (map 'list #'get-q-hist (algorithms)))) (mapcan (lambda (hist) (if (and (typep hist 'sequence) (> (length hist) 0)) (list (aref hist (1- (length hist)))) '())) hists)) '()))) (defun pick-exploration-strategy (algorithm &optional (strategy *exploration-strategy*)) (let ((result (ecase strategy ((:random) 'random) ((:epsilon) (make-instance '<epsilon-policy> :q-learning-alg algorithm)) ((:boltzman) (make-instance 'exp-pol:<epsilon-boltzmann-exp-pol> :q-learning-alg algorithm ;; TODO: Make first parameter depend on number of trials :temp-fn (lambda (n) (/ 1000.0 (1+ n))) :epsilon-decay-fn (exp-pol:make-linear-epsilon-decay-fn 10000 0.01)))))) (setf *current-exploration-strategy* result) result)) (defun learn-behavior (&key (program *program*) environment-type (use-complex-environment nil use-complex-environment-p) (exploration-strategy *exploration-strategy* exploration-strategy-p) (algorithm-names *algorithm-names* algorithm-names-p)) (when environment-type (setf *environment-type* environment-type)) (when use-complex-environment-p (setf *use-complex-environment* use-complex-environment)) (when exploration-strategy-p (setf *exploration-strategy* exploration-strategy)) (when algorithm-names-p (setf *algorithm-names* algorithm-names)) (initialize-environment) (initialize-algorithms algorithm-names) (case exploration-strategy ((:random) (format t "~&Learning behavior using random exploration strategy~%") (learn program *environment* 'random (coerce (algorithms) 'list) (steps-for-environment) :hist-length 50 :step-print-inc 2500 :episode-print-inc 500)) (otherwise (format t "~&Learning behavior using exploration strategy ~A~%" exploration-strategy) (map nil (lambda (alg) (learn program *environment* (pick-exploration-strategy alg exploration-strategy) alg (steps-for-environment) :hist-length 100 :step-print-inc 2500 :episode-print-inc 500)) (algorithms))))) (defvar *evaluation-steps* 50) (defvar *evaluation-trials* 25) (defun evaluation-for (name) (evaluate *program* *environment* (get-policy-hist (algorithm-for name)) :num-steps *evaluation-steps* :num-trials *evaluation-trials*)) (defvar *gnuplot-file-template* "set title 'Learning Curve for ~A (Nav, ~A, ~A)' set title font \",15\" set xlabel 'iterations' set xlabel font \",12\" set ylabel 'reward' set ylabel font \",12\" set key off set autoscale xy unset grid set style line 1 lt rgb \"blue\" lw 2 pt 6 set terminal pngcairo set output '~A' plot '~A' with linespoints ls 1 ") (defvar *file-exists-action* :error) (defun evaluate-performance (&key (algorithm-names *algorithm-names*) (output-directory (merge-pathnames (make-pathname :directory '(:relative ".." "Gnuplot")) (asdf:system-definition-pathname :waste))) (gnuplot-file-prefix) (data-file-prefix "data") (png-file-prefix gnuplot-file-prefix) (output-to-terminal (not gnuplot-file-prefix))) (when output-to-terminal (format t "~2&Learning curves for ~{~A~^, ~} are:~%" algorithm-names) (pprint (map 'list #'list (mapcar 'evaluation-for algorithm-names)))) (when gnuplot-file-prefix (ensure-directories-exist output-directory) (dolist (alg-name algorithm-names) (let* ((file-postfix (concatenate 'string "-simple-" (string-downcase (symbol-name alg-name)) "-" (string-downcase (symbol-name *environment-type*)) "-" (if *use-complex-environment* "complex" "simple"))) (data-file (merge-pathnames (make-pathname :name (concatenate 'string data-file-prefix file-postfix) :type "dat") output-directory)) (gnuplot-file (merge-pathnames (make-pathname :name (concatenate 'string gnuplot-file-prefix file-postfix) :type "plt") output-directory)) (png-file (merge-pathnames (make-pathname :name (concatenate 'string png-file-prefix file-postfix) :type "png") output-directory))) (format t "~&Writing data to file ~A." (enough-namestring data-file)) (with-open-file (stream data-file :direction :output :if-exists *file-exists-action* :if-does-not-exist :create) (map nil (lambda (value) (format stream "~&~A~%" value)) (let ((*evaluation-steps* (floor (* *evaluation-steps* 2.5)))) (evaluation-for alg-name)))) (format t "~&Writing gnuplot driver file ~A." (enough-namestring gnuplot-file)) (with-open-file (stream gnuplot-file :direction :output :if-exists *file-exists-action* :if-does-not-exist :create) (format stream *gnuplot-file-template* alg-name *environment-type* (if *use-complex-environment* "Complex" "Simple") (namestring png-file) (namestring data-file))))))) #+(or) (defun clean-up () (reset *smdpq*))
<?php /** * Available variables * * $products an array of "themed" api product elements * $num_products total number of products * $raw_products - PHP array of products, each of which is an associative array */ ?> <div class="api-product-listing-wrapper"> <div class="apiproduct_filtered_list" id="apiproduct_filtered_list"></div> </div> <script> var rawProducts = <?php print ' ' . json_encode($raw_products, JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES); ?>; </script> <script id="prod_template" type="text/html"> <div id="<%= element_id %>" class="api-product-item"> <div class="api-product-wrapper"> <div class="api-product-image"> <img src="<%= link_img %>" height="230" width="230"> </div> <div> <h2 class="api-product-name"><%= display_name %></h2> </div> <div class="api-product-desc"> <%= description_150 %> </div> <div class="api-product-link"> <a href="<%= link_doc %>" class="apiproduct-btn-default">API docs</a> </div> </div> </div> </script>
using System.Threading.Tasks; namespace JPenny.Tasks.Resolvers { public interface ITaskResolver { Task Resolve(); } }
#!/usr/bin/env python3 import subprocess import json import yaml from yaml import SafeDumper from pathlib import Path def get_multipass_instances(): try: # pipe multipass json list into dictionary result = subprocess.run( ['multipass', 'list', '--format', 'json'], stdout=subprocess.PIPE).stdout.decode('utf-8') result_dict = json.loads(result) # filter for k0s instances filtered_list = list( filter(lambda k: 'k0s-' in k['name'], result_dict["list"])) return sorted(filtered_list, key=lambda x: x['name']) except Exception as e: print(e) # Ansible inventory template inventory = {'all': {'hosts': {}, 'vars': {'ansible_user': 'k0s'}, 'children': { 'initial_controller': {'hosts': []}, 'controller': {'hosts': []}, 'worker': {'hosts': []} } } } instances = get_multipass_instances() # Parse hosts for inventory hosts = {} for i in instances: hosts.update({i["name"]: {'ansible_host': i["ipv4"][0]}}) # Assign instances to their tasks. # If more than four instances create three controllers initial_controller = {} controllers = {} workers = {} if len(instances) > 4: for i in instances[0:1]: initial_controller.update({i["name"]: None}) for i in instances[1:3]: controllers.update({i["name"]: None}) for i in instances[3:]: workers.update({i["name"]: None}) print('Designated first three instances as control plane nodes.') else: for i in instances[0:1]: initial_controller.update({i["name"]: None}) # The others become workers for i in instances[1:]: workers.update({i["name"]: None}) print('Designated first instance as control plane node.') # Fill inventory template with parsed values inventory['all']['hosts'] = hosts inventory['all']['children']['initial_controller']['hosts'] = initial_controller inventory['all']['children']['controller']['hosts'] = controllers inventory['all']['children']['worker']['hosts'] = workers # Dump blanks instead of 'null' by using SafeDumper # https://stackoverflow.com/a/37445121 SafeDumper.add_representer( type(None), lambda dumper, value: dumper.represent_scalar( u'tag:yaml.org,2002:null', '') ) # Write inventory as yaml yaml_path = Path(__file__).with_name('inventory.yml') f = open(yaml_path, 'w') f.write(yaml.safe_dump(inventory, default_flow_style=False, explicit_start=True)) f.close print(f'Created Ansible Inventory at: {yaml_path}')
from django.urls import path from grocery_store.product.views import AddProduct, product_details, edit_product, delete_products, AddDiscountProduct urlpatterns = [ path('add/', AddProduct.as_view(), name='add product'), path('details/<int:pk>', product_details, name='product details'), path('edit/<int:pk>', edit_product, name='edit product'), path('delete/<int:pk>', delete_products, name='delete product'), path('add_discount/', AddDiscountProduct.as_view(), name='add discounted product'), ]
package sandbox import sandbox.monads.EvalFold import sandbox.monads.WriteLogMonad._ //import functors.Printers.booleanPrintable._ //import functors.Printers.stringPrintable._ //import functors.Printers.boxPrintable._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent._ import scala.concurrent.duration._ object MainValidated extends App { import cats.data.Validated import cats.syntax.either._ import cats.syntax.validated._ import cats.syntax.apply._ import cats.instances.list._ import cats.instances.string._ type FormData = Map[String, String] type FailFast[A] = Either[List[String], A] type FailSlow[A] = Validated[List[String], A] case class User(name: String, age: Int) def getValue(field: String)(fieldVals: FormData): FailFast[String] = { // fieldVals.get(field) match { // case Some(fieldValue) => Right(fieldValue) // case None => Left(s"field $field does not exist") // } fieldVals.get(field).toRight(List(s"field $field does not exist")) } val getName = getValue("name") _ getName(Map("name" -> "john")) getName(Map()) def parseInt(name: String)(data: String): FailFast[Int] = Either .catchOnly[NumberFormatException](data.toInt) .leftMap(_ => List(s"$name must be valid int")) parseInt("count")("1") parseInt("count")("a") def nonBlank(name: String)(data: String): FailFast[String] = if (data.size > 0) Right(data) else Left(List(s"$name must not be empty")) def nonNegative(name: String)(data: Int): FailFast[Int] = if (data > -1) Right(data) else Left(List(s"$name cannot be negative")) nonBlank("name")("jonas") nonBlank("name")("") nonNegative("age")(10) nonNegative("age")(-1) def readName(fieldVals: FormData) = getValue("name")(fieldVals) .flatMap(nonBlank("name")) def readAge(fieldVals: FormData) = getValue("age")(fieldVals) .flatMap(nonBlank("age")) .flatMap(parseInt("age")) .flatMap(nonNegative("age")) readName(Map()) readName(Map("name" -> "")) readName(Map("name" -> "jonas")) readAge(Map()) readAge(Map("age" -> "")) readAge(Map("age" -> "-1")) readAge(Map("age" -> "1")) ("Badness".invalid[Int], "Badness".invalid[Int]).tupled def readUser(fieldVals: FormData): FailSlow[User] = (readName(fieldVals).toValidated, readAge(fieldVals).toValidated) .mapN(User.apply) println(readUser(Map())) println(readUser(Map("name" -> "", "age" -> ""))) println(readUser(Map("name" -> "jonas", "age" -> ""))) println(readUser(Map("name" -> "jonas", "age" -> "1"))) println(readUser(Map("name" -> "", "age" -> "-1"))) println(readUser(Map("name" -> "", "age" -> "1"))) }
<?php namespace Obos\Bundle\CoreBundle\Controller; use Symfony\Bundle\FrameworkBundle\Controller\Controller, Sensio\Bundle\FrameworkExtraBundle\Configuration\Route, Sensio\Bundle\FrameworkExtraBundle\Configuration\Template, Obos\Bundle\CoreBundle\Entity\Consultant, Symfony\Component\HttpFoundation\Request, Symfony\Component\HttpFoundation\Response; /** * Root app controller * * @Route("/") */ class CoreController extends Controller { /** * App root. If the user is authorized, she is redirected to the project listing view; otherwise, * the default view prompts her to log in or register. * * @return Response * * @Route("/", name="core_root") */ public function rootAction() { if ($this->get('security.authorization_checker')->isGranted('ROLE_CONSULTANT')) { return $this->redirectToRoute('projects.root'); } return $this->render('core/root.html.twig'); } /** * Login checking action. * * @return Response * * @Route("/auth", name="core_login") */ public function loginAction() { return new Response(); } /** * Logout action. * * @return Response * * @Route("/logout", name="core_logout") */ public function logoutAction() { return new Response(); } /** * Registration page. * * @param Request $request The request. * @return Response * * @Route("/register", name="core_register") * @Template() */ public function registerAction(Request $request) { $newUser = new Consultant(); $form = $this->createForm('registration', $newUser); // Handle the form submission when applicable $form->handleRequest($request); if ($form->isValid()) { // Persist the new user $this->get('obos.manager.user')->registerUser($newUser); // Add confirmation flash $this->addFlash('success', sprintf( 'Thanks for registering, %s! You can now log in using your email address and password.', $newUser->getFirstName() )); return $this->redirectToRoute('core_root'); } return $this->render('core/register.html.twig', [ 'regForm' => $form->createView() ]); } }
<?php namespace App\Http\Controllers; use App\Http\Requests; use App\Http\Controllers\Controller; use Illuminate\Http\Request; use App\Game; use App\GameSet; class GameSetsController extends Controller { public function __construct(Game $game, GameSet $game_set) { $this->middleware('auth'); $this->game = $game; $this->game_set = $game_set; } public function add(Request $request) { $set_number = $request->set_number; $game = $this->game->find($request->game_id); $home_points = null; $away_points = null; if ($game instanceof Game) { $html = view('game_sets.display', compact('game', 'set_number', 'home_points', 'away_points'))->render(); return response()->json(['html' => $html]); } } public function destroy(Request $request) { $game_set = $this->game_set->find($request->game_set_id); if ($game_set instanceof GameSet) { $game_set->hidden = true; $game_set->save(); } } }
//------------------------------------------------------------------------------ // // Copyright 2018 Fetch.AI Limited // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //------------------------------------------------------------------------------ #include "ledger/storage_unit/lane_remote_control.hpp" #include "core/byte_array/consumers.hpp" #include "core/byte_array/decoders.hpp" #include "core/byte_array/tokenizer/tokenizer.hpp" #include "core/commandline/cli_header.hpp" #include "core/commandline/parameter_parser.hpp" #include "core/json/document.hpp" #include "core/logger.hpp" #include "core/string/trim.hpp" #include "ledger/chain/helper_functions.hpp" #include "ledger/storage_unit/storage_unit_client.hpp" #include "network/service/client.hpp" #include "storage/document_store_protocol.hpp" #include <iostream> using namespace fetch; using namespace fetch::service; using namespace fetch::byte_array; int main(int argc, char const **argv) { using service_type = ServiceClient; using client_type = fetch::network::TCPClient; using shared_service_type = std::shared_ptr<service_type>; fetch::logger.DisableLogger(); commandline::ParamsParser params; params.Parse(argc, argv); uint32_t lane_count = params.GetParam<uint32_t>("lane-count", 1); std::cout << std::endl; fetch::commandline::DisplayCLIHeader("Storage Unit Remote"); std::cout << "Connecting with " << lane_count << " lanes." << std::endl; // Remote setup fetch::network::NetworkManager tm(8); std::string host = "localhost"; uint16_t port = params.GetParam<uint16_t>("port", 8080); ledger::LaneRemoteControl remote; std::vector<shared_service_type> services; for (uint32_t i = 0; i < lane_count; ++i) { client_type client(tm); client.Connect(host, uint16_t(port + i)); shared_service_type service = std::make_shared<service_type>(client, tm); services.push_back(service); remote.AddClient(i, service); } // Client setup fetch::ledger::StorageUnitClient client(tm); tm.Start(); for (std::size_t i = 0; i < lane_count; ++i) { client.AddLaneConnection<fetch::network::TCPClient>("localhost", uint16_t(port + i)); } // Setting tokenizer up enum { TOKEN_NAME = 1, TOKEN_STRING = 2, TOKEN_NUMBER = 3, TOKEN_CATCH_ALL = 12 }; std::string line = ""; Tokenizer tokenizer; std::vector<ByteArray> command; tokenizer.AddConsumer(consumers::StringConsumer<TOKEN_STRING>); tokenizer.AddConsumer(consumers::NumberConsumer<TOKEN_NUMBER>); tokenizer.AddConsumer(consumers::Token<TOKEN_NAME>); tokenizer.AddConsumer(consumers::AnyChar<TOKEN_CATCH_ALL>); while ((std::cin) && (line != "quit")) { std::cout << ">> "; // Getting command std::getline(std::cin, line); try { command.clear(); tokenizer.clear(); tokenizer.Parse(line); for (auto &t : tokenizer) { if (t.type() != TOKEN_CATCH_ALL) command.push_back(t); } if (command.size() > 0) { if (command[0] == "connectall") { if (command.size() == 3) { for (std::size_t i = 0; i < lane_count; ++i) { remote.Connect(uint32_t(i), command[1], uint16_t(command[2].AsInt() + int(i))); } } else { std::cout << "usage: connectall [ip] [port]" << std::endl; } } else if (command[0] == "connect") { if (command.size() == 4) { remote.Connect(uint32_t(command[1].AsInt()), command[2], uint16_t(command[3].AsInt())); } else { std::cout << "usage: connect [lane] [ip] [port]" << std::endl; } } if (command[0] == "getlanenumber") { if (command.size() == 2) { std::cout << remote.GetLaneNumber(uint32_t(command[1].AsInt())) << std::endl; } else { std::cout << "usage: getlanenumber [lane]" << std::endl; } } if (command[0] == "gettx") { if (command.size() != 2) { std::cout << "usage: gettx \"[hash]\"" << std::endl; } else { auto enckey = command[1].SubArray(1, command[1].size() - 2); auto key = byte_array::FromBase64(enckey); chain::Transaction tx; client.GetTransaction(key, tx); std::cout << std::endl; std::cout << "Transaction: " << byte_array::ToBase64(tx.digest()) << std::endl; std::cout << "Signature: " << byte_array::ToBase64(tx.signature()) << std::endl; std::cout << "Fee: " << tx.summary().fee << std::endl; std::cout << std::endl; } } else if (command[0] == "addtx") { if (command.size() == 1) { chain::Transaction tx = chain::VerifiedTransaction::Create(chain::RandomTransaction()); std::cout << std::endl; std::cout << "Transaction: " << byte_array::ToBase64(tx.digest()) << std::endl; std::cout << "Signature: " << byte_array::ToBase64(tx.signature()) << std::endl; std::cout << "Fee: " << tx.summary().fee << std::endl; std::cout << std::endl; client.AddTransaction(tx); } else { std::cout << "usage: addtx" << std::endl; } } } } catch (serializers::SerializableException &e) { std::cerr << "error: " << e.what() << std::endl; } } tm.Stop(); return 0; }
const Discord = require('discord.js'); const client = new Discord.Client(); function cEmbed(channel, role) { let str = `Le role **${role.name}** à bien été ajouté !`; let embed = new Discord.RichEmbed() .setColor(0x42f44b) .setDescription(str.replace('♦', '\♦')); channel.send(embed); }; function aEmbed(channel, role) { let str = `Erreur, Tu à déja le role **${role.name}** !`; let embed = new Discord.RichEmbed() .setColor(0xf44141) .setDescription(str.replace('♦', '\♦')); channel.send(embed); }; client.on('message', message => { let prefix = '!'; let channel = message.channel; let args = message.content.slice(prefix.length).trim().split(/ +/g); let command = args.shift().toLowerCase(); if (command == 'role') { if (args[0] == 'minecraft' || args[0] == 'mc') { let role = message.guild.roles.find('name', '♦ Minecraft ♦'); if (message.guild.member(message.author).roles.array().includes(role)) { aEmbed(channel, role); return; } message.guild.member(message.author).addRole(role); cEmbed(channel, role); } if (args[0] == 'dontstarve' || args[0] == 'dst') { let role = message.guild.roles.find('name', '♦ Don\'t Starve ♦'); if (message.guild.member(message.author).roles.array().includes(role)) { aEmbed(channel, role); return; } message.guild.member(message.author).addRole(role); cEmbed(channel, role); } if (args[0] == 'worldoftanks' || args[0] == 'wot') { let role = message.guild.roles.find('name', '♦ World of Tanks ♦'); if (message.guild.member(message.author).roles.array().includes(role)) { aEmbed(channel, role); return; } message.guild.member(message.author).addRole(role); cEmbed(channel, role); } if (args[0] == 'leagueoflegends' || args[0] == 'lol') { let role = message.guild.roles.find('name', '♦ League of Legends ♦'); if (message.guild.member(message.author).roles.array().includes(role)) { aEmbed(channel, role); return; } message.guild.member(message.author).addRole(role); cEmbed(channel, role); } if (args[0] == 'osu!' || args[0] == 'osu') { let role = message.guild.roles.find('name', '♦ Osu! ♦'); if (message.guild.member(message.author).roles.array().includes(role)) { aEmbed(channel, role); return; } message.guild.member(message.author).addRole(role); cEmbed(channel, role); } if (args[0] == 'elsword' || args[0] == 'els') { let role = message.guild.roles.find('name', '♦ Elsword ♦'); if (message.guild.member(message.author).roles.array().includes(role)) { aEmbed(channel, role); return; } message.guild.member(message.author).addRole(role); cEmbed(channel, role); } else { message.channel.send({ embed: { description: 'Erreur, ce role n\'existe pas ou tu n\'a pas entré de nom de role !', color: 0xf44141 } }) } } }) client.on('guildMemberAdd', member => { let role = member.guild.roles.find('name', '♦ Membre ♦'); let rolep = member.guild.roles.find('name', '♦ Perm - Membre ♦'); member.addRole(role); member.addRole(rolep); }); client.login(process.env.TOKEN);
<?php namespace PHPFUI; /** * Simple wrapper for DropDown links */ class DropDown extends \PHPFUI\Base { private \PHPFUI\HTML5Element $dropDown; private bool $hover = false; /** * Construct a DropDown * * @param HTML5Element $dropTarget what to click on to initite drop down * @param HTML5Element $dropDown what you are going to drop */ public function __construct(\PHPFUI\HTML5Element $dropTarget, HTML5Element $dropDown) { parent::__construct(); $this->dropDown = $dropDown; $this->dropDown->addClass('dropdown-pane'); $this->dropDown->addAttribute('data-dropdown'); $dropTarget->addAttribute('data-toggle', $this->dropDown->getId()); $this->add($dropTarget); } /** * Set the alignment of the drop down * * @param string $alignment must be one of left, center, right */ public function setAlignment(string $alignment) : DropDown { $validAlignments = ['left', 'center', 'right', ]; if (! \in_array($alignment, $validAlignments)) { throw new \Exception(__METHOD__ . ': $alignment must be one of (' . \implode(',', $validAlignments) . ')'); } $this->dropDown->addAttribute('data-alignment', $alignment); return $this; } /** * DropDown on hover * * @param bool $hover default true */ public function setHover(bool $hover = true) : DropDown { $this->hover = $hover; return $this; } /** * Set the position of the drop down (drop up anyone?) * * @param string $position must be one of top, bottom, left, right */ public function setPosition(string $position) : DropDown { $validPositions = ['top', 'bottom', 'left', 'right', ]; if (! \in_array($position, $validPositions)) { throw new \Exception(__METHOD__ . ': $position must be one of (' . \implode(',', $validPositions) . ')'); } $this->dropDown->addAttribute('data-position', $position); return $this; } protected function getBody() : string { return ''; } protected function getEnd() : string { return "{$this->dropDown}"; } protected function getStart() : string { if ($this->hover) { $this->dropDown->addAttribute('data-hover', 'true'); $this->dropDown->addAttribute('data-hover-pane', 'true'); } else { $this->dropDown->addAttribute('data-auto-focus', 'true'); } return ''; } }
-- @author bwcx_jzy ALTER TABLE USEROPERATELOGV1 ALTER COLUMN nodeId VARCHAR(50) COMMENT '节点ID'; ALTER TABLE MONITORNOTIFYLOG ALTER COLUMN nodeId VARCHAR(50) COMMENT '节点ID'; ALTER TABLE OUTGIVINGLOG ADD IF NOT EXISTS modifyUser VARCHAR(50) comment '操作人'; ALTER TABLE SSHTERMINALEXECUTELOG ADD IF NOT EXISTS modifyUser VARCHAR(50) comment '操作人'; ALTER TABLE BUILDHISTORYLOG ADD IF NOT EXISTS triggerBuildType int DEFAULT 0 comment '触发类型{0,手动,1 触发器,2 自动触发}'; ALTER TABLE BUILDHISTORYLOG ADD IF NOT EXISTS diffSync TINYINT COMMENT '增量同步'; ALTER TABLE SSHTERMINALEXECUTELOG ALTER COLUMN commands CLOB comment '操作的命令'; ALTER TABLE SYSTEMMONITORLOG ADD IF NOT EXISTS networkTime int COMMENT '延迟时间ms'; ALTER TABLE BUILDHISTORYLOG ADD IF NOT EXISTS buildRemark VARCHAR(255) comment '构建备注';
#!/bin/bash rm *.pem rm *.csr rm *.key rm *.srl
(defun list< (N L) (if (null L) () (if (< (first L) N) (cons (first L) (list< N (rest L)) ) (list< N (rest L)) ) ) ) (defun list>= (N L) (if (null L) () (if (>= (first L) N) (cons (first L) (list>= N (rest L)) ) (list>= N (rest L)) ) ) ) (defun qsort (L) (if (null L) () (append (qsort (list< (first L) (rest L)) ) (cons (first L) nil) (qsort (list>= (first L) (rest L)) ) ) ) ) ; start here (print (qsort (list 1 5 3 8 2 0)))
package com.cesarla.persistence import java.time.Instant import com.cesarla.models.{Column, Key, Operation} import scala.concurrent.ExecutionContext trait KeyValueEngine { type KeyCodec[A] = Codec[Key[A]] type KeyDecoder[A] = Decoder[Key[A]] type KeyEncoder[A] = Encoder[Key[A]] type ColumnCodec[A] = Codec[Column[A]] type ColumnDecoder[A] = Decoder[Column[A]] type ColumnEncoder[A] = Encoder[Column[A]] def get[A](key: Key[A], timestamp: Instant)(implicit keyEncoder: KeyEncoder[A], valueDecoder: ColumnDecoder[A], ec: ExecutionContext): Operation[Column[A]] def put[A](key: Key[A], column: Column[A])(implicit keyEncoder: KeyEncoder[A], valueFormat: ColumnCodec[A], ec: ExecutionContext): Operation[Unit] def delete[A](key: Key[A], timestamp: Instant)(implicit keyEncoder: KeyEncoder[A], valueReader: ColumnCodec[A], ec: ExecutionContext): Operation[Unit] }
use crate::{types::Accept, Error, CONSENSUS_VERSION_HEADER}; use reqwest::{header::ACCEPT, RequestBuilder, Response, StatusCode}; use std::str::FromStr; use types::ForkName; /// Trait for converting a 404 error into an `Option<Response>`. pub trait ResponseOptional { fn optional(self) -> Result<Option<Response>, Error>; } impl ResponseOptional for Result<Response, Error> { fn optional(self) -> Result<Option<Response>, Error> { match self { Ok(x) => Ok(Some(x)), Err(e) if e.status() == Some(StatusCode::NOT_FOUND) => Ok(None), Err(e) => Err(e), } } } /// Trait for extracting the fork name from the headers of a response. pub trait ResponseForkName { #[allow(clippy::result_unit_err)] fn fork_name_from_header(&self) -> Result<Option<ForkName>, ()>; } impl ResponseForkName for Response { fn fork_name_from_header(&self) -> Result<Option<ForkName>, ()> { self.headers() .get(CONSENSUS_VERSION_HEADER) .map(|fork_name| { fork_name .to_str() .map_err(|_| ()) .and_then(ForkName::from_str) }) .transpose() } } /// Trait for adding an "accept" header to a request builder. pub trait RequestAccept { fn accept(self, accept: Accept) -> RequestBuilder; } impl RequestAccept for RequestBuilder { fn accept(self, accept: Accept) -> RequestBuilder { self.header(ACCEPT, accept.to_string()) } }
package com.example.feature_toggle_impl.domain import com.example.feature_toggle_api.LocalTogglesChecker import com.example.feature_toggle_api.LocalTogglesLoader import com.example.feature_toggle_api.LocalTogglesUpdater import com.example.feature_toggle_api.model.LocalToggle import com.example.feature_toggle_impl.data.LocalAppToggleLoader import com.example.key_value_storage_api.KeyValueStorage import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.withContext /** * This class is an entry point for local toggles. It contains loading, storing, and accessing logic for them. * @property kvStorage key-value storage * @property appToggleLoader a loader for the application's toggles */ class LocalTogglesProvider( private val kvStorage: KeyValueStorage, private val appToggleLoader: LocalAppToggleLoader ) : LocalTogglesChecker, LocalTogglesLoader, LocalTogglesUpdater { // Keys only private lateinit var turnedOnToggles: Set<String> /** * Checks, that a toggle is turned on * @param toggleKey the checking toggle's key */ override fun isOn(toggleKey: String) = turnedOnToggles.contains(toggleKey) /** * Loads all toggles. Must be called once. */ override suspend fun load() { val appToggles = withContext(Dispatchers.IO) { appToggleLoader.loadAll() } val storedToggles = kvStorage.reader.getStringSet(TOGGLES_KV_STORAGE_KEY) ?: setOf() val calculationResult = LocalTogglesCalculator.calculate(appToggles, storedToggles) turnedOnToggles = calculationResult.activeToggles if(calculationResult.togglesToRemove.isNotEmpty()) { val newStoredToggles = storedToggles.toMutableSet().apply { removeAll(calculationResult.togglesToRemove) } kvStorage.writer.update { it.setSetString(TOGGLES_KV_STORAGE_KEY, newStoredToggles) } } } /** * Turns on all given toggles (the rest are turned off automatically) * @param toggleKeys keys of toggles to be turned on */ override fun setTurnedOn(toggleKeys: Set<String>) { turnedOnToggles = toggleKeys kvStorage.writer.update { it.setSetString(TOGGLES_KV_STORAGE_KEY, toggleKeys) } } /** * Returns list of all toggles */ override suspend fun getAll(): List<LocalToggle> { val appToggles = withContext(Dispatchers.IO) { appToggleLoader.loadAll() } return withContext(Dispatchers.Default) { appToggles.map { LocalToggle( key = it.key, title = it.title, description = it.description, enabled = turnedOnToggles.contains(it.key) ) } } } companion object { private const val TOGGLES_KV_STORAGE_KEY = "LOCAL_TOGGLES" } }
module DefaultTypes where import Type import TyCon import DataCon iBoxT :: TyCon iBoxT = AlgTyCon { tyConUnique = 1, tyConName = "I#", tyConBinders = [], tyConTyVars = [], tyConResKind = tInt, tyConKind = tInt `TArr` tIntU, tyConArity = 1, tcRepName = "Int", algTcRhs = DataTyCon {data_cons=[iBox]} } iBox :: DataCon iBox = MkData {dcName="I#", dcUnique=0, dcOrigArgTys=[tInt], dcOrigResTy=tIntU} iBoxV :: Var iBoxV = mkVar (TyConApp iBoxT [tIntU]) "I#"
/* * Copyright (c) 2020 Nguyen Trung Tin. All rights reserved. * Use of this source code is governed by a BSD-style * license that can be found in the LICENSE file. */ package connect import ( "fmt" "google.golang.org/grpc" ) type Connector map[string]*Connection // Add add connection to connector func (c Connector) Add(connections ...*Connection) Connector { for _, connection := range connections { c[connection.Name] = connection } return c } // Get get connection in the connector by name func (c Connector) Get(name string) (*Connection, error) { connection, ok := c[name] if !ok { return nil, fmt.Errorf("connection name %s not found", name) } return connection, nil } // Delete delete connection in the connector func (c Connector) Delete(connections ...*Connection) Connector { for _, connection := range connections { delete(c, connection.Name) } return c } // Connect connect to the gRPC server // Remember to defer Close() to avoid memory leak func (c Connector) Connect(name string) (*grpc.ClientConn, error) { connection, err := c.Get(name) if err != nil { return nil, err } cnn, err := connection.Connect() if err != nil { return nil, err } return cnn.Conn, nil }
import { __extends } from "tslib"; import { Event as GraphEvent } from '@antv/g-base'; var G6GraphEvent = /** @class */ function (_super) { __extends(G6GraphEvent, _super); function G6GraphEvent(type, event) { var _this = _super.call(this, type, event) || this; _this.item = event.item; _this.canvasX = event.canvasX; _this.canvasY = event.canvasY; _this.wheelDelta = event.wheelDelta; _this.detail = event.detail; return _this; } return G6GraphEvent; }(GraphEvent); export { G6GraphEvent };
/* A module that implements defrag callback mechanisms. */ #define REDISMODULE_EXPERIMENTAL_API #include "redismodule.h" #include <stdlib.h> static RedisModuleType *FragType; struct FragObject { unsigned long len; void **values; int maxstep; }; /* Make sure we get the expected cursor */ unsigned long int last_set_cursor = 0; unsigned long int datatype_attempts = 0; unsigned long int datatype_defragged = 0; unsigned long int datatype_resumes = 0; unsigned long int datatype_wrong_cursor = 0; unsigned long int global_attempts = 0; unsigned long int global_defragged = 0; int global_strings_len = 0; RedisModuleString **global_strings = NULL; static void createGlobalStrings(RedisModuleCtx *ctx, int count) { global_strings_len = count; global_strings = RedisModule_Alloc(sizeof(RedisModuleString *) * count); for (int i = 0; i < count; i++) { global_strings[i] = RedisModule_CreateStringFromLongLong(ctx, i); } } static int defragGlobalStrings(RedisModuleDefragCtx *ctx) { for (int i = 0; i < global_strings_len; i++) { RedisModuleString *new = RedisModule_DefragRedisModuleString(ctx, global_strings[i]); global_attempts++; if (new != NULL) { global_strings[i] = new; global_defragged++; } } return 0; } static void FragInfo(RedisModuleInfoCtx *ctx, int for_crash_report) { REDISMODULE_NOT_USED(for_crash_report); RedisModule_InfoAddSection(ctx, "stats"); RedisModule_InfoAddFieldLongLong(ctx, "datatype_attempts", datatype_attempts); RedisModule_InfoAddFieldLongLong(ctx, "datatype_defragged", datatype_defragged); RedisModule_InfoAddFieldLongLong(ctx, "datatype_resumes", datatype_resumes); RedisModule_InfoAddFieldLongLong(ctx, "datatype_wrong_cursor", datatype_wrong_cursor); RedisModule_InfoAddFieldLongLong(ctx, "global_attempts", global_attempts); RedisModule_InfoAddFieldLongLong(ctx, "global_defragged", global_defragged); } struct FragObject *createFragObject(unsigned long len, unsigned long size, int maxstep) { struct FragObject *o = RedisModule_Alloc(sizeof(*o)); o->len = len; o->values = RedisModule_Alloc(sizeof(RedisModuleString*) * len); o->maxstep = maxstep; for (unsigned long i = 0; i < len; i++) { o->values[i] = RedisModule_Calloc(1, size); } return o; } /* FRAG.RESETSTATS */ static int fragResetStatsCommand(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { REDISMODULE_NOT_USED(argv); REDISMODULE_NOT_USED(argc); datatype_attempts = 0; datatype_defragged = 0; datatype_resumes = 0; datatype_wrong_cursor = 0; global_attempts = 0; global_defragged = 0; RedisModule_ReplyWithSimpleString(ctx, "OK"); return REDISMODULE_OK; } /* FRAG.CREATE key len size maxstep */ static int fragCreateCommand(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { if (argc != 5) return RedisModule_WrongArity(ctx); RedisModuleKey *key = RedisModule_OpenKey(ctx,argv[1], REDISMODULE_READ|REDISMODULE_WRITE); int type = RedisModule_KeyType(key); if (type != REDISMODULE_KEYTYPE_EMPTY) { return RedisModule_ReplyWithError(ctx, "ERR key exists"); } long long len; if ((RedisModule_StringToLongLong(argv[2], &len) != REDISMODULE_OK)) { return RedisModule_ReplyWithError(ctx, "ERR invalid len"); } long long size; if ((RedisModule_StringToLongLong(argv[3], &size) != REDISMODULE_OK)) { return RedisModule_ReplyWithError(ctx, "ERR invalid size"); } long long maxstep; if ((RedisModule_StringToLongLong(argv[4], &maxstep) != REDISMODULE_OK)) { return RedisModule_ReplyWithError(ctx, "ERR invalid maxstep"); } struct FragObject *o = createFragObject(len, size, maxstep); RedisModule_ModuleTypeSetValue(key, FragType, o); RedisModule_ReplyWithSimpleString(ctx, "OK"); RedisModule_CloseKey(key); return REDISMODULE_OK; } void FragFree(void *value) { struct FragObject *o = value; for (unsigned long i = 0; i < o->len; i++) RedisModule_Free(o->values[i]); RedisModule_Free(o->values); RedisModule_Free(o); } size_t FragFreeEffort(RedisModuleString *key, const void *value) { REDISMODULE_NOT_USED(key); const struct FragObject *o = value; return o->len; } int FragDefrag(RedisModuleDefragCtx *ctx, RedisModuleString *key, void **value) { REDISMODULE_NOT_USED(key); unsigned long i = 0; int steps = 0; int dbid = RedisModule_GetDbIdFromDefragCtx(ctx); RedisModule_Assert(dbid != -1); /* Attempt to get cursor, validate it's what we're exepcting */ if (RedisModule_DefragCursorGet(ctx, &i) == REDISMODULE_OK) { if (i > 0) datatype_resumes++; /* Validate we're expecting this cursor */ if (i != last_set_cursor) datatype_wrong_cursor++; } else { if (last_set_cursor != 0) datatype_wrong_cursor++; } /* Attempt to defrag the object itself */ datatype_attempts++; struct FragObject *o = RedisModule_DefragAlloc(ctx, *value); if (o == NULL) { /* Not defragged */ o = *value; } else { /* Defragged */ *value = o; datatype_defragged++; } /* Deep defrag now */ for (; i < o->len; i++) { datatype_attempts++; void *new = RedisModule_DefragAlloc(ctx, o->values[i]); if (new) { o->values[i] = new; datatype_defragged++; } if ((o->maxstep && ++steps > o->maxstep) || ((i % 64 == 0) && RedisModule_DefragShouldStop(ctx))) { RedisModule_DefragCursorSet(ctx, i); last_set_cursor = i; return 1; } } last_set_cursor = 0; return 0; } int RedisModule_OnLoad(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { REDISMODULE_NOT_USED(argv); REDISMODULE_NOT_USED(argc); if (RedisModule_Init(ctx, "defragtest", 1, REDISMODULE_APIVER_1) == REDISMODULE_ERR) return REDISMODULE_ERR; if (RedisModule_GetTypeMethodVersion() < REDISMODULE_TYPE_METHOD_VERSION) { return REDISMODULE_ERR; } long long glen; if (argc != 1 || RedisModule_StringToLongLong(argv[0], &glen) == REDISMODULE_ERR) { return REDISMODULE_ERR; } createGlobalStrings(ctx, glen); RedisModuleTypeMethods tm = { .version = REDISMODULE_TYPE_METHOD_VERSION, .free = FragFree, .free_effort = FragFreeEffort, .defrag = FragDefrag }; FragType = RedisModule_CreateDataType(ctx, "frag_type", 0, &tm); if (FragType == NULL) return REDISMODULE_ERR; if (RedisModule_CreateCommand(ctx, "frag.create", fragCreateCommand, "write deny-oom", 1, 1, 1) == REDISMODULE_ERR) return REDISMODULE_ERR; if (RedisModule_CreateCommand(ctx, "frag.resetstats", fragResetStatsCommand, "write deny-oom", 1, 1, 1) == REDISMODULE_ERR) return REDISMODULE_ERR; RedisModule_RegisterInfoFunc(ctx, FragInfo); RedisModule_RegisterDefragFunc(ctx, defragGlobalStrings); return REDISMODULE_OK; }
package delta import java.util.Arrays final case class Snapshot[+S]( state: S, revision: Revision, tick: Tick) { def map[That](f: S => That): Snapshot[That] = new Snapshot(f(state), revision, tick) def transpose[That](implicit ev: S <:< Option[That]): Option[Snapshot[That]] = state.map(state => this.copy(state = state)) override lazy val hashCode: Int = { this.state match { case bytes: Array[Byte] => Arrays.hashCode(bytes) * 31 + revision * 31 + tick.## * 31 case _ => super.hashCode() } } def stateEquals(thatState: Any): Boolean = this.state.getClass == thatState.getClass && { this.state match { case bytes: Array[Byte] => Arrays.equals(bytes, thatState.asInstanceOf[Array[Byte]]) case _ => this.state == thatState } } def stateEquals(that: Snapshot[_]): Boolean = stateEquals(that.state) override def equals(other: Any): Boolean = other match { case that: Snapshot[_] => this.revision == that.revision && this.tick == that.tick && this.stateEquals(that) case _ => false } }
//! TextFormat and types for building new ones. use crate::descriptions::Trimming; use crate::enums::*; use crate::factory::Factory; use crate::font_collection::FontCollection; use crate::inline_object::InlineObject; use std::ffi::OsString; use std::ptr; use checked_enum::UncheckedEnum; use com_wrapper::ComWrapper; use dcommon::Error; use winapi::shared::winerror::SUCCEEDED; use winapi::um::dwrite::IDWriteTextFormat; use wio::com::ComPtr; use wio::wide::FromWide; #[doc(inline)] pub use self::builder::TextFormatBuilder; #[doc(hidden)] pub mod builder; #[derive(ComWrapper, PartialEq)] #[com(send, sync, debug)] #[repr(transparent)] /// Represents a format for laying out text. You can think of this like a Font with all of the /// little customization boxes filled in. pub struct TextFormat { ptr: ComPtr<IDWriteTextFormat>, } impl TextFormat { /// Get a builder for creating a new text format. pub fn create<'a>(factory: &'a Factory) -> TextFormatBuilder<'a> { unsafe { TextFormatBuilder::new(&*factory.get_raw()) } } } pub unsafe trait ITextFormat { /// Get the flow direction of text in this format. fn flow_direction(&self) -> UncheckedEnum<FlowDirection> { unsafe { self.raw_tf().GetFlowDirection().into() } } /// Get the font collection this format loaded its font from. fn font_collection(&self) -> Option<FontCollection> { unsafe { let mut ptr = ptr::null_mut(); let hr = self.raw_tf().GetFontCollection(&mut ptr); if SUCCEEDED(hr) && ptr != ptr::null_mut() { Some(FontCollection::from_raw(ptr)) } else { None } } } /// Get the name of the font family specified for this format. fn font_family_name(&self) -> Option<String> { unsafe { let len = self.raw_tf().GetFontFamilyNameLength(); let mut buf = Vec::with_capacity(len as usize + 1); let hr = self.raw_tf().GetFontFamilyName(buf.as_mut_ptr(), len + 1); if SUCCEEDED(hr) { buf.set_len(len as usize); let osstr = OsString::from_wide(&buf); let ff_name = osstr.to_string_lossy().into_owned(); Some(ff_name) } else { None } } } /// Get the font size associated with this format. fn font_size(&self) -> f32 { unsafe { self.raw_tf().GetFontSize() } } /// Get the stretch applied to this format. fn font_stretch(&self) -> UncheckedEnum<FontStretch> { unsafe { self.raw_tf().GetFontStretch().into() } } /// Get the style applied to this format. fn font_style(&self) -> UncheckedEnum<FontStyle> { unsafe { self.raw_tf().GetFontStyle().into() } } /// Get the weight applied to this format. fn font_weight(&self) -> FontWeight { unsafe { FontWeight(self.raw_tf().GetFontWeight()) } } /// Get the incremental tabstop size for this format. fn incremental_tabstop(&self) -> f32 { unsafe { self.raw_tf().GetIncrementalTabStop() } } /// Get the line spacing information for this format. fn line_spacing(&self) -> Result<LineSpacing, Error> { unsafe { let mut method = 0; let mut spacing = 0.0; let mut baseline = 0.0; let hr = self .raw_tf() .GetLineSpacing(&mut method, &mut spacing, &mut baseline); if SUCCEEDED(hr) { let method = method.into(); Ok(LineSpacing { method, spacing, baseline, }) } else { Err(hr.into()) } } } /// Get the locale used for this format. fn locale_name(&self) -> Result<String, Error> { unsafe { let len = self.raw_tf().GetLocaleNameLength(); let mut buf = Vec::with_capacity(len as usize + 1); let hr = self.raw_tf().GetLocaleName(buf.as_mut_ptr(), len + 1); if SUCCEEDED(hr) { buf.set_len(len as usize); let osstr = OsString::from_wide(&buf); let loc_name = osstr .into_string() .unwrap_or_else(|e| e.to_string_lossy().into_owned()); Ok(loc_name) } else { Err(hr.into()) } } } /// Get the paragraph alignment of text under this format. fn paragraph_alignment(&self) -> UncheckedEnum<ParagraphAlignment> { unsafe { self.raw_tf().GetParagraphAlignment().into() } } /// Get the reading direction of text under this format. fn reading_direction(&self) -> UncheckedEnum<ReadingDirection> { unsafe { self.raw_tf().GetReadingDirection().into() } } /// Get the alignment of text under this format. fn text_alignment(&self) -> UncheckedEnum<TextAlignment> { unsafe { self.raw_tf().GetTextAlignment().into() } } /// Gets the trimming options for text that overflows the layout box. /// /// The inline object is an omission sign that will be rendered to show that /// text was omitted. fn trimming(&self) -> Result<(Trimming, Option<InlineObject>), Error> { unsafe { let mut trimming = std::mem::zeroed(); let mut ptr = std::ptr::null_mut(); let hr = self.raw_tf().GetTrimming(&mut trimming, &mut ptr); if SUCCEEDED(hr) { let obj = if !ptr.is_null() { Some(InlineObject::from_raw(ptr)) } else { None }; Ok((trimming.into(), obj)) } else { Err(hr.into()) } } } /// Get the word wrapping for text under this format. fn word_wrapping(&self) -> UncheckedEnum<WordWrapping> { unsafe { self.raw_tf().GetWordWrapping().into() } } /// Set the flow direction for text under this format. fn set_flow_direction(&mut self, value: FlowDirection) -> Result<(), Error> { unsafe { let hr = self.raw_tf().SetFlowDirection(value as u32); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } /// Set the incremental tabstop value for text under this format. fn set_incremental_tabstop(&mut self, value: f32) -> Result<(), Error> { unsafe { let hr = self.raw_tf().SetIncrementalTabStop(value); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } /// Set the line spacing metrics for text under this format. fn set_line_spacing( &mut self, method: LineSpacingMethod, spacing: f32, baseline: f32, ) -> Result<(), Error> { unsafe { let hr = self .raw_tf() .SetLineSpacing(method as u32, spacing, baseline); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } /// Set the paragraph alignment for text under this format. fn set_paragraph_alignment(&mut self, value: ParagraphAlignment) -> Result<(), Error> { unsafe { let hr = self.raw_tf().SetParagraphAlignment(value as u32); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } /// Set the reading direction used to lay out text under this format. fn set_reading_direction(&mut self, value: ReadingDirection) -> Result<(), Error> { unsafe { let hr = self.raw_tf().SetReadingDirection(value as u32); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } /// Set the text alignment for this format. fn set_text_alignment(&mut self, value: TextAlignment) -> Result<(), Error> { unsafe { let hr = self.raw_tf().SetTextAlignment(value as u32); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } /// Sets trimming options for text overflowing the layout width. fn set_trimming( &self, trimming: &Trimming, omission_sign: Option<&InlineObject>, ) -> Result<(), Error> { unsafe { let omission_sign = match omission_sign { Some(sign) => sign.get_raw(), None => ptr::null_mut(), }; let hr = self .raw_tf() .SetTrimming(trimming as *const _ as *const _, omission_sign); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } /// Set the word wrapping for text under this format. fn set_word_wrapping(&mut self, value: WordWrapping) -> Result<(), Error> { unsafe { let hr = self.raw_tf().SetWordWrapping(value as u32); if SUCCEEDED(hr) { Ok(()) } else { Err(hr.into()) } } } unsafe fn raw_tf(&self) -> &IDWriteTextFormat; } unsafe impl ITextFormat for TextFormat { unsafe fn raw_tf(&self) -> &IDWriteTextFormat { &self.ptr } } /// Information about the line spacing of a format. pub struct LineSpacing { /// The method used for line spacing in a text layout. pub method: UncheckedEnum<LineSpacingMethod>, /// The amount of spacing to use. pub spacing: f32, /// The distance from top of line to baseline. A reasonable ratio to `spacing` is 80 percent. pub baseline: f32, }
// Copyright(c) 2019-2020 Vertical Software - All rights reserved // // This code file has been made available under the terms of the // MIT license. Please refer to LICENSE.txt in the root directory // or refer to https://opensource.org/licenses/MIT namespace Vertical.Tools.TemplateCopy.IO { /// <summary> /// Represents an object that locates assemblies. /// </summary> public interface IAssemblyResolver { /// <summary> /// Finds an assembly. /// </summary> /// <param name="assembly">Assembly name or path.</param> /// <returns>Path</returns> string GetAssemblyPath(string assembly); } }
use v6.c; role Bailador::Command { method run(:$app) { ... } }
package codec var RewardDestination = []string{"Staked", "Stash", "Controller"} var VoteThreshold = []string{"SuperMajorityApprove", "SuperMajorityAgainst", "SimpleMajority"} var StorageHasher = []string{"Blake2_128", "Blake2_256", "Twox128", "Twox256", "Twox128Concat"} var WithdrawReasons = []string{"TransactionPayment", "Transfer", "Reserve", "Fee"} var Bidder = []string{"NewBidder", "ParaId"} var Conviction = []string{"None", "Locked1x", "Locked2x", "Locked3x", "Locked4x", "Locked5x"} var ParachainDispatchOrigin = []string{"Signed", "Parachain"} var StoredState = []string{"Live", "PendingPause", "Paused", "PendingResume"} var UncleEntryItem = []string{"InclusionHeight", "Uncle"} var VoteType = []string{"Binary", "MultiOption"} var ProposalStage = []string{"PreVoting", "Voting", "Completed"} var ProposalCategory = []string{"Signaling"} var VoteStage = []string{"PreVoting", "Commit", "Voting", "Completed"} var TallyType = []string{"OnePerson", "OneCoin"} var Role = []string{"Storage"} var ContentVisibility = []string{"Draft", "Public"} var LiaisonJudgement = []string{"Pending", "Accepted", "Rejected"} var DownloadState = []string{"Started", "Ended"} var EntryMethod = []string{"Paid", "Screening"} var ProposalStatus = []string{"Active", "Cancelled", "Expired", "Approved", "Rejected", "Slashed"} var VoteKind = []string{"Abstain", "Approve", "Reject", "Slash"} func (sb *OffsetBytes) ToRewardDestination() (string, error) { return sb.ToEnumValue(RewardDestination) } func (sb *OffsetBytes) ToVoteThreshold() (string, error) { return sb.ToEnumValue(VoteThreshold) } func (sb *OffsetBytes) ToStorageHasher() (string, error) { return sb.ToEnumValue(StorageHasher) } func (sb *OffsetBytes) ToWithdrawReasons() (string, error) { return sb.ToEnumValue(WithdrawReasons) } func (sb *OffsetBytes) ToBidder() (string, error) { return sb.ToEnumValue(Bidder) } func (sb *OffsetBytes) ToConviction() (string, error) { return sb.ToEnumValue(Conviction) } func (sb *OffsetBytes) ToParachainDispatchOrigin() (string, error) { return sb.ToEnumValue(ParachainDispatchOrigin) } func (sb *OffsetBytes) ToSoredState() (string, error) { return sb.ToEnumValue(StoredState) } func (sb *OffsetBytes) ToUncleEntryItem() (string, error) { return sb.ToEnumValue(UncleEntryItem) } func (sb *OffsetBytes) ToVoteType() (string, error) { return sb.ToEnumValue(VoteType) } func (sb *OffsetBytes) ToProposalStage() (string, error) { return sb.ToEnumValue(ProposalStage) } func (sb *OffsetBytes) ToProposalCategory() (string, error) { return sb.ToEnumValue(ProposalCategory) } func (sb *OffsetBytes) ToVoteStage() (string, error) { return sb.ToEnumValue(VoteStage) } func (sb *OffsetBytes) ToTallyType() (string, error) { return sb.ToEnumValue(TallyType) } func (sb *OffsetBytes) ToRole() (string, error) { return sb.ToEnumValue(Role) } func (sb *OffsetBytes) ToContentVisibility() (string, error) { return sb.ToEnumValue(ContentVisibility) } func (sb *OffsetBytes) ToLiaisonJudgement() (string, error) { return sb.ToEnumValue(LiaisonJudgement) } func (sb *OffsetBytes) ToDownloadState() (string, error) { return sb.ToEnumValue(DownloadState) } func (sb *OffsetBytes) ToEntryMethod() (string, error) { return sb.ToEnumValue(EntryMethod) } func (sb *OffsetBytes) ToProposalStatus() (string, error) { return sb.ToEnumValue(ProposalStatus) } func (sb *OffsetBytes) ToVoteKind() (string, error) { return sb.ToEnumValue(VoteKind) }
### Example 1: List the metadata of service instance. ```powershell PS C:\> Get-AzHealthcareApisService Location Name Kind ResourceGroupName -------- ---- ---- ----------------- eastus2 azpsapiservice fhir azps_test_group ``` List the metadata of service instance. ### Example 2: List the metadata of service instance by resource group. ```powershell PS C:\> Get-AzHealthcareApisService -ResourceGroupName azps_test_group Location Name Kind ResourceGroupName -------- ---- ---- ----------------- eastus2 azpsapiservice fhir azps_test_group ``` List the metadata of service instance by resource group. ### Example 3: Get the metadata of a service instance. ```powershell PS C:\> Get-AzHealthcareApisService -ResourceGroupName azps_test_group -Name azpsapiservice Location Name Kind ResourceGroupName -------- ---- ---- ----------------- eastus2 azpsapiservice fhir azps_test_group ``` Get the metadata of a service instance.
use macroquad::prelude::*; #[macroquad::main("BasicShapes")] async fn main() { loop { clear_background(Color::LIGHTGRAY); draw_line(40.0, 40.0, 100.0, 200.0, 15.0, Color::BLUE); draw_rectangle(screen_width() / 2.0 - 60.0, 100.0, 120.0, 60.0, Color::GREEN); draw_circle(screen_width() - 30.0, screen_height() - 30.0, 15.0, Color::YELLOW); draw_text("HELLO", 20.0, 20.0, 30.0, Color::DARKGRAY); next_frame().await } }
#! /bin/sh # docker-tag.sh - Create a new tag for a set of Agoric Docker images set -xe SRCTAG=$1 DSTTAG=$2 DOCKERUSER=${3-agoric} if [ -z "$DSTTAG" ]; then echo 1>&2 "Usage: $0 SRCTAG DSTTAG" exit 1 fi for img in agoric-sdk cosmic-swingset-setup cosmic-swingset-solo deployment; do docker pull $DOCKERUSER/$img:$SRCTAG docker tag $DOCKERUSER/$img:$SRCTAG $DOCKERUSER/$img:$DSTTAG docker push $DOCKERUSER/$img:$DSTTAG done
# frozen_string_literal: true module I18n::Processes::Scanners::Files # Reads the files in 'rb' mode and UTF-8 encoding. # # @since 0.9.0 class FileReader # Return the contents of the file at the given path. # The file is read in the 'rb' mode and UTF-8 encoding. # # @param path [String] Path to the file, absolute or relative to the working directory. # @return [String] file contents def read_file(path) result = nil File.open(path, 'rb', encoding: 'UTF-8') { |f| result = f.read } result end end end
functionalchaos.net ===================== Static content to build functionalchaos.net website. Powered by [Sculpin](http://sculpin.io)