text
stringlengths
27
775k
@file:Suppress("NOTHING_TO_INLINE") package com.soywiz.kds import java.util.* actual class WeakMap<K : Any, V> { val wm = WeakHashMap<K, V>() actual operator fun contains(key: K): Boolean = wm.containsKey(key) actual operator fun set(key: K, value: V) = run { if (key is String) error("Can't use String as WeakMap keys") wm[key] = value } actual operator fun get(key: K): V? = wm[key] } ///////////////// actual class FastIdentityMap<K, V>(dummy: Boolean) { val map = IdentityHashMap<K, V>() } actual fun <K, V> FastIdentityMap(): FastIdentityMap<K, V> = FastIdentityMap(true) actual val <K, V> FastIdentityMap<K, V>.size: Int get() = this.map.size actual fun <K, V> FastIdentityMap<K, V>.keys(): List<K> = this.map.keys.toList() actual operator fun <K, V> FastIdentityMap<K, V>.get(key: K): V? = this.map[key] actual operator fun <K, V> FastIdentityMap<K, V>.set(key: K, value: V): Unit = run { this.map[key] = value } actual operator fun <K, V> FastIdentityMap<K, V>.contains(key: K): Boolean = key in this.map actual fun <K, V> FastIdentityMap<K, V>.remove(key: K): Unit = run { this.map.remove(key) } actual fun <K, V> FastIdentityMap<K, V>.clear() = this.map.clear() actual inline fun <K, V> FastIdentityMap<K, V>.fastKeyForEach(callback: (key: K) -> Unit): Unit { for (key in this.keys()) { callback(key) } }
๏ปฟusing DigitalRune.Geometry; using DigitalRune.Graphics.Scene3D; using DigitalRune.Mathematics; using DigitalRune.Mathematics.Algebra; using NUnit.Framework; namespace DigitalRune.Graphics.Tests { [TestFixture] public class CameraInstanceTest { [Test] public void PoseTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Assert.IsNotNull(cameraInstance.PoseWorld); Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); // Set new Pose Vector3F position = new Vector3F(1, 2, 3); QuaternionF orientation = QuaternionF.CreateRotation(new Vector3F(3, 4, 5), 0.123f); cameraInstance.PoseWorld = new Pose(position, orientation); Assert.AreEqual(position, cameraInstance.PoseWorld.Position); Assert.AreEqual(orientation.ToRotationMatrix33(), cameraInstance.PoseWorld.Orientation); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.ToMatrix44F(), cameraInstance.ViewInverse)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.Inverse.ToMatrix44F(), cameraInstance.View)); // Set Position and Orientation position = new Vector3F(5, 6, 7); orientation = QuaternionF.CreateRotation(new Vector3F(1, -1, 6), -0.123f); cameraInstance.PoseWorld = new Pose(position, orientation); Assert.AreEqual(position, cameraInstance.PoseWorld.Position); Assert.AreEqual(orientation.ToRotationMatrix33(), cameraInstance.PoseWorld.Orientation); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.Inverse.ToMatrix44F(), cameraInstance.View)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(cameraInstance.PoseWorld.ToMatrix44F(), cameraInstance.ViewInverse)); } [Test] public void ViewTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Assert.AreEqual(Matrix44F.Identity, cameraInstance.View); Assert.AreEqual(Matrix44F.Identity, cameraInstance.ViewInverse); Vector3F position = new Vector3F(1, 2, 3); Vector3F target = new Vector3F(2, 5, 4); Vector3F upVector = new Vector3F(1, 1, 1); Matrix44F view = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.View = view; Assert.AreEqual(view, cameraInstance.View); Assert.AreEqual(view.Inverse, cameraInstance.ViewInverse); Vector3F originOfCamera = cameraInstance.PoseWorld.Position; originOfCamera = cameraInstance.View.TransformPosition(originOfCamera); Assert.IsTrue(Vector3F.AreNumericallyEqual(Vector3F.Zero, originOfCamera)); Vector4F positionView = new Vector4F(0, 0, -1, 1); Vector4F positionView2; // Transform a point from view space to world space. Vector4F positionWorld = cameraInstance.PoseWorld * positionView; Vector4F positionWorld2 = cameraInstance.ViewInverse * positionView; Assert.IsTrue(Vector4F.AreNumericallyEqual(positionWorld, positionWorld2)); // Transform a point from world space to view space. positionView = cameraInstance.PoseWorld.Inverse * positionWorld; positionView2 = cameraInstance.View * positionWorld; Assert.IsTrue(Vector4F.AreNumericallyEqual(positionView, positionView2)); cameraInstance.View = Matrix44F.Identity; Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); } [Test] public void InverseViewTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Assert.AreEqual(Matrix44F.Identity, cameraInstance.View); Assert.AreEqual(Matrix44F.Identity, cameraInstance.ViewInverse); Vector3F position = new Vector3F(1, 2, 3); Vector3F target = new Vector3F(2, 5, 4); Vector3F upVector = new Vector3F(1, 1, 1); Matrix44F view = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.ViewInverse = view.Inverse; Assert.IsTrue(Matrix44F.AreNumericallyEqual(view, cameraInstance.View)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(view.Inverse, cameraInstance.ViewInverse)); Assert.IsTrue(Matrix44F.AreNumericallyEqual(view.Inverse, cameraInstance.PoseWorld.ToMatrix44F())); } [Test] public void LookAtTest() { CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); Vector3F position = new Vector3F(1, 2, 3); Vector3F target = new Vector3F(2, 5, 4); Vector3F upVector = new Vector3F(1, 1, 1); cameraInstance.PoseWorld = new Pose(new Vector3F(1, 2, 3)); Matrix44F expected = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.LookAt(target, upVector); Assert.That(Matrix44F.AreNumericallyEqual(expected, cameraInstance.View)); position = new Vector3F(-2, 3, -7.5f); expected = Matrix44F.CreateLookAt(position, target, upVector); cameraInstance.LookAt(position, target, upVector); Assert.That(Vector3F.AreNumericallyEqual(position, cameraInstance.PoseWorld.Position)); Assert.That(Matrix44F.AreNumericallyEqual(expected, cameraInstance.View)); } [Test] public void PoseChangedTest() { bool poseChanged = false; CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); cameraInstance.PoseChanged += (sender, eventArgs) => poseChanged = true; cameraInstance.PoseWorld = new Pose(new Vector3F(1, 2, 3)); Assert.IsTrue(poseChanged); } [Test] public void ShapeChangedTest() { bool shapeChanged = false; CameraInstance cameraInstance = new CameraInstance(new Camera(new PerspectiveProjection())); cameraInstance.BoundingShapeChanged += (sender, eventArgs) => shapeChanged = true; cameraInstance.Camera.Projection.Far = 9; Assert.IsTrue(shapeChanged); } [Test] public void SetProjectionTest() { Matrix44F projectionMatrix = Matrix44F.CreateOrthographicOffCenter(1, 4, 2, 5, 6, 11); OrthographicProjection orthographicProjection = new OrthographicProjection(); orthographicProjection.Set(projectionMatrix); CameraInstance cameraInstance = new CameraInstance(new Camera(orthographicProjection)); Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); Assert.That(Numeric.AreEqual(3, cameraInstance.Camera.Projection.Width)); Assert.That(Numeric.AreEqual(3, cameraInstance.Camera.Projection.Height)); Assert.That(Numeric.AreEqual(1f, cameraInstance.Camera.Projection.AspectRatio)); Assert.That(Numeric.AreEqual(6, cameraInstance.Camera.Projection.Near)); Assert.That(Numeric.AreEqual(11, cameraInstance.Camera.Projection.Far)); Assert.That(Numeric.AreEqual(1, cameraInstance.Camera.Projection.Left)); Assert.That(Numeric.AreEqual(4, cameraInstance.Camera.Projection.Right)); Assert.That(Numeric.AreEqual(2, cameraInstance.Camera.Projection.Bottom)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Top)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Depth)); Assert.That(Matrix44F.AreNumericallyEqual(orthographicProjection, cameraInstance.Camera.Projection)); Assert.That(Matrix44F.AreNumericallyEqual(orthographicProjection.Inverse, cameraInstance.Camera.Projection.Inverse)); Assert.IsNotNull(cameraInstance.BoundingShape); PerspectiveProjection perspectiveProjection = new PerspectiveProjection(); perspectiveProjection.Inverse = Matrix44F.CreatePerspectiveOffCenter(1, 5, 2, 5, 1, 10).Inverse; cameraInstance = new CameraInstance(new Camera(perspectiveProjection)); Assert.AreEqual(Vector3F.Zero, cameraInstance.PoseWorld.Position); Assert.AreEqual(Matrix33F.Identity, cameraInstance.PoseWorld.Orientation); Assert.That(Numeric.AreEqual(MathHelper.ToRadians(33.690067f), cameraInstance.Camera.Projection.FieldOfViewX)); Assert.That(Numeric.AreEqual(MathHelper.ToRadians(15.255119f), cameraInstance.Camera.Projection.FieldOfViewY)); Assert.That(Numeric.AreEqual(4, cameraInstance.Camera.Projection.Width)); Assert.That(Numeric.AreEqual(3, cameraInstance.Camera.Projection.Height)); Assert.That(Numeric.AreEqual(4.0f / 3.0f, cameraInstance.Camera.Projection.AspectRatio)); Assert.That(Numeric.AreEqual(1, cameraInstance.Camera.Projection.Left)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Right)); Assert.That(Numeric.AreEqual(2, cameraInstance.Camera.Projection.Bottom)); Assert.That(Numeric.AreEqual(5, cameraInstance.Camera.Projection.Top)); Assert.That(Numeric.AreEqual(1, cameraInstance.Camera.Projection.Near)); Assert.That(Numeric.AreEqual(10, cameraInstance.Camera.Projection.Far)); Assert.That(Numeric.AreEqual(9, cameraInstance.Camera.Projection.Depth)); Assert.IsNotNull(cameraInstance.BoundingShape); } } }
import Sequelize from 'sequelize' import dbConfig from '../config/dbConfig' // TODO: add logger const sequelize = new Sequelize(dbConfig.database, dbConfig.username, dbConfig.password, { host: dbConfig.host, port: dbConfig.port, dialect: dbConfig.dialect, dialectOptions: dbConfig.dialectOptions, operatorsAliases: false, pool: { max: 5, min: 0, idle: 1, }, }) const UserQuestion = require('./models/UserQuestion')(sequelize); const User = require('./models/User')(sequelize); const Topic = require('./models/Topic')(sequelize); const TestType = require('./models/TestType')(sequelize); const Tech = require('./models/Tech')(sequelize); const Source = require('./models/Source')(sequelize); const QuestionStatus = require('./models/QuestionStatus')(sequelize); const Question = require('./models/Question')(sequelize); const Answer = require('./models/Answer')(sequelize); Question.hasMany(Source, {foreignKey: 'question_id'}); Source.belongsTo(Question, {foreignKey: 'question_id'}); Question.hasMany(Answer, {foreignKey: 'question_id'}); Answer.belongsTo(Question, {foreignKey: 'question_id'}); Tech.belongsToMany(Topic, {foreignKey: 'tech_id'}); Topic.belongsToMany(Question, {foreignKey: 'topic_id'}); UserQuestion.hasOne(QuestionStatus, {foreignKey: 'status_id'}); User.belongsToMany(Question, {through: UserQuestion}); Question.belongsToMany(User, {through: UserQuestion}); sequelize.sync(); export default { UserQuestion, User, Topic, TestType, Tech, Source, QuestionStatus, Question, Answer, sync: sequelize.sync.bind(this), close: () => sequelize.connectionManager.close(), };
<?php class SXE extends SimpleXmlElement { public function count() { echo "Called Count!\n"; return parent::count(); } } $str = '<xml><c>asdf</c><c>ghjk</c></xml>'; $sxe = new SXE($str); var_dump(count($sxe)); ?> ==Done==
import 'dart:convert'; import 'package:archive/archive.dart'; import 'package:flutter_trading_volume/models/supported_pairs.dart'; import 'package:flutter_trading_volume/models/trades/binance_trade.dart'; import 'package:flutter_trading_volume/models/trades/bitfinex_trade.dart'; import 'package:flutter_trading_volume/models/trades/bitmex_trade.dart'; import 'package:flutter_trading_volume/models/trades/bitstamp_trade.dart'; import 'package:flutter_trading_volume/models/trades/bybit_trade.dart'; import 'package:flutter_trading_volume/models/trades/coinbase_trade.dart'; import 'package:flutter_trading_volume/models/trades/ftx_trade.dart'; import 'package:flutter_trading_volume/models/trades/kraken_trade.dart'; import 'package:flutter_trading_volume/models/trades/okex_trade.dart'; import 'package:flutter_trading_volume/utils/constants.dart'; import 'package:flutter_trading_volume/websockets/bitstamp_socket.dart'; import 'package:flutter_trading_volume/websockets/callbacks/exchange_callbacks.dart'; import 'package:flutter_trading_volume/websockets/coinbase_socket.dart'; import 'package:flutter_trading_volume/websockets/huobi_socket.dart'; import 'package:flutter_trading_volume/websockets/okex_socket.dart'; import '../binance_socket.dart'; import '../bitfinex_socket.dart'; import '../bitmex_socket.dart'; import '../bybit_socket.dart'; import '../ftx_socket.dart'; import '../kraken_socket.dart'; class ExchangeManager { SupportedPairs _currentPair; //Sockets BinanceSocket _binanceSocket; FtxSocket _ftxSocket; ByBitSocket _byBitSocket; BitmexSocket _bitmexSocket; BitfinexSocket _bitfinexSocket; KrakenSocket _krakenSocket; BitstampSocket _bitstampSocket; CoinbaseSocket _coinbaseSocket; HuobiSocket _huobiSocket; OkExSocket _okExSocket; //Callbacks ExchangeCallbacks _exchangeCallbacks; ExchangeManager(SupportedPairs pair, ExchangeCallbacks callbacks) { this._exchangeCallbacks = callbacks; this._currentPair = pair; _initExchanges(); } void _initExchanges() { _binanceSocket = new BinanceSocket(pair: _currentPair); _ftxSocket = new FtxSocket(pair: _currentPair); _byBitSocket = new ByBitSocket(pair: _currentPair); _bitmexSocket = new BitmexSocket(pair: _currentPair); _bitfinexSocket = new BitfinexSocket(pair: _currentPair); _krakenSocket = new KrakenSocket(pair: _currentPair); _bitstampSocket = new BitstampSocket(pair: _currentPair); _coinbaseSocket = new CoinbaseSocket(pair: _currentPair); _huobiSocket = new HuobiSocket(pair: _currentPair); _okExSocket = new OkExSocket(pair: _currentPair); } void updatePairs(SupportedPairs pair) { this._currentPair = pair; _initExchanges(); } void _listenForDataUpdate() { _binanceSocket.socket.stream.listen((event) { final trade = BinanceTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BINANCE_PRICE_ID); }); _ftxSocket.socket.stream.listen((event) { final trades = FtxTrade.fromJson(event.toString()); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, FTX_PRICE_ID); }); } }); _byBitSocket.socket.stream.listen((event) { final trade = ByBitTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BYBIT_PRICE_ID); }); _bitmexSocket.socket.stream.listen((event) { final trade = BitmexTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BITMEX_PRICE_ID); }); _bitfinexSocket.socket.stream.listen((event) { final trades = BitfinexTrade.fromJson(event.toString()); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, BITFINEX_PRICE_ID); }); } }); _krakenSocket.socket.stream.listen((event) { final trades = KrakenTrade.fromJson(event.toString()); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, KRAKEN_PRICE_ID); }); } }); _bitstampSocket.socket.stream.listen((event) { final trade = BitstampTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, BITSTAMP_PRICE_ID); }); _coinbaseSocket.socket.stream.listen((event) { final trade = CoinbaseTrade.fromJson(event.toString()); _exchangeCallbacks.onTrade(trade, COINBASE_PRICE_ID); }); _okExSocket.socket.stream.listen((event) { final inflater = Inflate(event); final trades = OkExTrade.fromJson(utf8.decode(inflater.getBytes())); if(trades != null && trades.isNotEmpty) { trades.forEach((trade) { _exchangeCallbacks.onTrade(trade, OKEX_PRICE_ID); }); } }); //TODO: connection doesn't work, why?... _huobiSocket.socket.stream.listen((event) { //print(event); //final trade = CoinbaseTrade.fromJson(event.toString()); //_exchangeCallbacks.onTrade(trade, COINBASE_PRICE_ID); }); } void connectToSocket() { if (_binanceSocket.socket == null/* && (_currentExchange == SupportedExchange.ALL || _currentExchange == SupportedExchange.BINANCE)*/) { _binanceSocket.connect(); } if (_ftxSocket.socket == null/* && (_currentExchange == SupportedExchange.ALL || _currentExchange == SupportedExchange.FTX)*/) { _ftxSocket.connect(); } if(_byBitSocket.socket == null && _currentPair == SupportedPairs.BTC_USDT){ //TODO: Currently we don't support other pairs for ByBit _byBitSocket.connect(); } if(_bitmexSocket.socket == null && _currentPair == SupportedPairs.BTC_USDT){ //TODO: Currently we don't support other pairs for BitMEX _bitmexSocket.connect(); } if(_bitfinexSocket.socket == null ){ _bitfinexSocket.connect(); } if(_krakenSocket.socket == null ){ _krakenSocket.connect(); } if(_bitstampSocket.socket == null ){ _bitstampSocket.connect(); } if(_coinbaseSocket.socket == null ){ _coinbaseSocket.connect(); } if(_huobiSocket.socket == null ){ _huobiSocket.connect(); } if(_okExSocket.socket == null ){ _okExSocket.connect(); } _listenForDataUpdate(); } void closeConnection() { _binanceSocket.closeConnection(); _ftxSocket.closeConnection(); _byBitSocket.closeConnection(); _bitmexSocket.closeConnection(); _bitfinexSocket.closeConnection(); _krakenSocket.closeConnection(); _bitstampSocket.closeConnection(); _coinbaseSocket.closeConnection(); _huobiSocket.closeConnection(); _okExSocket.closeConnection(); } }
\ local utils = require("leftry.utils") local function quasiquote_eval(invariant, car, output) if utils.hasmetatable(car, list) then if car:car() == symbol("quasiquote-eval") then local cdr = car:cdr() assert(list.__len(cdr) == 1, "quasiquote_eval only accepts one parameter.") return compiler.expize(invariant, cdr:car(), output) end return list.cast(car, function(value) return quasiquote_eval(invariant, value, output) end) end if lua_ast[getmetatable(car)] then return car:gsub(list, function(value) return quasiquote_eval(invariant, value, output) end) end return car end local function compile_quasiquote_eval(invariant, cdr, output) local cadr = cdr:car() local exp = compiler.expize(invariant, quasiquote_eval(invariant, cadr, output), output) if utils.hasmetatable(exp, lua_name) then function exp:repr() return lua_functioncall.new(lua_name("lua_nameize"), lua_args.new(lua_explist({exp}))) end else function exp:repr() return exp end end return exp end local function escape_lua(invariant, data) if lua_ast[getmetatable(data)] then return data:repr() end if utils.hasmetatable(data, list) then if data:car() == symbol("quasiquote-eval") then return data end data = list.cast(data, function(value) return escape_lua(invariant, value) end) end return data end local function compile_quasiquote(invariant, cdr, output) assert(list.__len(cdr) == 1, "quasiquote only accepts one parameter.") local cadr = cdr:car() return quasiquote_eval(invariant, escape_lua(invariant, cadr), output) end local function read_quasiquote(invariant, position) local rest, values = read(invariant, position + 1) if rest then values[1] = list(symbol("quasiquote"), values[1]) return rest, values end end local function read_quasiquote_eval(invariant, position) local rest, values = read(invariant, position + 1) if rest then values[1] = list(symbol("quasiquote-eval"), values[1]) return rest, values end end { read = { [","] = {read_quasiquote_eval}, ["`"] = {read_quasiquote} }, lua = { ["quasiquote"] = {expize=compile_quasiquote, statize=compile_quasiquote}, ["quasiquote-eval"] = {expize=compile_quasiquote_eval, statize=compile_quasiquote_eval} } }
from django.shortcuts import render from django.db.models import Max, Count from leads.models import Team, Distance from leads.serializers import TeamSerializer, TeamActvSerializer, DistanceSerializer from rest_framework import generics class TeamListAPI(generics.ListAPIView): queryset = Team.objects.all() serializer_class = TeamSerializer class TeamRetriveAPI(generics.RetrieveAPIView): queryset = Team.objects.all() serializer_class = TeamSerializer class TeamRetriveUpdateAPI(generics.RetrieveUpdateAPIView): queryset = Team.objects.all() serializer_class = TeamActvSerializer class DistanceCreateAPI(generics.CreateAPIView): queryset = Distance.objects.all() serializer_class = DistanceSerializer class DistanceRetriveUpdateDestroyAPI(generics.RetrieveUpdateDestroyAPIView): queryset = Distance.objects.all() serializer_class = DistanceSerializer
unorm = require('unorm'); // Function to display Unicode codepoints of a string. function codepoints(string) { return string.split('').map(function(chr) { var codepoint = chr.charCodeAt(0); return (codepoint >= 33 && codepoint <= 126) ? JSON.stringify(chr) : 'U+' + codepoint.toString(16).toUpperCase(); }).join(' '); } // Scientific ร…ngstrรถm symbol is converted to Scandinavian letter ร…. angstrom = '\u212B'; console.log('- Example 1 -'); console.log(codepoints(angstrom)); console.log(codepoints(unorm.nfc(angstrom))); // German รค and รผ decomposed into a and u with Combining Diaeresis character. letters = '\u00E4\u00FC' console.log('- Example 2 -'); console.log(codepoints(letters)); console.log(codepoints(unorm.nfd(letters))); // String optimized for compatibility, ie. COโ‚‚ becomes CO2. scientific = 'CO\u2082 and E=mc\u00B2' console.log('- Example 3 -'); console.log(scientific) console.log(unorm.nfkc(scientific)); // NOTE: Rest of the example requires XRegExp: npm install xregexp // Remove combining characters / marks from Swedish name, ie. รถ becomes o. // This is useful for indexing and searching internationalized text. XRegExp = require('xregexp'); name = '\u00C5ngstr\u00F6m'; console.log('- Example 4 -'); console.log(unorm.nfkd(name)); console.log(unorm.nfkd(name).replace(XRegExp('\\p{M}', 'g'), ''));
ActiveDirectory-Password-Change =============================== ![Screenshot](https://raw.github.com/janikvonrotz/ActiveDirectory-Password-Change/master/doc/screenshot.png) * Install dependencies with [bower](https://github.com/bower/bower) * Run a bower update in the project root * Add an ActiveDirectory user which has the right to reset a user's password * Set variables in `index.php` * `$ldapuser` * `$ldappwd` * `$ldaphost` * `$SecKey` * Move the project on a webserver * Support for php ldap module must be enabled * Use a SSL certified connection when publishing the site to the internet * Open the password change website like this `https://site.yourdomain.com/index.php?sec=[your secure key from $SecKey]`
<?php session_start(); error_reporting(0); if(isset($_SESSION['email'])) { $email = $_SESSION['email']; $id=$_GET['id']; $errors = array(); $db = mysqli_connect('localhost:3307', 'root', '', 'foodshala'); $query="INSERT INTO cart(email, dishid) VALUES('$email','$id')"; mysqli_query($db, $query); echo "done"; echo "<script>"; echo "alert('Added to the cart Successfully!');"; echo "location='shop.php';"; echo "</script>"; } else{ header('location: userlogin.php'); } ?>
# Alignments These directories hold Turtle-based alignments between Brick and other ontologies, as well as the auxiliary files needed to generate those alignments where necessary. Alignments should be distributed as Turtle files with a specific naming schema. For an ontology `X`, the alignment file should be `Brick-X-alignment.ttl`.
/* Copyright ยฉ LiquidWeb Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "bytes" "fmt" "html/template" "io/ioutil" "os" "path/filepath" "strings" "time" "github.com/spf13/cobra" "gopkg.in/yaml.v2" "github.com/liquidweb/liquidweb-cli/instance" "github.com/liquidweb/liquidweb-cli/utils" ) var planCmd = &cobra.Command{ Use: "plan", Short: "Process YAML plan file", Long: `Process YAML plan file. Examples: 'lw plan --file plan.yaml --var envname=dev' Any value in the plan can optionally utilitize variables in Golang's template style. To access environment variables use .Env.VARNAME (i.e. .Env.USER ) Example plan file to create a cloud server: --- cloud: server: create: - type: "SS.VPS" template: "UBUNTU_1804_UNMANAGED" zone: 40460 hostname: "db1.somedomain.com" ips: 1 public-ssh-key: "public ssh key string here " config-id: 88 - type: "SS.VPS" template: "UBUNTU_1804_UNMANAGED" zone: 40460 hostname: "web1.{{- .Var.envname -}}.somedomain.com" ips: 1 public-ssh-key: "public ssh key string here " config-id: 88 `, Run: func(cmd *cobra.Command, args []string) { planFile, _ := cmd.Flags().GetString("file") varSliceFlag, err := cmd.Flags().GetStringSlice("var") if err != nil { lwCliInst.Die(err) } _, err = os.Stat(planFile) if err != nil { if os.IsNotExist(err) { lwCliInst.Die(fmt.Errorf("Plan file \"%s\" does not exist.\n", planFile)) } else { lwCliInst.Die(err) } } planYaml, err := ioutil.ReadFile(filepath.Clean(planFile)) if err != nil { lwCliInst.Die(err) } planYaml, err = processTemplate(varSliceFlag, planYaml) if err != nil { lwCliInst.Die(err) } var plan instance.Plan err = yaml.Unmarshal(planYaml, &plan) if err != nil { lwCliInst.Die(fmt.Errorf("Error parsing YAML file: %s\n", err)) } if err := lwCliInst.ProcessPlan(&plan); err != nil { lwCliInst.Die(err) } }, } func envToMap() map[string]string { envMap := make(map[string]string) for _, v := range os.Environ() { split_v := strings.Split(v, "=") envMap[split_v[0]] = split_v[1] } return envMap } func varsToMap(vars []string) map[string]string { varMap := make(map[string]string) for _, v := range vars { s := strings.Split(v, "=") varMap[s[0]] = s[1] } return varMap } func processTemplate(varSliceFlag []string, planYaml []byte) ([]byte, error) { type TemplateVars struct { Var map[string]string Env map[string]string } tmplVars := &TemplateVars{ Var: varsToMap(varSliceFlag), Env: envToMap(), } var tmplBytes bytes.Buffer tmpl, err := template.New("plan.yaml").Funcs(template.FuncMap{ "generatePassword": func(length int) string { return utils.RandomString(length) }, "now": time.Now, "hex": func(number int64) string { return fmt.Sprintf("%X", number) }, }). Parse(string(planYaml)) if err != nil { return nil, err } err = tmpl.Execute(&tmplBytes, tmplVars) if err != nil { return nil, err } return tmplBytes.Bytes(), nil } func init() { rootCmd.AddCommand(planCmd) planCmd.Flags().String("file", "", "YAML file used to define a plan") planCmd.Flags().StringSlice("var", nil, "define variable name") if err := planCmd.MarkFlagRequired("file"); err != nil { lwCliInst.Die(err) } }
package br.com.alura.technews.retrofit.service import br.com.alura.technews.model.Noticia import retrofit2.Call import retrofit2.http.* interface NoticiaService { @GET("noticias") fun buscaTodas(): Call<List<Noticia>> @POST("noticias") fun salva(@Body noticia: Noticia): Call<Noticia> @PUT("noticias/{id}") fun edita(@Path("id") id: Long, @Body noticia: Noticia) : Call<Noticia> @DELETE("noticias/{id}") fun remove(@Path("id") id: Long): Call<Void> }
#[macro_use] extern crate c2rust_xcheck_derive; extern crate c2rust_xcheck_runtime; extern crate c2rust_bitfields; use c2rust_bitfields::BitfieldStruct; use c2rust_xcheck_runtime::hash::jodyhash::JodyHasher; use c2rust_xcheck_runtime::hash::simple::SimpleHasher; use c2rust_xcheck_runtime::hash::CrossCheckHash as XCH; #[test] fn test_bitfields() { #[repr(C)] #[derive(BitfieldStruct, CrossCheckHash, Default)] struct Foo { #[bitfield(name = "a", ty = "u32", bits = "0..=6")] #[bitfield(name = "b", ty = "u32", bits = "7..=17")] a_b: [u8; 3], #[bitfield(padding)] _pad: [u8; 1], } // These tests should match the ones in struct10.c let mut x = Foo::default(); x.set_a(42); x.set_b(1337); let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0x24e75f75c47e329a)); let x = Foo { a_b: [0xAA, 0x55, 0xAA], _pad: [0x55], }; let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0x24e75fad2461b12c)); let x = Foo { a_b: [0x55, 0xAA, 0x55], _pad: [0xAA], }; let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0xc3e72e2d630778ed)); let x = Foo { a_b: [0x78, 0x56, 0x34], _pad: [0x12], }; let x_hash = XCH::cross_check_hash::<JodyHasher, SimpleHasher>(&x); assert_eq!(x_hash, Some(0xb6e8a1efb3617525)); }
๏ปฟ// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. using System; namespace FASTER.core { /// <summary> /// Configuration settings for serializing objects /// </summary> /// <typeparam name="Key"></typeparam> /// <typeparam name="Value"></typeparam> public class SerializerSettings<Key, Value> { /// <summary> /// Key serializer /// </summary> public Func<IObjectSerializer<Key>> keySerializer; /// <summary> /// Value serializer /// </summary> public Func<IObjectSerializer<Value>> valueSerializer; } /// <summary> /// Interface for variable length in-place objects /// modeled as structs, in FASTER /// </summary> /// <typeparam name="T"></typeparam> public interface IVariableLengthStruct<T> { /// <summary> /// Actual length of object /// </summary> /// <param name="t"></param> /// <returns></returns> int GetLength(ref T t); /// <summary> /// Average length of objects, make sure this includes the object /// header needed to compute the actual object length /// </summary> /// <returns></returns> int GetAverageLength(); /// <summary> /// Initial length, when populating for RMW from given input /// </summary> /// <typeparam name="Input"></typeparam> /// <param name="input"></param> /// <returns></returns> int GetInitialLength<Input>(ref Input input); } /// <summary> /// Length specification for fixed size (normal) structs /// </summary> /// <typeparam name="T"></typeparam> public readonly struct FixedLengthStruct<T> : IVariableLengthStruct<T> { private static readonly int size = Utility.GetSize(default(T)); /// <summary> /// Get average length /// </summary> /// <returns></returns> public int GetAverageLength() => size; /// <summary> /// Get initial length /// </summary> /// <typeparam name="Input"></typeparam> /// <param name="input"></param> /// <returns></returns> public int GetInitialLength<Input>(ref Input input) => size; /// <summary> /// Get length /// </summary> /// <param name="t"></param> /// <returns></returns> public int GetLength(ref T t) => size; } /// <summary> /// Settings for variable length keys and values /// </summary> /// <typeparam name="Key"></typeparam> /// <typeparam name="Value"></typeparam> public class VariableLengthStructSettings<Key, Value> { /// <summary> /// Key length /// </summary> public IVariableLengthStruct<Key> keyLength; /// <summary> /// Value length /// </summary> public IVariableLengthStruct<Value> valueLength; } /// <summary> /// Configuration settings for hybrid log /// </summary> public class LogSettings { /// <summary> /// Device used for main hybrid log /// </summary> public IDevice LogDevice = new NullDevice(); /// <summary> /// Device used for serialized heap objects in hybrid log /// </summary> public IDevice ObjectLogDevice = new NullDevice(); /// <summary> /// Size of a segment (group of pages), in bits /// </summary> public int PageSizeBits = 25; /// <summary> /// Size of a segment (group of pages), in bits /// </summary> public int SegmentSizeBits = 30; /// <summary> /// Total size of in-memory part of log, in bits /// </summary> public int MemorySizeBits = 34; /// <summary> /// Fraction of log marked as mutable (in-place updates) /// </summary> public double MutableFraction = 0.9; /// <summary> /// Copy reads to tail of log /// </summary> public bool CopyReadsToTail = false; /// <summary> /// Settings for optional read cache /// Overrides the "copy reads to tail" setting /// </summary> public ReadCacheSettings ReadCacheSettings = null; } /// <summary> /// Configuration settings for hybrid log /// </summary> public class ReadCacheSettings { /// <summary> /// Size of a segment (group of pages), in bits /// </summary> public int PageSizeBits = 25; /// <summary> /// Total size of in-memory part of log, in bits /// </summary> public int MemorySizeBits = 34; /// <summary> /// Fraction of log head (in memory) used for second chance /// copy to tail. This is (1 - MutableFraction) for the /// underlying log /// </summary> public double SecondChanceFraction = 0.1; } }
from datetime import datetime def get_quote() : date = str(datetime.now().date()) quotes = open("quotes.txt",'r',encoding="ISO-8859-1") line = quotes.readline() while line is not "": if line.split(' ', 1)[0] == date: quotation = line.split(' ',1)[1] print_quote(quotation) quotes.close() return None line = quotes.readline() quotes.close() print("Getting todays quote. press Ctrl+c to cancel...") import requests from bs4 import BeautifulSoup try: page = requests.get("https://theysaidso.com/quote-of-the-day/", timeout=5) # page.status_code soup = BeautifulSoup(page.content, 'html.parser') carousel = soup.find_all(id="myCarousel") lead = carousel[0].find_all('div', class_="lead") text = lead[0].find('span') text = text.get_text() author = lead[0].find_all('span')[1] author = author.get_text() quotation = text+" -"+author print_quote(quotation) try: # write the quote to file. with open("quotes.txt",'a') as quotes: quotes.write(date + ' ' + text + ' -' + author + "\n") except Exception as e: print(e) except: print("Cannot fetch todays quote. Please check your network connection.") def print_quote(quotation): print("\n>>>Todays quote : \n "+ quotation +"\n") if __name__ == "__main__": get_quote()
# frozen_string_literal: true module Resolvers module ErrorTracking class SentryErrorsResolver < BaseResolver def resolve(**args) args[:cursor] = args.delete(:after) project = object.project result = ::ErrorTracking::ListIssuesService.new( project, context[:current_user], args ).execute next_cursor = result[:pagination]&.dig('next', 'cursor') previous_cursor = result[:pagination]&.dig('previous', 'cursor') issues = result[:issues] # ReactiveCache is still fetching data return if issues.nil? Gitlab::Graphql::ExternallyPaginatedArray.new(previous_cursor, next_cursor, *issues) end end end end
// /////////////////////////////////////////////////////////////////// // // class: seg // // /////////////////////////////////////////////////////////////////// #ifndef pniseg_h #define pniseg_h // /////////////////////////////////////////////////////////////////// #include "pnimath.h" #include "pnivec3.h" // /////////////////////////////////////////////////////////////////// namespace pni { namespace math { // /////////////////////////////////////////////////////////////////// /** The seg is a representation of a line with a beginning and end point. It is stored as the origin (vec3) of the segment, the normalized direction (vec3) the segment points in, and the length (float). @note This internal representation differs from another common representation which has a non-normalized direction and a length which can be treated as a paramemtric value ranging from 0 to 1. */ class PNIMATHAPI seg { public: typedef Trait TraitType; typedef TraitType::ValueType ValueType; typedef ValueType& RefType; typedef const ValueType& ConstRefType; typedef seg ThisType; enum InitState { NoInit }; seg (); seg ( InitState ); seg ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ); seg ( const vec3& posIn, const vec3& endIn ); seg ( const vec3& posIn, ValueType headingIn, ValueType pitchIn, ValueType lengthIn ); seg ( const seg& orig ); ~seg (); // set methods seg& operator = ( const seg& orig ); void set ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ); void set ( const vec3& posIn, const vec3& endIn ); void set ( const vec3& posIn, ValueType headingIn, ValueType pitchIn, ValueType lengthIn ); // get methods void get ( vec3& posOut, vec3& dirOut, ValueType& lengthOut ) const; void get ( vec3& begOut, vec3& endOut ) const; void getPos ( vec3& posOut ) const; void getDir ( vec3& dirOut ) const; const vec3& getPos () const; const vec3& getDir () const; void setLength ( ValueType val ); ValueType getLength () const; // equality methods bool operator == ( const ThisType& vecIn ) const; bool equal ( const ThisType& vecIn, ValueType fuzz = TraitType::fuzzVal ) const; // get the closest point to pt in the primative bool closestPtIn ( vec3& dest, ValueType xval, ValueType yval, ValueType zval ) const; bool closestPtIn ( vec3& dest, const vec3& pt ) const; // Point contains tests. int contains ( ValueType x, ValueType y, ValueType z ) const; int contains ( const vec3& pt ) const; int contains ( const seg& segIn ) const; // extendBy () //mtcl: mtcl_begin_ignore bool extendBy ( ValueType x, ValueType y, ValueType c ); bool extendBy ( const vec3& pt ); //mtcl: mtcl_end_ignore /// Find intersection of two segs, or none. bool isect ( ThisType const& rhs, vec3& dst ) const; // simple point interp method // this interpolates along the segment given the range [0,1] vec3 lerp ( ValueType value ) const; void lerp ( vec3& dest, ValueType value ) const; // xform methods void xform ( const seg& seg, const matrix4& mat ); void xform4 ( const seg& seg, const matrix4& mat ); protected: vec3 pos; vec3 dir; /// Always normalized ValueType length; /// Actual length, not 0 <= length <= 1 private: }; ///////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////// PNIMATHINLINE seg:: seg () : length ( TraitType::zeroVal ) { } PNIMATHINLINE seg:: seg ( InitState ) : pos ( vec3::NoInit ), dir ( vec3::NoInit ) { } PNIMATHINLINE seg:: seg ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ) : pos ( posIn ), dir ( dirIn ), length ( lengthIn ) { } PNIMATHINLINE seg:: seg ( const vec3& posIn, const vec3& endIn ) { set ( posIn, endIn ); } PNIMATHINLINE seg:: seg ( const vec3& posIn, ValueType headingIn, ValueType pitchIn, ValueType lengthIn ) { set ( posIn, headingIn, pitchIn, lengthIn ); } PNIMATHINLINE seg:: seg ( const seg& orig ) : pos ( orig.pos ), dir ( orig.dir ), length ( orig.length ) { } PNIMATHINLINE seg:: ~seg () { } ///////////////////////////////////////////////////////////////////// // set methods PNIMATHINLINE seg& seg:: operator = ( const seg& orig ) { pos = orig.pos; dir = orig.dir; length = orig.length; return *this; } PNIMATHINLINE void seg:: set ( const vec3& posIn, const vec3& dirIn, ValueType lengthIn ) { pos = posIn; dir = dirIn; length = lengthIn; } ///////////////////////////////////////////////////////////////////// // get methods PNIMATHINLINE void seg:: get ( vec3& posOut, vec3& dirOut, ValueType& lengthOut ) const { posOut = pos; dirOut = dir; lengthOut = length; } PNIMATHINLINE void seg:: get ( vec3& begOut, vec3& endOut ) const { begOut = pos; endOut = dir; endOut *= length; endOut += begOut; } PNIMATHINLINE void seg:: getPos ( vec3& posOut ) const { posOut = pos; } PNIMATHINLINE void seg:: getDir ( vec3& dirOut ) const { dirOut = dir; } PNIMATHINLINE const vec3& seg:: getPos () const { return pos; } PNIMATHINLINE const vec3& seg:: getDir () const { return dir; } PNIMATHINLINE void seg:: setLength ( ValueType val ) { length = val; } PNIMATHINLINE seg::ValueType seg:: getLength () const { return length; } ///////////////////////////////////////////////////////////////////// } // end namespace math } // end namespace pni ///////////////////////////////////////////////////////////////////// #endif // pniseg_h
#!/bin/bash sudo apxs -a -i -c mod_waf.c sudo service apache2 restart sudo chown -R www-data.www-data /home/dexter/git_working/waf_apache_module/waf sudo chmod -R 774 /home/dexter/git_working/waf_apache_module/waf sudo chmod 777 /usr/lib/apache2/modules/mod_waf.so
; VL 2014 -- VL Verilog Toolkit, 2014 Edition ; Copyright (C) 2008-2015 Centaur Technology ; ; Contact: ; Centaur Technology Formal Verification Group ; 7600-C N. Capital of Texas Highway, Suite 300, Austin, TX 78731, USA. ; http://www.centtech.com/ ; ; License: (An MIT/X11-style license) ; ; Permission is hereby granted, free of charge, to any person obtaining a ; copy of this software and associated documentation files (the "Software"), ; to deal in the Software without restriction, including without limitation ; the rights to use, copy, modify, merge, publish, distribute, sublicense, ; and/or sell copies of the Software, and to permit persons to whom the ; Software is furnished to do so, subject to the following conditions: ; ; The above copyright notice and this permission notice shall be included in ; all copies or substantial portions of the Software. ; ; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING ; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ; DEALINGS IN THE SOFTWARE. ; ; Original author: Jared Davis <jared@centtech.com> (in-package "VL2014") (include-book "std/strings/cat" :dir :system) (include-book "std/util/defval" :dir :system) (include-book "centaur/fty/fixequiv" :dir :system) (include-book "centaur/fty/basetypes" :dir :system) (local (include-book "misc/assert" :dir :system)) (local (include-book "arithmetic")) (local (std::add-default-post-define-hook :fix)) (defsection url-encoding :parents (utilities) :short "Functions for % encoding strings for use in URLs, as described in <a href='http://tools.ietf.org/html/rfc3986'>RFC 3986</a>." :long "<p>Per RFC 3986, the only unreserved characters are ALPHA, DIGIT, -, ., _, and ~. We implement some functions to percent-encode other characters in character lists and strings.</p>") (local (xdoc::set-default-parents url-encoding)) (define vl-url-encode-char ((x characterp)) :short "URL encode a single character. (slow, logically nice version)." :returns (encoding character-listp "Encoded version of X, in proper order.") :long "<p>See @(see vl-fast-url-encode-char) for an faster, array-lookup alternative.</p>" (let ((x (char-fix x))) (if (or (and (char<= #\A x) (char<= x #\Z)) (and (char<= #\a x) (char<= x #\z)) (and (char<= #\0 x) (char<= x #\9)) (member x '(#\- #\_ #\. #\~))) (list x) (let* ((hex-code (explode-atom (char-code x) 16)) (hex-code (if (eql (len hex-code) 1) (cons #\0 hex-code) hex-code))) (cons #\% hex-code)))) /// (local (progn (assert! (equal (implode (vl-url-encode-char #\a)) "a")) (assert! (equal (implode (vl-url-encode-char #\Space)) "%20")) (assert! (equal (implode (vl-url-encode-char (code-char 0))) "%00"))))) (define vl-make-url-encode-array ((n natp)) :parents (*vl-url-encode-array*) :guard (<= n 255) :hooks nil (if (zp n) (list (cons n (vl-url-encode-char (code-char n)))) (cons (cons n (vl-url-encode-char (code-char n))) (vl-make-url-encode-array (- n 1))))) (defval *vl-url-encode-array* :short "Array binding character codes to the pre-computed URL encodings." :showval t (compress1 'vl-url-encode-array (cons '(:header :dimensions (256) :maximum-length 257 :name vl-url-encode-array) (vl-make-url-encode-array 255)))) (define vl-fast-url-encode-char ((x :type character)) :short "URL encode a single character. (fast, array-based version)" :inline t :enabled t :verify-guards nil :hooks nil (mbe :logic (vl-url-encode-char x) :exec (aref1 'vl-url-encode-array *vl-url-encode-array* (char-code x))) /// (local (in-theory (disable aref1))) (local (defun test (n) (and (equal (aref1 'vl-url-encode-array *vl-url-encode-array* n) (vl-url-encode-char (code-char n))) (if (zp n) t (test (- n 1)))))) (local (defthm l0 (implies (and (test n) (natp n) (natp i) (<= i n)) (equal (aref1 'vl-url-encode-array *vl-url-encode-array* i) (vl-url-encode-char (code-char i)))))) (local (defthm l1 (implies (and (natp i) (<= i 255)) (equal (aref1 'vl-url-encode-array *vl-url-encode-array* i) (vl-url-encode-char (code-char i)))) :hints(("Goal" :use ((:instance l0 (n 255))))))) (local (defthm l2 (implies (characterp x) (equal (aref1 'vl-url-encode-array *vl-url-encode-array* (char-code x)) (vl-url-encode-char x))))) (verify-guards vl-fast-url-encode-char$inline)) (define vl-url-encode-chars-aux ((chars character-listp) acc) :short "URL encode a list of characters onto an accumulator in reverse order." :returns (encoded character-listp :hyp (character-listp acc)) :verbosep t (if (atom chars) acc (vl-url-encode-chars-aux (cdr chars) (revappend (vl-fast-url-encode-char (car chars)) acc))) /// (defthm true-listp-of-vl-url-encode-chars-aux (equal (true-listp (vl-url-encode-chars-aux x acc)) (true-listp acc)))) (define vl-url-encode-chars ((x character-listp)) :short "Simple way to URL encode a list of characters." :returns (encoded character-listp) :inline t ; This could be optimized with nreverse, but since the printer only uses the ; aux function anyway, I haven't bothered. (reverse (vl-url-encode-chars-aux x nil)) /// (defthm true-listp-of-vl-url-encode-chars (true-listp (vl-url-encode-chars x)) :rule-classes :type-prescription)) (define vl-url-encode-string-aux :short "Efficiently way to URL encode a string, in reverse order, without exploding it." ((x stringp) (n natp) (xl (eql xl (length x))) acc) :guard (<= n xl) :long "<p>This has such a nice logical definition that we just leave it enabled.</p>" :enabled t ; Removed after v7-2 by Matt K. since logically, the definition is ; non-recursive: ; :measure (nfix (- (nfix xl) (nfix n))) :verify-guards nil :hooks nil (mbe :logic (vl-url-encode-chars-aux (nthcdr n (explode x)) acc) :exec (b* (((when (mbe :logic (zp (- (nfix xl) (nfix n))) :exec (eql n xl))) acc) (char (char x n)) (encoding (vl-fast-url-encode-char char)) (acc (revappend encoding acc))) (vl-url-encode-string-aux x (+ 1 (lnfix n)) xl acc))) /// (local (in-theory (enable vl-url-encode-string-aux vl-url-encode-chars-aux))) (verify-guards vl-url-encode-string-aux)) (define vl-url-encode-string :short "Simple way to URL encode a string." ((x stringp :type string)) :returns (encoded stringp :rule-classes :type-prescription) :split-types t :inline t (let ((x (mbe :logic (str-fix x) :exec x))) (str::rchars-to-string (vl-url-encode-string-aux x 0 (length x) nil))) /// (local (assert! (let ((x "foo123$%20 blah !==[]{}7&*^!@&*^&*)($")) (equal (vl-url-encode-string x) (implode (vl-url-encode-chars (explode x))))))))
<?php namespace LAuth\Plugins\UEditor; use Illuminate\Http\Request; class Uploadvideo extends Uploadfile implements Contracts\UEditor { public function __construct(Request $request) { parent::__construct($request); $this->config = [ "path" => config('ueditor.videoPathFormat'), "size" => config('ueditor.videoMaxSize'), "allow" => config('ueditor.videoAllowFiles'), 'name' => config('ueditor.videoFieldName'), ]; } }
--- title: ComponentCounts searchTitle: Lua Spell ComponentCounts weight: 1 hidden: true menuTitle: ComponentCounts --- ## ComponentCounts ```lua Spell:ComponentCounts(number i); -- number ```
var browserify = require('browserify'), watchify = require('watchify'), gulp = require('gulp'), merge = require('merge-stream'), file = require('gulp-file'), jsonEditor = require("gulp-json-editor"), del = require('del'), source = require('vinyl-source-stream'), fs = require('fs'), sourceFile = './frame.js', destFolder = './build/', destFile = 'frame.bundle.js'; gulp.task('clean', ['version'], function () { return del(destFolder); }); gulp.task('version', function () { let version = fs.readFileSync('../../../version.txt', 'utf8'); let versionMetadata = fs.readFileSync('../../../version-metadata.txt'); let fullVersion = version; if (versionMetadata && versionMetadata != '') fullVersion = version + '-' + versionMetadata; var versionJson = `{ "base": "${version}", "metadata": "${versionMetadata}", "full": "${fullVersion}" }`; let manifestPipe = gulp.src("./manifest.json") .pipe(jsonEditor({ 'version': version })) .pipe(gulp.dest('.')) let versionPipe = file('version.json', versionJson, {src: true}) .pipe(gulp.dest('.')); return merge(manifestPipe, versionPipe); }); gulp.task('copy', ['version', 'clean'], function () { return gulp.src([ './*.json', './*.png', './*.js', '!gulpfile.js', './*.html', 'semantic/dist/semantic.min.css' ]).pipe(gulp.dest(destFolder)); }); gulp.task('browserify', ['version', 'clean', 'copy'], function() { return browserify(sourceFile) .bundle() .pipe(source(destFile)) .pipe(gulp.dest(destFolder)); }); gulp.task('default', ['version', 'clean', 'copy', 'browserify']);
๏ปฟusing System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Text; using System.Threading.Tasks; using HtmlAgilityPack; namespace BRUParserTable { public class BRUParser { WebClient webClient = new WebClient(); string Return; public async Task<string> GetTable(int number) { string page = webClient.DownloadString($"http://vuz2.bru.by/rate/{number}/"); HtmlDocument doc = new HtmlDocument(); doc.LoadHtml(page); var query = from table in doc.DocumentNode.SelectNodes("//table").Cast<HtmlNode>() from row in table.SelectNodes("tr").Cast<HtmlNode>() from cell in row.SelectNodes("th|td").Cast<HtmlNode>() select new { Table = table.Id, CellText = cell.InnerText }; foreach (var cell in query) { Return += string.Format("{0}: {1}", cell.Table, cell.CellText); } return Return; } } }
--- order: 1 title: scala_ๅŸบ็ก€ date: 2021-02-08 09:57:38 permalink: /pages/b8ef2f/ categories: - ่ฏญ่จ€ - scala tags: - null comment: true --- # ็ฌฌไธ€็ซ  ๅŸบ็ก€ ## ๅธธ็”จ็ฑปๅž‹ scalaไธญๅธธ็”จ็ฑปๅž‹ๅฆ‚ไธ‹๏ผš - Byte - Char - Short - Int - Long - Float - Double - Booean ่ฟ™ไบ›็ฑปๅž‹้ƒฝๆ˜ฏ็ฑป๏ผŒๆ‰€ไปฅๅœจscalaไธญไธ้œ€่ฆๅŒ…่ฃ…็ฑปๅž‹๏ผŒๅœจๅŸบๆœฌ็ฑปๅž‹ๅ’ŒๅŒ…่ฃ…็ฑปๅž‹ไน‹้—ด็š„่ฝฌๆขๅทฅไฝœๆ˜ฏscala็ผ–่ฏ‘ๅ™จ็š„ไบ‹ใ€‚ ้™คๆญคไน‹ๅค–๏ผŒ่ฟ˜ๆœ‰ `Null` ็ฑปๅž‹๏ผŒๆŒ‡ไปฃ `null` ๆˆ–่€…็ฉบๅผ•็”จใ€‚ `Nothing` ๆ˜ฏๆ‰€ๆœ‰ๅ…ถๅฎƒ็ฑปๅž‹็š„ๅญ็ฑปๅž‹๏ผŒๅŒ…ๆ‹ฌ็ฉบๅ€ผใ€‚ `Any` ๆ˜ฏๅ…ถๅฎƒ็ฑปๅž‹็š„็ˆถ็ฑปๅž‹๏ผŒ `AnyRef` ๆ˜ฏๅ…ถๅฎƒๅผ•็”จ็ฑปๅž‹็š„็ˆถ็ฑปๅž‹ใ€‚ ๅœจscalaไธญ๏ผŒๆˆ‘ไปฌไฝฟ็”จๆ–นๆณ•่€Œไธๆ˜ฏๅผบๅˆถ็ฑปๅž‹่ฝฌๆข๏ผŒๆฅๅšๆ•ฐๅ€ผ็ฑปๅž‹ไน‹้—ด็š„่ฝฌๆขใ€‚ไพ‹ๅฆ‚๏ผš ```scala 99.44.toInt // 99 99.toChar // 'c' ``` ๅ’ŒJavaไธ€ๆ ท๏ผŒ `toString` ๅฐ†ไปปๆ„ๅฏน่ฑก่ฝฌๆขๆˆๅญ—็ฌฆไธฒ๏ผŒ่ฆๅฐ†ๅŒ…ๅซไบ†ๆ•ฐๅญ—็š„ๅญ—็ฌฆไธฒ่ฝฌๆขๆˆๆ•ฐๅญ—๏ผŒไฝฟ็”จ `toInt` ๆˆ–่€… `toDouble` ใ€‚ ## ็ฎ—ๆœฏๅ’Œๆ“ไฝœ็ฌฆ้‡่ฝฝ ๅ’ŒJava็›ธๆฏ”๏ผŒScalaๅนถๆฒกๆœ‰ๆไพ› `++` ๅ’Œ `--` ๆ“ไฝœ็ฌฆ๏ผŒๆˆ‘ไปฌ้œ€่ฆไฝฟ็”จ `+=1` ๆˆ–่€… `-=1` ๏ผ› ๅฏนไบŽๅธธ่ง„็š„ `BigInt` ๅ’Œ `BigDecimal` ๅฏน่ฑก๏ผŒๆˆ‘ไปฌๅฏไปฅไฝฟ็”จๅธธ่ง„็š„ๆ–นๆณ•ไฝฟ็”จ้‚ฃไบ›ๆ•ฐๅญฆๆ“ไฝœ็ฌฆ๏ผš ```scala val x:BigInt = 12121212 x*x*x ``` ๅœจjavaไธญ๏ผŒๆˆ‘ไปฌ้œ€่ฆไฝฟ็”จ `x.multiply(x).multiply(x)` ใ€‚ ## ่ฐƒ็”จๅ‡ฝๆ•ฐๅ’Œๆ–นๆณ• ็›ธๆฏ”Java๏ผŒๅœจscalaไธญไฝฟ็”จๆ•ฐๅญฆๅ‡ฝๆ•ฐๆ›ด็ฎ€ๅ•๏ผŒๆˆ‘ไปฌไธ้œ€่ฆไปŽๆŸไธช็ฑป็š„่ฐƒ็”จๅฎƒ็š„้™ๆ€ๆ–นๆณ•ใ€‚ ```scala import scala.math._ sqrt(2) pow(2,4) ``` ๅœจไฝฟ็”จไปฅscalaๅผ€ๅคด็š„ๅŒ…ๆ—ถ๏ผŒๆˆ‘ไปฌๅฏไปฅ็œ็•ฅscala็š„ๅ‰็ผ€ใ€‚ไพ‹ๅฆ‚ `import math._` ็ญ‰ไปทไบŽ `import scala.math._` ใ€‚ Scalaไธญๆฒกๆœ‰้™ๆ€ๆ–นๆณ•๏ผŒไฝ†ๆ˜ฏๆไพ›ไบ†ๅ•ไพ‹ๅฏน่ฑกใ€‚ ไธๅธฆๅ‚ๆ•ฐ็š„ๆ–นๆณ•้€šๅธธไธไฝฟ็”จๅœ†ๆ‹ฌๅท๏ผŒไธ€่ˆฌๆฅ่ฎฒ๏ผŒๆฒกๆœ‰ๅ‚ๆ•ฐๅนถไธๆ”นๅ˜ๅฝ“ๅ‰ๅฏน่ฑก็š„ๆ–นๆณ•้ƒฝไธๅธฆๅœ†ๆ‹ฌๅทใ€‚ scalaไธญๅ…่ฎธไฝฟ็”จๆ•ฐๅญ— `*` ๅญ—็ฌฆไธฒ๏ผŒไปŽ่€Œๅฎž็Žฐๅคๅˆถๅญ—็ฌฆไธฒ็š„ๅŠŸ่ƒฝใ€‚ ## applyๆ–นๆณ• ๅœจscalaไธญ๏ผŒๆˆ‘ไปฌ้€šๅธธไฝฟ็”จ็ฑปไผผๅ‡ฝๆ•ฐ่ฐƒ็”จ็š„่ฏญๆณ•ใ€‚ไพ‹ๅฆ‚๏ผš ```scala "Hello"(4) // o ``` ไฝ ๅฏไปฅๅฐ†่ฟ™็ง็”จๆณ•ๅฝ“ไฝœ `()` ๆ“ไฝœ็ฌฆ็š„้‡่ฝฝๅฝขๅผ๏ผŒๅฎƒ่ƒŒๅŽๅŽŸ็†ๆ˜ฏๅฎž็Žฐไธ€ไธชๅไธบ `apply` ็š„ๆ–นๆณ•ใ€‚ๆ‰€ไปฅ `"Hello"(4)` ็›ธๅฝ“ไบŽไปฅไธ‹ไปฃ็ ๏ผš ```scala "Hello".apply(4) ```
# GogoKit - viagogo API Client Library for PHP [![Package Version](https://img.shields.io/packagist/v/viagogo/gogokit.svg?style=flat)][version] [![Total Downloads](https://img.shields.io/packagist/dt/viagogo/gogokit.svg?style=flat)][downloads] [![Code Climate](https://img.shields.io/codeclimate/github/viagogo/gogokit.php.svg?style=flat)][codeclimate] [version]: https://packagist.org/packages/viagogo/gogokit [downloads]: https://packagist.org/packages/viagogo/gogokit [codeclimate]: https://codeclimate.com/github/viagogo/gogokit.php [apidocs]: http://developer.viagogo.net GogoKit is a lightweight, viagogo API client library for PHP. Our [developer site][apidocs] documents all of the viagogo APIs. ## Installation [composer]: https://getcomposer.org Install via [Composer][composer]. ``` $ composer require viagogo/gogokit ``` ## Usage [apidocsgettingstarted]: http://developer.viagogo.net/#getting-started See our [developer site][apidocsgettingstarted] for more examples. ```php // All methods require authentication. To get your viagogo OAuth credentials, // See TODO: docs url $configuration = new Viagogo\Core\ViagogoConfiguration(); $configuration->clientId = 'CLIENT_ID'; $configuration->clientSecret = 'CLIENT_SECRET'; $viagogoClient = new Viagogo\ViagogoClient($configuration); $viagogoClient->setToken($viagogoClient->getOAuthClient()->getClientAccessToken()); // Get an event by id $event = $viagogoClient->getEventClient()->getEvent(676615); // Get a list of results that match your search query $searchResults = $viagogoClient->getSearchClient()->getSearch("FC Barcelona tickets"); ``` ## Supported Platforms * PHP 5.5 or higher ## How to contribute All submissions are welcome. Fork the repository, read the rest of this README file and make some changes. Once you're done with your changes send a pull request. Thanks! ## Need Help? Found a bug? [submitanissue]: https://github.com/viagogo/gogokit.php/issues Just [submit a issue][submitanissue] if you need any help. And, of course, feel free to submit pull requests with bug fixes or changes.
<html> <head> welcome to simplexue </head> <body> <?php if($_POST['user'] && $_POST['pass']) { $conn = mysql_connect("********", "*****", "********"); mysql_select_db("phpformysql") or die("Could not select database"); if ($conn->connect_error) { die("Connection failed: " . mysql_error($conn)); } $user = $_POST[user]; $pass = md5($_POST[pass]); $sql = "select pw from php where user='$user'"; $query = mysql_query($sql); if (!$query) { printf("Error: %s\n", mysql_error($conn)); exit(); } $row = mysql_fetch_array($query, MYSQL_ASSOC); //echo $row["pw"]; if (($row[pw]) && (!strcasecmp($pass, $row[pw]))) { echo "<p>Logged in! Key:************** </p>"; } else { echo("<p>Log in failure!</p>"); } } ?> <form method=post action=index.php> <input type=text name=user value="Username"> <input type=password name=pass value="Password"> <input type=submit> </form> </body> <a href="index.txt"> </html>
import { CloudFrontToS3 } from "@aws-solutions-constructs/aws-cloudfront-s3"; import { aws_certificatemanager, aws_route53, aws_route53_targets, aws_s3_deployment, } from "aws-cdk-lib"; import { Construct } from "constructs"; interface Props { hostedZone: aws_route53.IPublicHostedZone; certificate: aws_certificatemanager.ICertificate; } export class AppDistributionConstruct extends Construct { readonly hostedZone: aws_route53.IPublicHostedZone; constructor(scope: Construct, id: string, props: Props) { super(scope, id); const { hostedZone, certificate } = props; const appDistributionDomainName = `app.${hostedZone.zoneName}`; const { s3Bucket: appBucket, cloudFrontWebDistribution: appDistribution } = new CloudFrontToS3(this, "AppDistribution", { insertHttpSecurityHeaders: false, cloudFrontDistributionProps: { certificate, domainNames: [appDistributionDomainName], }, }); new aws_s3_deployment.BucketDeployment(this, "AppDeployment", { destinationBucket: appBucket!, distribution: appDistribution!, sources: [aws_s3_deployment.Source.asset("sources/app/build")], }); new aws_route53.ARecord(this, "AppRecord", { zone: hostedZone, recordName: `app.${hostedZone.zoneName}`, target: aws_route53.RecordTarget.fromAlias( new aws_route53_targets.CloudFrontTarget(appDistribution) ), }); } }
package uk.co.appsbystudio.geoshare.friends.manager interface FriendsManagerPresenter { fun friends() fun viewpagerItem(item: Int) fun search() fun invalidSession() fun stop() }
<?php namespace App\Http\Controllers; use Illuminate\Http\Request; class ResultController extends Controller { public function result(Request $request){ exec('cd '.app_path().'..\\public', $output, $ret_code); exec('python script.py'. ' '.$request->input('dep').' '.$request->input('arr'). ' '.$request->input('debut').' '.$request->input('fin'). ' '.$request->input('sel'), $output, $ret_code); $f=fopen("b.txt","r"); $res=fgets($f); return view('result',['res'=>$res]); } }
//Autogenerated by SSDCPortal.EntityGenerator using SSDCPortal.Constants; using System; using System.Collections.Generic; using System.ComponentModel; namespace SSDCPortal.Shared.DataInterfaces { public interface IMessage { Int32 Id { get; set; } String UserName { get; set; } String Text { get; set; } DateTime When { get; set; } Guid UserID { get; set; } IApplicationUser Sender { get; set; } } }
-- @testpoint:opengaussๅ…ณ้”ฎๅญ—command_function_code(้žไฟ็•™)๏ผŒไฝœไธบ่ง’่‰ฒๅ --ๅ…ณ้”ฎๅญ—ไธๅธฆๅผ•ๅท-ๆˆๅŠŸ drop role if exists command_function_code; create role command_function_code with password 'gauss@123' valid until '2020-12-31'; --ๅ…ณ้”ฎๅญ—ๅธฆๅŒๅผ•ๅท-ๆˆๅŠŸ drop role if exists "command_function_code"; create role "command_function_code" with password 'gauss@123' valid until '2020-12-31'; --ๅ…ณ้”ฎๅญ—ๅธฆๅ•ๅผ•ๅท-ๅˆ็†ๆŠฅ้”™ drop role if exists 'command_function_code'; create role 'command_function_code' with password 'gauss@123' valid until '2020-12-31'; --ๅ…ณ้”ฎๅญ—ๅธฆๅๅผ•ๅท-ๅˆ็†ๆŠฅ้”™ drop role if exists `command_function_code`; create role `command_function_code` with password 'gauss@123' valid until '2020-12-31';
package ru.otus.otuskotlin.marketplace.backend.repo.dynamo import ru.otus.otuskotlin.marketplace.backend.repo.test.* import java.util.* /* ะะธะบะพะณะดะฐ ั‚ะฐะบ ะฝะต ะดะตะปะฐะนั‚ะต ะบะฐะบ ะฒ ัั‚ะพะผ ั‚ะตัั‚ะต. ะ—ะดะตััŒ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะฑะพะตะฒะฐั ะฑะฐะทะฐ ะดะฐะฝะฝั‹ั… dynamoDB ั ั‚ะตัั‚ะพะฒั‹ะผะธ ั‚ะฐะฑะปะธั†ะฐะผะธ. ะ’ ะธั‚ะพะณะต, ั‚ะฐะฑะปะธั†ั‹ ัะพะทะดะฐัŽั‚ัั ะฟะพัะปะต ะบะฐะถะดะพะน ัะฑะพั€ะบะธ. ะ•ัะปะธ ะธั… ะฝะต ะฟะพะดั‡ะธั‰ะฐั‚ัŒ, ะผะพะถะตั‚ ะฝะฐะฑะตะถะฐั‚ัŒ ะดะพะฒะพะปัŒะฝะพ ะบั€ัƒะฟะฝะฐั ััƒะผะผะฐ ะทะฐ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต dynamoDB ะฒ AWS. ะขะตัั‚ะธั€ะพะฒะฐะฝะธะต ะฝะตะพะฑั…ะพะดะธะผะพ ะฒั‹ะฟะพะปะฝัั‚ัŒ ั ะฟะพะผะพั‰ัŒัŽ ั‚ะตัั‚ะพะฒะพะณะพ Docker-ะพะฑั€ะฐะทะฐ dynamoDb: https://hub.docker.com/r/amazon/dynamodb-local */ //class RepoAdDynamoCreateTest: RepoAdCreateTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //class RepoAdDynamoReadTest: RepoAdReadTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //class RepoAdDynamoUpdateTest: RepoAdUpdateTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //class RepoAdDynamoDeleteTest: RepoAdDeleteTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //} // //// ะะต ะฟั€ะพั…ะพะดะธั‚ ั ะณะปะพะฑะฐะปัŒะฝั‹ะผะธ ะธะฝะดะตะบัะฐะผะธ, ะธะฝะดะตะบั ะฝะต ัƒัะฟะตะฒะฐะตั‚ ะพะฑะฝะพะฒะธั‚ัŒัั //class RepoAdDynamoSearchTest: RepoAdSearchTest() { // override val repo = RepoAdDynamo(initObjects = initObjects, table = "test-${UUID.randomUUID()}") //}
package com.foryouandme.data.repository.auth.network.request import com.squareup.moshi.Json data class LoginRequest<T>(@Json(name = "user") val user: T) data class PhoneLoginRequest( @Json(name = "phone_number") val phoneNumber: String, @Json(name = "verification_code") val verificationCode: String ) data class PinLoginRequest( @Json(name = "email") val pin: String, )
package br.charles.repository; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; import br.charles.model.Contato; @Repository public interface ContatoRepository extends PagingAndSortingRepository<Contato, String> { public Page<Contato> findAll(Pageable pageable); @Query("SELECT p FROM Contato p " + "WHERE lower(nome) like %:busca% ") public Page<Contato> busca(@Param("busca") String busca, Pageable pageable); }
import 'core/room.dart'; import 'options.dart'; /// Main entry point to connect to a room. /// {@category Room} class LiveKitClient { static const version = '1.0.0'; /// Convenience method for connecting to a LiveKit server. /// Returns a [Room] upon a successful connect or throws when it fails. /// Alternatively, it is possible to instantiate [Room] and call [Room.connect] directly. static Future<Room> connect( String url, String token, { ConnectOptions? connectOptions, RoomOptions? roomOptions, }) async { final room = Room(); try { await room.connect( url, token, connectOptions: connectOptions, roomOptions: roomOptions, ); return room; } catch (error) { await room.dispose(); rethrow; } } }
/* * Copyright 2018 Nazmul Idris. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ object GradlePlugins { data class Versions(val gradle: String = "3.3.0", val kotlin: String = "1.3.20", val junit5: String = "1.2.0.0") val versions = Versions() val gradle = "com.android.tools.build:gradle:${versions.gradle}" val kotlin = "org.jetbrains.kotlin:kotlin-gradle-plugin:${versions.kotlin}" val junit5 = "de.mannodermaus.gradle.plugins:android-junit5:${versions.junit5}" } object Versions { val compile_sdk = 28 val target_sdk = 26 val min_sdk = 16 } object Deps { data class Versions(val arch_comp: String = "2.0.0", val design: String = "1.0.0", val gson: String = "2.8.5", val gms: String = "16.0.0", val places: String = "1.0.0", val dagger2: String = "2.17", val junit5: String = "5.2.0", val crayon: String = "0.1.0") val versions = Versions() val kotlin_stdlib_jdk8 = "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${GradlePlugins.versions.kotlin}" val arch_comp = "androidx.lifecycle:lifecycle-extensions:${versions.arch_comp}" val arch_comp_annotation = "androidx.lifecycle:lifecycle-compiler:${versions.arch_comp}" val material_design = "com.google.android.material:material:${versions.design}" val vector_drawable = "androidx.vectordrawable:vectordrawable:${versions.design}" val recycler_view = "androidx.recyclerview:recyclerview:${versions.design}" val gms_places = "com.google.android.libraries.places:places-compat:${versions.places}" val gms_location = "com.google.android.gms:play-services-location:${versions.gms}" val gson = "com.google.code.gson:gson:${versions.gson}" val dagger2 = "com.google.dagger:dagger:${versions.dagger2}" val dagger2_annotation = "com.google.dagger:dagger-compiler:${versions.dagger2}" val crayon = "com.importre:crayon:${versions.crayon}" } object TestingDeps { data class Versions(val assertj: String = "3.11.1", val junit5: String = "5.2.0", val mockk: String = "1.8.9", val roboelectric: String = "3.8", val junit4: String = "4.12") val versions = Versions() val junit5_jupiter = "org.junit.jupiter:junit-jupiter-api:${versions.junit5}" val junit5_jupiter_runtime = "org.junit.jupiter:junit-jupiter-engine:${versions.junit5}" val junit5_jupiter_params = "org.junit.jupiter:junit-jupiter-params:${versions.junit5}" val junit4_legacy = "junit:junit:${versions.junit4}" val junit5_vintage = "org.junit.vintage:junit-vintage-engine:${versions.junit5}" val assertj = "org.assertj:assertj-core:${versions.assertj}" val mockk = "io.mockk:mockk:${versions.mockk}" val roboelectric = "org.robolectric:robolectric:${versions.roboelectric}" }
# Data Access ๆ•ฐๆฎ่ฎฟ้—ฎ ๆœฌๅ•ๅ…ƒๅˆ›ๅปบไบŽ2015-10-22๏ผŒ็”จไบŽๅญ˜ๅ‚จC#ไธญไธŽๆ•ฐๆฎๆœ‰ๅ…ณ็š„็ซ ่Š‚ๅ†…ๅฎนใ€‚ ๅ…ถไธญๅฐ†ๆถ‰ๅŠไปฅไธ‹ๅ‡ ไธชๆ–น้ข็š„ๅ†…ๅฎน๏ผš * **ๆ–‡ไปถ็ณป็ปŸๆ•ฐๆฎ** * **XML** * **LINQ็ฎ€ไป‹** * **ๅบ”็”จLINQ**
package org.jim.common.cluster; import java.util.UUID; import org.jim.common.ImPacket; /** * ๆˆๅ‘˜ๅ˜้‡group, userid, ip่ฐๆœ‰ๅ€ผๅฐฑๅ‘็ป™่ฐ๏ผŒtoAllไธบtrueๅˆ™ๅ‘็ป™ๆ‰€ๆœ‰<br> * packetๆ˜ฏไธๅ…่ฎธไธบnull็š„ * @author WChao * 2018ๅนด05ๆœˆ20ๆ—ฅ ไธ‹ๅˆ3:10:29 */ public class ImClusterVo implements java.io.Serializable { private static final long serialVersionUID = 6978027913776155664L; public static final String CLIENTID = UUID.randomUUID().toString(); private ImPacket packet; private String clientId = CLIENTID; private String group; private String userid; private String token; private String ip; /** * ChannelContext'id */ private String channelId; private boolean toAll = false; public ImPacket getPacket() { return packet; } public void setPacket(ImPacket packet) { this.packet = packet; } public String getGroup() { return group; } public void setGroup(String group) { this.group = group; } public String getUserid() { return userid; } public void setUserid(String userid) { this.userid = userid; } public String getIp() { return ip; } public void setIp(String ip) { this.ip = ip; } /** * * @author: WChao */ public ImClusterVo() { } public ImClusterVo(ImPacket packet) { this.packet = packet; } /** * @param args * @author: WChao */ public static void main(String[] args) { } public boolean isToAll() { return toAll; } public void setToAll(boolean toAll) { this.toAll = toAll; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getChannelId() { return channelId; } public void setChannelId(String channelId) { this.channelId = channelId; } public String getToken() { return token; } public void setToken(String token) { this.token = token; } }
--- title: JPA Relation 2 author: Njade date: 2020-12-16 00:25:00 +0900 categories: [JPA] tags: [JPA] --- ์ด ๊ฒŒ์‹œ๊ธ€์€ ์ธํ”„๋Ÿฐ์˜ [๊น€์˜ํ•œ๋‹˜์˜ ๊ฐ•์˜](https://www.inflearn.com/course/ORM-JPA-Basic)๋ฅผ ๋ณด๊ณ  ์ •๋ฆฌํ•œ ๊ฒƒ์ž…๋‹ˆ๋‹ค. ์‹ค์ „ ์˜ˆ์ œ์˜ ํŒ ๋“ฑ์„ ์ œ์™ธํ•œ ์ฝ”๋“œ๋Š” ์ฒจ๋ถ€ํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค. ๊ฐ•์˜๋ฅผ ๋ด์ฃผ์„ธ์š”. --- ## ๋‹ค์–‘ํ•œ ์—ฐ๊ด€๊ด€๊ณ„ ๋งตํ•‘ --- * ์—ฐ๊ด€๊ด€๊ณ„ ๋งตํ•‘์‹œ ๊ณ ๋ ค์‚ฌํ•ญ 3๊ฐ€์ง€ * ๋‹ค๋Œ€์ผ [N:1] * ์ผ๋Œ€๋‹ค [1:N] * ์ผ๋Œ€์ผ [1:1] * ๋‹ค๋Œ€๋‹ค [N:N] --- ## ์—ฐ๊ด€๊ด€๊ณ„ ๋งตํ•‘์‹œ ๊ณ ๋ ค์‚ฌํ•ญ 3๊ฐ€์ง€ --- ### ๋‹ค์ค‘์„ฑ * ๋‹ค๋Œ€์ผ: @ManyToOne * ์ผ๋Œ€๋‹ค: @OneToMany * ์ผ๋Œ€์ผ: @OneToOne * ๋‹ค๋Œ€๋‹ค: @ManyToMany > ์‹ค๋ฌด์—์„œ ์‚ฌ์šฉํ•˜์ง€ ๋ง ๊ฒƒ ### ๋‹จ๋ฐฉํ–ฅ, ์–‘๋ฐฉํ–ฅ * ํ…Œ์ด๋ธ”: ์™ธ๋ž˜ ํ‚ค ํ•˜๋‚˜๋กœ ์–‘์ชฝ ์กฐ์ธ์ด ๊ฐ€๋Šฅํ•œ ๋ฐฉํ–ฅ์ด๋ผ๋Š” ๊ฐœ๋…์ด ์—†์Œ * ๊ฐ์ฒด: ์ฐธ์กฐ์šฉ ํ•„๋“œ๊ฐ€ ์žˆ๋Š” ์ชฝ๋งŒ ์ฐธ์กฐ ๊ฐ€๋Šฅ, ํ•œ์ชฝ๋งŒ ์ฐธ์กฐํ•˜๋ฉด ๋‹จ๋ฐฉํ–ฅ, ์–‘์ชฝ ์ฐธ์กฐ๋ฉด ์–‘๋ฐฉํ–ฅ ### ์—ฐ๊ด€๊ด€๊ณ„์˜ ์ฃผ์ธ * ํ…Œ์ด๋ธ”์€ ์™ธ๋ž˜ ํ‚ค๊ฐ€ ํ•˜๋‚˜ * ๊ฐ์ฒด๋Š” ์ฐธ์กฐ๊ฐ€ 2๊ตฐ๋ฐ * ๋‘ ๊ฐ์ฒด ์ค‘ ํ…Œ์ด๋ธ”์˜ ์™ธ๋ž˜ ํ‚ค๋ฅผ ๊ด€๋ฆฌํ•  ๊ณณ์„ ์ •ํ•ด์•ผ ํ•จ. * ์™ธ๋ž˜ ํ‚ค๋ฅผ ๊ด€๋ฆฌํ•˜๋Š” ์ฐธ์กฐ๊ฐ€ ์žˆ๋Š” ๊ณณ์ด ์ฃผ์ธ * ์ฃผ์ธ์ด ์•„๋‹Œ ๊ณณ์€ ์กฐํšŒ๋งŒ ๊ฐ€๋Šฅ * ๋‹ค๋Œ€์ผ, ์ผ๋Œ€๋‹ค ๋“ฑ์—์„œ ์•ž์— ๋‚˜์˜ค๋Š” ๊ฒƒ์ด ์ฃผ์ธ --- ## ๋‹ค๋Œ€์ผ [N:1] --- * ๋‹ค๋Œ€์ผ ๋‹จ๋ฐฉํ–ฅ์ƒํ™ฉ์—์„œ ๋‹ค๋Œ€์ผ ์–‘๋ฐฉํ–ฅ์œผ๋กœ์˜ ํ™•์žฅ์€ ํ…Œ์ด๋ธ”์— ์˜ํ–ฅ์„ ์ฃผ์ง€ ์•Š๊ณ  ์ฝ”๋“œ์ƒ์œผ๋กœ๋งŒ ์ถ”๊ฐ€๊ฐ€ ๊ฐ€๋Šฅ * ์™ธ๋ž˜ํ‚ค๊ฐ€ ์žˆ๋Š” ์ชฝ์ด ์—ฐ๊ด€๊ด€๊ณ„์˜ ์ฃผ์ธ --- ## ์ผ๋Œ€๋‹ค [1:N] --- * 1์ด ์ฃผ์ธ * ์ผ๋ฐ˜์ ์œผ๋กœ ๊ถŒ์žฅํ•˜์ง€ ์•Š์Œ. * ํ…Œ์ด๋ธ”์—์„œ ์ƒ๊ฐํ•˜๋ฉด N์ชฝ์— ๋ฌด์กฐ๊ฑด ์™ธ๋ž˜ํ‚ค๊ฐ€ ๋“ค์–ด๊ฐ. * ์ด ๊ฒฝ์šฐ 1์ชฝ ๊ฐ์ฒด๊ฐ€ ๋ฐ”๋€Œ๋ฉด ์ž์‹ ์˜ ํ…Œ์ด๋ธ”์ด ์•„๋‹Œ ๋‹ค๋ฅธ ํ…Œ์ด๋ธ”๋กœ sql์ด ์‹คํ–‰๋˜์–ด ์ฟผ๋ฆฌ๊ฐ€ ํ•œ ๋ฒˆ ๋” ๋‚˜๊ฐ. * ๊ฐ์ฒด์™€ ํ…Œ์ด๋ธ”์˜ ๊ด€๊ณ„๋ฅผ ๋ช…ํ™•ํ•˜๊ฒŒ ํŒŒ์•…ํ•˜์ง€ ์•Š์œผ๋ฉด ์ฝ”๋“œ์™€ sql์ด ๋งตํ•‘๋˜์ง€ ์•Š์•„ ํ•ด์„์ƒ์˜ ์–ด๋ ค์›€์ด ์ƒ๊ธธ ์ˆ˜ ์žˆ์Œ. * ๊ฐ์ฒด์ง€ํ–ฅ์ ์œผ๋กœ๋Š” ์‚ด์ง ๋ถ€์ ์ ˆํ•˜๋”๋ผ๋„ DB์„ค๊ณ„์— ๋งž์ถฐ ๋‹ค๋Œ€์ผ ๊ด€๊ณ„๋กœ ์„ค๊ณ„๋กœ ๋ฐ”๊พธ๋Š” ๊ฒƒ์ด ์ข‹์Œ. * @JoinColumn์„ ๊ผญ ์‚ฌ์šฉํ•˜์—ฌ์•ผ ํ•˜๋ฉฐ ์ด๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š์œผ๋ฉด ์กฐ์ธ ํ…Œ์ด๋ธ”์„ ์‚ฌ์šฉํ•˜๊ฒŒ ๋จ. (ํ…Œ์ด๋ธ”์ด ํ•˜๋‚˜ ๋” ์ƒ๊น€.) * ์ผ๋Œ€๋‹ค ์–‘๋ฐฉํ–ฅ์€ ๊ณต์‹์ ์œผ๋กœ ์กด์žฌํ•˜์ง€ ์•Š์ง€๋งŒ ์‚ฌ์šฉ์€ ๊ฐ€๋Šฅํ•˜๋‹ค. ```java @ManyToOne @JoinColumn(insertable = false, updatable = false) ``` * ์œ„ ๋‘๊ฐœ์˜ ์–ด๋…ธํ…Œ์ด์…˜์„ ํ†ตํ•ด ์ฝ๊ธฐ ์ „์šฉ ํ•„๋“œ๋ฅผ ์‚ฌ์šฉํ•ด์„œ ์–‘๋ฐฉํ–ฅ์ฒ˜๋Ÿผ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์ด๋‹ค. --- ## ์ผ๋Œ€์ผ [1:1] --- * ์ผ๋Œ€์ผ์€ ๋ฐ˜๋Œ€๋„ ์ผ๋Œ€์ผ * ์ฃผ ํ…Œ์ด๋ธ”์ด๋‚˜ ๋Œ€์ƒ ํ…Œ์ด๋ธ” ์ค‘์— ์™ธ๋ž˜ ํ‚ค ์„ ํƒ์ด ๊ฐ€๋Šฅ * ์™ธ๋ž˜ ํ‚ค์— DB์— ์œ ๋‹ˆํฌ ์ œ์•ฝ์กฐ๊ฑด์ด ์ถ”๊ฐ€๋˜์–ด์•ผ ํ•œ๋‹ค. * ๋‹ค๋Œ€์ผ๊ณผ ์œ ์‚ฌํ•จ. * ์™ธ๋ž˜ํ‚ค๊ฐ€ ์žˆ๋Š” ๊ณณ์ด ์—ฐ๊ด€๊ด€๊ณ„์˜ ์ฃผ์ธ * ๋ฐ˜๋Œ€ํŽธ์€ mappedBy ์ ์šฉ. * ์ผ๋Œ€์ผ์ด์ง€๋งŒ ๋Œ€์ƒ ํ…Œ์ด๋ธ”์— ์™ธ๋ž˜ํ‚ค๊ฐ€ ์žˆ๋Š” ๋‹จ๋ฐฉํ–ฅ์˜ ๊ฒฝ์šฐ JPA๊ฐ€ ์ง€์›ํ•ด์ฃผ์ง€ ์•Š์Œ. * ์–‘๋ฐฉํ–ฅ์ธ ๊ฒฝ์šฐ์—๋Š” ๋Œ€์ƒ ํ…Œ์ด๋ธ”์— ์™ธ๋ž˜ํ‚ค๊ฐ€ ์žˆ์œผ๋ฉด ๊ฐ€๋Šฅํ•˜์ง€๋งŒ ์‚ฌ์‹ค ์ผ๋Œ€์ผ ์ฃผ ํ…Œ์ด๋ธ”์˜ ์–‘๋ฐฉํ–ฅ๊ณผ ๋™์ผ. ### ์ฃผ ํ…Œ์ด๋ธ”์— ์™ธ๋ž˜ํ‚ค * ๊ฐœ๋ฐœ์ƒ์—์„œ๋Š” ์ฃผ ํ…Œ์ด๋ธ”์— ์™ธ๋ž˜ํ‚ค๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ๋Š” ๊ฒฝ์šฐ JPA ๋งตํ•‘์ด ํŽธ๋ฆฌํ•˜๊ณ  ์ถ”๊ฐ€ ์ฟผ๋ฆฌ๋ฅผ ์‹คํ–‰ํ•  ํ•„์š”๊ฐ€ ์—†์–ด ์ด์ ์ด ์žˆ์„ ์ˆ˜ ์žˆ๋‹ค. * ๊ฐ’์ด ์—†์œผ๋ฉด ์™ธ๋ž˜ํ‚ค์— null์ด ๋“ค์–ด๊ฐ€๋Š” ๋“ฑ์˜ ๋‹จ์ ์ด ์žˆ์„ ์ˆ˜ ์žˆ๋‹ค. ### ๋Œ€์ƒ ํ…Œ์ด๋ธ”์— ์™ธ๋ž˜ ํ‚ค * DB๊ด€์ ์—์„œ ์ผ๋Œ€๋‹ค๊ฐ€ ๋˜๋Š” ๊ฒฝ์šฐ ํ…Œ์ด๋ธ” ๊ตฌ์กฐ๊ฐ€ ์œ ์ง€๋  ์ˆ˜ ์žˆ๋‹ค. * ํ”„๋ก์‹œ ๊ธฐ๋Šฅ์˜ ํ•œ๊ณ„๋กœ ์ง€์—ฐ ๋กœ๋”ฉ์œผ๋กœ ์„ค์ •ํ•˜์—ฌ๋„ ์–ด์ฐจํ”ผ ์ฟผ๋ฆฌํ•ด๋ด์•ผ ๊ฒฐ๊ณผ๋ฅผ ์•Œ ์ˆ˜ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ํ•ญ์ƒ ์ฆ‰์‹œ ๋กœ๋”ฉ๋œ๋‹ค. --- ## ๋‹ค๋Œ€๋‹ค [N:M] --- * ์‹ค๋ฌด์—์„œ๋Š” ์‚ฌ์šฉํ•˜์ง€ ๋ง ๊ฒƒ. * RDB์—์„œ ์ •๊ทœํ™”๋œ ํ…Œ์ด๋ธ”์€ ๋‹ค๋Œ€๋‹ค๊ฐ€ ํ‘œํ˜„์ด ์•ˆ๋จ. * ์—ฐ๊ฒฐ ํ…Œ์ด๋ธ”์„ ์‚ฌ์šฉํ•ด์„œ ์ผ๋Œ€๋‹ค, ๋‹ค๋Œ€์ผ ๊ด€๊ณ„๋กœ ํ‘œํ˜„ํ•ด์•ผ ํ•จ. * ๊ฐ์ฒด๋Š” ์ปฌ๋ ‰์…˜์„ ์‚ฌ์šฉํ•ด์„œ ๊ฐ์ฒด 2๊ฐœ๋กœ ๋‹ค๋Œ€๋‹ค ๊ด€๊ณ„๊ฐ€ ๊ฐ€๋Šฅ. * @ManyToMany, @JoinTable๋กœ ์ง€์ •์ด ๊ฐ€๋Šฅํ•˜๋ฉฐ ๋‹จ๋ฐฉํ–ฅ, ์–‘๋ฐฉํ–ฅ์ด ๊ฐ€๋Šฅํ•˜๋‹ค. * ์–‘๋ฐฉํ–ฅ์€ ๋™์ผํ•˜๊ฒŒ mappedBy๋ฅผ ์‚ฌ์šฉํ•ด์•ผํ•จ. * ์‹ค๋ฌด์—์„œ๋Š” ์—ฐ๊ฒฐ ํ…Œ์ด๋ธ”์ด ์—ฐ๊ฒฐ๋งŒ ํ•˜๊ณ  ๋๋‚˜๋Š” ์ผ์ด ์—†๊ณ  ์ถ”๊ฐ€์ •๋ณด๊ฐ€ ๊ผญ ๋“ค์–ด๊ฐ€๊ธฐ ๋•Œ๋ฌธ์— ์‚ฌ์šฉํ•˜์ง€ ์•Š๋Š” ๊ฒƒ์ด ์ข‹์Œ. * ์—ฐ๊ฒฐ ํ…Œ์ด๋ธ”์šฉ ์—”ํ‹ฐํ‹ฐ๋ฅผ ๋งŒ๋“œ๋Š” ๊ฒƒ์ด ์ข‹๋‹ค. * ์—ฐ๊ฒฐ ํ…Œ์ด๋ธ”๋„ PK๋Š” ์˜๋ฏธ์—†๋Š” sequence๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ฒƒ์ด ์œ ์—ฐํ•ด์งˆ ์ˆ˜ ์žˆ๋‹ค. --- ## ์‹ค์ „ ์˜ˆ์ œ --- * JPA๋Š” parent์™€ ๊ฐ™์€ ํ˜•ํƒœ์˜ ์…€ํ”„ ๋งตํ•‘๋„ ๊ฐ€๋Šฅํ•จ. * ์‹ค๋ฌด์—์„œ ์ค‘๊ฐ„ ํ…Œ์ด๋ธ”์€ ๋‹จ์ˆœํ•˜์ง€ ์•Š์œผ๋ฏ€๋กœ @ManyToMany๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ๋ง ๊ฒƒ. * @JoinColumn์€ ์™ธ๋ž˜ํ‚ค๋ฅผ ๋งตํ•‘ํ•  ๋•Œ ์‚ฌ์šฉ * name: ๋งคํ•‘ํ•  ์™ธ๋ž˜ ํ‚ค ์ด๋ฆ„ * referencedColumnName: ์™ธ๋ž˜ ํ‚ค๊ฐ€ ์ฐธ์กฐํ•˜๋Š” ๋Œ€์ƒ ํ…Œ์ด๋ธ”์˜ ์ปฌ๋Ÿผ๋ช… * foreignKey(DDL): ์™ธ๋ž˜ํ‚ค ์ œ์•ฝ์กฐ๊ฑด์„ ์ง์ ‘ ์ง€์ • * ์ด์™ธ์—๋Š” @Column๊ณผ ๋™์ผ * @ManyToOne * optional: ๊ธฐ๋ณธ๊ฐ’ true * fetch: ๊ธฐ๋ณธ๊ฐ’ EAGER * cascaed: ์˜์†์„ฑ ์ „์ด ๊ธฐ๋Šฅ * targetEntity: ์—ฐ๊ด€๋œ ์—”ํ‹ฐํ‹ฐ์˜ ํƒ€์ž… ์ •๋ณด๋ฅผ ์„ค์ •, ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ. ์ปฌ๋ ‰์…˜ ์ œ๋„ค๋ฆญ์œผ๋กœ ํƒ€์ž… ์ •๋ณด ์ถ”๋ก . * ์ŠคํŽ™์ƒ mappedBy๊ฐ€ ์—†์Œ. > ๋ฌด์กฐ๊ฑด ์ฃผ์ธ์ด ๋˜์–ด์•ผ ํ•จ. * @OneToMany * mappedBy: ์ฃผ์ธ ํ•„๋“œ ์„ ํƒ * fetch: ๊ธฐ๋ณธ๊ฐ’ LAZY * cascaed: ์˜์†์„ฑ ์ „์ด ๊ธฐ๋Šฅ * targetEntity: ์—ฐ๊ด€๋œ ์—”ํ‹ฐํ‹ฐ์˜ ํƒ€์ž… ์ •๋ณด๋ฅผ ์„ค์ •, ๊ฑฐ์˜ ์‚ฌ์šฉํ•˜์ง€ ์•Š์Œ. ์ปฌ๋ ‰์…˜ ์ œ๋„ค๋ฆญ์œผ๋กœ ํƒ€์ž… ์ •๋ณด ์ถ”๋ก . --- # Reference --- - [์ธํ”„๋Ÿฐ](https://www.inflearn.com/course/ORM-JPA-Basic)
from pyspark.sql.functions import col """Toy join function to showcase spark functions.""" def join_dataframes(left, right, columns_left, columns_right, join_type='inner'): if len(columns_left) == len(columns_right) and len(columns_left) > 0: cond = [col(left_col) == col(right_col) for (left_col, right_col) in zip(columns_left, columns_right)] return left.join(right, cond, join_type) raise Error('Columns parameters don\'t match or empty')
--- title: Good News date: 2017-03-11 14:45:00 Z tags: - shopify - e-commerce - design - fashion - clothing - apparel - shoes - trainers image: "/uploads/255-goodnews@2x.jpg" store-link: https://goodnews.london credit: Not credit-link: http://not-studio.com ---
#!ruby start_num = ARGV[0].hex end_num = ARGV[1].hex start_num.upto(end_num) do |n| puts sprintf('0x%04x', n) end
package xyz.gillall.demoapp.ui.pixabay.videogallery import android.os.Bundle import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import androidx.databinding.DataBindingUtil import androidx.fragment.app.Fragment import androidx.navigation.NavController import androidx.navigation.fragment.NavHostFragment import org.koin.androidx.viewmodel.ext.android.getViewModel import xyz.gillall.demoapp.R import xyz.gillall.demoapp.databinding.FragmentVideoGalleryBinding class VideoGalleryFragment : Fragment() { private lateinit var binding: FragmentVideoGalleryBinding private lateinit var navController: NavController private lateinit var viewModel: VideoGalleryViewModel override fun onCreateView( inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle? ): View { viewModel = getViewModel() binding = DataBindingUtil .inflate(inflater, R.layout.fragment_video_gallery, container, false) binding.root.context binding.lifecycleOwner = this binding.viewModel = viewModel viewModel.updateByViewModel.observe(viewLifecycleOwner, { when (it.action) { "update" -> binding.viewModel = viewModel } }) navController = NavHostFragment.findNavController(this) return binding.root } }
<?php /** * from php.net. */ /** * Encapsulates a closure. */ final class Delegate { private $_Closure; /** * construct. * * @param Callable $closure */ public function __construct($closure) { // $this->_Closure = \Closure::fromCallable($closure); // $this->_Closure = new \Closure::($closure); $this->_Closure = $closure; } /** * Allows to call the delegate object directly. * * @param list ...$args variable numbers of arguments. * * @return mixed */ public function __invoke(...$args) { return call_user_func_array($this->_Closure, $args); } } /** * defines a type for event arguments. */ class EventArgs { protected $_Sender; /** * construct. * * @param mixed $sender */ public function __construct($sender = null) { $this->_Sender = $sender; } /** * property-read. * * @return object should contain the event emitting object. */ final public function Sender() { return $this->_Sender; } } /** * a basic event type for the delegate. */ class Event { private $_Receivers = array(); /** * Undocumented function * * @param Delegate $delegate * * @return Event */ final public function Add(Delegate $delegate) { $this->_Receivers[] = $delegate; return $this; } /** * fires the event. * * @param EventArgs $args * * @return void */ final public function Trigger(EventArgs $args) { foreach ($this->_Receivers as $delegate) { $delegate($args); } } } // declare anonymous function as delegate. $myDelegate = new Delegate(function(EventArgs $args) { echo 'anonymous function' . PHP_EOL; }); // declare event, assign the delegate, trigger event. $myEvent = new Event(); $myEvent->Add($myDelegate); /** * Defines a simple type that can handle events. */ class DemoEventHandler { public function onEvent(EventArgs $args) { echo 'class event handler' . PHP_EOL; } } // test event handler $controller = new DemoEventHandler(); $myEvent->Add(new Delegate(array($controller, 'onEvent'))); $myEvent->Trigger(new EventArgs($myEvent));
class ChangeSpaceObjCol < ActiveRecord::Migration[6.0] def change rename_column :album_space_objs, :object_id, :space_obj_id end end
module OodJob # A class that handles the communication with a resource manager for # submitting/statusing/holding/deleting jobs # @abstract class Adapter # The root exception class that all {Adapter} exceptions inherit from class Error < StandardError; end # The cluster used in submitting, querying status, and controlling jobs # @return [OodCluster::Cluster] cluster to communicate with attr_reader :cluster # @param cluster [OodCluster::Cluster] cluster that job is submitted to def initialize(cluster:, **_) @cluster = cluster end # Submit a job with the attributes defined in the job template instance # @abstract Subclass is expected to implement {#submit} # @raise [NotImplementedError] if subclass did not define {#submit} # @example Submit job template to cluster # solver_id = OodJob::Job.submit(script: solver_script) # #=> "1234.server" # @example Submit job that depends on previous job # post_id = OodJob::Job.submit( # script: post_script, # afterok: solver_id # ) # #=> "1235.server" # @param script [Script] script object that describes the script and # attributes for the submitted job # @param after [#to_s, Array<#to_s>] this job may be scheduled for execution # at any point after dependent jobs have started execution # @param afterok [#to_s, Array<#to_s>] this job may be scheduled for # execution only after dependent jobs have terminated with no errors # @param afternotok [#to_s, Array<#to_s>] this job may be scheduled for # execution only after dependent jobs have terminated with errors # @param afterany [#to_s, Array<#to_s>] this job may be scheduled for # execution after dependent jobs have terminated # @return [String] the job id returned after successfully submitting a job def submit(script:, after: [], afterok: [], afternotok: [], afterany: []) raise NotImplementedError, "subclass did not define #submit" end # Retrieve job info from the resource manager # @abstract Subclass is expected to implement {#info} # @raise [NotImplementedError] if subclass did not define {#info} # @param id [#to_s] the id of the job, otherwise get list of all jobs # running on cluster # @return [Info, Array<Info>] information describing submitted job def info(id: '') raise NotImplementedError, "subclass did not define #info" end # Retrieve job status from resource manager # @note Optimized slightly over retrieving complete job information from server # @abstract Subclass is expected to implement {#status} # @raise [NotImplementedError] if subclass did not define {#status} # @param id [#to_s] the id of the job # @return [Status] status of job def status(id:) raise NotImplementedError, "subclass did not define #status" end # Put the submitted job on hold # @abstract Subclass is expected to implement {#hold} # @raise [NotImplementedError] if subclass did not define {#hold} # @param id [#to_s] the id of the job # @return [void] def hold(id:) raise NotImplementedError, "subclass did not define #hold" end # Release the job that is on hold # @abstract Subclass is expected to implement {#release} # @raise [NotImplementedError] if subclass did not define {#release} # @param id [#to_s] the id of the job # @return [void] def release(id:) raise NotImplementedError, "subclass did not define #release" end # Delete the submitted job # @abstract Subclass is expected to implement {#delete} # @raise [NotImplementedError] if subclass did not define {#delete} # @param id [#to_s] the id of the job # @return [void] def delete(id:) raise NotImplementedError, "subclass did not define #delete" end private # Reduce an array to unique objects with count # ["a", "a", "b"] #=> {"a" => 2, "b" => 1} def uniq_array(ary) ary.group_by {|v| v}.each_with_object({}) {|(k, v), h| h[k] = v.size} end end end
#!/usr/bin/perl # Copyright (c) 2021 Tom Hancocks # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Trim function sub trim { my $str = $_[0]; $str =~ s/^\s+|\n+//g; return $str; } # We need to extract certain information about the binary and the application # bundle that we are installing. my ($bin_path) = @ARGV; if (!defined $bin_path) { die("You must provide a mach-o binary."); } my $macos_path = trim(`dirname ${bin_path}`); my $contents_path = trim(`dirname ${macos_path}`); my $frameworks_path = trim("${contents_path}/Frameworks"); # Make sure the frameworks directory actually exists in the application bundle. `mkdir -p ${frameworks_path}`; # Setup a selection of functions that are responsible for moving files and # altering linking. sub install_name_tool { local ($path, $dylib_path) = ($_[0], $_[1]); local $dylib_name = trim(`basename ${dylib_path}`); local $dylib_link_path = "\@executable_path/../Frameworks/${dylib_name}"; `install_name_tool -change "${dylib_path}" "${dylib_link_path}" "${path}"`; } sub get_dylib_install_path { local ($dylib_name) = (trim(`basename $_[0]`)); return "${frameworks_path}/${dylib_name}"; } sub copy_dylib { local ($dylib_path, $dylib_name) = ($_[0], get_dylib_install_path($_[0])); `cp -v ${dylib_path} ${dylib_install_path}`; `chmod 0755 ${dylib_install_path}`; return $dylib_install_path; } # Setup a subroutine to handle the actual DYLIB installation. This is # unfortunately a recursive operation, as actual DYLIBs can reference other # DYLIBs. sub install_dylib { local $base = $_[0]; local @result = split /\n/, `otool -L ${base}`; while (local $dylib = shift(@result)) { local $dylib_path = trim((split / /, $dylib)[0]); local $dylib_install_path = get_dylib_install_path($dylib_path); if ($dylib_install_path eq $base) { install_name_tool($dylib_install_path, $dylib_path); } # Check if the DYLIB is a user one (located in a brew install location) elsif ((rindex $dylib_path, "/usr/local") == 0) { local $dylib_install_path = copy_dylib($dylib_path); print("Installing DYLIB to ${dylib_install_path}\n"); install_name_tool($base, $dylib_path); install_dylib($dylib_install_path); } } } install_dylib($bin_path);
package m54tom55 import ( "testing" "github.com/gogo/protobuf/proto" "github.com/pkg/errors" "github.com/stackrox/rox/generated/storage" "github.com/stackrox/rox/pkg/testutils" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" bolt "go.etcd.io/bbolt" ) var ( // Sections are not essential for the test // but are required for a policy to be valid. sections = []*storage.PolicySection{ { PolicyGroups: []*storage.PolicyGroup{ { FieldName: "CVSS", Values: []*storage.PolicyValue{ { Value: ">= 7.000000", }, }, }, }, }, } exclusions = []*storage.Exclusion{ { Name: "42", }, } originalPolicies = []*storage.Policy{ { Id: "0", Name: "policy 0 with no whitelists", PolicyVersion: oldVersion, PolicySections: sections, }, { Id: "1", Name: "policy 1 with a whitelist", PolicyVersion: oldVersion, PolicySections: sections, Whitelists: exclusions, }, { Id: "2", Name: "policy 2 with both a whitelist and an exclusion", PolicyVersion: oldVersion, PolicySections: sections, Whitelists: exclusions, Exclusions: exclusions, }, { Id: "3", Name: "policy 3 with an exclusion but the old version", PolicyVersion: oldVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "4", Name: "policy 4 with an exclusion and the new version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "5", Name: "policy 5 with no exclusion and and the new version", PolicyVersion: newVersion, PolicySections: sections, }, { Id: "6", Name: "policy 6 with a whitelist and the new version", PolicyVersion: newVersion, PolicySections: sections, Whitelists: exclusions, }, } expectedPolicies = []*storage.Policy{ { Id: "0", Name: "policy 0 with no whitelists", PolicyVersion: newVersion, PolicySections: sections, }, { Id: "1", Name: "policy 1 with a whitelist", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "2", Name: "policy 2 with both a whitelist and an exclusion", PolicyVersion: newVersion, PolicySections: sections, Exclusions: append(exclusions, exclusions...), }, { Id: "3", Name: "policy 3 with an exclusion but the old version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "4", Name: "policy 4 with an exclusion and the new version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, { Id: "5", Name: "policy 5 with no exclusion and and the new version", PolicyVersion: newVersion, PolicySections: sections, }, { Id: "6", Name: "policy 6 with a whitelist and the new version", PolicyVersion: newVersion, PolicySections: sections, Exclusions: exclusions, }, } ) func TestPolicyMigration(t *testing.T) { db := testutils.DBForT(t) err := db.Update(func(tx *bolt.Tx) error { bucket, err := tx.CreateBucket(policyBucket) if err != nil { return err } for _, policy := range originalPolicies { bytes, err := proto.Marshal(policy) if err != nil { return err } if err := bucket.Put([]byte(policy.GetId()), bytes); err != nil { return err } } return nil }) require.NoError(t, err, "Prepare test policy bucket") err = migrateWhitelistsToExclusions(db) require.NoError(t, err, "Run migration") var migratedPolicies []*storage.Policy err = db.View(func(tx *bolt.Tx) error { bucket := tx.Bucket(policyBucket) if bucket == nil { return errors.Errorf("bucket %q not found", policyBucket) } return bucket.ForEach(func(_, obj []byte) error { policy := &storage.Policy{} if err := proto.Unmarshal(obj, policy); err != nil { return err } migratedPolicies = append(migratedPolicies, policy) return nil }) }) require.NoError(t, err, "Read migrated policies from the bucket") assert.ElementsMatch(t, expectedPolicies, migratedPolicies) }
package activerecord import ( "context" "fmt" "strings" "github.com/activegraph/activegraph/activesupport" ) type ErrUnknownPrimaryKey struct { PrimaryKey string Description string } func (e *ErrUnknownPrimaryKey) Error() string { return fmt.Sprintf("Primary key is unknown, %s", e.Description) } type R struct { rel *Relation tableName string primaryKey string attrs attributesMap assocs associationsMap validators validatorsMap reflection *Reflection connections *connectionHandler } // TableName sets the table name explicitly. // // Vertex := activerecord.New("vertex", func(r *activerecord.R) { // r.TableName("vertices") // }) func (r *R) TableName(name string) { r.tableName = name } func (r *R) PrimaryKey(name string) { r.primaryKey = name } func (r *R) DefineAttribute(name string, t Type, validators ...AttributeValidator) { r.attrs[name] = attr{Name: name, Type: t} r.validators.include(name, typeValidator{t}) r.validators.include(name, validators...) } func (r *R) Validates(name string, validator AttributeValidator) { if v, ok := validator.(activesupport.Initializer); ok { err := v.Initialize() if err != nil { panic(err) } // activesupport.Err(err).Unwrap() } r.validators.include(name, validator) } func (r *R) ValidatesPresence(names ...string) { r.validators.extend(names, new(Presence)) } func (r *R) BelongsTo(name string, init ...func(*BelongsTo)) { assoc := BelongsTo{targetName: name, owner: r.rel, reflection: r.reflection} switch len(init) { case 0: case 1: init[0](&assoc) default: panic(activesupport.ErrMultipleVariadicArguments{Name: "init"}) } r.attrs[assoc.AssociationForeignKey()] = attr{ Name: assoc.AssociationForeignKey(), Type: Nil{new(Int64)}, } r.assocs[name] = &assoc } func (r *R) HasMany(name string) { // TODO: Define library methods to pluralize words. targetName := strings.TrimSuffix(name, "s") // Use plural name for the name of attribute, while target name // of the association should be in singular (to find a target relation // through the reflection. r.assocs[name] = &HasMany{ targetName: targetName, owner: r.rel, reflection: r.reflection, } } func (r *R) HasOne(name string) { r.assocs[name] = &HasOne{targetName: name, owner: r.rel, reflection: r.reflection} } func (r *R) init(ctx context.Context, tableName string) error { conn, err := r.connections.RetrieveConnection(primaryConnectionName) if err != nil { return err } definitions, err := conn.ColumnDefinitions(ctx, tableName) if err != nil { return err } for _, column := range definitions { columnType := column.Type if !column.NotNull { columnType = Nil{columnType} } r.DefineAttribute(column.Name, columnType) if column.IsPrimaryKey { r.PrimaryKey(column.Name) } } return nil } type Relation struct { name string tableName string // TODO: add *Reflection property. // reflection *Reflection conn Conn connections *connectionHandler scope *attributes query *QueryBuilder ctx context.Context associations validations AttributeMethods } func New(name string, init ...func(*R)) *Relation { var ( rel *Relation err error ) switch len(init) { case 0: rel, err = Initialize(name, nil) case 1: rel, err = Initialize(name, init[0]) default: panic(&activesupport.ErrMultipleVariadicArguments{Name: "init"}) } if err != nil { panic(err) } return rel } func Initialize(name string, init func(*R)) (*Relation, error) { rel := &Relation{name: name} r := R{ rel: rel, assocs: make(associationsMap), attrs: make(attributesMap), validators: make(validatorsMap), reflection: globalReflection, connections: globalConnectionHandler, } err := r.init(context.TODO(), name+"s") if err != nil { return nil, err } if init != nil { init(&r) } // When the primary key was assigned to record builder, mark it explicitely // wrapping with PrimaryKey structure. Otherwise, fallback to the default primary // key implementation. if r.primaryKey != "" { attr, ok := r.attrs[r.primaryKey] if !ok { return nil, &ErrUnknownPrimaryKey{r.primaryKey, "not in attributes"} } r.attrs[r.primaryKey] = PrimaryKey{Attribute: attr} } if r.tableName == "" { r.tableName = name + "s" } // The scope is empty by default. scope, err := newAttributes(name, r.attrs.copy(), nil) if err != nil { return nil, err } assocs := newAssociations(name, r.assocs.copy(), r.reflection) validations := newValidations(r.validators.copy()) // Create the model schema, and register it within a reflection instance. rel.tableName = r.tableName rel.scope = scope rel.associations = *assocs rel.validations = *validations rel.connections = r.connections rel.query = &QueryBuilder{from: r.tableName} rel.AttributeMethods = scope r.reflection.AddReflection(name, rel) return rel, nil } func (rel *Relation) TableName() string { return rel.tableName } func (rel *Relation) Name() string { return rel.name } func (rel *Relation) Copy() *Relation { scope := rel.scope.copy() return &Relation{ name: rel.name, tableName: rel.tableName, conn: rel.Connection(), connections: rel.connections, scope: rel.scope.copy(), query: rel.query.copy(), ctx: rel.ctx, associations: *rel.associations.copy(), validations: *rel.validations.copy(), AttributeMethods: scope, } } func (rel *Relation) empty() *Relation { rel.scope, _ = newAttributes(rel.name, nil, nil) return rel } // IsEmpty returns true if there are no records. func (rel *Relation) IsEmpty() bool { // TODO: implement the method. return false } func (rel *Relation) Context() context.Context { if rel.ctx == nil { return context.Background() } return rel.ctx } func (rel *Relation) WithContext(ctx context.Context) *Relation { newrel := rel.Copy() newrel.ctx = ctx return newrel } func (rel *Relation) Connect(conn Conn) *Relation { newrel := rel.Copy() newrel.conn = conn return newrel } func (rel *Relation) Connection() Conn { if rel.conn != nil { return rel.conn } conn, err := rel.connections.RetrieveConnection(primaryConnectionName) if err != nil { return &errConn{err} } return conn } func (rel *Relation) New(params ...map[string]interface{}) Result { switch len(params) { case 0: return Return(rel.Initialize(nil)) case 1: return Return(rel.Initialize(params[0])) default: return Err(&activesupport.ErrMultipleVariadicArguments{Name: "params"}) } } func (rel *Relation) Initialize(params map[string]interface{}) (*ActiveRecord, error) { attributes := rel.scope.clear() err := attributes.AssignAttributes(params) if err != nil { return nil, err } rec := &ActiveRecord{ name: rel.name, tableName: rel.tableName, conn: rel.Connection(), attributes: attributes, associations: rel.associations.copy(), validations: *rel.validations.copy(), } return rec.init(), nil } func (rel *Relation) Create(params map[string]interface{}) Result { return Return(rel.Initialize(params)).Insert() } func (rel *Relation) ExtractRecord(h activesupport.Hash) (*ActiveRecord, error) { var ( attrNames = rel.scope.AttributeNames() columnNames = rel.scope.ColumnNames() ) params := make(activesupport.Hash, len(attrNames)) for i, colName := range columnNames { attrName := attrNames[i] attr := rel.scope.AttributeForInspect(attrName) attrValue, err := attr.AttributeType().Deserialize(h[colName]) if err != nil { return nil, err } params[attrName] = attrValue } return rel.Initialize(params) } // PrimaryKey returns the attribute name of the record's primary key. func (rel *Relation) PrimaryKey() string { return rel.scope.PrimaryKey() } func (rel *Relation) All() CollectionResult { return ReturnCollection(rel, nil) } // TODO: move to the Schema type all column-related methods. func (rel *Relation) ColumnNames() []string { return rel.scope.ColumnNames() } func (rel *Relation) Each(fn func(*ActiveRecord) error) error { q := rel.query.copy() q.Select(rel.ColumnNames()...) // Include all join dependencies into the query with fully-qualified column // names, so each part of the request can be extracted individually. for _, join := range rel.query.joinValues { q.Select(join.Relation.ColumnNames()...) } var lasterr error err := rel.Connection().ExecQuery(rel.Context(), q.Operation(), func(h activesupport.Hash) bool { rec, e := rel.ExtractRecord(h) if lasterr = e; e != nil { return false } for _, join := range rel.query.joinValues { arec, e := join.Relation.ExtractRecord(h) if lasterr = e; e != nil { return false } e = rec.AssignAssociation(join.Relation.Name(), arec) if lasterr = e; e != nil { return false } } if lasterr = fn(rec); lasterr != nil { return false } return true }) if lasterr != nil { return lasterr } return err } func (rel *Relation) Where(cond string, arg interface{}) *Relation { newrel := rel.Copy() // When the condition is a regular column, pass it through the regular // column comparison instead of query chain predicates. if newrel.scope.HasAttribute(cond) { // newrel.scope.AssignAttribute(cond, arg) newrel.query.Where(fmt.Sprintf("%s = ?", cond), arg) } else { newrel.query.Where(cond, arg) } return newrel } // Select allows to specify a subset of fields to return. // // Method returns a new relation, where a set of attributes is limited by the // specified list. // // Model.Select("field", "other_field") // // #<Model id: 1, field: "value", other_field: "value"> // // Accessing attributes of a Record that do not have fields retrieved by a select // except id with return nil. // // model, _ := Model.Select("field").Find(1) // model.Attribute("other_field") // Returns nil func (rel *Relation) Select(attrNames ...string) *Relation { newrel := rel.Copy() if !newrel.scope.HasAttributes(attrNames...) { return newrel.empty() } attrMap := make(map[string]struct{}, len(attrNames)) for _, attrName := range attrNames { attrMap[attrName] = struct{}{} } for _, attrName := range newrel.scope.AttributeNames() { if _, ok := attrMap[attrName]; !ok { newrel.scope.ExceptAttribute(attrName) } } return newrel } func (rel *Relation) Group(attrNames ...string) *Relation { newrel := rel.Copy() // When the attribute is not part of the scope, return an empty relation. if !newrel.scope.HasAttributes(attrNames...) { return newrel.empty() } newrel.query.Group(attrNames...) return newrel } // Limit specifies a limit for the number of records to retrieve. // // User.Limit(10) // Generated SQL has 'LIMIT 10' func (rel *Relation) Limit(num int) *Relation { newrel := rel.Copy() newrel.query.Limit(num) return newrel } func (rel *Relation) Joins(assocNames ...string) *Relation { newrel := rel.Copy() for _, assocName := range assocNames { association := newrel.ReflectOnAssociation(assocName) if association == nil { return newrel.empty() } newrel.query.Join(association.Relation.Copy(), association.Association) } return newrel } func (rel *Relation) Find(id interface{}) Result { var q QueryBuilder q.From(rel.TableName()) q.Select(rel.scope.AttributeNames()...) // TODO: consider using unified approach. q.Where(fmt.Sprintf("%s = ?", rel.PrimaryKey()), id) var rows []activesupport.Hash if err := rel.Connection().ExecQuery(rel.Context(), q.Operation(), func(h activesupport.Hash) bool { rows = append(rows, h) return true }); err != nil { return Err(err) } if len(rows) != 1 { return Err(ErrRecordNotFound{PrimaryKey: rel.PrimaryKey(), ID: id}) } return rel.New(rows[0]) } // FindBy returns a record matching the specified condition. // // person := Person.FindBy("name", "Bill") // // Ok(Some(#<Person id: 1, name: "Bill", occupation: "retired">)) // // person := Person.FindBy("salary > ?", 10000) // // Ok(Some(#<Person id: 3, name: "Jeff", occupation: "CEO">)) func (rel *Relation) FindBy(cond string, arg interface{}) Result { return rel.Where(cond, arg).First() } // First find returns the first record. func (rel *Relation) First() Result { records, err := rel.Limit(1).ToA() if err != nil { return Err(err) } switch len(records) { case 0: return Ok(None) default: return Ok(Some(records[0])) } } func (rel *Relation) InsertAll(params ...map[string]interface{}) ( rr []*ActiveRecord, err error, ) { rr = make([]*ActiveRecord, 0, len(params)) for _, h := range params { rec, err := rel.Initialize(h) if err != nil { return nil, err } rr = append(rr, rec) } if err = rel.connections.Transaction(rel.Context(), func() error { for i, rec := range rr { if rr[i], err = rec.Insert(); err != nil { return err } } return nil }); err != nil { return nil, err } return rr, nil } // ToA converts Relation to array. The method access database to retrieve objects. func (rel *Relation) ToA() (Array, error) { var rr Array if err := rel.Each(func(r *ActiveRecord) error { rr = append(rr, r) return nil }); err != nil { return nil, err } return rr, nil } // ToSQL returns sql statement for the relation. // // User.Where("name", "Oscar").ToSQL() // // SELECT * FROM "users" WHERE "name" = ? func (rel *Relation) ToSQL() string { return rel.query.String() } func (rel *Relation) String() string { var buf strings.Builder fmt.Fprintf(&buf, "%s(", strings.Title(rel.name)) attrs := rel.AttributesForInspect() for i, attr := range attrs { fmt.Fprintf(&buf, "%s: %s", attr.AttributeName(), attr.AttributeType()) if i < len(attrs)-1 { fmt.Fprint(&buf, ", ") } } fmt.Fprintf(&buf, ")") return buf.String() }
from compas.geometry.primitives.frame import Frame class BeamStorage(object): def __init__(self, frame=None, y_count=5, y_spacing=140, z_spacing=140): # type: (Frame, int, float, float) -> None """Frame should have X pointing along beam length and Z pointing to world Z""" self.frame = frame # type: (Frame) # Frame where the self.y_count = y_count self.y_spacing = y_spacing self.z_spacing = z_spacing def to_data(self): """Simpliest way to get this class serialized. """ return self.data @classmethod def from_data(cls, data): """Construct a Movement from structured data. Subclass must add their properity to the data properity. """ beamstorage = cls() beamstorage.data = data return beamstorage @property def data(self): data = {} data['frame'] = self.frame data['y_count'] = self.y_count data['y_spacing'] = self.y_spacing data['z_spacing'] = self.z_spacing return data @data.setter def data(self, data): self.frame = data.get('frame', Frame.worldXY()) self.y_count = data.get('y_count', 5) self.y_spacing = data.get('y_spacing', 140) self.z_spacing = data.get('z_spacing', 140) def get_storage_frame(self, beam_seq, total_beam_count = 0): # type(int) -> Frame """Get the storage frame of a particular beam based on the sequence number (zero start) The algorithm is a simple Y first and then Z. The returned frame have X pointing along the beam length and Z pointing to world Up. You can align the grasp face's face_frame such that the beam is stored in the same orientation with the gripping direction. and optionally compensate the depth of the beam by moving the beam up. """ # Reverse the order (since we pick form the top) if total_beam_count > 0 : beam_seq = total_beam_count - beam_seq - 1 y = (beam_seq % self.y_count) z = beam_seq // self.y_count y_offset = y * self.y_spacing z_offset = z * self.z_spacing transform_vector = self.frame.yaxis.unitized().scaled(y_offset) + self.frame.zaxis.unitized().scaled(z_offset) return Frame(self.frame.point + transform_vector, self.frame.xaxis.copy(), self.frame.yaxis.copy())
# -*- coding: utf-8 -*- import requests from lxml import etree import SaveData import random class Get_album_and_aongs: ''' ้€š่ฟ‡ไธ“่พ‘ๅท่Žทๅ–ไธ“่พ‘ไฟกๆฏๅ’ŒๆญŒๆ›ฒไฟกๆฏ ''' def __init__(self, album_id, proxy_pool): self.album_id = album_id self.proxy_pool = proxy_pool def get_album_and_songs(self): ''' ่ฏฅๅ‡ฝๆ•ฐ็”จไบŽ้€š่ฟ‡ไธ“่พ‘ๅท่Žทๅ–ไธ“่พ‘ไฟกๆฏ ไปฅๅŠ่ฏฅไธ“่พ‘ๅŒ…ๅซ็š„ๆ‰€ๆœ‰ๆญŒๆ›ฒไฟกๆฏ ๅฐ†่Žทๅ–็š„ไฟกๆฏไฟๅญ˜ๅˆฐๆ•ฐๆฎๅบ“ ''' url = 'http://music.163.com/album?id=' + self.album_id headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0'} ip = random.choice(self.proxy_pool) proxies = {'http': ip} try: r = requests.get(url, headers=headers, proxies=proxies, timeout=3) #่ฏทๆฑ‚ไธ€ๅผ ไธ“่พ‘็š„ๆญŒๆ›ฒๅˆ—่กจ้กต้ข if r.status_code == 404: return -1 else: r.raise_for_status() html = etree.HTML(r.text) '''่งฃๆž่Žทๅ–ไธ“่พ‘ไฟกๆฏ๏ผŒๅŒ…ๆ‹ฌไธ“่พ‘ๅทใ€ไธ“่พ‘ๅใ€ๆญŒๆ‰‹ๅทใ€ๆญŒๆ‰‹ๅใ€ๅ‘่กŒๆ—ถ้—ดๅ’Œๅ‘่กŒๅ•ไฝ''' album_info = {} album_info['album_id'] = self.album_id album_info['album_name'] = html.xpath("//h2[@class='f-ff2']/text()") album_info['singer_id'] = html.xpath("//p[@class='intr']//a/@href")[0].replace('/artist?id=', '') album_info['singer_name'] = html.xpath("//p[@class='intr']//a/text()") album_info['release_time'] = html.xpath("//p[@class='intr']/text()")[0] if len(html.xpath("//p[@class='intr']/text()")) > 1: album_info['release_company'] = html.xpath("//p[@class='intr']/text()")[1].strip() else: album_info['release_company'] = 'ๆ— ' #ๆœ‰ไบ›ไธ“่พ‘ๆฒกๆœ‰ๆ ‡ๆ˜Žๅ‘่กŒๅ•ไฝ๏ผŒๆญค็ฑป็ปŸ็ปŸ็”จโ€˜ๆ— โ€™่กจ็คบ '''่งฃๆž่Žทๅ–ไธ€ๅผ ไธ“่พ‘็š„ๆ‰€ๆœ‰ๆญŒๆ›ฒไฟกๆฏ''' songs_info = [] for i in range(len(html.xpath("//ul[@class='f-hide']/li"))): '''่งฃๆž่Žทๅ–ไธ€้ฆ–ๆญŒ็š„ไฟกๆฏ๏ผŒๅŒ…ๆ‹ฌๆญŒๆ›ฒๅทใ€ๆญŒๆ›ฒๅใ€ๆ‰€ๅฑžไธ“่พ‘ๅทใ€ๆ‰€ๅฑžไธ“่พ‘ๅ''' song_info = {} song_info['song_id'] = html.xpath("//ul[@class='f-hide']/li/a/@href")[i].replace('/song?id=', '') song_info['song_name'] = html.xpath("//ul[@class='f-hide']/li/a/text()")[i] song_info['album_id'] = album_info['album_id'] song_info['album_name'] = album_info['album_name'] songs_info.append(song_info) '''่ฐƒ็”จๅ‡ฝๆ•ฐ๏ผŒไฟๅญ˜ไธ“่พ‘ไฟกๆฏๅ’ŒๆญŒๆ›ฒไฟกๆฏๅˆฐๆ•ฐๆฎๅบ“''' SaveData.save_album_info(album_info) SaveData.save_songs_info(songs_info) print("ไธ“่พ‘idไธบ"+ self.album_id +"็š„ไฟกๆฏ่Žทๅ–ๅฎŒๆฏ•") return 1 except: print("ไธ“่พ‘idไธบ"+ self.album_id +"็š„ไฟกๆฏ่Žทๅ–ๅคฑ่ดฅ") print("ๆญฃๅœจ้‡ๆ–ฐ่Žทๅ–") return None
/** * Copyright 2018 gd Author. All Rights Reserved. * Author: Chuck1024 */ package discovery import ( "github.com/chuck1024/gd/service" ) var ( defaultConf = "conf/conf.ini" ) type DogDiscovery interface { Start() error Close() Watch(key, node string) error WatchMulti(nodes map[string]string) error AddNode(key string, info service.NodeInfo) DelNode(key string, addr string) GetNodeInfo(key string) (nodesInfo []service.NodeInfo) }
import { PublicKey } from '@solana/web3.js'; import { getPayer, getRpcUrl} from '../utils'; import { Connection, NodeWallet, programs, actions } from '@metaplex/js'; async function getVaultInfo(vaultAddress) { const rpcUrl = await getRpcUrl(); let connection = new Connection(rpcUrl, 'confirmed'); const vault = await programs.vault.Vault.load(connection, vaultAddress); console.log(vault.data.authority); } getVaultInfo(new PublicKey("AvLtCwsoqXe2jr2rQ1wwvXF8LD6g9PcR8Qz8ygy5ARmF"))
package net.apptronic.test.commons_sample_compat_app import android.content.Intent import android.os.Bundle import net.apptronic.test.commons_sample_compat_app.about.AboutActivity import net.apptronic.test.commons_sample_compat_app.data.UserData import net.apptronic.test.commons_sample_compat_app.fragments.dialog.SampleDialog import net.apptronic.test.commons_sample_compat_app.fragments.enterdata.EnterDataFragment import net.apptronic.test.commons_sample_compat_app.fragments.showdata.KEY_USER_DATA import net.apptronic.test.commons_sample_compat_app.fragments.showdata.ShowDataFragment class RouterImpl(private val mainActivity: MainActivity) : Router { override fun openAbout() { mainActivity.startActivity(Intent(mainActivity, AboutActivity::class.java)) } override fun openDialog() { mainActivity.supportFragmentManager.beginTransaction() .add(SampleDialog(), null) .commit() } override fun goToEnterData() { mainActivity.replaceFragmentWithAddToBackStack(EnterDataFragment()) } override fun goToShowUserData(data: UserData) { mainActivity.replaceFragmentWithAddToBackStack(ShowDataFragment().apply { arguments = Bundle().apply { putSerializable(KEY_USER_DATA, data) } }) } }
import json from enum import Enum, auto from . import profile, oauth class OutputFormat(Enum): json = auto() shell = auto() config = auto() def output(fmt: OutputFormat, tokens: oauth.Tokens, **kwargs: str) -> None: if fmt == OutputFormat.json: print(json.dumps(tokens._asdict(), indent=4)) elif fmt == OutputFormat.shell: print(f"export ACCESS_TOKEN={tokens.access_token}") print(f"export ID_TOKEN={tokens.id_token}") elif fmt == OutputFormat.config: profile.set_credentials(profile_name=kwargs["profile"], credentials=tokens) else: raise ValueError(f"Output format {format} not implemented")
/* * Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.javadsl.api import org.scalatest.{ Inside, Matchers, WordSpec } class ScalaSupportSpec extends WordSpec with Matchers with Inside { "scala support" should { "resolve a function" in { val method: ScalaServiceSupport.ScalaMethodCall[String] = testMethod _ method.method.getDeclaringClass should ===(this.getClass) method.method.getName should ===("testMethod") } } def testMethod(s: String): String = s }
--- layout: default title: Ferramentas description: Ferramentas utilizadas pela nossa empresa! --- ## Ferramentas [Trello](https://trello.com) [Bug Track](https://www.bugtrack.net) [Git Hub](https://github.com) [BPMN.io](https://demo.bpmn.io/s/start)
// Copyright Luc Yriarte <luc.yriarte@thingagora.org> 2018 // License: Apache-2.0 package main import ( "github.com/hyperledger/fabric/core/chaincode/shim" ) type Storable interface { Put(stub shim.ChaincodeStubInterface, key string) error Get(stub shim.ChaincodeStubInterface, key string) error }
#!/bin/bash cd ../../ brew install boost --with-python brew install boost-python ffmpeg xerces-c mono brew cask install java schemas="$(pwd)/Schemas" echo "export MALMO_XSD_PATH=$schemas" >> ~/.bashrc source ~/.bashrc
package com.quickbirdstudios.surveykit.backend.presenter import com.quickbirdstudios.surveykit.FinishReason import com.quickbirdstudios.surveykit.result.StepResult sealed class NextAction { data class Next(val result: StepResult) : NextAction() data class Back(val result: StepResult) : NextAction() object Skip : NextAction() data class Close(val result: StepResult, val finishReason: FinishReason) : NextAction() }
import 'dart:async'; import 'package:academy_app/data/repository/failures/firestore_failure.dart'; import 'package:academy_app/data/repository/user/i_user_repository.dart'; import 'package:academy_app/model/user_data/user_data.dart'; import 'package:bloc/bloc.dart'; import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:injectable/injectable.dart'; part 'get_users_state.dart'; part 'get_users_cubit.freezed.dart'; @injectable class GetUsersCubit extends Cubit<GetUsersState> { final IUserRepo iUserRepo; GetUsersCubit(this.iUserRepo) : super(GetUsersState.initial()); String query = ''; List<UserData> filterUsers = []; late StreamSubscription streamSubscription; Future<void> getUsers() async { streamSubscription = iUserRepo.getUsers(query).listen( (failureOrSucces) { failureOrSucces.fold( (failure) => emit( GetUsersState.loadFailure(failure), ), (users) { filterUsers = users; return emit( GetUsersState.loadSuccess(users), ); }, ); }, ); } void listChanged(List<UserData> users, String queryChanged) { query = queryChanged; filterUsers = users.where((user) { final userName = user.userName!.getOrCrash().toLowerCase(); final search = queryChanged.toLowerCase(); return userName.contains(search); }).toList(); } @override Future<void> close() { streamSubscription.cancel(); return super.close(); } }
def coord(path) e = 0 ne = 0 path = path.chars until path.empty? do s = path.shift case s when ?e e += 1 when ?w e -= 1 when ?n t = path.shift case t when ?e ne += 1 when ?w e -= 1 ne += 1 end when ?s t = path.shift case t when ?e e += 1 ne -= 1 when ?w ne -= 1 end end end [e, ne] end blacks = [] $stdin.readlines.map(&:strip).each do |path| c = coord(path) if blacks.include?(c) blacks.delete(c) else blacks.push(c) end end puts blacks.length
๏ปฟ//********************** //Hosting eDrawings control in Windows Forms //Copyright(C) 2019 www.codestack.net //License: https://github.com/codestack-net-dev/solidworks-api-examples/blob/master/LICENSE //Product URL: https://www.codestack.net/edrawings-api/gettings-started/winforms/ //********************** using System; using System.Windows.Forms; using eDrawings.Interop.EModelViewControl; namespace CodeStack.Examples.eDrawingsApi { public partial class EDrawingsUserControl : UserControl { public event Action<EModelViewControl> EDrawingsControlLoaded; public EDrawingsUserControl() { InitializeComponent(); } public void LoadEDrawings() { var host = new EDrawingsHost(); host.ControlLoaded += OnControlLoaded; this.Controls.Add(host); host.Dock = DockStyle.Fill; } private void OnControlLoaded(EModelViewControl ctrl) { EDrawingsControlLoaded?.Invoke(ctrl); } } }
export function lowercaseStaticParts(path: string): string { return path .split('/') .map((part) => { return part.startsWith(':') ? part : part.toLowerCase(); }) .join('/'); }
package com.gowtham.letschat.fragments import android.os.Bundle import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import com.google.android.material.bottomsheet.BottomSheetDialogFragment import com.gowtham.letschat.databinding.FAttachmentBinding import com.gowtham.letschat.databinding.FImageSrcSheetBinding import com.gowtham.letschat.utils.BottomSheetEvent import org.greenrobot.eventbus.EventBus class FAttachment : BottomSheetDialogFragment() { private lateinit var binding: FAttachmentBinding companion object{ fun newInstance(bundle : Bundle) : FAttachment{ val fragment = FAttachment() fragment.arguments=bundle return fragment } } override fun onCreateView( inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View { binding = FAttachmentBinding.inflate(layoutInflater, container, false) return binding.root } override fun onViewCreated(view: View, savedInstanceState: Bundle?) { super.onViewCreated(view, savedInstanceState) binding.imgCamera.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(0)) dismiss() } binding.imgGallery.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(1)) dismiss() } binding.videoGallery.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(2)) dismiss() } binding.videoCamera.setOnClickListener { EventBus.getDefault().post(BottomSheetEvent(3)) dismiss() } } }
# Conway's Game of Life A C implementation of Conway's Game of Life using ncurses. ### Getting Started ```sh # Substitute `gmake` if on *BSD $ make $ ./conway ``` Press 'q' or CTRL-C to exit. A full list of keybindings can be displayed by pressing '?'. `conway` can also read a starting position from a cells formatted text file. ```sh $ ./conway patterns/glider.cells ```
module ElasticsearchDslBuilder # @abstact Exceptions raised by ElasticsearchDslBuilder inherit from Error class Error < StandardError; end # Exception raised when Queries::Query.to_hash attempts to build invalid query class InvalidQuery < Error; end end
require "alexa_string_tools/version" require "humanize" module AlexaStringTools # we'll lazy load this mapping. @@alexa_string_mapping = nil def email_from_alexa string = convert_from_alexa_string_to_email(self) string.strip end private def convert_from_alexa_string_to_email(string) alexa_string_mapping.each do |from,to| string = string.gsub(from, to) end string.gsub(' ', '') end def alexa_string_mapping return @@alexa_string_mapping if @@alexa_string_mapping @@alexa_string_mapping = {} # we donโ€™t have to pad these with spaces because the results are crunched together in the end. ('A'..'Z').to_a.each do |letter| @@alexa_string_mapping["#{letter}."] = letter @@alexa_string_mapping["#{letter.downcase}."] = letter.downcase end # itโ€™s okay to pad these with spaces, because they canโ€™t be at the beginning or end anyway. @@alexa_string_mapping.merge!({ ' at ' => '@', ' dot ' => '.', ' period ' => '.', ' underscore ' => '_', ' plus ' => '+', }) # spoken numbers are always converted into integers. (0..10000).to_a.reverse.each do |number| number_as_string = number.humanize number_as_string = number_as_string.gsub(/,/, '') number_as_string = number_as_string.gsub(/-/, ' ') @@alexa_string_mapping[number_as_string] = number.to_s end @@alexa_string_mapping end end String.class_eval do include AlexaStringTools end # trigger the loading of the string mapping automatically at load time. "".email_from_alexa
package com.xaron.equilinoxmodded.framework.blueprintgen.components; import java.io.IOException; import com.xaron.equilinoxmodded.framework.CsvWriter; import com.xaron.equilinoxmodded.framework.blueprintgen.components.deathai.DeathAIGen; import food.FoodSectionType; public class FoodComponentGen extends ComponentGen { public class FoodSection { private int name; private int foodPoints; private FoodSectionType type; private int portions = 0; private DeathAIGen deathAi; public FoodSection(int name, int foodPoints, FoodSectionType type, DeathAIGen deathAi) { this.name = name; this.foodPoints = foodPoints; this.type = type; this.deathAi = deathAi; } public FoodSection(int name, int foodPoints, int portions) { this.name = name; this.foodPoints = foodPoints; this.type = FoodSectionType.TO_SHARE; this.portions = portions; } } private FoodSection[] foodSections; public FoodComponentGen(FoodSection[] foodSections) { super("FOOD"); this.foodSections = foodSections; } @Override public void writeComponent(CsvWriter writer) throws IOException { super.writeComponent(writer); writer.writeInt(foodSections.length); for (int i = 0; i < foodSections.length; i++) { writer.writeInt(foodSections[i].name); writer.writeInt(foodSections[i].foodPoints); writer.writeString(foodSections[i].type.name()); if (foodSections[i].type == FoodSectionType.TO_SHARE) writer.writeLabelInt("portions", foodSections[i].portions); else if (foodSections[i].type == FoodSectionType.WHOLE) foodSections[i].deathAi.write(writer); else if (foodSections[i].type == FoodSectionType.ROOT_VEG) foodSections[i].deathAi.write(writer); } } }
// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. library which.test.candidate_paths; import 'package:unittest/unittest.dart'; import 'util.dart'; main() { group('getCandidatePaths', () { test('posix', () { var candidatePaths = getPosixCandidatePaths('z', '/x/y:/a/b/c', '/foo/bar'); expect(candidatePaths, ['/x/y/z', '/a/b/c/z']); }); test('windows', () { var candidatePaths = getWindowsCandidatePaths('z', r'C:\x\y;C:\a\b\c', '.EXE;.BAT', r'C:\foo\bar'); expect(candidatePaths, [ r'C:\foo\bar\z.EXE', r'C:\foo\bar\z.BAT', r'C:\x\y\z.EXE', r'C:\x\y\z.BAT', r'C:\a\b\c\z.EXE', r'C:\a\b\c\z.BAT']); }); }); }
package it.sephiroth.android.library.kotlin_extensions.io.reactivex import io.reactivex.disposables.Disposable fun Disposable.addTo(autoDisposable: AutoDisposable): Disposable { autoDisposable.add(this) return this }
#pragma once #ifndef UTILS_HPP #define UTILS_HPP namespace cave { /**Vector de tres componentes. * */ struct caveVec3f { float x; float y; float z; caveVec3f(float x, float y, float z) { this->x = x; this->y = y; this->z = z; } caveVec3f() = default; }; /**Cuaternion. * */ struct caveQuat { float x; float y; float z; float w; caveQuat(float x, float y, float z, float w) { this->w = w; this->x = x; this->y = y; this->z = z; } caveQuat() = default; }; /**Color rgba. * */ struct caveColour { float r; float g; float b; float alpha; caveColour(float r=1.0f, float g=1.0f, float b=1.0f, float alpha=1.0f) { this->r = r; this->g = g; this->b = b; this->alpha = alpha; } caveColour() = default; }; } #endif
## ์›น ์•ˆ๋…•ํ•˜์„ธ์š”. ! MONKEY.D ์ž…๋‹ˆ๋‹ค :-) ์›น ์Šคํฌ๋ž˜ํ•‘ ํ•˜๋Š” ํ”„๋ ˆ์ž„์„ ๊ฐ„๋‹จํ•˜๊ฒŒ ์ ์–ด๋ณผ๊ฑฐ์—์š”. ์š”๋ฒˆ์—๋Š” ์ €๋งŒ์ด ์ฐธ๊ณ ํ•˜๋Š” ์šฉ์œผ๋กœ ์ž‘์„ฑํ•  ๊ฑฐ์—ฌ์„œ ์–‘ํ•ด ๋ถ€ํƒ๋“œ๋ฆฝ๋‹ˆ๋‹ค. ```python #์…€๋ ˆ๋‹ˆ์›€ ๊ธฐ๋ณธ ํ”„๋ ˆ์ž„ from selenium import webdriver browser = webdriver.Chrome() browser.maximize_window() url = "url๋„ฃ์„ ์ฃผ์†Œ" browser.get(url) #์‚ฌ์ดํŠธ๋กœ ์ด๋™. ``` ```python driver.find_element_by_xpath('xpath์ฃผ์†Œ!').click() ``` ```python #๋ทฐํ‹ฐํ”Œ์ˆฉ ๊ธฐ๋ณธ ํ”„๋ ˆ์ž„ import requests from bs4 import BeautifulSoup url = "๊ฐ€์ ธ์˜ฌ url์ฃผ์†Œ" ๋ณ€์ˆ˜ = requests.get(url) ๋ณ€์ˆ˜.raise_for_status() #๋ณ€์ˆ˜.raise_for_status() ๋งŒ์•ฝ ํ™ˆํŽ˜์ด์ง€ ๋ณด์•ˆ์ƒ์˜ ์ด์œ ๋‚˜ ๋ชจ์ข…์˜ ์ด์œ ๋กœ ์Šคํฌ๋ž˜ํ•‘์ด ๋ถˆ๊ฐ€๋Šฅํ•œ ๊ฒฝ์šฐ์— ์˜ค๋ฅ˜๋ฅผ ๋‚ด๋Š” ํ•จ์ˆ˜์‹์ž…๋‹ˆ๋‹ค. soup=Beautifulsoup(๋ณ€์ˆ˜.text, "lxml") ``` ***์ •๊ทœ์‹*** ์šฐ๋ฆฌ๊ฐ€ ์–ด๋–ค ์ •๋ณด๋ฅผ ์ฐพ์„ ๋•Œ ๊ทธ๋ž˜๋„ ๋Œ€๋ถ€๋ถ„์˜ ํ˜•์‹์ด ์ •ํ•ด์ ธ์žˆ๊ฒ ์ฃ ? ๋‚™์—ฝ์„ ๊ธ๊ฐœ๋กœ ์“ธ์–ด์˜ฌ ๋•Œ ๋Œ•๊ฒจ์™€์„œ ๊ธ์ง€ ๋ง‰ ๋˜์ ธ์„œ ๊ทธ๊ฑธ ์ฃผ์›Œ์˜ค์ง€๋Š” ์•Š์ง€ ์•Š์Šต๋‹ˆ๊นŒ. ์ด๋Ÿฐ ์ •๊ทœ์‹๋“ค์„ ์•Œ์•„๋ณด๋„๋ก ํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค. ๋จผ์ € ์ •๊ทœ์‹์„ ์“ฐ๊ธฐ ์œ„ํ•ด์„  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ์–ด์•ผ๊ฒ ์ฃ ? ```python import re ๋ณ€์ˆ˜=re.compile("xx.xx") or ๋ณ€์ˆ˜=re.compile("^xxx") or ๋ณ€์ˆ˜=re.compile("xxxx$") ``` ์ด๋Ÿฐ์‹์œผ๋กœ ๋‚ด๊ฐ€ ๋จผ์ € ๋ฌธ์ž๋ฅผ ์–ด๋–ค ๋ฐฉ์‹์œผ๋กœ ์ฐพ์„ ๊ฒƒ์ธ์ง€๋ฅผ ์ •ํ•ด์ฃผ์–ด์–ด์•ผํ•ฉ๋‹ˆ๋‹ค. **"."**์ด ๋“ค์–ด๊ฐ€๋Š” ๊ฒฝ์šฐ์—๋Š” **"."**ํ•œ๊ธ€์ž๋ฅผ ์ œ์™ธํ•˜๊ณ  ์ผ์น˜ํ•˜๋Š” ๋ฌธ์ž๋ฅผ ๋‹ค ์ฐพ์Šต๋‹ˆ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด ๋ณ€์ˆ˜=re.compile("ca.e") ๋ผ๊ณ  ํ•œ๋‹ค๋ฉด "cafe","case","care"๋“ฑ๊ณผ ๊ฐ™์€ ๋‹จ์–ด๋“ค์„ ์š”๊ตฌํ•ฉ๋‹ˆ๋‹ค. **"^"**์ด ๋“ค์–ด๊ฐ€๋Š” ๊ฒฝ์šฐ์—๋Š” **' ~๋กœ ์‹œ์ž‘ํ•˜๋Š”'** ์˜๋ฏธ๋ฅผ ๊ฐ€์ง‘๋‹ˆ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด ๋ณ€์ˆ˜=re.compile("^de") ๋ผ๊ณ  ํ•œ๋‹ค๋ฉด "destination","deep"๋“ฑ๊ณผ ๊ฐ™์€ ๋‹จ์–ด๋“ค์„ ์š”๊ตฌํ•ฉ๋‹ˆ๋‹ค. **** **"$"**์ด ๋“ค์–ด๊ฐ€๋Š” ๊ฒฝ์šฐ์—๋Š” '~๋กœ ๋๋‚˜๋Š”' ์˜๋ฏธ๋ฅผ ๊ฐ€์ง‘๋‹ˆ๋‹ค. ์˜ˆ๋ฅผ ๋“ค์–ด ๋ณ€์ˆ˜=re.compile("se$") ๋ผ๊ณ  ํ•œ๋‹ค๋ฉด "case","base"๋“ฑ๊ณผ ๊ฐ™์€ ๋‹จ์–ด๋“ค์„ ์š”๊ตฌํ•ฉ๋‹ˆ๋‹ค. ์ด๋ ‡๊ฒŒ ๋จผ์ € ์›ํ•˜๋Š” ๋ฌธ์ž์˜ ํ˜•ํƒœ๋ฅผ ์ •ํ•ด์ค€ ๋‹ค์Œ์— ์ฃผ์–ด์ง„ ๋ฌธ์ž๋ฅผ ์ž…๋ ฅํ•ด์„œ ์š”๊ตฌํ•œ ๋‹จ์–ด์™€ ์ผ์น˜ํ•˜๋Š”์ง€๋ฅผ ํ™•์ธํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. ๊ทธ ๋ฌธ์ž๋“ค์„ ์ž…๋ ฅํ•˜๋Š” ํ˜•ํƒœ๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์Šต๋‹ˆ๋‹ค. ```python m=p.match("๋น„๊ตํ•  ๋ฌธ์ž์—ด") or m=p.search("๋น„๊ตํ•  ๋ฌธ์ž์—ด") or m=p.findall("๋น„๊ตํ•  ๋ฌธ์ž์—ด") ``` **m=p.match("๋น„๊ตํ•  ๋ฌธ์ž์—ด")** : ์ฃผ์–ด์ง„ ๋ฌธ์ž์—ด์˜ ์ฒ˜์Œ๋ถ€ํ„ฐ ์ผ์น˜ํ•˜๋Š”์ง€ ํ™•์ธํ•ฉ๋‹ˆ๋‹ค. ๊ทธ๋Ÿฐ๋ฐ matchํ•จ์ˆ˜๋Š” ๋น„๊ตํ•  ๋ฌธ์ž์—ด์˜ ์ฒ˜์Œ๋ถ€ํ„ฐ ์ผ์น˜ํ•˜๋Š”์ง€๋ฅผ ํ™•์ธํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์ฒซ๋ถ€๋ถ„๋งŒ ์ผ์น˜ํ•˜๋ฉด ๋’ค์˜ ์–ด๋–ค๋ง์ด ์™€๋„ ๋งž๋‹ค๊ณ  ํŒ๋‹จํ•ฉ๋‹ˆ๋‹ค. **m=p.search("๋น„๊ตํ•  ๋ฌธ์ž์—ด")** : ์ฃผ์–ด์ง„ ๋ฌธ์ž์—ด ์ค‘์— ์ผ์น˜ํ•˜๋Š” ๊ฒŒ ์žˆ๋Š”์ง€ ํ™•์ธ, ์ฆ‰ ์ค‘๊ฐ„์— ๋‹จ์–ด๊ฐ€ ๊ปด์žˆ์–ด๋„ ์žˆ๊ธฐ๋งŒ ํ•œ๋‹ค๋ฉด ์š”๊ตฌํ•˜๋Š” ๋‹จ์–ด์— ๋งž๋‹ค๊ณ  ํŒ๋‹จํ•ฉ๋‹ˆ๋‹ค. **m=p.findall("๋น„๊ตํ•  ๋ฌธ์ž์—ด")** : ์ผ์น˜ํ•˜๋Š” ๋ชจ๋“  ๊ฒƒ์„ "๋ฆฌ์ŠคํŠธ" ํ˜•ํƒœ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. ๋ง ๊ทธ๋Œ€๋กœ ๋ฆฌ์ŠคํŠธ๋กœ ๋ฐ”๊ฟ”์ฃผ๊ธฐ ๋•Œ๋ฌธ์— ๋ณดํ†ต์€ lst๋ณ€์ˆ˜๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. ์ฐธ์กฐ ๋ธ”๋กœ๊ทธ : https://blog.naver.com/paragonyun/222205019430
/*global Raphael, d3, $, define */ /*! * Diff็š„ๅ…ผๅฎนๆ€งๅฎšไน‰ */ ;(function (name, definition) { if (typeof define === 'function') { // Module define(definition); } else { // Assign to common namespaces or simply the global object (window) this[name] = definition(function (id) { return this[id]; }); } })('Diff', function (require) { var DataV = require('DataV'); /** * ๆž„้€ ๅ‡ฝๆ•ฐ * @param {Object} node ่กจ็คบๅœจhtml็š„ๅ“ชไธชๅฎนๅ™จไธญ็ป˜ๅˆถ่ฏฅ็ป„ไปถ * @param {Object} options ไธบ็”จๆˆท่‡ชๅฎšไน‰็š„็ป„ไปถ็š„ๅฑžๆ€ง๏ผŒๆฏ”ๅฆ‚็”ปๅธƒๅคงๅฐ */ var Diff = DataV.extend(DataV.Chart, { type: "Diff", initialize: function (node, options) { this.node = this.checkContainer(node); //ๅ›พ็š„ๅคงๅฐ่ฎพ็ฝฎ this.defaults.width = 900; this.defaults.height = 800; //่ฎพ็ฝฎ็”จๆˆทๆŒ‡ๅฎš็š„ๅฑžๆ€ง this.setOptions(options); //ๅˆ›ๅปบ็”ปๅธƒ this.createCanvas(); } }); /** * ๅˆ›ๅปบ็”ปๅธƒ */ Diff.prototype.createCanvas = function () { this.canvas = new Raphael(this.node, this.defaults.width, this.defaults.height); }; /** * ็ป˜ๅˆถๅผฆๅ›พ */ Diff.prototype.render = function () { this.layout(); }; // ่ฎก็ฎ—้กบๅบ็š„็›ธไผผๅบฆ var diffMap = function (list1, list2) { var map = []; var hit = 0; var lastIndex = -1; for (var i = 0; i < list1.length; i++) { var index = _.indexOf(list2, list1[i]); if (index === -1) { continue; } else { if (index > lastIndex) { lastIndex = index; map.push([i, index]); } hit++; } } console.log(map); console.log(map.length / list1.length); console.log(hit / list1.length); return map; }; /** *ๅฏนๅŽŸๅง‹ๆ•ฐๆฎ่ฟ›่กŒๅค„็† * @param {Array} table ๅฐ†่ฆ่ขซ็ป˜ๅˆถๆˆ้ฅผๅ›พ็š„ไบŒ็ปด่กจๆ•ฐๆฎ */ Diff.prototype.setSource = function (table1, table2) { this.rawData = [table1, table2]; this.diffMap = diffMap(table1, table2); }; /** *ๅˆ›ๅปบchordๅธƒๅฑ€ */ Diff.prototype.layout = function () { var that = this; var canvas = that.canvas; var paddingLeft = 10; var paddingTop = 10; var height = 20; var distance = 50; var width = (this.defaults.width - 2 * paddingLeft - distance) / 2; for (var j = 0, k = this.rawData.length; j < k; j++) { var maped = _.pluck(this.diffMap, j); for (var i = 0, l = this.rawData[j].length; i < l; i++) { canvas.rect(paddingLeft + j * (width + distance), paddingTop + height * i, width, height).attr({fill: _.indexOf(maped, i) !== -1 ? "#00ff00" : "#ff0000"}); canvas.text(paddingLeft + j * (width + distance), paddingTop + height * i + height / 2, this.rawData[j][i]).attr({'text-anchor': 'start'}); } } for (var i = 0, l = this.diffMap.length; i < l; i++) { var line = this.diffMap[i]; canvas.path("M" + (paddingLeft + width) + ' ' + (paddingTop + height * line[0] + height / 2) + "L" + (paddingLeft + width + distance) + " " + (paddingTop + height * line[1] + height / 2)).attr({stroke: '#00ff00'}); } }; return Diff; });
var options = function(type, height, numbers , color){ return { chart: { height: height, width: '100%', type: type, sparkline: { enabled: true }, toolbar: { show: false, }, }, grid: { show: false, padding: { top: 0, right: 0, bottom: 0, left: 0 } }, dataLabels: { enabled: false }, legend: { show: false, }, series: [ { name: "serie1", data: numbers } ], fill: { colors: [color], }, stroke:{ colors: [color], width: 3 }, yaxis: { show: false, }, xaxis: { show: false, labels: { show: false, }, axisBorder: { show: false, }, tooltip: { enabled: false, } }, }; } var analytics_1 = document.getElementsByClassName("analytics_1"); if (analytics_1 != null && typeof(analytics_1) != 'undefined') { var chart = new ApexCharts(analytics_1[0], options("area" , '51px' , numArr(10,99) , '#4fd1c5')); var chart_1 = new ApexCharts(analytics_1[1], options("area" , '51px' , numArr(10,99) , '#4c51bf')); chart.render(); chart_1.render(); }
using Random using StaticArrays using Test using CLIMA using CLIMA.Atmos using CLIMA.GenericCallbacks using CLIMA.ODESolvers using CLIMA.Mesh.Filters using CLIMA.MoistThermodynamics using CLIMA.PlanetParameters using CLIMA.VariableTemplates # ------------------------ Description ------------------------- # # 1) Dry Rising Bubble (circular potential temperature perturbation) # 2) Boundaries - `All Walls` : NoFluxBC (Impermeable Walls) # Laterally periodic # 3) Domain - 2500m[horizontal] x 2500m[horizontal] x 2500m[vertical] # 4) Timeend - 1000s # 5) Mesh Aspect Ratio (Effective resolution) 1:1 # 7) Overrides defaults for # `forcecpu` # `solver_type` # `sources` # `C_smag` # 8) Default settings can be found in `src/Driver/Configurations.jl` # ------------------------ Description ------------------------- # function init_risingbubble!(bl, state, aux, (x,y,z), t) FT = eltype(state) R_gas::FT = R_d c_p::FT = cp_d c_v::FT = cv_d ฮณ::FT = c_p / c_v p0::FT = MSLP xc::FT = 1250 yc::FT = 1250 zc::FT = 1000 r = sqrt((x-xc)^2+(y-yc)^2+(z-zc)^2) rc::FT = 500 ฮธ_ref::FT = 300 ฮ”ฮธ::FT = 0 if r <= rc ฮ”ฮธ = FT(5) * cospi(r/rc/2) end #Perturbed state: ฮธ = ฮธ_ref + ฮ”ฮธ # potential temperature ฯ€_exner = FT(1) - grav / (c_p * ฮธ) * z # exner pressure ฯ = p0 / (R_gas * ฮธ) * (ฯ€_exner)^ (c_v / R_gas) # density P = p0 * (R_gas * (ฯ * ฮธ) / p0) ^(c_p/c_v) # pressure (absolute) T = P / (ฯ * R_gas) # temperature ฯu = SVector(FT(0),FT(0),FT(0)) #State (prognostic) variable assignment e_kin = FT(0) e_pot = grav * z ฯe_tot = ฯ * total_energy(e_kin, e_pot, T) state.ฯ = ฯ state.ฯu = ฯu state.ฯe = ฯe_tot state.moisture.ฯq_tot = FT(0) end function config_risingbubble(FT, N, resolution, xmax, ymax, zmax) # Boundary conditions bc = NoFluxBC() # Choose explicit solver ode_solver = CLIMA.ExplicitSolverType(solver_method=LSRK144NiegemannDiehlBusch) # Set up the model C_smag = FT(0.23) model = AtmosModel{FT}(AtmosLESConfiguration; turbulence=SmagorinskyLilly{FT}(C_smag), source=(Gravity(),), init_state=init_risingbubble!) # Problem configuration config = CLIMA.Atmos_LES_Configuration("DryRisingBubble", N, resolution, xmax, ymax, zmax, init_risingbubble!, solver_type=ode_solver, model=model) return config end function main() CLIMA.init() # Working precision FT = Float64 # DG polynomial order N = 4 # Domain resolution and size ฮ”h = FT(50) ฮ”v = FT(50) resolution = (ฮ”h, ฮ”h, ฮ”v) # Domain extents xmax = 2500 ymax = 2500 zmax = 2500 # Simulation time t0 = FT(0) timeend = FT(1000) # Courant number CFL = FT(0.8) driver_config = config_risingbubble(FT, N, resolution, xmax, ymax, zmax) solver_config = CLIMA.setup_solver(t0, timeend, driver_config, forcecpu=true, Courant_number=CFL) # User defined filter (TMAR positivity preserving filter) cbtmarfilter = GenericCallbacks.EveryXSimulationSteps(1) do (init=false) Filters.apply!(solver_config.Q, 6, solver_config.dg.grid, TMARFilter()) nothing end # Invoke solver (calls solve! function for time-integrator) result = CLIMA.invoke!(solver_config; user_callbacks=(cbtmarfilter,), check_euclidean_distance=true) @test isapprox(result,FT(1); atol=1.5e-3) end main()
๏ปฟnamespace Evolution.Textkernel { using Evolution.Textkernel.Models; using System.Threading.Tasks; /// <summary>Interface for Textkernel's CV Extract parser.</summary> public interface ITextkernelParser { /// <summary>Send the file bytes to the service and get back the deserialised result.</summary> /// <param name="file">The CV file to parse.</param> /// <param name="filename">Optional name of the file.</param> /// <returns>The deserialised result.</returns> Task<Profile> Parse(byte[] file, string filename = null); } }
(ns metro.components.web.health (:require [clojure.data.json :as json] [metro.components.server.pedestal :as server] [metro.components.db.postgres :as postgres])) (defn health [request] {:status 200 :headers {"Content-Type" "application/json"} :body (json/write-str {:server @server/status :database @postgres/status})})
import React, { Component } from 'react' import ConcertList from '../components/ConcertList' import DeleteArtist from '../components/DeleteArtist' class CurrentArtist extends Component { componentDidMount() { this.props.getConcerts(this.props.currentArtist.id) } componentDidUpdate(prevProps) { if(this.props.currentArtist !== prevProps.currentArtist) { this.props.getConcerts(this.props.currentArtist.id) } } render() { let view return ( <div class='container'> <br/><br/> <div class='row'> <div class='col-lg'> <h2>{this.props.currentArtist.name}</h2><br/> <img class='img-fluid' src={this.props.currentArtist.image_url}/><br/><br/> <DeleteArtist id={this.props.currentArtist.id} deleteArtist={this.props.deleteArtist}/><br/> </div> <div className='col-lg'> <ConcertList concerts={this.props.concerts} artistId = {this.props.currentArtist.id} addConcert={this.props.add}/> </div> </div> </div> ) } } export default CurrentArtist
<?php namespace estoque\Http\Controllers; use Illuminate\Http\Request; use estoque\Http\Requests; use Illuminate\Support\Facades\DB; use estoque\Produto; use estoque\Http\Requests\ProdutoRequest; class ProdutoController extends Controller { public function __construct(Request $request) { //$this->middleware('auth', ['except' => ['/home', '/lala']]); $this->middleware('auth'); } public function lista() { /*$produtos = DB::select('SELECT * FROM produtos'); if(view()->exists('produto.listagem')) { return view('produto/listagem')->with('produtos', $produtos); return view('listagem', ['produtos' => $produtos]); return view('listagem')->withProdutos($produtos);//magic methods } else { return view('welcome'); }*/ $produtos = Produto::all(); return view('produto/listagem')->with('produtos', $produtos); } //?id=1 public function mostra(Request $request) { /*$id = $request->route('id'); $produto = DB::select('SELECT * FROM produtos WHERE id = ?', [$id]);*/ if($request->has('id')) {//verifica se um parรขmetro foi informado $id = $request->input('id'); //$produto = DB::select('SELECT * FROM produtos WHERE id = ?', [$id]); $produto = Produto::find($id); } else { return "Informe um id."; } if(empty($produto)) { return "Este produto nรฃo existe."; } return view('produto/detalhes')->with('p', $produto); } public function novo() { return view('produto/formulario'); } public function adiciona(ProdutoRequest $req) { /* $nome = $req->input('nome'); $desc = $req->input('descricao'); $valor = $req->input('valor'); $qtd = $req->input('quantidade'); DB::insert('INSERT INTO produtos (nome, quantidade, valor, descricao) VALUES (?, ?, ?, ?)', array($nome, $qtd, $valor, $desc)); //return redirect('/produtos')->withInput();//envia tudo //return redirect('/produtos')->withInput($req->only('nome')); return redirect()->action('ProdutoController@lista')->withInput();*/ //outra forma de fazer /*$params = $req->all(); $produto = new Produto($params); $produto->save();*/ //mais uma forma de fazer Produto::create($req->all()); return redirect()->action('ProdutoController@lista')->withInput(); } //passando com a barra /1 public function remove($id) { $produto = Produto::find($id); $produto->delete(); return redirect()->action('ProdutoController@lista'); } //poderia ter usado o mรฉtodo mostrar, mas para deixar um exemplo de como fazer das maneiras vou usar esse public function editar($id) { $produto = Produto::find($id); return view('produto/atualiza')->with('p', $produto); } public function atualiza($id, Request $req) { $produto = Produto::findOrFail($id); $params = $req->all(); $produto->fill($params)->save(); return redirect()->action('ProdutoController@lista'); } } /* exemplo public function __construct(Request $request) { $this->request = $request; } public function checkText() { $txt = $this->request->has('txt'); return $txt; } */
package org.zstack.header.identity; import org.zstack.header.query.ExpandedQueries; import org.zstack.header.query.ExpandedQuery; import org.zstack.header.search.Inventory; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.List; @Inventory(mappingVOClass = UserGroupUserRefVO.class) @ExpandedQueries({ @ExpandedQuery(expandedField = "user", inventoryClass = UserInventory.class, foreignKey = "userUuid", expandedInventoryKey = "uuid"), @ExpandedQuery(expandedField = "group", inventoryClass = UserGroupInventory.class, foreignKey = "groupUuid", expandedInventoryKey = "uuid") }) public class UserGroupUserRefInventory { private String userUuid; private String groupUuid; private Timestamp createDate; private Timestamp lastOpDate; public static UserGroupUserRefInventory valueOf(UserGroupUserRefVO vo) { UserGroupUserRefInventory inv = new UserGroupUserRefInventory(); inv.setUserUuid(vo.getUserUuid()); inv.setGroupUuid(vo.getGroupUuid()); inv.setCreateDate(vo.getCreateDate()); inv.setLastOpDate(vo.getLastOpDate()); return inv; } public static List<UserGroupUserRefInventory> valueOf(Collection<UserGroupUserRefVO> vos) { List<UserGroupUserRefInventory> invs = new ArrayList<UserGroupUserRefInventory>(); for (UserGroupUserRefVO vo : vos) { invs.add(valueOf(vo)); } return invs; } public String getUserUuid() { return userUuid; } public void setUserUuid(String userUuid) { this.userUuid = userUuid; } public String getGroupUuid() { return groupUuid; } public void setGroupUuid(String groupUuid) { this.groupUuid = groupUuid; } public Timestamp getCreateDate() { return createDate; } public void setCreateDate(Timestamp createDate) { this.createDate = createDate; } public Timestamp getLastOpDate() { return lastOpDate; } public void setLastOpDate(Timestamp lastOpDate) { this.lastOpDate = lastOpDate; } }
rootProject.name = "PortalClosers" include(":core") project(":core").projectDir = File("game/core") include(":android") project(":android").projectDir = File("game/android") include(":desktop") project(":desktop").projectDir = File("game/desktop") include(":headless") project(":headless").projectDir = File("game/headless") includeBuild("engine/gradle-plugins")
package v1 import ( "github.com/Gavazn/Gavazn/internal/category" "github.com/Gavazn/Gavazn/internal/comment" "github.com/Gavazn/Gavazn/internal/post" "github.com/Gavazn/Gavazn/internal/user" "github.com/labstack/echo" "go.mongodb.org/mongo-driver/bson" ) /** * @api {get} /api/v2/dashboard get statistics * @apiVersion 1.0.0 * @apiName getStatistics * @apiGroup Dashboard * * @apiSuccess {Number} total_posts * @apiSuccess {Number} total_categories * @apiSuccess {Number} total_comments * @apiSuccess {Number} total_users * * @apiError {String} error api error message */ func getStatistics(ctx echo.Context)error{ return ctx.JSON(200, echo.Map{ "total_posts": post.Count(bson.M{}), "total_categories": category.Count(bson.M{}), "total_comments": comment.Count(bson.M{}), "total_users": user.Count(bson.M{}), }) }
function addServicesList(name, file) { angular.bootstrap(document.getElementById("divWrap"), ['useApp']); var list = RepeatObj.addList(name, file, function () { showService(name); function showService(name) { console.log(name + ' initialize(); complete'); RepeatObj.useList.title = name; RepeatObj.useList.setActions({ entry: { Action: 'Done' } }); RepeatObj.useList.initialize(name, true); } }); return (list); } function ToolbarObj (tempid, key) { var listobj = ListDataObj(tempid); listobj.DataKey = key; listobj.addReferences = addReferences; listobj.listobj_processData = listobj.processData; listobj.processDatax = function (data) { listobj.listobj_processData(data); if ( typeof (listobj.addReferences) === 'undefined') { } else { listobj.addReferences(); delete (listobj.addReferences); } } return (listobj); } function addListObj(tempid, jsonfilename, readyfunc) { var funcname = 'RepeatObj.addList()'; var listobj = null; if ((listobj = ToolbarObj(tempid, RepeatObj.getDataKey(jsonfilename))) == null) { console.log(funcname + '(); Error in ' + funcname + '; unable to create ListObj'); } else { console.log(funcname + '(); NEW [' + tempid + '] ListObj[' + listobj.DataKey + ']'); } RepeatObj.addListObj(listobj, tempid, jsonfilename, readyfunc); return (listobj); } function addServices() { //restoreTemp(addServicesList); return (addServicesList('Service', '/data/Service.json')); }
import React from 'react' import { graphql } from 'gatsby' import Layout from '../components/Layout' import { useSiteMetadata } from '../hooks' import WebmentionReplies from '../components/Webmention/WebmentionFeed' import { Helmet } from 'react-helmet/es/Helmet' const PageTemplate = ({ data }) => { const { title: siteTitle } = useSiteMetadata() const { html: pageBody, frontmatter, fields } = data.markdownRemark return ( <Layout title={`${frontmatter.title} | ${siteTitle}`} description={frontmatter.description} > <Helmet> <script type="application/ld+json"> {`{ "@context": "http://schema.org", "@type": "WebPage", "name": "${frontmatter.title} | ${siteTitle}", "url": "${'https://arisemyson.com' + fields.slug}", "description": "${frontmatter.description}" }`} </script> </Helmet> <div className={'content'}> <div className={'section-title'}>{frontmatter.title}</div> <article dangerouslySetInnerHTML={{ __html: pageBody }} /> <hr /> <div className={'socialize'}> <form id="comment-form" method="get" action="https://quill.p3k.io/" target="_blank" > <input type="hidden" name="dontask" value="1" /> <input type="hidden" name="me" value="https://commentpara.de/" /> <input type="hidden" name="reply" value={'https://arisemyson.com' + fields.slug} /> </form> <a className={'button button-outline button-small'} target="_blank" href={`https://twitter.com/intent/tweet/?text=My%20thoughts%20on%20${'https://hbish.com' + fields.slug}`} > Tweet this post{' '} </a> <input form="comment-form" className={'button button-outline button-small'} type="submit" value="Write a comment" />{' '} </div> <WebmentionReplies target={'https://arisemyson.com' + fields.slug} /> </div> </Layout> ) } export default PageTemplate export const pageQuery = graphql` query PageBySlug($slug: String!) { site { siteMetadata { title author { name } } } markdownRemark(fields: { slug: { eq: $slug } }) { id excerpt html fields { slug } frontmatter { title date(formatString: "MMMM DD, YYYY") categories tags description } } } `
from rest_framework.permissions import BasePermission class HasValidToken(BasePermission): """Has Valid Token Checks if the request has a valid token, even if the user is not authenticated """ def has_permission(self, request, view): return bool(getattr(request, "auth", False))
package org.example.quiz.service import cats.effect.{ContextShift, IO} import org.example.quiz.dao.QuestionAnswerDao import org.example.quiz.entities._ class QuizService(dao: QuestionAnswerDao, categoryService: CategoryService) (implicit cs: ContextShift[IO]) { private val numberOfQuestions = 10 def generate(categoryId: Long): IO[Option[QuizEntity]] = categoryService.get(categoryId).flatMap { case Some(category) => pickQuestions(category, numberOfQuestions).map(qs => Some(QuizEntity(qs))) case None => IO(None) } private def pickQuestions(category: CategoryEntity, n: Int): IO[List[QuestionEntity]] = { val randomQAs = IO.fromFuture(IO(dao.pickByCategoryId(category.id, n = n))) randomQAs.map { qas => qas.map { case (q, as) => QuestionEntity.fromRecord(q, as) }.toList } } def score(givenAnswers: List[GivenAnswerEntity]): IO[ScoreEntity] = { val questionIds = givenAnswers.map(_.questionId) IO.fromFuture(IO(dao.getCorrectQuestionAnswers(questionIds))).map { correctAnswers => val goodAnswers = givenAnswers.filter { answer => correctAnswers.exists { case (q, a) => q == answer.questionId && a == answer.answerId } } val badAnswers = givenAnswers.diff(goodAnswers) val score = 1.0 * goodAnswers.size / givenAnswers.size ScoreEntity(score, correct = goodAnswers, wrong = badAnswers) } } }
package aws import ( "fmt" "net/url" "regexp" "strings" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/service/iam" "github.com/hashicorp/terraform/helper/schema" ) func resourceAwsIamRolePolicy() *schema.Resource { return &schema.Resource{ // PutRolePolicy API is idempotent, so these can be the same. Create: resourceAwsIamRolePolicyPut, Update: resourceAwsIamRolePolicyPut, Read: resourceAwsIamRolePolicyRead, Delete: resourceAwsIamRolePolicyDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, Schema: map[string]*schema.Schema{ "policy": &schema.Schema{ Type: schema.TypeString, Required: true, }, "name": &schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, ValidateFunc: func(v interface{}, k string) (ws []string, errors []error) { // https://github.com/boto/botocore/blob/2485f5c/botocore/data/iam/2010-05-08/service-2.json#L8291-L8296 value := v.(string) if len(value) > 128 { errors = append(errors, fmt.Errorf( "%q cannot be longer than 128 characters", k)) } if !regexp.MustCompile("^[\\w+=,.@-]+$").MatchString(value) { errors = append(errors, fmt.Errorf( "%q must match [\\w+=,.@-]", k)) } return }, }, "role": &schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, }, }, } } func resourceAwsIamRolePolicyPut(d *schema.ResourceData, meta interface{}) error { iamconn := meta.(*AWSClient).iamconn request := &iam.PutRolePolicyInput{ RoleName: aws.String(d.Get("role").(string)), PolicyName: aws.String(d.Get("name").(string)), PolicyDocument: aws.String(d.Get("policy").(string)), } if _, err := iamconn.PutRolePolicy(request); err != nil { return fmt.Errorf("Error putting IAM role policy %s: %s", *request.PolicyName, err) } d.SetId(fmt.Sprintf("%s:%s", *request.RoleName, *request.PolicyName)) return nil } func resourceAwsIamRolePolicyRead(d *schema.ResourceData, meta interface{}) error { iamconn := meta.(*AWSClient).iamconn role, name, err := resourceAwsIamRolePolicyParseId(d.Id()) if err != nil { return err } request := &iam.GetRolePolicyInput{ PolicyName: aws.String(name), RoleName: aws.String(role), } getResp, err := iamconn.GetRolePolicy(request) if err != nil { if iamerr, ok := err.(awserr.Error); ok && iamerr.Code() == "NoSuchEntity" { // XXX test me d.SetId("") return nil } return fmt.Errorf("Error reading IAM policy %s from role %s: %s", name, role, err) } if getResp.PolicyDocument == nil { return fmt.Errorf("GetRolePolicy returned a nil policy document") } policy, err := url.QueryUnescape(*getResp.PolicyDocument) if err != nil { return err } if err := d.Set("policy", policy); err != nil { return err } if err := d.Set("name", name); err != nil { return err } return d.Set("role", role) } func resourceAwsIamRolePolicyDelete(d *schema.ResourceData, meta interface{}) error { iamconn := meta.(*AWSClient).iamconn role, name, err := resourceAwsIamRolePolicyParseId(d.Id()) if err != nil { return err } request := &iam.DeleteRolePolicyInput{ PolicyName: aws.String(name), RoleName: aws.String(role), } if _, err := iamconn.DeleteRolePolicy(request); err != nil { return fmt.Errorf("Error deleting IAM role policy %s: %s", d.Id(), err) } return nil } func resourceAwsIamRolePolicyParseId(id string) (roleName, policyName string, err error) { parts := strings.SplitN(id, ":", 2) if len(parts) != 2 { err = fmt.Errorf("role_policy id must be of the for <role name>:<policy name>") return } roleName = parts[0] policyName = parts[1] return }
function Invoke-AmoebaMLPipeline { [CmdletBinding()] param( [Parameter(Mandatory=$True,ValueFromPipeline=$True)] [ref]$Pipeline, [Parameter(Mandatory)] [Type]$DataSet, [Parameter(Mandatory)] [Type]$Predictor ) Write-Verbose ("{0}: {1}" -f $MyInvocation.MyCommand, "Training Data"); $Method = [Microsoft.ML.LearningPipeline].GetMethod("Train").MakeGenericMethod([Type]$DataSet,[Type]$Predictor) try { return $Method.Invoke($Pipeline.Value, $null) } catch { write-host ( @( $_ $_.exception.InnerException.StackTrace $_.exception $_.exception.InnerException $_.exception.InnerException.InnerException $_.exception.InnerException.InnerException.InnerException $_.exception.InnerException.InnerException.InnerException.InnerException ) | out-string ) } }
namespace :doc do desc "Generate the documentation" task :yard do puts "Generating YARD documentation" system(File.join("..", "build", "run"), "doc:yardoc") end desc "Create the API.md file" task :api do require 'erb' require 'sinatra' require 'jsonmodel' require_relative '../backend/app/lib/rest.rb' require_relative '../backend/app/lib/username.rb' require_relative '../backend/app/model/backend_enum_source.rb' require_relative '../backend/app/lib/logging.rb' require_relative '../backend/app/lib/streaming_import.rb' require_relative '../backend/app/lib/component_transfer.rb' require_relative '../backend/app/lib/reports/report_helper.rb' class ArchivesSpaceService < Sinatra::Base def self.helpers nil end include RESTHelpers end @time = Time.new JSONModel::init(:enum_source => BackendEnumSource) require_relative '../backend/app/lib/export' Dir.glob(File.dirname(__FILE__) + '/../backend/app/controllers/*.rb') {|file| require file unless file =~ /system/} @endpoints = ArchivesSpaceService::Endpoint.all.sort{|a,b| a[:uri] <=> b[:uri]} @examples = JSON.parse( IO.read File.dirname(__FILE__) + "/../endpoint_examples.json" ) erb = ERB.new(File.read('API.erb'), nil, '<>') File.open('../API.md', 'w') do |f| f.write erb.result(binding) end end desc 'Rename the YARD index file to avoid problems with Jekyll' task :rename_index do Dir.chdir('../') do files = Dir.glob('doc/**/*') files.each do |f| if File::file?(f) content = File.read(f) content.gsub!('"_index.html"', '"alpha_index.html"') content.gsub!('/_index.html', '/alpha_index.html') File.open(f, "w") do |io| io.write content end end end `mv doc/_index.html doc/alpha_index.html` end end desc 'This generates all documentation and publishes it to the doc folder' task :gen do Rake::Task["doc:api"].invoke Rake::Task["doc:yard"].invoke # Rake::Task["doc:yard-txt"].invoke Rake::Task["doc:rename_index"].invoke end end
//index.js //่Žทๅ–ๅบ”็”จๅฎžไพ‹ Page({ onShareAppMessage(): object { return { title: 'Oak Weui Avatar ๅคดๅƒ', path: '/pages/avatar/avatar', } }, })
module model_module use amrex_fort_module, only : rt => amrex_real implicit none contains subroutine get_model_size(ymin, ymax, dy, lo, hi) use amrex_fort_module, only : rt => amrex_real real(rt) , intent(in) :: ymin, ymax, dy integer, intent(out) :: lo, hi integer :: npts ! number of points in the domain npts = (ymax - ymin)/dy + 1 ! we'll do some ghost cells, for the boundary conditions ! by design, the base of the model will be at zone 0 lo = -4 hi = npts + 4 end subroutine get_model_size subroutine get_model(ymin, ymax, dy, & pres_base, dens_base, do_isentropic, & xn_model, & r_model, rho_model, T_model, e_model, p_model, & lo, hi) use eos_module, only : eos use eos_type_module, only : eos_t, eos_input_rp use network, only : nspec use meth_params_module, only: const_grav use amrex_fort_module, only : rt => amrex_real integer, intent(in) :: lo, hi real(rt) , intent(in) :: ymin, ymax, dy real(rt) , intent(in) :: pres_base, dens_base logical, intent(in) :: do_isentropic real(rt) , intent(in) :: xn_model(nspec) real(rt) , intent(out) :: r_model(lo:hi) real(rt) , intent(out) :: rho_model(lo:hi) real(rt) , intent(out) :: T_model(lo:hi) real(rt) , intent(out) :: e_model(lo:hi) real(rt) , intent(out) :: p_model(lo:hi) real(rt) :: H, gamma_const integer :: j type (eos_t) :: eos_state ! compute the pressure scale height (for an isothermal, ideal-gas ! atmosphere) H = pres_base / dens_base / abs(const_grav) ! create the constant if we are isentropic eos_state % rho = dens_base eos_state % p = pres_base eos_state % xn(:) = xn_model(:) ! initial guess eos_state % T = 1000.0e0_rt call eos(eos_input_rp, eos_state) gamma_const = pres_base/(dens_base * eos_state % e) + 1.0e0_rt rho_model(0) = dens_base p_model(0) = pres_base r_model(0) = ymin + 0.5e0_rt*dy ! integrate up from the base do j = 1, hi r_model(j) = ymin + (dble(j)+0.5e0_rt)*dy if (do_isentropic) then rho_model(j) = dens_base*(const_grav*dens_base*(gamma_const - 1.0)* & (r_model(j)-r_model(0))/ & (gamma_const*pres_base) + 1.e0_rt)**(1.e0_rt/(gamma_const - 1.e0_rt)) else rho_model(j) = dens_base * exp(-(r_model(j)-r_model(0))/H) endif p_model(j) = p_model(j-1) - & dy * 0.5e0_rt * (rho_model(j)+rho_model(j-1)) * abs(const_grav) enddo ! integrate down from the base do j = -1, lo, -1 r_model(j) = ymin + (dble(j)+0.5e0_rt)*dy if (do_isentropic) then rho_model(j) = dens_base*(const_grav*dens_base*(gamma_const - 1.0)* & (r_model(j)-r_model(0))/ & (gamma_const*pres_base) + 1.e0_rt)**(1.e0_rt/(gamma_const - 1.e0_rt)) else rho_model(j) = dens_base * exp(-(r_model(j)-r_model(0))/H) endif p_model(j) = p_model(j+1) + & dy * 0.5e0_rt * (rho_model(j)+rho_model(j+1)) * abs(const_grav) enddo ! thermodynamics do j = lo, hi eos_state % rho = rho_model(j) eos_state % p = p_model(j) eos_state % xn(:) = xn_model(:) ! initial guess eos_state % T = 1000.0e0_rt call eos(eos_input_rp, eos_state) e_model(j) = eos_state % e T_model(j) = eos_state % T end do end subroutine get_model end module model_module
export default { control: { opacity: 1, backgroundColor: "#242424" }, button: { backgroundColor: "#332A00" }, buttonLabel: { color: "#FBD89B" }, disabledButtonLabel: { color: "#4F493D", fontSize: 20 }, cancelButton: { backgroundColor: "#5B0000", }, cancelButtonLabel: { color: "#A49A8C" }, title: { color: "#BDC5CE" }, message: { color: "#7F7F7F" } };
// JVM_TARGET: 1.8 // KOTLIN_CONFIGURATION_FLAGS: +JVM.JVM8_TARGET_WITH_DEFAULTS interface Test { fun test(): String { return "OK" } fun testAbstract(): String } // TESTED_OBJECT_KIND: function // TESTED_OBJECTS: Test, test // FLAGS: ACC_PUBLIC // TESTED_OBJECT_KIND: function // TESTED_OBJECTS: Test, testAbstract // FLAGS: ACC_PUBLIC, ACC_ABSTRACT
require "active_support/dependencies" module ActiveMailer # Our host application root path # We set this when the engine is initialized mattr_accessor :app_root # Yield self on setup for nice config blocks def self.setup yield self end end require "active_mailer/engine" ActiveRecord::ConnectionAdapters::SchemaStatements.module_eval do def create_active_mailer_table(*args, &block) block_with_default_columns = Proc.new do |t| t.integer :sender_id t.timestamp :sent_at t.string :subject block.call(t) end create_table(*args, &block_with_default_columns) end end
<?php /** * Category */ namespace app\Models; /** * Category */ class Category { /** @var int $id */ public $id = 0; /** @var string $name */ public $name = 'default-name'; }
<?php namespace App\Http\Controllers\Admin; use App\Http\Model\Real; use App\Http\Model\User; use Validator; use Illuminate\Support\Facades\Crypt; //use \Illuminate\Validation\Validator; use Illuminate\Http\Request; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Input; class IndexController extends CommonController { public function index(){ // $pdo= DB::connection()->getPdo(); // dd($pdo); return view('admin.index'); }// public function info(){ return view('admin.info'); } public function element(){ return view('admin.element'); } // public function map() // { // $loca=Real::all(); //// dd($loca); // // return view('admin.map')->with('data',$loca); // } public function pass(){ if ($input = Input::all()){ $rules=[ 'password'=>'required|between:6,20|confirmed', ]; $message=[ 'password.required'=>'ๆ–ฐๅฏ†็ ไธ่ƒฝไธบ็ฉบ๏ผ', 'password.between'=>'ๆ–ฐๅฏ†็ ้•ฟๅบฆๅฟ…้กปๅœจ6ๅˆฐ20ไฝไน‹้—ด๏ผ', 'password.confirmed'=>'ๆ–ฐๅฏ†็ ไธŽ็กฎ่ฎคๅฏ†็ ไธๅŒน้…๏ผ' ]; $validator= Validator::make($input,$rules,$message); if ($validator->passes()){ $user=User::where('user_name','=',session('user.user_name'))->first(); $_password = Crypt::decrypt($user->user_password); // dd($_password); if ($input['password_o']==$_password){ $user->user_password = Crypt::encrypt($input['password']); $user->update(); // dd($user->user_password); // return back()->withErrors(['errors'=>'ๅฏ†็ ไฟฎๆ”นๆˆๅŠŸๅ•ฆ!']); } else{ return back()->withErrors(['errors'=>'ๅŽŸๅฏ†็ ้”™่ฏฏ!']); // return back(); } } else{ // return back()->withErrors($validator); // dd($validator->errors()->all()); return back() ->withErrors($validator); } } else{ return view('admin.pass'); } } }
package my.sample class A fun A.check() {} fun test() { val a = A() a.check<caret>() A().check() }
fn = normpath(joinpath(dirname(@__FILE__),"center_sizes.jld2")) d = JLD2.jldopen(fn,"r") tmp = d["ctrs"] ctrs = [SVector(q...) for q in tmp] rads = d["rads"] tree = CD.Octree(ctrs, rads) # extract all the triangles that (potentially) intersect octant (+,+,+) pred(i) = all(ctrs[i].+rads[i] .> 0) bb = SVector(0.5, 0.5, 0.5), 0.5 ids = collect(CD.searchtree(pred, tree, bb)) @test length(ids) == 178 N = 100 using DelimitedFiles buf = readdlm(joinpath(@__DIR__,"assets","ctrs.csv")) ctrs = vec(collect(reinterpret(SVector{3,Float64}, buf'))) rads = vec(readdlm(joinpath(@__DIR__,"assets","rads.csv"))) tree = CD.Octree(ctrs, rads) pred(i) = all(ctrs[i] .+ rads[i] .> 0) bb = @SVector[0.5, 0.5, 0.5], 0.5 ids = collect(CD.searchtree(pred, tree, bb)) @show ids ids2 = findall(i -> all(ctrs[i].+rads[i] .> 0), 1:N) @test length(ids2) == length(ids) @test sort(ids2) == sort(ids) @test ids == [26, 46, 54, 93, 34, 94, 75, 23, 86, 57, 44, 40, 67, 73, 77, 80]