text
stringlengths
27
775k
using System; using NSaga; using NSaga.AzureTables; using NSaga.SimpleInjector; using SimpleInjector; namespace Samples { public class AzureTableStorageSample { private ISagaMediator sagaMediator; public void Run() { try { // You need Azure Storage emulator running to run this var connectionString = "UseDevelopmentStorage=true"; var builder = Wireup.UseInternalContainer() .UseRepository<AzureTablesSagaRepository>() .Register(typeof(ITableClientFactory), new TableClientFactory(connectionString)); sagaMediator = builder.ResolveMediator(); var correlationId = Guid.NewGuid(); var initMessage = new PersonalDetailsVerification(correlationId) { FirstName = "James", LastName = "Bond", }; sagaMediator.Consume(initMessage); } catch (Exception) { Console.WriteLine("Quite likely you don't have Azure Storage Emulator running, so this sample can't be executed"); } } public void WithSimpleInjector() { try { var container = new Container(); container.RegisterNSagaComponents(); container.UseSagaRepository<AzureTablesSagaRepository>(); // You need Azure Storage emulator running to run this var connectionString = "UseDevelopmentStorage=true"; container.Register<ITableClientFactory>(() => new TableClientFactory(connectionString), Lifestyle.Singleton); sagaMediator = container.GetInstance<ISagaMediator>(); var correlationId = Guid.NewGuid(); var initMessage = new PersonalDetailsVerification(correlationId) { FirstName = "James", LastName = "Bond", }; sagaMediator.Consume(initMessage); } catch (Exception) { Console.WriteLine("Quite likely you don't have Azure Storage Emulator running, so this sample can't be executed"); } } } }
def caesar_cipher(string) alphabet = Array('a'..'z') encrypter = Hash[alphabet.zip(alphabet.rotate(1))] string.chars.map { |c| encrypter.fetch(c, " ") } end p 'Enter the word you want to be encrypted:' p caesar_cipher(gets.chomp).join
class RegularEventUpdateJob < ApplicationJob queue_as :default def perform(*args) RegularEvents::Daily1UpdateJob.perform_now RegularEvents::Daily2UpdateJob.perform_now RegularEvents::Weekly1UpdateJob.perform_now RegularEvents::Weekly2UpdateJob.perform_now RegularEvents::MonthlyUpdateJob.perform_now end end
# minikube [![asciicast](minikube-setup.gif)](https://asciinema.org/a/7JhlsED9rIJaZaE5wvOxSbpY9?autoplay=1)
-- CREATE VIEW create table tab1 (i1 integer, i2 integer); create view v1 as select i1 from tab1; create or replace view v1 as select i2 from tab1; drop view v1; drop table tab1;
namespace CatenaX.NetworkServices.Registration.Service.CDQ.Model { public class FetchBusinessPartnerDto { public string cdqId { get; set; } public string dataSource { get; set; } public Businesspartner businessPartner { get; set; } } public class Businesspartner { public Name[] names { get; set; } public Legalform legalForm { get; set; } public Identifier[] identifiers { get; set; } public object[] categories { get; set; } public Address[] addresses { get; set; } public string externalId { get; set; } public Formattedsaprecord formattedSapRecord { get; set; } public object[] types { get; set; } } public class Legalform { public string name { get; set; } } public class Formattedsaprecord { public string name1 { get; set; } public string legalEntity { get; set; } public string legalForm { get; set; } public string narp { get; set; } public string stceg { get; set; } public string country { get; set; } public string countryCode { get; set; } public string region { get; set; } public string regionCodeSap { get; set; } public string regionCode { get; set; } public string county { get; set; } public string countyCode { get; set; } public string city { get; set; } public string district { get; set; } public string street1 { get; set; } public string houseNum { get; set; } public string latitude { get; set; } public string longitude { get; set; } public string postalCode { get; set; } } public class Name { public Type type { get; set; } public string value { get; set; } } public class Type { public string url { get; set; } public string name { get; set; } public string technicalKey { get; set; } } public class Identifier { public Type type { get; set; } public string value { get; set; } public Status status { get; set; } } public class Status { public string technicalKey { get; set; } } public class Address { public Country country { get; set; } public Administrativearea[] administrativeAreas { get; set; } public Postcode[] postCodes { get; set; } public Locality[] localities { get; set; } public Thoroughfare[] thoroughfares { get; set; } public object[] premises { get; set; } public Geographiccoordinates geographicCoordinates { get; set; } public Type[] types { get; set; } public Formattedaddress formattedAddress { get; set; } } public class Country { public string shortName { get; set; } public string value { get; set; } } public class Geographiccoordinates { public float latitude { get; set; } public float longitude { get; set; } } public class Formattedaddress { public string country { get; set; } public string administrativeArea { get; set; } public string region { get; set; } public string regionCode { get; set; } public string locality { get; set; } public string district { get; set; } public string postalCode { get; set; } public string thoroughfare { get; set; } } public class Administrativearea { public string value { get; set; } public string shortName { get; set; } } public class Postcode { public string value { get; set; } public Type type { get; set; } } public class Locality { public string value { get; set; } } public class Thoroughfare { public Type type { get; set; } public string number { get; set; } public string value { get; set; } } }
// // based on: https://github.com/KhronosGroup/glTF-Sample-Viewer/blob/master/src/shaders/punctual.glsl#L20 // export enum LightType { Directional = 0, Point = 1, Spot = 2, }
package opennlp.scalabha.ccg /** A lexical entry: a word and category associated with it. */ case class LexicalEntry (word: String, cat: Cat) /** * A helper object that constructs a Map from words to the sets of * categories associated with them, based on a flat input lexicon. */ object Lexicon { lazy val catParser = new CatParser def apply (entries: List[String]) = { val validLines = entries.filter(line => line != "" && !line.startsWith("#")) val lentries = validLines.map(entry => catParser.parseLexEntry(entry)) lentries.groupBy(_.word).mapValues { entries => entries.map(_.cat).toSet } } } class MissingLexicalEntryException (msg: String) extends Throwable(msg) { override def fillInStackTrace = this }
package shadows.click.block.gui; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.inventory.Container; import net.minecraft.inventory.Slot; import net.minecraft.item.ItemStack; import net.minecraftforge.items.SlotItemHandler; import shadows.click.ClickMachine; import shadows.click.block.TileAutoClick; import shadows.click.net.MessageUpdateGui; import shadows.click.util.VanillaPacketDispatcher; public class ContainerAutoClick extends Container { TileAutoClick tile; EntityPlayer player; public ContainerAutoClick(TileAutoClick tile, EntityPlayer player) { this.tile = tile; this.player = player; this.addSlotToContainer(new SlotItemHandler(tile.getHandler(), 0, 8, 35)); for (int i1 = 0; i1 < 3; ++i1) { for (int k1 = 0; k1 < 9; ++k1) { this.addSlotToContainer(new Slot(player.inventory, k1 + i1 * 9 + 9, 8 + k1 * 18, 84 + i1 * 18)); } } for (int j1 = 0; j1 < 9; ++j1) { this.addSlotToContainer(new Slot(player.inventory, j1, 8 + j1 * 18, 142)); } } @Override public boolean canInteractWith(EntityPlayer player) { return true; } public void handleButtonClick(int button) { if (button < 12) { if (button < 9) tile.setSpeedIndex(button); else if (button == 9) tile.setSneaking(!tile.isSneaking()); else tile.setRightClicking(button == 11); } } @Override public ItemStack transferStackInSlot(EntityPlayer player, int slotIndex) { ItemStack transferred = ItemStack.EMPTY; Slot slot = this.inventorySlots.get(slotIndex); int otherSlots = this.inventorySlots.size() - 36; if (slot != null && slot.getHasStack()) { ItemStack current = slot.getStack(); transferred = current.copy(); if (slotIndex < otherSlots) { if (!this.mergeItemStack(current, otherSlots, this.inventorySlots.size(), true)) { return ItemStack.EMPTY; } } else if (!this.mergeItemStack(current, 0, otherSlots, false)) { return ItemStack.EMPTY; } slot.onSlotChanged(); } return transferred; } @Override public void onContainerClosed(EntityPlayer player) { if (!player.world.isRemote) VanillaPacketDispatcher.dispatchTEToPlayer(tile, player); } boolean sent = false; @Override public void detectAndSendChanges() { super.detectAndSendChanges(); if (!sent && tile.hasWorld() && !tile.getWorld().isRemote) { sent = true; ClickMachine.NETWORK.sendTo(new MessageUpdateGui(tile), (EntityPlayerMP) player); } } }
package com.ctrlaccess.moviebuff.ui import androidx.test.ext.junit.rules.ActivityScenarioRule import dagger.hilt.android.testing.HiltAndroidRule import dagger.hilt.android.testing.HiltAndroidTest import dagger.hilt.android.testing.HiltTestApplication import org.junit.After import org.junit.Before import org.junit.Rule import org.junit.Test import org.junit.runner.RunWith import org.robolectric.RobolectricTestRunner import org.robolectric.annotation.Config import org.robolectric.annotation.LooperMode @HiltAndroidTest @RunWith(RobolectricTestRunner::class) @LooperMode(LooperMode.Mode.PAUSED) @Config(application = HiltTestApplication::class) class RoboMainActivityTest { @get:Rule(order = 0) var hiltRule = HiltAndroidRule(this) @get:Rule(order = 1) var activityScenarioRule: ActivityScenarioRule<MainActivity> = ActivityScenarioRule(MainActivity::class.java) @Before fun setUp() { hiltRule.inject() } @After fun tearDown() { } @Test fun `happy path`() { activityScenarioRule.scenario } }
<?php namespace frontend\models; use Yii; use yii\base\Model; use common\models\User; use frontend\models\WenetApp; class AuthorisationForm extends Model { public $appId; public $publicScope = []; public $readScope = []; public $writeScope = []; public $userId; public $allowedPublicScope; public $allowedWriteScope; public $allowedReadScope; /** * {@inheritdoc} */ public function rules() { return [ [['appId', 'scope', 'userId'], 'required'], [['allowedPublicScope', 'allowedWriteScope', 'allowedReadScope'], 'safe'] ]; } public static function scope() { return [ 'public' => self::publicScope(), 'read' => self::readScope(), 'write' => self::writeScope(), ]; } public static function publicScope() { return [ 'id:read' => Yii::t('scope', 'ID'), 'first_name:read' => Yii::t('scope', 'First name'), 'last_name:read' => Yii::t('scope', 'Last name'), 'id' => Yii::t('scope', 'ID: DEPRECATED'), 'first_name' => Yii::t('scope', 'First name: DEPRECATED'), 'last_name' => Yii::t('scope', 'Last name: DEPRECATED'), 'conversations' => Yii::t('scope', 'Conversation logging: DEPRECATED'), ]; } public static function readScope() { return [ 'middle_name:read' => Yii::t('scope', 'Middle name'), 'prefix_name:read' => Yii::t('scope', 'Prefix name'), 'suffix_name:read' => Yii::t('scope', 'Suffix name'), 'birth_date:read' => Yii::t('scope', 'Birthdate'), 'gender:read' => Yii::t('scope', 'Gender'), 'email:read' => Yii::t('scope', 'Email'), 'phone_number:read' => Yii::t('scope', 'Phone number'), 'locale:read' => Yii::t('scope', 'Locale'), 'avatar:read' => Yii::t('scope', 'Avatar'), 'nationality:read' => Yii::t('scope', 'Nationality'), 'occupation:read' => Yii::t('scope', 'Occupation'), 'norms:read' => Yii::t('scope', 'Norms'), 'activities:read' => Yii::t('scope', 'Activities'), 'locations:read' => Yii::t('scope', 'Locations'), 'relationships:read' => Yii::t('scope', 'Relationships'), 'behaviours:read' => Yii::t('scope', 'Behaviours'), 'materials:read' => Yii::t('scope', 'Materials'), 'competences:read' => Yii::t('scope', 'Competences'), 'materials:read' => Yii::t('scope', 'Materials'), 'meanings:read' => Yii::t('scope', 'Meanings'), 'middle_name' => Yii::t('scope', 'Middle name: DEPRECATED'), 'prefix_name' => Yii::t('scope', 'Prefix name: DEPRECATED'), 'suffix_name' => Yii::t('scope', 'Suffix name: DEPRECATED'), 'birthdate' => Yii::t('scope', 'Birthdate: DEPRECATED'), 'gender' => Yii::t('scope', 'Gender: DEPRECATED'), 'nationality' => Yii::t('scope', 'Nationality: DEPRECATED'), 'locale' => Yii::t('scope', 'Language: DEPRECATED'), 'phone_number' => Yii::t('scope', 'Phone number: DEPRECATED'), ]; } public static function writeScope() { return [ 'first_name:write' => Yii::t('scope', 'First name'), 'last_name:write' => Yii::t('scope', 'Last name'), 'middle_name:write' => Yii::t('scope', 'Middle name'), 'prefix_name:write' => Yii::t('scope', 'Prefix name'), 'suffix_name:write' => Yii::t('scope', 'Suffix name'), 'birth_date:write' => Yii::t('scope', 'Birthdate'), 'gender:write' => Yii::t('scope', 'Gender'), 'email:write' => Yii::t('scope', 'Email'), 'phone_number:write' => Yii::t('scope', 'Phone number'), 'locale:write' => Yii::t('scope', 'Locale'), 'avatar:write' => Yii::t('scope', 'Avatar'), 'nationality:write' => Yii::t('scope', 'Nationality'), 'occupation:write' => Yii::t('scope', 'Occupation'), 'norms:write' => Yii::t('scope', 'Norms'), 'activities:write' => Yii::t('scope', 'Activities'), 'locations:write' => Yii::t('scope', 'Locations'), 'relationships:write' => Yii::t('scope', 'Relationships'), 'behaviours:write' => Yii::t('scope', 'Behaviours'), 'materials:write' => Yii::t('scope', 'Materials'), 'competences:write' => Yii::t('scope', 'Competences'), 'meanings:write' => Yii::t('scope', 'Meanings'), 'conversation:write' => Yii::t('scope', 'Conversation'), 'data:write' => Yii::t('scope', 'Data'), 'write_feed' => Yii::t('scope', 'Write data feed: DEPRECATED'), ]; } public function withCompleteScope() { $this->publicScope = self::publicScope(); $this->readScope = self::readScope(); $this->writeScope = self::writeScope(); return $this; } public function withSpecifiedScope($requestedScope) { $this->publicScope = self::publicScope(); foreach (self::readScope() as $permission => $label) { if (in_array($permission, $requestedScope)) { $this->readScope[$permission] = $label; } } foreach (self::writeScope() as $permission => $label) { if (in_array($permission, $requestedScope)) { $this->writeScope[$permission] = $label; } } return $this; } public function user() { return User::findOne($this->userId); } public function app() { return WenetApp::findOne($this->appId); } }
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE OverloadedStrings #-} module Hassistant.Header where import qualified GHC import qualified Exception import qualified DynFlags import qualified Util import qualified Outputable import qualified GHC.Paths import Control.Applicative import qualified Data.Text as T import qualified Data.Attoparsec.Text as A import Data.Maybe(catMaybes) import Data.List(sort,nub) import Data.Either (rights) import Data.Int(Int32) import Hassistant.Common import Hassistant.Parser imports :: T.Text -> [String] imports = go [] . dropWhile (not . importLine). rights . map (A.parseOnly dropCommentP) . T.lines where importLine ('i':'m':'p':'o':'r':'t':o) = null o || head o == ' ' importLine _ = False go a [] = [unlines $ reverse a] go a (l:ls) | null l = go a ls | ' ' == head l = go (l:a) ls | importLine l = (unlines $ reverse a) : go [l] ls | otherwise = [unlines $ reverse a] parseImports :: String -> GHC.Ghc (Maybe (GHC.ImportDecl GHC.RdrName)) parseImports i = (Just <$> GHC.parseImportDecl i) `Exception.gcatch` handler where handler (_::Exception.SomeException) = return Nothing calcHash :: T.Text -> IO Int32 calcHash cont = GHC.runGhc (Just GHC.Paths.libdir) $ do dyn <- GHC.getSessionDynFlags imps <- sort . map (Outputable.showPpr dyn) . catMaybes <$> mapM parseImports (imports cont) let langs = map T.unpack . nub . sort $ languages cont return . Util.hashString $ unlines (langs ++ imps) listLANGAUGE :: [Candidate] listLANGAUGE = let obj s = (candidate $ T.pack s) { menu = Just "LANGAUGE" } in concatMap (\(s,_,_) -> [obj s, obj $ "No" ++ s]) DynFlags.xFlags languages :: T.Text -> [T.Text] languages = concat . rights . map (A.parseOnly languageP) . T.lines
package goalg_test import ( "testing" "github.com/ericpai/goalg" "github.com/stretchr/testify/assert" ) func TestHeap(t *testing.T) { h := goalg.NewHeap([]interface{}{5, 4, 3, 2, 1}, func(i, j interface{}) bool { return i.(int) < j.(int) }) assert.Equal(t, 5, h.Len()) assert.Equal(t, 1, h.Top()) assert.Equal(t, 1, h.Pop()) assert.Equal(t, 4, h.Len()) assert.Equal(t, 2, h.Top()) assert.Equal(t, 2, h.Pop()) h.Push(1) assert.Equal(t, 4, h.Len()) assert.Equal(t, 1, h.Top()) h.Push(6) assert.Equal(t, 5, h.Len()) assert.Equal(t, 1, h.Top()) assert.Equal(t, 1, h.Pop()) assert.Equal(t, 4, h.Len()) assert.Equal(t, 3, h.Top()) assert.Equal(t, 3, h.Pop()) assert.Equal(t, 3, h.Len()) assert.Equal(t, 4, h.Top()) assert.Equal(t, 4, h.Pop()) assert.Equal(t, 2, h.Len()) assert.Equal(t, 5, h.Top()) assert.Equal(t, 5, h.Pop()) assert.Equal(t, 1, h.Len()) assert.Equal(t, 6, h.Top()) assert.Equal(t, 6, h.Pop()) assert.Equal(t, 0, h.Len()) assert.Equal(t, nil, h.Top()) assert.Equal(t, nil, h.Pop()) }
# persist.py # # Implement limited persistence. # # Simple interface: # persist.save() save __main__ module on file (overwrite) # persist.load() load __main__ module from file (merge) # # These use the filename persist.defaultfile, initialized to 'wsrestore.py'. # # A raw interface also exists: # persist.writedict(dict, fp) save dictionary to open file # persist.readdict(dict, fp) read (merge) dictionary from open file # # Internally, the function dump() and a whole bunch of support of functions # traverse a graph of objects and print them in a restorable form # (which happens to be a Python module). # # XXX Limitations: # - Volatile objects are dumped as strings: # - open files, windows etc. # - Other 'obscure' objects are dumped as strings: # - classes, instances and methods # - compiled regular expressions # - anything else reasonably obscure (e.g., capabilities) # - type objects for obscure objects # - It's slow when there are many of lists or dictionaries # (This could be fixed if there were a quick way to compute a hash # function of any object, even if recursive) defaultfile = 'wsrestore.py' def save(): import __main__ import os # XXX On SYSV, if len(defaultfile) >= 14, this is wrong! backup = defaultfile + '~' try: os.unlink(backup) except os.error: pass try: os.rename(defaultfile, backup) except os.error: pass fp = open(defaultfile, 'w') writedict(__main__.__dict__, fp) fp.close() def load(): import __main__ fp = open(defaultfile, 'r') readdict(__main__.__dict__, fp) def writedict(dict, fp): import sys savestdout = sys.stdout try: sys.stdout = fp dump(dict) # Writes to sys.stdout finally: sys.stdout = savestdout def readdict(dict, fp): contents = fp.read() globals = {} exec(contents, globals) top = globals['top'] for key in top.keys(): if dict.has_key(key): print 'warning:', key, 'not overwritten' else: dict[key] = top[key] # Function dump(x) prints (on sys.stdout!) a sequence of Python statements # that, when executed in an empty environment, will reconstruct the # contents of an arbitrary dictionary. import sys # Name used for objects dict on output. # FUNNYNAME = FN = 'A' # Top-level function. Call with the object you want to dump. # def dump(x): types = {} stack = [] # Used by test for recursive objects print FN, '= {}' topuid = dumpobject(x, types, stack) print 'top =', FN, '[', `topuid`, ']' # Generic function to dump any object. # dumpswitch = {} # def dumpobject(x, types, stack): typerepr = `type(x)` if not types.has_key(typerepr): types[typerepr] = {} typedict = types[typerepr] if dumpswitch.has_key(typerepr): return dumpswitch[typerepr](x, typedict, types, stack) else: return dumpbadvalue(x, typedict, types, stack) # Generic function to dump unknown values. # This assumes that the Python interpreter prints such values as # <foo object at xxxxxxxx>. # The object will be read back as a string: '<foo object at xxxxxxxx>'. # In some cases it may be possible to fix the dump manually; # to ease the editing, these cases are labeled with an XXX comment. # def dumpbadvalue(x, typedict, types, stack): xrepr = `x` if typedict.has_key(xrepr): return typedict[xrepr] uid = genuid() typedict[xrepr] = uid print FN, '[', `uid`, '] =', `xrepr`, '# XXX' return uid # Generic function to dump pure, simple values, except strings # def dumpvalue(x, typedict, types, stack): xrepr = `x` if typedict.has_key(xrepr): return typedict[xrepr] uid = genuid() typedict[xrepr] = uid print FN, '[', `uid`, '] =', `x` return uid # Functions to dump string objects # def dumpstring(x, typedict, types, stack): # XXX This can break if strings have embedded '\0' bytes # XXX because of a bug in the dictionary module if typedict.has_key(x): return typedict[x] uid = genuid() typedict[x] = uid print FN, '[', `uid`, '] =', `x` return uid # Function to dump type objects # typeswitch = {} class some_class: def method(self): pass some_instance = some_class() # def dumptype(x, typedict, types, stack): xrepr = `x` if typedict.has_key(xrepr): return typedict[xrepr] uid = genuid() typedict[xrepr] = uid if typeswitch.has_key(xrepr): print FN, '[', `uid`, '] =', typeswitch[xrepr] elif x == type(sys): print 'import sys' print FN, '[', `uid`, '] = type(sys)' elif x == type(sys.stderr): print 'import sys' print FN, '[', `uid`, '] = type(sys.stderr)' elif x == type(dumptype): print 'def some_function(): pass' print FN, '[', `uid`, '] = type(some_function)' elif x == type(some_class): print 'class some_class: pass' print FN, '[', `uid`, '] = type(some_class)' elif x == type(some_instance): print 'class another_class: pass' print 'some_instance = another_class()' print FN, '[', `uid`, '] = type(some_instance)' elif x == type(some_instance.method): print 'class yet_another_class:' print ' def method(): pass' print 'another_instance = yet_another_class()' print FN, '[', `uid`, '] = type(another_instance.method)' else: # Unknown type print FN, '[', `uid`, '] =', `xrepr`, '# XXX' return uid # Initialize the typeswitch # for x in None, 0, 0.0, '', (), [], {}: typeswitch[`type(x)`] = 'type(' + `x` + ')' for s in 'type(0)', 'abs', '[].append': typeswitch[`type(eval(s))`] = 'type(' + s + ')' # Dump a tuple object # def dumptuple(x, typedict, types, stack): item_uids = [] xrepr = '' for item in x: item_uid = dumpobject(item, types, stack) item_uids.append(item_uid) xrepr = xrepr + ' ' + item_uid del stack[-1:] if typedict.has_key(xrepr): return typedict[xrepr] uid = genuid() typedict[xrepr] = uid print FN, '[', `uid`, '] = (', for item_uid in item_uids: print FN, '[', `item_uid`, '],', print ')' return uid # Dump a list object # def dumplist(x, typedict, types, stack): # Check for recursion for x1, uid1 in stack: if x is x1: return uid1 # Check for occurrence elsewhere in the typedict for uid1 in typedict.keys(): if x is typedict[uid1]: return uid1 # This uses typedict differently! uid = genuid() typedict[uid] = x print FN, '[', `uid`, '] = []' stack.append(x, uid) item_uids = [] for item in x: item_uid = dumpobject(item, types, stack) item_uids.append(item_uid) del stack[-1:] for item_uid in item_uids: print FN, '[', `uid`, '].append(', FN, '[', `item_uid`, '])' return uid # Dump a dictionary object # def dumpdict(x, typedict, types, stack): # Check for recursion for x1, uid1 in stack: if x is x1: return uid1 # Check for occurrence elsewhere in the typedict for uid1 in typedict.keys(): if x is typedict[uid1]: return uid1 # This uses typedict differently! uid = genuid() typedict[uid] = x print FN, '[', `uid`, '] = {}' stack.append(x, uid) item_uids = [] for key in x.keys(): val_uid = dumpobject(x[key], types, stack) item_uids.append(key, val_uid) del stack[-1:] for key, val_uid in item_uids: print FN, '[', `uid`, '][', `key`, '] =', print FN, '[', `val_uid`, ']' return uid # Dump a module object # def dumpmodule(x, typedict, types, stack): xrepr = `x` if typedict.has_key(xrepr): return typedict[xrepr] from string import split # `x` has the form <module 'foo'> name = xrepr[9:-2] uid = genuid() typedict[xrepr] = uid print 'import', name print FN, '[', `uid`, '] =', name return uid # Initialize dumpswitch, a table of functions to dump various objects, # indexed by `type(x)`. # for x in None, 0, 0.0: dumpswitch[`type(x)`] = dumpvalue for x, f in ('', dumpstring), (type(0), dumptype), ((), dumptuple), \ ([], dumplist), ({}, dumpdict), (sys, dumpmodule): dumpswitch[`type(x)`] = f # Generate the next unique id; a string consisting of digits. # The seed is stored as seed[0]. # seed = [0] # def genuid(): x = seed[0] seed[0] = seed[0] + 1 return `x`
exports.up = function(knex) { return knex.schema.createTable('users', tbl => { tbl.increments("id"); tbl.string('username', 128).notNullable().unique(); tbl.string('password', 128).notNullable(); tbl.string('firstName', 128).notNullable(); tbl.string('lastName', 128).notNullable(); tbl.string('city', 255); tbl.string('state', 50); tbl.string('services', 255); tbl.string('rates', 255); tbl.string('Available', 255); tbl.boolean('canDrive', false); tbl.boolean('isNanny', true); }) .createTable('requests', tbl => { tbl.increments(); tbl .integer("requesterUserID") .references("id") .inTable("users") .onDelete("cascade") .onUpdate("cascade"); tbl .integer("nannyUserID") .references("id") .inTable("users") .onDelete("cascade") .onUpdate("cascade"); tbl .boolean("accepted").notNullable(); tbl.string('name', 128).notNullable(); tbl.string('city',128).notNullable(); tbl.string('state', 50).notNullable(); tbl.string('numberOfKids', 25).notNullable(); tbl.string('kidsAges', 128).notNullable(); tbl.string('timeNeeded', 128).notNullable(); }) }; exports.down = function(knex) { return knex.schema.dropTableIfExists('requests') .dropTableIfExists('users') };
module Chouette class LineNotice < Chouette::ActiveRecord before_validation :define_line_referential, on: :create has_metadata include LineReferentialSupport include ObjectidSupport belongs_to :line_provider, required: true # We will protect the notices that are used by vehicle_journeys scope :unprotected, -> { subquery = CrossReferentialIndexEntry.where(relation_name: :line_notices).select(:parent_id).distinct where.not("id in (#{subquery.to_sql})" ) } scope :autocomplete, ->(q) { if q.present? where("title ILIKE '%#{sanitize_sql_like(q)}%'") else all end } scope :by_provider, ->(line_provider) { where(line_provider_id: line_provider.id) } belongs_to :line_referential, inverse_of: :line_notices has_and_belongs_to_many :lines, :class_name => 'Chouette::Line', :join_table => "public.line_notices_lines" has_many_scattered :vehicle_journeys validates_presence_of :title alias_attribute :name, :title def self.nullable_attributes [:content, :import_xml] end def protected? vehicle_journeys.exists? end private def define_line_referential # TODO Improve performance ? self.line_referential ||= line_provider&.line_referential end end end
// // main.cpp // basicCube // // Created by George Papagiannakis on 23/10/12. // Copyright (c) 2012 University Of Crete & FORTH. All rights reserved. // // basic STL streams #include <iostream> // GLEW lib // http://glew.sourceforge.net/basic.html #include <GL/glew.h> //Simple DirectMedia Layer is a cross-platform development library //designed to provide low level access to audio, keyboard, mouse, joystick, //and graphics hardware via OpenGL //Link : https://www.libsdl.org/ #include <SDL2/SDL.h> //SDL Wrapper for OpenGL #include <SDL2/SDL_opengl.h> // GLM lib // http://glm.g-truc.net/api/modules.html #define GLM_SWIZZLE #define GLM_FORCE_INLINE #include <glm/glm.hpp> #include <glm/gtx/string_cast.hpp> #include <glm/gtc/matrix_transform.hpp> #include <glm/gtc/quaternion.hpp> #include <glm/gtc/type_ptr.hpp> #include <glm/gtc/random.hpp> //local #include "glGA/glGAHelper.h" // global variables int windowWidth = 1024, windowHeight = 768; GLuint program; GLuint vao; GLuint buffer; bool wireFrame = false; typedef glm::vec4 color4; typedef glm::vec4 point4; int Index = 0; const int NumVertices = 36; //(6 faces)(2 triangles/face)(3 vertices/triangle) SDL_Window* gWindow = NULL; SDL_GLContext gContext; // Modelling arrays point4 points[NumVertices]; color4 colors[NumVertices]; glm::vec3 normals[NumVertices]; glm::mat4 translate(1.0); GLuint TranslateMat; // Our function prototypes bool init(); void close(); void initCube(); void displayCube(); void colorcube(); void quad(int a, int b, int c, int d); // Vertices of a unit cube centered at origin, sides aligned with axes point4 vertices[8] = { point4(-0.5, -0.5, 0.5, 1.0), point4(-0.5, 0.5, 0.5, 1.0), point4(0.5, 0.5, 0.5, 1.0), point4(0.5, -0.5, 0.5, 1.0), point4(-0.5, -0.5, -0.5, 1.0), point4(-0.5, 0.5, -0.5, 1.0), point4(0.5, 0.5, -0.5, 1.0), point4(0.5, -0.5, -0.5, 1.0) }; // RGBA olors color4 vertex_colors[8] = { color4(0.0, 0.0, 0.0, 1.0), // black color4(1.0, 0.0, 0.0, 1.0), // red color4(1.0, 1.0, 0.0, 1.0), // yellow color4(0.0, 1.0, 0.0, 1.0), // green color4(0.0, 0.0, 1.0, 1.0), // blue color4(1.0, 0.0, 1.0, 1.0), // magenta color4(1.0, 1.0, 1.0, 1.0), // white color4(0.0, 1.0, 1.0, 1.0) // cyan }; bool init() { //Init flag bool success = true; //Basic Setup if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER) != 0) { std::cout << "SDL could not initialize! SDL Error: " << SDL_GetError() << std::endl; success = false; } else { std::cout << std::endl << "Yay! Initialized SDL succesfully!" << std::endl; //Use OpenGL Core 3.2 SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, SDL_GL_CONTEXT_FORWARD_COMPATIBLE_FLAG); SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE); SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); //SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16); //SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8); SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3); SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2); //Create Window SDL_DisplayMode current; SDL_GetCurrentDisplayMode(0, &current); gWindow = SDL_CreateWindow("basicCube", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, windowWidth, windowHeight, SDL_WINDOW_OPENGL); if (gWindow == NULL) { std::cout << "Window could not be created! SDL Error: " << SDL_GetError() << std::endl; success = false; } else { std::cout << std::endl << "Yay! Created window sucessfully!" << std::endl << std::endl; //Create context gContext = SDL_GL_CreateContext(gWindow); if (gContext == NULL) { std::cout << "OpenGL context could not be created! SDL Error: " << SDL_GetError() << std::endl; success = false; } else { //Initialize GLEW glewExperimental = GL_TRUE; GLenum glewError = glewInit(); if (glewError != GLEW_OK) { std::cout << "Error initializing GLEW! " << glewGetErrorString(glewError) << std::endl; } //Use Vsync if (SDL_GL_SetSwapInterval(1) < 0) { std::cout << "Warning: Unable to set Vsync! SDL Error: " << SDL_GetError << std::endl; } } } } return success; } void close() { // Clean up SDL_GL_DeleteContext(gContext); SDL_DestroyWindow(gWindow); SDL_Quit(); } // quad generates two triangles for each face and assigns colors // to the vertices void quad(int a, int b, int c, int d) { colors[Index] = vertex_colors[a]; points[Index] = vertices[a]; Index++; colors[Index] = vertex_colors[b]; points[Index] = vertices[b]; Index++; colors[Index] = vertex_colors[c]; points[Index] = vertices[c]; Index++; colors[Index] = vertex_colors[a]; points[Index] = vertices[a]; Index++; colors[Index] = vertex_colors[c]; points[Index] = vertices[c]; Index++; colors[Index] = vertex_colors[d]; points[Index] = vertices[d]; Index++; } // generate 12 triangles: 36 vertices and 36 colors void colorcube() { quad(1, 0, 3, 2); quad(2, 3, 7, 6); quad(3, 0, 4, 7); quad(6, 5, 1, 2); quad(4, 5, 6, 7); quad(5, 4, 0, 1); } void initCube() { //generate and bind a VAO for the 3D axes glGenVertexArrays(1, &vao); glBindVertexArray(vao); colorcube(); // Load shaders and use the resulting shader program program = LoadShaders("vshaderCube.vert", "fshaderCube.frag"); glUseProgram(program); // Create and initialize a buffer object on the server side (GPU) //GLuint buffer; glGenBuffers(1, &buffer); glBindBuffer(GL_ARRAY_BUFFER, buffer); glBufferData(GL_ARRAY_BUFFER, sizeof(points) + sizeof(colors), NULL, GL_STATIC_DRAW); glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(points), points); glBufferSubData(GL_ARRAY_BUFFER, sizeof(points), sizeof(colors), colors); // set up vertex arrays GLuint vPosition = glGetAttribLocation(program, "vPosition"); glEnableVertexAttribArray(vPosition); glVertexAttribPointer(vPosition, 4, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0)); GLuint vColor = glGetAttribLocation(program, "vColor"); glEnableVertexAttribArray(vColor); glVertexAttribPointer(vColor, 4, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(sizeof(points))); TranslateMat = glGetUniformLocation(program, "translate"); glUniformMatrix4fv(TranslateMat, 1, GL_FALSE, glm::value_ptr(translate)); glEnable(GL_DEPTH_TEST); glClearColor(0.0, 0.0, 0.0, 1.0); // only one VAO can be bound at a time, so disable it to avoid altering it accidentally glBindVertexArray(0); } void displayCube() { glUseProgram(program); glBindVertexArray(vao); glDisable(GL_CULL_FACE); glPushAttrib(GL_ALL_ATTRIB_BITS); if (wireFrame) glPolygonMode(GL_FRONT_AND_BACK, GL_LINE); else glPolygonMode(GL_FRONT_AND_BACK, GL_FILL); translate = glm::translate(glm::mat4(1.0), glm::vec3(-0.5, 0.0, 0.0)); glUniformMatrix4fv(TranslateMat, 1, GL_FALSE, glm::value_ptr(translate)); glDrawArrays(GL_TRIANGLES, 0, NumVertices); glPopAttrib(); glBindVertexArray(0); } int main(int, char**) { // test a simple GLM vector glm::vec4 origin(0.0f, 0.0f, 0.0f, 1.0f); // test a simple GLM matrix glm::mat4 mat(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0); int running = GL_TRUE; if (!init()) { std::cout << "Error Initializing! Sorry " << std::endl; exit(EXIT_FAILURE); } //Sets mouse position SDL_WarpMouseInWindow(gWindow, windowWidth / 2, windowHeight / 2); // Enable depth test glEnable(GL_DEPTH_TEST); // Accept fragment if it closer to the camera than the former one glDepthFunc(GL_LESS); // init Scene initCube(); //GLFW main loop while (running) { SDL_Event event; while (SDL_PollEvent(&event)) { if (event.type == SDL_KEYDOWN && event.key.keysym.sym == SDLK_ESCAPE) { running = GL_FALSE; } if (event.type == SDL_QUIT) { running = GL_FALSE; } if (event.type == SDL_KEYDOWN) { if (event.key.keysym.sym == SDLK_w) { if (wireFrame) { wireFrame = false; } else { wireFrame = true; } } } } glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glClearColor(0.0, 0.0, 0.3, 1.0); //black color // call function to render our scene displayCube(); SDL_GL_SwapWindow(gWindow); } //close OpenGL window and terminate GLFW close(); std::cout << "Hello, GLFW, GLEW, GLM Graphics World!\n"; exit(EXIT_SUCCESS); }
from django.test import Client, TestCase from django.urls import reverse from ..models import MoviePanel, MovieGenre, Movie class MoviePanelView(TestCase): def setUp(self): self.client = Client() self.moviepanel = MoviePanel.objects.create(name='test panel') self.moviegenre = MovieGenre.objects.create(name='test genre', moviepanel=self.moviepanel) self.movie = Movie.objects.create(name='test movie', description='test description', moviepanel=self.moviepanel) self.movie.moviegenre.add(self.moviegenre) self.response_list = self.client.get(reverse('moviepanel:moviepanel-list')) self.response = self.client.get( reverse('moviepanel:moviepanel-detail', kwargs={'slug': self.moviepanel.slug})) def test_moviepanel_response(self): self.assertEqual(self.response_list.status_code, 200) self.assertEqual(self.response.status_code, 200) def test_moviepanel_response_invalid(self): self.assertNotEqual(self.response_list.status_code, 404) self.assertNotEqual(self.response.status_code, 404) def test_moviepanel_content(self): self.assertIn(self.moviepanel.name, self.response_list.content.decode('utf-8')) self.assertIn('moviegenres', self.response_list.content.decode('utf-8')) self.assertIn('moviegenre', self.response_list.content.decode('utf-8')) self.assertIn('movies', self.response_list.content.decode('utf-8')) self.assertIn(self.moviepanel.name, self.response.content.decode('utf-8')) self.assertIn('moviegenres', self.response.content.decode('utf-8')) self.assertIn('moviegenre', self.response.content.decode('utf-8')) self.assertIn('movies', self.response.content.decode('utf-8')) def test_moviepanel_content_invalid(self): self.assertNotEqual('{}', self.response_list.content.decode('utf-8')) self.assertNotEqual('{}', self.response.content.decode('utf-8')) class MovieGenreView(TestCase): def setUp(self): self.client = Client() self.moviepanel = MoviePanel.objects.create(name='test panel') self.moviegenre = MovieGenre.objects.create(name='test genre', moviepanel=self.moviepanel) self.movie = Movie.objects.create(name='test movie', description='test description', moviepanel=self.moviepanel) self.movie.moviegenre.add(self.moviegenre) self.response_list = self.client.get(reverse('moviepanel:moviegenre-list')) self.response = self.client.get( reverse('moviepanel:moviegenre-detail', kwargs={'slug': self.moviegenre.slug})) def test_moviegenre_response(self): self.assertEqual(self.response_list.status_code, 200) self.assertEqual(self.response.status_code, 200) def test_moviegenre_response_invalid(self): self.assertNotEqual(self.response_list.status_code, 404) self.assertNotEqual(self.response.status_code, 404) def test_moviegenre_content(self): self.assertIn(self.moviegenre.name, self.response_list.content.decode('utf-8')) self.assertIn('moviepanel', self.response_list.content.decode('utf-8')) self.assertIn('movies', self.response_list.content.decode('utf-8')) self.assertIn(self.moviegenre.name, self.response.content.decode('utf-8')) self.assertIn('moviepanel', self.response.content.decode('utf-8')) self.assertIn('movies', self.response.content.decode('utf-8')) def test_moviegenre_content_invalid(self): self.assertNotEqual('{}', self.response_list.content.decode('utf-8')) self.assertNotEqual('{}', self.response.content.decode('utf-8')) class MovieView(TestCase): def setUp(self): self.client = Client() self.moviepanel = MoviePanel.objects.create(name='test panel') self.moviegenre = MovieGenre.objects.create(name='test genre', moviepanel=self.moviepanel) self.movie = Movie.objects.create(name='test movie', description='test description', moviepanel=self.moviepanel) self.movie.moviegenre.add(self.moviegenre) self.response_list = self.client.get(reverse('moviepanel:movie-list')) self.response = self.client.get( reverse('moviepanel:movie-detail', kwargs={'pk': self.movie.pk})) def test_movie_response(self): self.assertEqual(self.response_list.status_code, 200) self.assertEqual(self.response.status_code, 200) def test_movie_response_invalid(self): self.assertNotEqual(self.response_list.status_code, 404) self.assertNotEqual(self.response.status_code, 404) def test_movie_content(self): self.assertIn(self.movie.name, self.response_list.content.decode('utf-8')) self.assertIn('moviepanel', self.response_list.content.decode('utf-8')) self.assertIn('moviegenre', self.response_list.content.decode('utf-8')) self.assertIn(self.movie.name, self.response.content.decode('utf-8')) self.assertIn('moviepanel', self.response.content.decode('utf-8')) self.assertIn('moviegenre', self.response.content.decode('utf-8')) def test_movie_content_invalid(self): self.assertNotEqual('{}', self.response_list.content.decode('utf-8')) self.assertNotEqual('{}', self.response.content.decode('utf-8'))
/** ****************************************************************************** * @file ADPD105.c * @brief Source file for ADPD105 photometric front end. * @version V0.1 * @author ADI * @date April 2017 * @par Revision History: * - V0.1, April 2017: initial version. * ******************************************************************************* * Copyright 2017(c) Analog Devices, Inc. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * - Neither the name of Analog Devices, Inc. nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * - The use of this software may or may not infringe the patent rights * of one or more patent holders. This license does not release you * from the requirement that you obtain separate licenses from these * patent holders to use this software. * - Use of the software either in source or binary form, must be run * on or directly connected to an Analog Devices Inc. component. * * THIS SOFTWARE IS PROVIDED BY ANALOG DEVICES "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, NON-INFRINGEMENT, MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL ANALOG DEVICES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * INTELLECTUAL PROPERTY RIGHTS, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ******************************************************************************* **/ /***************************** Include Files **********************************/ #include <stdio.h> #include <ADuCM360.h> #include "ADPD105.h" #include "Communication.h" #include "Timer.h" uint16_t au16DataSlotA[4] = {0,0,0,0}; uint16_t au16DataSlotB[4] = {0,0,0,0}; /************************* Functions Definitions ******************************/ /** @brief Writes a register to the ADPD105 via I2C. @param u8Address - register address @param u16value - value to be written @return none **/ void ADPD105_WriteReg(uint8_t u8Address, uint16_t u16Value) { I2C_Write(ADPD105_ADDRESS, u8Address, (u16Value >> 8), (u16Value & 0xFF), I2C_WRITE_TWO_REG); } /** @brief Reads a register from ADPD105 via I2C. @param u8Address - register address @return uint16_t **/ uint16_t ADPD105_ReadReg(uint8_t u8Address) { return I2C_Read(ADPD105_ADDRESS, u8Address, I2C_READ_TWO_REG); } /** @brief Reads the DEVID register from ADPD105 via I2C. @return uint16_t **/ uint16_t ADPD105_GetDevId(void) { return ADPD105_ReadReg(ADPD105_DEVID); } /** @brief Reads the Status register from ADPD105 via I2C. @return uint16_t **/ uint16_t ADPD105_GetStatus(void) { return ADPD105_ReadReg(ADPD105_STATUS); } /** @brief Reads the Status register from ADPD105 via I2C. @param enMode - ADPD105 mode of operation @return uint16_t **/ void ADPD105_SetOperationMode(enum ADPD105_OperationMode enMode) { ADPD105_WriteReg(ADPD105_MODE, enMode); } /** @brief Configure Time Slot switch register from ADPD105 via I2C. @param u8SlotASelect - SlotA inputs @param u8SlotBSelect - SlotB inputs @return uint16_t **/ void ADPD105_SetTimeSlotSwitch(enum ADPD105_TimeSlotPD u8SlotASelect, enum ADPD105_TimeSlotPD u8SlotBSelect) { uint16_t u16Value; u16Value = ADPD105_ReadReg(ADPD105_PD_LED_SELECT); /* reset before settings for PD Time slot */ u16Value &= 0xF00F; /* set time slot values PD */ u16Value |= ((uint16_t)u8SlotASelect << 4) | (uint16_t)(u8SlotBSelect << 8); ADPD105_WriteReg(ADPD105_PD_LED_SELECT, u16Value); } /** @brief Enable the internal 32kHz internal clock from ADPD105 via I2C. @param u8SlotBSelect - u8Enable - enables/disables 32kHz clock @return void **/ void ADPD105_SetCLK32K(uint8_t u8Enable) { uint16_t u16Value; u16Value = ADPD105_ReadReg(ADPD105_SAMPLE_CLK); ADPD105_WriteReg(ADPD105_SAMPLE_CLK, u16Value | (u8Enable << 7)); /* enable 32kHz internal clock */ } /** @brief Setup FIFO for data reading from ADPD105. @return void **/ void ADPD105_SetFIFO(void) { ADPD105_SetOperationMode(PROGRAM); /* Slot A enable and 32 bit extend sample data to fifo */ ADPD105_WriteReg(ADPD105_SLOT_EN, 0x3131); /* set FIFO threshold to 8 words data */ ADPD105_WriteReg(ADPD105_FIFO_THRESH, 0x1F00); /* Set FIFO interrupt */ ADPD105_WriteReg(ADPD105_INT_MASK, 0x0FF); ADPD105_WriteReg(ADPD105_GPIO_DRV, 0x05); /* set GPIO */ ADPD105_SetOperationMode(NORMAL_OPERATION); } /** @brief Select what led to be used inside a time slot. @param enLEDNumber - the led number, see ADPD105_LED @param enSlot - time slot ( SlotA or SlotB) @return void **/ void ADPD105_SelectLED(enum ADPD105_LED enLEDNumber, enum ADPD105_TimeSlot enSlot) { uint16_t u16Value; u16Value = ADPD105_ReadReg(ADPD105_PD_LED_SELECT); if (enSlot == SLOTA) { u16Value &= 0xFFFC; u16Value |= enLEDNumber; /* configure LED for SLOTA */ } else { u16Value &= 0xFFF3; u16Value |= (enLEDNumber << 2); /* configure LED for SLOTB */ } ADPD105_WriteReg(ADPD105_PD_LED_SELECT, u16Value); } /** @brief Deselect the leds for each time slot . @return void **/ void ADPD105_DeselectLEDs(void) { uint16_t u16Value; u16Value = ADPD105_ReadReg(ADPD105_PD_LED_SELECT); u16Value &= 0xFFF0; ADPD105_WriteReg(ADPD105_PD_LED_SELECT, u16Value); } /** @brief Read the data register for each time slot @return void **/ void ADPD105_ReadDataRegs(uint16_t *data_Slot_A, uint16_t *data_Slot_B, uint8_t count) { uint8_t i; timer_sleep(30); /* Set data hold - disable data update */ ADPD105_WriteReg(ADPD105_DATA_ACCESS_CTL, 0x07); /* Read data registers */ for (i = 0; i < count; i++) data_Slot_A[i] = ADPD105_ReadReg(ADPD105_SLOTA_PD1_16BIT + i); for (i = 0; i < count; i++) data_Slot_B[i] = ADPD105_ReadReg(ADPD105_SLOTB_PD1_16BIT + i); /* disable data hold */ ADPD105_WriteReg(ADPD105_DATA_ACCESS_CTL, 0x01); } /** @brief Display values of registers @return void **/ void ADPD105_DisplayRegValues(uint16_t *data_slot_A, uint16_t *data_slot_B, uint8_t data_count) { uint8_t i; AppPrintf("SlotA Values\n\r"); for ( i = 0; i < data_count; i++) AppPrintf("PD%d = %d\n\r", i, data_slot_A[i]); AppPrintf("SlotB Values\n\r"); for ( i = 0; i < data_count; i++) AppPrintf("PD%d = %d\n\r", i, data_slot_B[i]); } /** @brief Set the width and offset for led pulse. @param enSlot - time slot (SlotA or SlotB) @param u8Width - the width of the led pulse (1us step) @param u8Offset - the offset of the led pulse (1us step) @return void **/ void ADPD105_SetLEDWidthOffset(enum ADPD105_TimeSlot enSlot, uint8_t u8Width, uint8_t u8Offset) { uint16_t u16Value; if (enSlot == SLOTA) { u16Value = u8Offset + (uint16_t)((u8Width & 0x1F) << 8); ADPD105_WriteReg(ADPD105_SLOTA_LED_PULSE, u16Value); } else { u16Value = u8Offset + (uint16_t)((u8Width & 0x1F) << 8); ADPD105_WriteReg(ADPD105_SLOTB_LED_PULSE, u16Value); } } /** @brief Set the width and offset for AFE integration. @param enSlot - time slot (SlotA or SlotB) @param u8Width - the width of the AFE integration window (1us step) @param u8Offset - the offset of the AFE integration window (1us step) @param u8FineOffset - the fine offset of the AFE integration window (31.25 ns step) @return void **/ void ADPD105_SetAFEWidthOffset(enum ADPD105_TimeSlot enSlot, uint8_t u8Width, uint8_t u8Offset, uint8_t u8FineOffset) { uint16_t u16Value = 0; if (enSlot == SLOTA) { u16Value = (uint16_t)(u8FineOffset & 0x1F) + (uint16_t)((u8Offset & 0x3F) << 5) + (uint16_t)((u8Width & 0x1F) << 11); ADPD105_WriteReg(ADPD105_SLOTA_AFE_WINDOW, u16Value); } else { u16Value = (uint16_t)(u8FineOffset & 0x1F) + (uint16_t)((u8Offset & 0x3F) << 5) + (uint16_t)((u8Width & 0x1F) << 11); ADPD105_WriteReg(ADPD105_SLOTB_AFE_WINDOW, u16Value); } } /** @brief Set the transimpendance amplifier gain. @param enSlot - time slot (SlotA or SlotB) @param enTIAGain - TIA gain value @return void **/ void ADPD105_SetTIAGain(enum ADPD105_TimeSlot enSlot, enum ADPD105_TIAGain enTIAGain) { uint16_t u16Value; if (enSlot == SLOTA) { u16Value = ADPD105_ReadReg(ADPD105_SLOTA_TIA_CFG); u16Value = 0x1C34 | enTIAGain; ADPD105_WriteReg(ADPD105_SLOTA_TIA_CFG, u16Value); } else { u16Value = ADPD105_ReadReg(ADPD105_SLOTB_TIA_CFG); u16Value = 0x1C34 | enTIAGain; ADPD105_WriteReg(ADPD105_SLOTB_TIA_CFG, u16Value); } } /** @brief Set the sampling frequency value. @param u16Frequency - sampling frequency value @return void **/ void ADPD105_SetSamplingFrequency(uint16_t u16Frequency) { uint16_t u16FValue; /* calculate value to write in FSAMPLE */ u16FValue = 32000 / u16Frequency / 4; ADPD105_WriteReg(ADPD105_FSAMPLE, u16FValue); } /** @brief Set the value of the average factor N. @param enAverage - average factor value @return void **/ void ADPD105_SetAverageFactor(enum ADPD105_AverageN enAverage) { ADPD105_WriteReg(ADPD105_NUM_AVG, (enAverage << 4) + (enAverage << 8)); } /** @brief Set ADC clock speed. @param enADCClock - ADC clock speed value @return void **/ void ADPD105_SetADCClock(enum ADPD105_ADCClockSpeed enADCClock) { ADPD105_WriteReg(ADPD105_ADC_CLOCK, enADCClock); } /** @brief Enable digital clock @return void **/ void ADPD105_SetDigitalClock(void) { ADPD105_WriteReg(ADPD105_DATA_ACCESS_CTL, 1); } /** @brief Software reset for the ADPD105 @return void **/ void ADPD105_Reset(void) { ADPD105_WriteReg(ADPD105_SW_RESET, 1); /* software reset */ } /** @brief Set channel offset. @param enSlot - time slot (SlotA or SlotB) @param stOffset - offset value @return void **/ void ADPD105_SetOffset(enum ADPD105_TimeSlot enSlot, struct ADPD105_ChannelOffset stOffset) { if (enSlot == SLOTA) { ADPD105_WriteReg(ADPD105_SLOTA_CH1_OFFSET, stOffset.CH1Offset); ADPD105_WriteReg(ADPD105_SLOTA_CH2_OFFSET, stOffset.CH2Offset); ADPD105_WriteReg(ADPD105_SLOTA_CH3_OFFSET, stOffset.CH3Offset); ADPD105_WriteReg(ADPD105_SLOTA_CH4_OFFSET, stOffset.CH4Offset); } else { ADPD105_WriteReg(ADPD105_SLOTB_CH1_OFFSET, stOffset.CH1Offset); ADPD105_WriteReg(ADPD105_SLOTB_CH2_OFFSET, stOffset.CH2Offset); ADPD105_WriteReg(ADPD105_SLOTB_CH3_OFFSET, stOffset.CH3Offset); ADPD105_WriteReg(ADPD105_SLOTB_CH4_OFFSET, stOffset.CH4Offset); } } /** @brief Disable the leds. @return void **/ void ADPD105_DisableLed(void) { ADPD105_WriteReg(ADPD105_LED_DISABLE, 0x300); } /** @brief Enable the leds. @return void **/ void ADPD105_EnableLed(void) { ADPD105_WriteReg(ADPD105_LED_DISABLE, 0); } /** @brief Set the pulse number and period. @param enSlot - time slot (SlotA or SlotB) @param u8PulseCount - number of pulses in time slot @param u8PulsePeriod - period of the pulse @return void **/ void ADPD105_SetPulseNumberPeriod(enum ADPD105_TimeSlot enSlot, uint8_t u8PulseCount, uint8_t u8PulsePeriod) { uint16_t u16Value = ((uint16_t)u8PulseCount << 8) + u8PulsePeriod; if (enSlot == SLOTA) { ADPD105_WriteReg(ADPD105_SLOTA_NUMPULSES, u16Value); } else { ADPD105_WriteReg(ADPD105_SLOTB_NUMPULSES, u16Value); } }
exports.config = { environment: 'development', common: { database: { name: process.env.DB_NAME_DEV }, session: { secret: process.env.SESSION_SECRET, expTimeSeconds: parseInt(process.env.SESSION_EXP_TIME_SECONDS) }, jsonPlaceHolderApi: { baseUrl: process.env.JSON_PLACE_HOLDER_API_BASE_URL }, availableDomains: process.env.AVAILABLE_DOMAINS }, isDevelopment: true };
# # $Id: file.pm,v 1.19 1999/04/23 17:54:02 gisle Exp $ package LWP::Protocol::file; require LWP::Protocol; @ISA = qw(LWP::Protocol); use strict; require LWP::MediaTypes; require HTTP::Request; require HTTP::Response; require HTTP::Status; require HTTP::Date; require URI::Escape; require HTML::Entities; sub request { my($self, $request, $proxy, $arg, $size) = @_; LWP::Debug::trace('()'); $size = 4096 unless defined $size and $size > 0; # check proxy if (defined $proxy) { return new HTTP::Response &HTTP::Status::RC_BAD_REQUEST, 'You can not proxy through the filesystem'; } # check method my $method = $request->method; unless ($method eq 'GET' || $method eq 'HEAD') { return new HTTP::Response &HTTP::Status::RC_BAD_REQUEST, 'Library does not allow method ' . "$method for 'file:' URLs"; } # check url my $url = $request->url; my $scheme = $url->scheme; if ($scheme ne 'file') { return new HTTP::Response &HTTP::Status::RC_INTERNAL_SERVER_ERROR, "LWP::file::request called for '$scheme'"; } # URL OK, look at file my $path = $url->file; # test file exists and is readable unless (-e $path) { return new HTTP::Response &HTTP::Status::RC_NOT_FOUND, "File `$path' does not exist"; } unless (-r _) { return new HTTP::Response &HTTP::Status::RC_FORBIDDEN, 'User does not have read permission'; } # looks like file exists my($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$filesize, $atime,$mtime,$ctime,$blksize,$blocks) = stat(_); # XXX should check Accept headers? # check if-modified-since my $ims = $request->header('If-Modified-Since'); if (defined $ims) { my $time = HTTP::Date::str2time($ims); if (defined $time and $time >= $mtime) { return new HTTP::Response &HTTP::Status::RC_NOT_MODIFIED, "$method $path"; } } # Ok, should be an OK response by now... my $response = new HTTP::Response &HTTP::Status::RC_OK; # fill in response headers $response->header('Last-Modified', HTTP::Date::time2str($mtime)); if (-d _) { # If the path is a directory, process it # generate the HTML for directory opendir(D, $path) or return new HTTP::Response &HTTP::Status::RC_INTERNAL_SERVER_ERROR, "Cannot read directory '$path': $!"; my(@files) = sort readdir(D); closedir(D); # Make directory listing for (@files) { if($^O eq "MacOS") { $_ .= "/" if -d "$path:$_"; } else { $_ .= "/" if -d "$path/$_"; } my $furl = URI::Escape::uri_escape($_); my $desc = HTML::Entities::encode($_); $_ = qq{<LI><A HREF="$furl">$desc</A>}; } # Ensure that the base URL is "/" terminated my $base = $url->clone; unless ($base->epath =~ m|/$|) { $base->epath($base->epath . "/"); } my $html = join("\n", "<HTML>\n<HEAD>", "<TITLE>Directory $path</TITLE>", "<BASE HREF=\"$base\">", "</HEAD>\n<BODY>", "<H1>Directory listing of $path</H1>", "<UL>", @files, "</UL>", "</BODY>\n</HTML>\n"); $response->header('Content-Type', 'text/html'); $response->header('Content-Length', length $html); $html = "" if $method eq "HEAD"; return $self->collect_once($arg, $response, $html); } # path is a regular file $response->header('Content-Length', $filesize); LWP::MediaTypes::guess_media_type($path, $response); # read the file if ($method ne "HEAD") { open(F, $path) or return new HTTP::Response(&HTTP::Status::RC_INTERNAL_SERVER_ERROR, "Cannot read file '$path': $!"); binmode(F); $response = $self->collect($arg, $response, sub { my $content = ""; my $bytes = sysread(F, $content, $size); return \$content if $bytes > 0; return \ ""; }); close(F); } $response; } 1;
CREATE TABLE newsletter_issues ( newsletter_issue_id uuid NOT NULL, title TEXT NOT NULL, text_content TEXT NOT NULL, html_content TEXT NOT NULL, published_at TEXT NOT NULL, PRIMARY KEY(newsletter_issue_id) );
CREATE OR REPLACE FUNCTION create_jwt(p_user_id uuid) RETURNS TEXT AS $$ DECLARE v_payload jsonb; v_token text; BEGIN SELECT jsonb_build_object( 'email', email, 'exp', extract(epoch from now())::int + app.get_setting_text('jwt_lifetime')::int, 'role', 'apiuser' -- apiuser is the PG role all PostgREST requests run under ) FROM public.users WHERE id = p_user_id INTO v_payload; SELECT pgjwt.sign(v_payload, app.get_setting_text('jwt_secret')) INTO v_token; RETURN v_token; END; $$ STABLE LANGUAGE plpgsql; CREATE OR REPLACE FUNCTION verify_jwt(token text) RETURNS table(header jsonb, payload jsonb, valid boolean) AS $$ SELECT * FROM pgjwt.verify(token, app.get_setting_text('jwt_secret')); $$ STABLE LANGUAGE sql;
export const defaultTheme = { boxplot: { sortIndicator: 'red', stroke: 'black', dotSize: 5, box: 'grey', outlier: 'black', }, }; export type Theme = typeof defaultTheme;
! ! CRTM_GeometryInfo_Define ! ! Module defining the CRTM GeometryInfo container object. ! ! ! ! CREATION HISTORY: ! Written by: Paul van Delst, 19-May-2004 ! paul.vandelst@noaa.gov ! MODULE CRTM_GeometryInfo_Define ! ------------------ ! Environment set up ! ------------------ ! Intrinsic modules USE ISO_Fortran_Env , ONLY: OUTPUT_UNIT ! Module use USE Type_Kinds , ONLY: fp USE Message_Handler , ONLY: SUCCESS, FAILURE, WARNING, INFORMATION, Display_Message USE Compare_Float_Numbers, ONLY: DEFAULT_N_SIGFIG, & OPERATOR(.EqualTo.), & Compares_Within_Tolerance USE File_Utility , ONLY: File_Open, File_Exists USE Binary_File_Utility , ONLY: Open_Binary_File , & WriteGAtts_Binary_File, & ReadGAtts_Binary_File USE CRTM_Parameters , ONLY: EARTH_RADIUS , & SATELLITE_HEIGHT , & DIFFUSIVITY_RADIAN, & SECANT_DIFFUSIVITY USE CRTM_Geometry_Define , ONLY: CRTM_Geometry_type, & OPERATOR(==), & OPERATOR(-) , & CRTM_Geometry_Destroy , & CRTM_Geometry_SetValue , & CRTM_Geometry_GetValue , & CRTM_Geometry_IsValid , & CRTM_Geometry_Inspect , & CRTM_Geometry_ReadRecord , & CRTM_Geometry_WriteRecord ! Disable implicit typing IMPLICIT NONE ! ------------ ! Visibilities ! ------------ ! Everything private by default PRIVATE ! Operators PUBLIC :: OPERATOR(==) PUBLIC :: OPERATOR(-) ! Geometry entities ! ...Structures PUBLIC :: CRTM_Geometry_type ! GeometryInfo enitities ! ...Structures PUBLIC :: CRTM_GeometryInfo_type ! ...Procedures PUBLIC :: CRTM_GeometryInfo_Destroy PUBLIC :: CRTM_GeometryInfo_SetValue PUBLIC :: CRTM_GeometryInfo_GetValue PUBLIC :: CRTM_GeometryInfo_IsValid PUBLIC :: CRTM_GeometryInfo_Inspect PUBLIC :: CRTM_GeometryInfo_InquireFile PUBLIC :: CRTM_GeometryInfo_ReadFile PUBLIC :: CRTM_GeometryInfo_WriteFile ! --------------------- ! Procedure overloading ! --------------------- INTERFACE OPERATOR(==) MODULE PROCEDURE CRTM_GeometryInfo_Equal END INTERFACE OPERATOR(==) INTERFACE OPERATOR(-) MODULE PROCEDURE CRTM_GeometryInfo_Subtract END INTERFACE OPERATOR(-) ! ----------------- ! Module parameters ! ----------------- ! Literal constants REAL(fp), PARAMETER :: ZERO = 0.0_fp REAL(fp), PARAMETER :: ONE = 1.0_fp ! Message string length INTEGER, PARAMETER :: ML = 256 ! File status on close after write error CHARACTER(*), PARAMETER :: WRITE_ERROR_STATUS = 'DELETE' ! --------------------------------- ! GeometryInfo data type definition ! --------------------------------- !:tdoc+: TYPE :: CRTM_GeometryInfo_type ! Structure for user Input TYPE(CRTM_Geometry_type) :: user ! Derived from User Input ! ...Default distance ratio REAL(fp) :: Distance_Ratio = EARTH_RADIUS/(EARTH_RADIUS + SATELLITE_HEIGHT) ! ...Sensor angle information REAL(fp) :: Sensor_Scan_Radian = ZERO REAL(fp) :: Sensor_Zenith_Radian = ZERO REAL(fp) :: Sensor_Azimuth_Radian = ZERO REAL(fp) :: Secant_Sensor_Zenith = ZERO REAL(fp) :: Cosine_Sensor_Zenith = ZERO ! ...Zenith angle used in the transmittance algorithms REAL(fp) :: Trans_Zenith_Radian = ZERO REAL(fp) :: Secant_Trans_Zenith = ZERO ! ...Source angle information REAL(fp) :: Source_Zenith_Radian = ZERO REAL(fp) :: Source_Azimuth_Radian = ZERO REAL(fp) :: Secant_Source_Zenith = ZERO ! ...Flux angle information REAL(fp) :: Flux_Zenith_Radian = DIFFUSIVITY_RADIAN REAL(fp) :: Secant_Flux_Zenith = SECANT_DIFFUSIVITY ! ...Square of ratio between mean and actual sun-earth (AU) distances REAL(fp) :: AU_ratio2 = ONE END TYPE CRTM_GeometryInfo_type !:tdoc-: CONTAINS !################################################################################## !################################################################################## !## ## !## ## PUBLIC MODULE ROUTINES ## ## !## ## !################################################################################## !################################################################################## !-------------------------------------------------------------------------------- !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_Destroy ! ! PURPOSE: ! Elemental subroutine to re-initialize a CRTM GeometryInfo objects. ! ! CALLING SEQUENCE: ! CALL CRTM_GeometryInfo_Destroy( gInfo ) ! ! OBJECTS: ! gInfo: Re-initialized GeometryInfo structure. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar or any rank ! ATTRIBUTES: INTENT(OUT) ! !:sdoc-: !-------------------------------------------------------------------------------- ELEMENTAL SUBROUTINE CRTM_GeometryInfo_Destroy( gInfo ) TYPE(CRTM_GeometryInfo_type), INTENT(OUT) :: gInfo CALL CRTM_Geometry_Destroy(gInfo%user) END SUBROUTINE CRTM_GeometryInfo_Destroy !-------------------------------------------------------------------------------- !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_SetValue ! ! PURPOSE: ! Elemental subroutine to set the values of CRTM GeometryInfo ! object components. ! ! CALLING SEQUENCE: ! CALL CRTM_GeometryInfo_SetValue( gInfo, & ! Geometry = Geometry , & ! iFOV = iFOV , & ! Longitude = Longitude , & ! Latitude = Latitude , & ! Surface_Altitude = Surface_Altitude , & ! Sensor_Scan_Angle = Sensor_Scan_Angle , & ! Sensor_Zenith_Angle = Sensor_Zenith_Angle , & ! Sensor_Azimuth_Angle = Sensor_Azimuth_Angle , & ! Source_Zenith_Angle = Source_Zenith_Angle , & ! Source_Azimuth_Angle = Source_Azimuth_Angle , & ! Flux_Zenith_Angle = Flux_Zenith_Angle , & ! Year = Year , & ! Month = Month , & ! Day = Day , & ! Distance_Ratio = Distance_Ratio , & ! Sensor_Scan_Radian = Sensor_Scan_Radian , & ! Sensor_Zenith_Radian = Sensor_Zenith_Radian , & ! Sensor_Azimuth_Radian = Sensor_Azimuth_Radian, & ! Secant_Sensor_Zenith = Secant_Sensor_Zenith , & ! Cosine_Sensor_Zenith = Cosine_Sensor_Zenith , & ! Source_Zenith_Radian = Source_Zenith_Radian , & ! Source_Azimuth_Radian = Source_Azimuth_Radian, & ! Secant_Source_Zenith = Secant_Source_Zenith , & ! Flux_Zenith_Radian = Flux_Zenith_Radian , & ! Secant_Flux_Zenith = Secant_Flux_Zenith , & ! Trans_Zenith_Radian = Trans_Zenith_Radian , & ! Secant_Trans_Zenith = Secant_Trans_Zenith , & ! AU_ratio2 = AU_ratio2 ) ! ! OBJECTS: ! gInfo: GeometryInfo object from which component values ! are to be retrieved. ! UNITS: N/A ! TYPE: CRTM_Geometry_type ! DIMENSION: Scalar or any rank ! ATTRIBUTES: INTENT(IN OUT) ! ! OPTIONAL INPUTS: ! Geometry: Geometry object. ! UNITS: N/A ! TYPE: CRTM_Geometry_type ! DIMENSION: Scalar or same as gInfo input ! ATTRIBUTES: INTENT(IN) ! ! All other gInfo components as listed in the calling sequence. ! NOTE: If the Geometry argument as well as any of the arguments iFOV to ! Flux_Zenith_Angle are specified, the latter values override any ! contained in the passed Geometry object. ! !:sdoc-: !-------------------------------------------------------------------------------- ELEMENTAL SUBROUTINE CRTM_GeometryInfo_SetValue( & gInfo , & ! Input Geometry , & ! Optional input iFOV , & ! Optional input Longitude , & ! Optional input Latitude , & ! Optional input Surface_Altitude , & ! Optional input Sensor_Scan_Angle , & ! Optional input Sensor_Zenith_Angle , & ! Optional input Sensor_Azimuth_Angle , & ! Optional input Source_Zenith_Angle , & ! Optional input Source_Azimuth_Angle , & ! Optional input Flux_Zenith_Angle , & ! Optional input Year , & ! Optional input Month , & ! Optional input Day , & ! Optional input Distance_Ratio , & ! Optional input Sensor_Scan_Radian , & ! Optional input Sensor_Zenith_Radian , & ! Optional input Sensor_Azimuth_Radian, & ! Optional input Secant_Sensor_Zenith , & ! Optional input Cosine_Sensor_Zenith , & ! Optional input Source_Zenith_Radian , & ! Optional input Source_Azimuth_Radian, & ! Optional input Secant_Source_Zenith , & ! Optional input Flux_Zenith_Radian , & ! Optional input Secant_Flux_Zenith , & ! Optional input Trans_Zenith_Radian , & ! Optional input Secant_Trans_Zenith , & ! Optional input AU_ratio2 ) ! Optional input ! Arguments TYPE(CRTM_GeometryInfo_type), INTENT(IN OUT) :: gInfo TYPE(CRTM_Geometry_type), OPTIONAL, INTENT(IN) :: Geometry INTEGER , OPTIONAL, INTENT(IN) :: iFOV REAL(fp), OPTIONAL, INTENT(IN) :: Longitude REAL(fp), OPTIONAL, INTENT(IN) :: Latitude REAL(fp), OPTIONAL, INTENT(IN) :: Surface_Altitude REAL(fp), OPTIONAL, INTENT(IN) :: Sensor_Scan_Angle REAL(fp), OPTIONAL, INTENT(IN) :: Sensor_Zenith_Angle REAL(fp), OPTIONAL, INTENT(IN) :: Sensor_Azimuth_Angle REAL(fp), OPTIONAL, INTENT(IN) :: Source_Zenith_Angle REAL(fp), OPTIONAL, INTENT(IN) :: Source_Azimuth_Angle REAL(fp), OPTIONAL, INTENT(IN) :: Flux_Zenith_Angle INTEGER, OPTIONAL, INTENT(IN) :: Year INTEGER, OPTIONAL, INTENT(IN) :: Month INTEGER, OPTIONAL, INTENT(IN) :: Day REAL(fp), OPTIONAL, INTENT(IN) :: Distance_Ratio REAL(fp), OPTIONAL, INTENT(IN) :: Sensor_Scan_Radian REAL(fp), OPTIONAL, INTENT(IN) :: Sensor_Zenith_Radian REAL(fp), OPTIONAL, INTENT(IN) :: Sensor_Azimuth_Radian REAL(fp), OPTIONAL, INTENT(IN) :: Secant_Sensor_Zenith REAL(fp), OPTIONAL, INTENT(IN) :: Cosine_Sensor_Zenith REAL(fp), OPTIONAL, INTENT(IN) :: Source_Zenith_Radian REAL(fp), OPTIONAL, INTENT(IN) :: Source_Azimuth_Radian REAL(fp), OPTIONAL, INTENT(IN) :: Secant_Source_Zenith REAL(fp), OPTIONAL, INTENT(IN) :: Flux_Zenith_Radian REAL(fp), OPTIONAL, INTENT(IN) :: Secant_Flux_Zenith REAL(fp), OPTIONAL, INTENT(IN) :: Trans_Zenith_Radian REAL(fp), OPTIONAL, INTENT(IN) :: Secant_Trans_Zenith REAL(fp), OPTIONAL, INTENT(IN) :: AU_ratio2 ! Get values IF ( PRESENT(Geometry) ) gInfo%user = Geometry CALL CRTM_Geometry_SetValue( gInfo%user, & iFOV = iFOV , & Longitude = Longitude , & Latitude = Latitude , & Surface_Altitude = Surface_Altitude , & Sensor_Scan_Angle = Sensor_Scan_Angle , & Sensor_Zenith_Angle = Sensor_Zenith_Angle , & Sensor_Azimuth_Angle = Sensor_Azimuth_Angle, & Source_Zenith_Angle = Source_Zenith_Angle , & Source_Azimuth_Angle = Source_Azimuth_Angle, & Flux_Zenith_Angle = Flux_Zenith_Angle , & Year = Year , & Month = Month , & Day = Day ) IF ( PRESENT(Distance_Ratio ) ) gInfo%Distance_Ratio = Distance_Ratio IF ( PRESENT(Sensor_Scan_Radian ) ) gInfo%Sensor_Scan_Radian = Sensor_Scan_Radian IF ( PRESENT(Sensor_Zenith_Radian ) ) gInfo%Sensor_Zenith_Radian = Sensor_Zenith_Radian IF ( PRESENT(Sensor_Azimuth_Radian) ) gInfo%Sensor_Azimuth_Radian = Sensor_Azimuth_Radian IF ( PRESENT(Secant_Sensor_Zenith ) ) gInfo%Secant_Sensor_Zenith = Secant_Sensor_Zenith IF ( PRESENT(Cosine_Sensor_Zenith ) ) gInfo%Cosine_Sensor_Zenith = Cosine_Sensor_Zenith IF ( PRESENT(Source_Zenith_Radian ) ) gInfo%Source_Zenith_Radian = Source_Zenith_Radian IF ( PRESENT(Source_Azimuth_Radian) ) gInfo%Source_Azimuth_Radian = Source_Azimuth_Radian IF ( PRESENT(Secant_Source_Zenith ) ) gInfo%Secant_Source_Zenith = Secant_Source_Zenith IF ( PRESENT(Flux_Zenith_Radian ) ) gInfo%Flux_Zenith_Radian = Flux_Zenith_Radian IF ( PRESENT(Secant_Flux_Zenith ) ) gInfo%Secant_Flux_Zenith = Secant_Flux_Zenith IF ( PRESENT(Trans_Zenith_Radian ) ) gInfo%Trans_Zenith_Radian = Trans_Zenith_Radian IF ( PRESENT(Secant_Trans_Zenith ) ) gInfo%Secant_Trans_Zenith = Secant_Trans_Zenith IF ( PRESENT(AU_ratio2 ) ) gInfo%AU_ratio2 = AU_ratio2 END SUBROUTINE CRTM_GeometryInfo_SetValue !-------------------------------------------------------------------------------- !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_GetValue ! ! PURPOSE: ! Elemental subroutine to get the values of CRTM GeometryInfo ! object components. ! ! CALLING SEQUENCE: ! CALL CRTM_GeometryInfo_GetValue( gInfo, & ! Geometry = Geometry , & ! iFOV = iFOV , & ! Longitude = Longitude , & ! Latitude = Latitude , & ! Surface_Altitude = Surface_Altitude , & ! Sensor_Scan_Angle = Sensor_Scan_Angle , & ! Sensor_Zenith_Angle = Sensor_Zenith_Angle , & ! Sensor_Azimuth_Angle = Sensor_Azimuth_Angle , & ! Source_Zenith_Angle = Source_Zenith_Angle , & ! Source_Azimuth_Angle = Source_Azimuth_Angle , & ! Flux_Zenith_Angle = Flux_Zenith_Angle , & ! Year = Year , & ! Month = Month , & ! Day = Day , & ! Distance_Ratio = Distance_Ratio , & ! Sensor_Scan_Radian = Sensor_Scan_Radian , & ! Sensor_Zenith_Radian = Sensor_Zenith_Radian , & ! Sensor_Azimuth_Radian = Sensor_Azimuth_Radian, & ! Secant_Sensor_Zenith = Secant_Sensor_Zenith , & ! Cosine_Sensor_Zenith = Cosine_Sensor_Zenith , & ! Source_Zenith_Radian = Source_Zenith_Radian , & ! Source_Azimuth_Radian = Source_Azimuth_Radian, & ! Secant_Source_Zenith = Secant_Source_Zenith , & ! Flux_Zenith_Radian = Flux_Zenith_Radian , & ! Secant_Flux_Zenith = Secant_Flux_Zenith , & ! Trans_Zenith_Radian = Trans_Zenith_Radian , & ! Secant_Trans_Zenith = Secant_Trans_Zenith , & ! AU_ratio2 = AU_ratio2 ) ! OBJECTS: ! gInfo: Geometry object from which component values ! are to be retrieved. ! UNITS: N/A ! TYPE: CRTM_Geometry_type ! DIMENSION: Scalar or any rank ! ATTRIBUTES: INTENT(IN OUT) ! ! OPTIONAL OUTPUTS: ! Geometry: Geometry object. ! UNITS: N/A ! TYPE: CRTM_Geometry_type ! DIMENSION: Scalar or same as gInfo input ! ATTRIBUTES: INTENT(OUT) ! ! All other gInfo components as listed in the calling sequence. ! !:sdoc-: !-------------------------------------------------------------------------------- ELEMENTAL SUBROUTINE CRTM_GeometryInfo_GetValue( & gInfo , & ! Input Geometry , & ! Optional output iFOV , & ! Optional output Longitude , & ! Optional output Latitude , & ! Optional output Surface_Altitude , & ! Optional output Sensor_Scan_Angle , & ! Optional output Sensor_Zenith_Angle , & ! Optional output Sensor_Azimuth_Angle , & ! Optional output Source_Zenith_Angle , & ! Optional output Source_Azimuth_Angle , & ! Optional output Flux_Zenith_Angle , & ! Optional output Year , & ! Optional output Month , & ! Optional output Day , & ! Optional output Distance_Ratio , & ! Optional output Sensor_Scan_Radian , & ! Optional output Sensor_Zenith_Radian , & ! Optional output Sensor_Azimuth_Radian, & ! Optional output Secant_Sensor_Zenith , & ! Optional output Cosine_Sensor_Zenith , & ! Optional output Source_Zenith_Radian , & ! Optional output Source_Azimuth_Radian, & ! Optional output Secant_Source_Zenith , & ! Optional output Flux_Zenith_Radian , & ! Optional output Secant_Flux_Zenith , & ! Optional output Trans_Zenith_Radian , & ! Optional output Secant_Trans_Zenith , & ! Optional output AU_ratio2 ) ! Optional output ! Arguments TYPE(CRTM_GeometryInfo_type), INTENT(IN) :: gInfo TYPE(CRTM_Geometry_type), OPTIONAL, INTENT(OUT) :: Geometry INTEGER , OPTIONAL, INTENT(OUT) :: iFOV REAL(fp), OPTIONAL, INTENT(OUT) :: Longitude REAL(fp), OPTIONAL, INTENT(OUT) :: Latitude REAL(fp), OPTIONAL, INTENT(OUT) :: Surface_Altitude REAL(fp), OPTIONAL, INTENT(OUT) :: Sensor_Scan_Angle REAL(fp), OPTIONAL, INTENT(OUT) :: Sensor_Zenith_Angle REAL(fp), OPTIONAL, INTENT(OUT) :: Sensor_Azimuth_Angle REAL(fp), OPTIONAL, INTENT(OUT) :: Source_Zenith_Angle REAL(fp), OPTIONAL, INTENT(OUT) :: Source_Azimuth_Angle REAL(fp), OPTIONAL, INTENT(OUT) :: Flux_Zenith_Angle INTEGER, OPTIONAL, INTENT(OUT) :: Year INTEGER, OPTIONAL, INTENT(OUT) :: Month INTEGER, OPTIONAL, INTENT(OUT) :: Day REAL(fp), OPTIONAL, INTENT(OUT) :: Distance_Ratio REAL(fp), OPTIONAL, INTENT(OUT) :: Sensor_Scan_Radian REAL(fp), OPTIONAL, INTENT(OUT) :: Sensor_Zenith_Radian REAL(fp), OPTIONAL, INTENT(OUT) :: Sensor_Azimuth_Radian REAL(fp), OPTIONAL, INTENT(OUT) :: Secant_Sensor_Zenith REAL(fp), OPTIONAL, INTENT(OUT) :: Cosine_Sensor_Zenith REAL(fp), OPTIONAL, INTENT(OUT) :: Source_Zenith_Radian REAL(fp), OPTIONAL, INTENT(OUT) :: Source_Azimuth_Radian REAL(fp), OPTIONAL, INTENT(OUT) :: Secant_Source_Zenith REAL(fp), OPTIONAL, INTENT(OUT) :: Flux_Zenith_Radian REAL(fp), OPTIONAL, INTENT(OUT) :: Secant_Flux_Zenith REAL(fp), OPTIONAL, INTENT(OUT) :: Trans_Zenith_Radian REAL(fp), OPTIONAL, INTENT(OUT) :: Secant_Trans_Zenith REAL(fp), OPTIONAL, INTENT(OUT) :: AU_ratio2 ! Get values IF ( PRESENT(Geometry) ) Geometry = gInfo%user CALL CRTM_Geometry_GetValue( gInfo%user, & iFOV = iFOV , & Longitude = Longitude , & Latitude = Latitude , & Surface_Altitude = Surface_Altitude , & Sensor_Scan_Angle = Sensor_Scan_Angle , & Sensor_Zenith_Angle = Sensor_Zenith_Angle , & Sensor_Azimuth_Angle = Sensor_Azimuth_Angle, & Source_Zenith_Angle = Source_Zenith_Angle , & Source_Azimuth_Angle = Source_Azimuth_Angle, & Flux_Zenith_Angle = Flux_Zenith_Angle , & Year = Year , & Month = Month , & Day = Day ) IF ( PRESENT(Distance_Ratio ) ) Distance_Ratio = gInfo%Distance_Ratio IF ( PRESENT(Sensor_Scan_Radian ) ) Sensor_Scan_Radian = gInfo%Sensor_Scan_Radian IF ( PRESENT(Sensor_Zenith_Radian ) ) Sensor_Zenith_Radian = gInfo%Sensor_Zenith_Radian IF ( PRESENT(Sensor_Azimuth_Radian) ) Sensor_Azimuth_Radian = gInfo%Sensor_Azimuth_Radian IF ( PRESENT(Secant_Sensor_Zenith ) ) Secant_Sensor_Zenith = gInfo%Secant_Sensor_Zenith IF ( PRESENT(Cosine_Sensor_Zenith ) ) Cosine_Sensor_Zenith = gInfo%Cosine_Sensor_Zenith IF ( PRESENT(Source_Zenith_Radian ) ) Source_Zenith_Radian = gInfo%Source_Zenith_Radian IF ( PRESENT(Source_Azimuth_Radian) ) Source_Azimuth_Radian = gInfo%Source_Azimuth_Radian IF ( PRESENT(Secant_Source_Zenith ) ) Secant_Source_Zenith = gInfo%Secant_Source_Zenith IF ( PRESENT(Flux_Zenith_Radian ) ) Flux_Zenith_Radian = gInfo%Flux_Zenith_Radian IF ( PRESENT(Secant_Flux_Zenith ) ) Secant_Flux_Zenith = gInfo%Secant_Flux_Zenith IF ( PRESENT(Trans_Zenith_Radian ) ) Trans_Zenith_Radian = gInfo%Trans_Zenith_Radian IF ( PRESENT(Secant_Trans_Zenith ) ) Secant_Trans_Zenith = gInfo%Secant_Trans_Zenith IF ( PRESENT(AU_ratio2 ) ) AU_ratio2 = gInfo%AU_ratio2 END SUBROUTINE CRTM_GeometryInfo_GetValue !-------------------------------------------------------------------------------- !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_IsValid ! ! PURPOSE: ! Non-pure function to perform some simple validity checks on a ! CRTM GeometryInfo container object. ! ! If invalid data is found, a message is printed to stdout. ! ! CALLING SEQUENCE: ! result = CRTM_GeometryInfo_IsValid( gInfo ) ! ! or ! ! IF ( CRTM_GeometryInfo_IsValid( gInfo ) ) THEN.... ! ! OBJECTS: ! gInfo: CRTM GeometryInfo object which is to have its ! contents checked. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! FUNCTION RESULT: ! result: Logical variable indicating whether or not the input ! passed the check. ! If == .FALSE., GeometryInfo object is unused or contains ! invalid data. ! == .TRUE., GeometryInfo object can be used in CRTM. ! UNITS: N/A ! TYPE: LOGICAL ! DIMENSION: Scalar ! !:sdoc-: !-------------------------------------------------------------------------------- FUNCTION CRTM_GeometryInfo_IsValid( gInfo ) RESULT( IsValid ) TYPE(CRTM_GeometryInfo_type), INTENT(IN) :: gInfo LOGICAL :: IsValid IsValid = CRTM_Geometry_IsValid( gInfo%user ) END FUNCTION CRTM_GeometryInfo_IsValid !-------------------------------------------------------------------------------- !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_Inspect ! ! PURPOSE: ! Subroutine to print the contents of a CRTM GeometryInfo container object ! to stdout. ! ! CALLING SEQUENCE: ! CALL CRTM_GeometryInfo_Inspect( gInfo, Unit=unit ) ! ! INPUTS: ! gInfo: CRTM GeometryInfo object to display. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! OPTIONAL INPUTS: ! Unit: Unit number for an already open file to which the output ! will be written. ! If the argument is specified and the file unit is not ! connected, the output goes to stdout. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN), OPTIONAL ! !:sdoc-: !-------------------------------------------------------------------------------- SUBROUTINE CRTM_GeometryInfo_Inspect( gInfo, Unit ) ! Arguments TYPE(CRTM_GeometryInfo_type), INTENT(IN) :: gInfo INTEGER, OPTIONAL, INTENT(IN) :: Unit ! Local parameters CHARACTER(*), PARAMETER :: RFMT = 'es22.15' ! Local variables INTEGER :: fid ! Setup fid = OUTPUT_UNIT IF ( PRESENT(Unit) ) THEN IF ( File_Open(Unit) ) fid = Unit END IF WRITE(fid, '(1x,"GeometryInfo OBJECT")') WRITE(fid, '(3x,"Distance ratio :",1x,'//RFMT//')') gInfo%Distance_Ratio ! ...Sensor angle information WRITE(fid, '(3x,"Sensor scan radian :",1x,'//RFMT//')') gInfo%Sensor_Scan_Radian WRITE(fid, '(3x,"Sensor zenith radian :",1x,'//RFMT//')') gInfo%Sensor_Zenith_Radian WRITE(fid, '(3x,"Sensor azimuth radian :",1x,'//RFMT//')') gInfo%Sensor_Azimuth_Radian WRITE(fid, '(3x,"Secant sensor zenith :",1x,'//RFMT//')') gInfo%Secant_Sensor_Zenith WRITE(fid, '(3x,"Cosine sensor zenith :",1x,'//RFMT//')') gInfo%Cosine_Sensor_Zenith ! ...Transmittance algorithm sensor angle information WRITE(fid, '(3x,"Trans zenith radian :",1x,'//RFMT//')') gInfo%Trans_Zenith_Radian WRITE(fid, '(3x,"Secant trans zenith :",1x,'//RFMT//')') gInfo%Secant_Trans_Zenith ! ...Source angle information WRITE(fid, '(3x,"Source zenith radian :",1x,'//RFMT//')') gInfo%Source_Zenith_Radian WRITE(fid, '(3x,"Source azimuth radian :",1x,'//RFMT//')') gInfo%Source_Azimuth_Radian WRITE(fid, '(3x,"Secant source zenith :",1x,'//RFMT//')') gInfo%Secant_Source_Zenith ! ...Flux angle information WRITE(fid, '(3x,"Flux zenith radian :",1x,'//RFMT//')') gInfo%Flux_Zenith_Radian WRITE(fid, '(3x,"Secant flux zenith :",1x,'//RFMT//')') gInfo%Secant_Flux_Zenith ! ...AU ratio information WRITE(fid, '(3x,"AU ratio^2 :",1x,'//RFMT//')') gInfo%AU_ratio2 ! The contained object CALL CRTM_Geometry_Inspect(gInfo%user, Unit=Unit) END SUBROUTINE CRTM_GeometryInfo_Inspect !------------------------------------------------------------------------------ !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_InquireFile ! ! PURPOSE: ! Function to inquire CRTM GeometryInfo object files. ! ! CALLING SEQUENCE: ! Error_Status = CRTM_GeometryInfo_InquireFile( & ! Filename , & ! n_Profiles = n_Profiles ) ! ! INPUTS: ! Filename: Character string specifying the name of a ! CRTM GeometryInfo data file to read. ! UNITS: N/A ! TYPE: CHARACTER(*) ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! OPTIONAL OUTPUTS: ! n_Profiles: The number of profiles for which there is geometry ! information in the data file. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! ATTRIBUTES: OPTIONAL, INTENT(OUT) ! ! FUNCTION RESULT: ! Error_Status: The return value is an integer defining the error status. ! The error codes are defined in the Message_Handler module. ! If == SUCCESS, the file inquire was successful ! == FAILURE, an unrecoverable error occurred. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! !:sdoc-: !------------------------------------------------------------------------------ FUNCTION CRTM_GeometryInfo_InquireFile( & Filename , & ! Input n_Profiles) & ! Optional output RESULT( err_stat ) ! Arguments CHARACTER(*), INTENT(IN) :: Filename INTEGER , OPTIONAL, INTENT(OUT) :: n_Profiles ! Function result INTEGER :: err_stat ! Function parameters CHARACTER(*), PARAMETER :: ROUTINE_NAME = 'CRTM_GeometryInfo_InquireFile' ! Function variables CHARACTER(ML) :: msg CHARACTER(ML) :: io_msg INTEGER :: io_stat INTEGER :: fid INTEGER :: m ! Set up err_stat = SUCCESS ! ...Check that the file exists IF ( .NOT. File_Exists( TRIM(Filename) ) ) THEN msg = 'File '//TRIM(Filename)//' not found.' CALL Inquire_Cleanup(); RETURN END IF ! Open the file err_stat = Open_Binary_File( Filename, fid ) IF ( err_stat /= SUCCESS ) THEN msg = 'Error opening '//TRIM(Filename) CALL Inquire_Cleanup(); RETURN END IF ! Read the number of profiles READ( fid,IOSTAT=io_stat,IOMSG=io_msg ) m IF ( io_stat /= 0 ) THEN msg = 'Error reading dimensions from '//TRIM(Filename)//' - '//TRIM(io_msg) CALL Inquire_Cleanup(); RETURN END IF ! Close the file CLOSE( fid,IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= 0 ) THEN msg = 'Error closing '//TRIM(Filename)//' - '//TRIM(io_msg) CALL Inquire_Cleanup(); RETURN END IF ! Set the return arguments IF ( PRESENT(n_Profiles) ) n_Profiles = m CONTAINS SUBROUTINE Inquire_CleanUp() IF ( File_Open(fid) ) THEN CLOSE( fid,IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= SUCCESS ) & msg = TRIM(msg)//'; Error closing input file during error cleanup - '//TRIM(io_msg) END IF err_stat = FAILURE CALL Display_Message( ROUTINE_NAME, msg, err_stat ) END SUBROUTINE Inquire_CleanUp END FUNCTION CRTM_GeometryInfo_InquireFile !------------------------------------------------------------------------------ !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_ReadFile ! ! PURPOSE: ! Function to read CRTM GeometryInfo object files. ! ! CALLING SEQUENCE: ! Error_Status = CRTM_GeometryInfo_ReadFile( & ! Filename , & ! GeometryInfo , & ! Quiet = Quiet , & ! n_Profiles = n_Profiles ) ! ! INPUTS: ! Filename: Character string specifying the name of an ! a GeometryInfo data file to read. ! UNITS: N/A ! TYPE: CHARACTER(*) ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! OUTPUTS: ! GeometryInfo: CRTM GeometryInfo object array containing the ! data read from file. ! UNITS: N/A ! TYPE: CRTM_Geometry_type ! DIMENSION: Rank-1 ! ATTRIBUTES: INTENT(OUT), ALLOCATABLE ! ! OPTIONAL INPUTS: ! Quiet: Set this logical argument to suppress INFORMATION ! messages being printed to stdout ! If == .FALSE., INFORMATION messages are OUTPUT [DEFAULT]. ! == .TRUE., INFORMATION messages are SUPPRESSED. ! If not specified, default is .FALSE. ! UNITS: N/A ! TYPE: LOGICAL ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN), OPTIONAL ! ! OPTIONAL OUTPUTS: ! n_Profiles: The number of profiles for which data was read. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! ATTRIBUTES: OPTIONAL, INTENT(OUT) ! ! FUNCTION RESULT: ! Error_Status: The return value is an integer defining the error status. ! The error codes are defined in the Message_Handler module. ! If == SUCCESS, the file read was successful ! == FAILURE, an unrecoverable error occurred. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! !:sdoc-: !------------------------------------------------------------------------------ FUNCTION CRTM_GeometryInfo_ReadFile( & Filename , & ! Input GeometryInfo, & ! Output Quiet , & ! Optional input n_Profiles , & ! Optional output Debug ) & ! Optional input (Debug output control) RESULT( err_stat ) ! Arguments CHARACTER(*), INTENT(IN) :: Filename TYPE(CRTM_GeometryInfo_type), ALLOCATABLE, INTENT(OUT) :: GeometryInfo(:) LOGICAL, OPTIONAL, INTENT(IN) :: Quiet INTEGER, OPTIONAL, INTENT(OUT) :: n_Profiles LOGICAL, OPTIONAL, INTENT(IN) :: Debug ! Function result INTEGER :: err_stat ! Function parameters CHARACTER(*), PARAMETER :: ROUTINE_NAME = 'CRTM_Geometry_ReadFile' ! Function variables CHARACTER(ML) :: msg CHARACTER(ML) :: io_msg CHARACTER(ML) :: alloc_msg INTEGER :: io_stat INTEGER :: alloc_stat LOGICAL :: noisy INTEGER :: fid INTEGER :: m, n_input_profiles ! Set up err_stat = SUCCESS ! ...Check Quiet argument noisy = .TRUE. IF ( PRESENT(Quiet) ) noisy = .NOT. Quiet ! ...Override Quiet settings if debug set. IF ( PRESENT(Debug) ) noisy = Debug ! Open the file err_stat = Open_Binary_File( Filename, fid ) IF ( err_stat /= SUCCESS ) THEN msg = 'Error opening '//TRIM(Filename) CALL Read_Cleanup(); RETURN END IF ! Read the dimensions READ( fid,IOSTAT=io_stat,IOMSG=io_msg ) n_input_profiles IF ( io_stat /= 0 ) THEN msg = 'Error reading dimension from '//TRIM(Filename)//' - '//TRIM(io_msg) CALL Read_Cleanup(); RETURN END IF ! ...Allocate the return structure array !ALLOCATE(GeometryInfo(n_input_profiles), STAT=alloc_stat, ERRMSG=alloc_msg) ALLOCATE(GeometryInfo(n_input_profiles), STAT=alloc_stat) IF ( alloc_stat /= 0 ) THEN msg = 'Error allocating GeometryInfo array - '//TRIM(alloc_msg) CALL Read_Cleanup(); RETURN END IF ! Loop over all the profiles GeometryInfo_Loop: DO m = 1, n_input_profiles err_stat = Read_Record( fid, GeometryInfo(m) ) IF ( err_stat /= SUCCESS ) THEN WRITE( msg,'("Error reading GeometryInfo element #",i0," from ",a)' ) m, TRIM(Filename) CALL Read_Cleanup(); RETURN END IF END DO GeometryInfo_Loop ! Close the file CLOSE( fid,IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= 0 ) THEN msg = 'Error closing '//TRIM(Filename)//' - '//TRIM(io_msg) CALL Read_Cleanup(); RETURN END IF ! Set the return values IF ( PRESENT(n_Profiles) ) n_Profiles = n_input_profiles ! Output an info message IF ( noisy ) THEN WRITE( msg,'("Number of profiles read from ",a,": ",i0)' ) & TRIM(Filename), n_input_profiles CALL Display_Message( ROUTINE_NAME, msg, INFORMATION ) END IF CONTAINS SUBROUTINE Read_CleanUp() IF ( File_Open(fid) ) THEN CLOSE( fid,IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= 0 ) & msg = TRIM(msg)//'; Error closing input file during error cleanup - '//TRIM(io_msg) END IF IF ( ALLOCATED(GeometryInfo) ) THEN !DEALLOCATE(GeometryInfo, STAT=alloc_stat, ERRMSG=alloc_msg) DEALLOCATE(GeometryInfo, STAT=alloc_stat) IF ( alloc_stat /= 0 ) & msg = TRIM(msg)//'; Error deallocating GeometryInfo array during error cleanup - '//& TRIM(alloc_msg) END IF err_stat = FAILURE CALL Display_Message( ROUTINE_NAME, msg, err_stat ) END SUBROUTINE Read_CleanUp END FUNCTION CRTM_GeometryInfo_ReadFile !------------------------------------------------------------------------------ !:sdoc+: ! ! NAME: ! CRTM_GeometryInfo_WriteFile ! ! PURPOSE: ! Function to write CRTM GeometryInfo object files. ! ! CALLING SEQUENCE: ! Error_Status = CRTM_GeometryInfo_WriteFile( & ! Filename , & ! Geometry , & ! Quiet = Quiet ) ! ! INPUTS: ! Filename: Character string specifying the name of the ! GeometryInfo format data file to write. ! UNITS: N/A ! TYPE: CHARACTER(*) ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! GeometryInfo: CRTM GeometryInfo object array containing the ! data to write. ! UNITS: N/A ! TYPE: CRTM_Geometry_type ! DIMENSION: Rank-1 ! ATTRIBUTES: INTENT(IN) ! ! OPTIONAL INPUTS: ! Quiet: Set this logical argument to suppress INFORMATION ! messages being printed to stdout ! If == .FALSE., INFORMATION messages are OUTPUT [DEFAULT]. ! == .TRUE., INFORMATION messages are SUPPRESSED. ! If not specified, default is .FALSE. ! UNITS: N/A ! TYPE: LOGICAL ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN), OPTIONAL ! ! FUNCTION RESULT: ! Error_Status: The return value is an integer defining the error status. ! The error codes are defined in the Message_Handler module. ! If == SUCCESS, the file write was successful ! == FAILURE, an unrecoverable error occurred. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! ! SIDE EFFECTS: ! - If the output file already exists, it is overwritten. ! - If an error occurs during *writing*, the output file is deleted before ! returning to the calling routine. ! !:sdoc-: !------------------------------------------------------------------------------ FUNCTION CRTM_GeometryInfo_WriteFile( & Filename , & ! Input GeometryInfo, & ! Input Quiet , & ! Optional input Debug ) & ! Optional input (Debug output control) RESULT( err_stat ) ! Arguments CHARACTER(*), INTENT(IN) :: Filename TYPE(CRTM_GeometryInfo_type), INTENT(IN) :: GeometryInfo(:) LOGICAL, OPTIONAL, INTENT(IN) :: Quiet LOGICAL, OPTIONAL, INTENT(IN) :: Debug ! Function result INTEGER :: err_stat ! Function parameters CHARACTER(*), PARAMETER :: ROUTINE_NAME = 'CRTM_GeometryInfo_WriteFile' ! Function variables CHARACTER(ML) :: msg CHARACTER(ML) :: io_msg LOGICAL :: noisy INTEGER :: io_stat INTEGER :: fid INTEGER :: m, n_profiles ! Set up err_stat = SUCCESS ! ...Check Quiet argument noisy = .TRUE. IF ( PRESENT(Quiet) ) noisy = .NOT. Quiet ! ...Override Quiet settings if debug set. IF ( PRESENT(Debug) ) noisy = Debug ! Open the file err_stat = Open_Binary_File( Filename, fid, For_Output = .TRUE. ) IF ( err_stat /= SUCCESS ) THEN msg = 'Error opening '//TRIM(Filename) CALL Write_Cleanup(); RETURN END IF ! Write the dimensions n_profiles = SIZE(GeometryInfo) WRITE( fid, IOSTAT=io_stat ) n_profiles IF ( io_stat /= 0 ) THEN msg = 'Error writing data dimension to '//TRIM(Filename)//'- '//TRIM(io_msg) CALL Write_Cleanup(); RETURN END IF ! Write the data GeometryInfo_Loop: DO m = 1, n_profiles err_stat = Write_Record( fid, GeometryInfo(m) ) IF ( err_stat /= SUCCESS ) THEN WRITE( msg,'("Error writing GeometryInfo element #",i0," to ",a)' ) m, TRIM(Filename) CALL Write_Cleanup(); RETURN END IF END DO GeometryInfo_Loop ! Close the file (if error, no delete) CLOSE( fid,STATUS='KEEP',IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= 0 ) THEN msg = 'Error closing '//TRIM(Filename)//'- '//TRIM(io_msg) CALL Write_Cleanup(); RETURN END IF ! Output an info message IF ( noisy ) THEN WRITE( msg,'("Number of profiles written to ",a,": ",i0)' ) TRIM(Filename), n_profiles CALL Display_Message( ROUTINE_NAME, msg, INFORMATION ) END IF CONTAINS SUBROUTINE Write_CleanUp() IF ( File_Open(fid) ) THEN CLOSE( fid,STATUS=WRITE_ERROR_STATUS,IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= 0 ) & msg = TRIM(msg)//'; Error deleting output file during error cleanup - '//TRIM(io_msg) END IF err_stat = FAILURE CALL Display_Message( ROUTINE_NAME, msg, err_stat ) END SUBROUTINE Write_CleanUp END FUNCTION CRTM_GeometryInfo_WriteFile !################################################################################## !################################################################################## !## ## !## ## PRIVATE MODULE ROUTINES ## ## !## ## !################################################################################## !################################################################################## !-------------------------------------------------------------------------------- ! ! NAME: ! CRTM_GeometryInfo_Equal ! ! PURPOSE: ! Elemental function to test the equality of two CRTM_GeometryInfo objects. ! Used in OPERATOR(==) interface block. ! ! CALLING SEQUENCE: ! is_equal = CRTM_GeometryInfo_Equal( x, y ) ! ! or ! ! IF ( x == y ) THEN ! ... ! END IF ! ! OBJECTS: ! x, y: Two CRTM GeometryInfo objects to be compared. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar or any rank ! ATTRIBUTES: INTENT(IN) ! ! FUNCTION RESULT: ! is_equal: Logical value indicating whether the inputs are equal. ! UNITS: N/A ! TYPE: LOGICAL ! DIMENSION: Same as inputs. ! !-------------------------------------------------------------------------------- ELEMENTAL FUNCTION CRTM_GeometryInfo_Equal( x, y ) RESULT( is_equal ) TYPE(CRTM_GeometryInfo_type) , INTENT(IN) :: x, y LOGICAL :: is_equal is_equal = ( (x%user == y%user ) .AND. & (x%Distance_Ratio .EqualTo. y%Distance_Ratio ) .AND. & (x%Sensor_Scan_Radian .EqualTo. y%Sensor_Scan_Radian ) .AND. & (x%Sensor_Zenith_Radian .EqualTo. y%Sensor_Zenith_Radian ) .AND. & (x%Sensor_Azimuth_Radian .EqualTo. y%Sensor_Azimuth_Radian) .AND. & (x%Secant_Sensor_Zenith .EqualTo. y%Secant_Sensor_Zenith ) .AND. & (x%Trans_Zenith_Radian .EqualTo. y%Trans_Zenith_Radian ) .AND. & (x%Secant_Trans_Zenith .EqualTo. y%Secant_Trans_Zenith ) .AND. & (x%Cosine_Sensor_Zenith .EqualTo. y%Cosine_Sensor_Zenith ) .AND. & (x%Source_Zenith_Radian .EqualTo. y%Source_Zenith_Radian ) .AND. & (x%Source_Azimuth_Radian .EqualTo. y%Source_Azimuth_Radian) .AND. & (x%Secant_Source_Zenith .EqualTo. y%Secant_Source_Zenith ) .AND. & (x%Flux_Zenith_Radian .EqualTo. y%Flux_Zenith_Radian ) .AND. & (x%Secant_Flux_Zenith .EqualTo. y%Secant_Flux_Zenith ) .AND. & (x%AU_ratio2 .EqualTo. y%AU_ratio2 ) ) END FUNCTION CRTM_GeometryInfo_Equal !-------------------------------------------------------------------------------- ! ! NAME: ! CRTM_GeometryInfo_Subtract ! ! PURPOSE: ! Pure function to subtract two CRTM GeometryInfo objects. ! Used in OPERATOR(-) interface block. ! ! CALLING SEQUENCE: ! gidiff = CRTM_GeometryInfo_Subtract( gi1, gi2 ) ! ! or ! ! gidiff = gi1 - gi2 ! ! ! INPUTS: ! gi1, gi2: The GeometryInfo objects to difference. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN OUT) ! ! RESULT: ! gidiff: GeometryInfo object containing the differenced components. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar ! !-------------------------------------------------------------------------------- ELEMENTAL FUNCTION CRTM_GeometryInfo_Subtract( gi1, gi2 ) RESULT( gidiff ) TYPE(CRTM_GeometryInfo_type), INTENT(IN) :: gi1, gi2 TYPE(CRTM_GeometryInfo_type) :: gidiff ! Copy the first structure gidiff = gi1 ! And subtract the second one's components from it ! ...Contained objects gidiff%user = gidiff%user - gi2%user ! ...Individual components gidiff%Distance_Ratio = gidiff%Distance_Ratio - gi2%Distance_Ratio gidiff%Sensor_Scan_Radian = gidiff%Sensor_Scan_Radian - gi2%Sensor_Scan_Radian gidiff%Sensor_Zenith_Radian = gidiff%Sensor_Zenith_Radian - gi2%Sensor_Zenith_Radian gidiff%Sensor_Azimuth_Radian = gidiff%Sensor_Azimuth_Radian - gi2%Sensor_Azimuth_Radian gidiff%Secant_Sensor_Zenith = gidiff%Secant_Sensor_Zenith - gi2%Secant_Sensor_Zenith gidiff%Cosine_Sensor_Zenith = gidiff%Cosine_Sensor_Zenith - gi2%Cosine_Sensor_Zenith gidiff%Trans_Zenith_Radian = gidiff%Trans_Zenith_Radian - gi2%Trans_Zenith_Radian gidiff%Secant_Trans_Zenith = gidiff%Secant_Trans_Zenith - gi2%Secant_Trans_Zenith gidiff%Source_Zenith_Radian = gidiff%Source_Zenith_Radian - gi2%Source_Zenith_Radian gidiff%Source_Azimuth_Radian = gidiff%Source_Azimuth_Radian - gi2%Source_Azimuth_Radian gidiff%Secant_Source_Zenith = gidiff%Secant_Source_Zenith - gi2%Secant_Source_Zenith gidiff%Flux_Zenith_Radian = gidiff%Flux_Zenith_Radian - gi2%Flux_Zenith_Radian gidiff%Secant_Flux_Zenith = gidiff%Secant_Flux_Zenith - gi2%Secant_Flux_Zenith gidiff%AU_ratio2 = gidiff%AU_ratio2 - gi2%AU_ratio2 END FUNCTION CRTM_GeometryInfo_Subtract !---------------------------------------------------------------------------------- ! ! NAME: ! Read_Record ! ! PURPOSE: ! Utility function to read a single GeometryInfo data record ! ! CALLING SEQUENCE: ! Error_Status = Read_Record( FileID, GeometryInfo ) ! ! INPUTS: ! FileID: Logical unit number from which to read data. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! OUTPUTS: ! GeometryInfo: CRTM GeometryInfo object containing the data read in. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(OUT) ! ! FUNCTION RESULT: ! Error_Status: The return value is an integer defining the error status. ! The error codes are defined in the Message_Handler module. ! If == SUCCESS, the read was successful ! == FAILURE, an unrecoverable error occurred. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! !---------------------------------------------------------------------------------- FUNCTION Read_Record( fid, ginfo ) RESULT( err_stat ) ! Arguments INTEGER, INTENT(IN) :: fid TYPE(CRTM_GeometryInfo_type), INTENT(OUT) :: ginfo ! Function result INTEGER :: err_stat ! Function parameters CHARACTER(*), PARAMETER :: ROUTINE_NAME = 'CRTM_GeometryInfo_ReadFile(Record)' ! Function variables CHARACTER(ML) :: msg CHARACTER(ML) :: io_msg INTEGER :: io_stat ! Set up err_stat = SUCCESS ! Read the embedded Geometry structure err_stat = CRTM_Geometry_ReadRecord( fid, ginfo%user ) IF ( err_stat /= SUCCESS ) THEN msg = 'Error reading embedded Geometry data' CALL Read_Record_Cleanup(); RETURN END IF ! Read the data record READ( fid, IOSTAT=io_stat,IOMSG=io_msg ) & ginfo%Distance_Ratio , & ginfo%Sensor_Scan_Radian , & ginfo%Sensor_Zenith_Radian , & ginfo%Sensor_Azimuth_Radian, & ginfo%Secant_Sensor_Zenith , & ginfo%Cosine_Sensor_Zenith , & ginfo%Trans_Zenith_Radian , & ginfo%Secant_Trans_Zenith , & ginfo%Source_Zenith_Radian , & ginfo%Source_Azimuth_Radian, & ginfo%Secant_Source_Zenith , & ginfo%Flux_Zenith_Radian , & ginfo%Secant_Flux_Zenith , & ginfo%AU_ratio2 IF ( io_stat /= 0 ) THEN msg = 'Error reading GeometryInfo data - '//TRIM(io_msg) CALL Read_Record_Cleanup(); RETURN END IF CONTAINS SUBROUTINE Read_Record_Cleanup() CALL CRTM_GeometryInfo_Destroy( ginfo ) CLOSE( fid,IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= SUCCESS ) & msg = TRIM(msg)//'; Error closing file during error cleanup - '//TRIM(io_msg) err_stat = FAILURE CALL Display_Message( ROUTINE_NAME, msg, err_stat ) END SUBROUTINE Read_Record_Cleanup END FUNCTION Read_Record !---------------------------------------------------------------------------------- ! ! NAME: ! Write_Record ! ! PURPOSE: ! Function to write a single GeometryInfo data record ! ! CALLING SEQUENCE: ! Error_Status = Write_Record( FileID, GeometryInfo ) ! ! INPUTS: ! FileID: Logical unit number to which data is written ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! GeometryInfo: CRTM GeometryInfo object containing the data to write. ! UNITS: N/A ! TYPE: CRTM_GeometryInfo_type ! DIMENSION: Scalar ! ATTRIBUTES: INTENT(IN) ! ! FUNCTION RESULT: ! Error_Status: The return value is an integer defining the error status. ! The error codes are defined in the Message_Handler module. ! If == SUCCESS the record write was successful ! == FAILURE an unrecoverable error occurred. ! UNITS: N/A ! TYPE: INTEGER ! DIMENSION: Scalar ! !---------------------------------------------------------------------------------- FUNCTION Write_Record( fid, ginfo ) RESULT( err_stat ) ! Arguments INTEGER, INTENT(IN) :: fid TYPE(CRTM_GeometryInfo_type), INTENT(IN) :: ginfo ! Function result INTEGER :: err_stat ! Function parameters CHARACTER(*), PARAMETER :: ROUTINE_NAME = 'CRTM_GeometryInfo_WriteFile(Record)' ! Function variables CHARACTER(ML) :: msg CHARACTER(ML) :: io_msg INTEGER :: io_stat ! Set up err_stat = SUCCESS ! Write the embedded Geometry structure err_stat = CRTM_Geometry_WriteRecord( fid, ginfo%user ) IF ( err_stat /= SUCCESS ) THEN msg = 'Error writing embedded Geometry data' CALL Write_Record_Cleanup(); RETURN END IF ! Write the data record WRITE( fid,IOSTAT=io_stat,IOMSG=io_msg ) & ginfo%Distance_Ratio , & ginfo%Sensor_Scan_Radian , & ginfo%Sensor_Zenith_Radian , & ginfo%Sensor_Azimuth_Radian, & ginfo%Secant_Sensor_Zenith , & ginfo%Cosine_Sensor_Zenith , & ginfo%Trans_Zenith_Radian , & ginfo%Secant_Trans_Zenith , & ginfo%Source_Zenith_Radian , & ginfo%Source_Azimuth_Radian, & ginfo%Secant_Source_Zenith , & ginfo%Flux_Zenith_Radian , & ginfo%Secant_Flux_Zenith , & ginfo%AU_ratio2 IF ( io_stat /= 0 ) THEN msg = 'Error writing GeometryInfo data - '//TRIM(io_msg) CALL Write_Record_Cleanup(); RETURN END IF CONTAINS SUBROUTINE Write_Record_Cleanup() CLOSE( fid,STATUS=WRITE_ERROR_STATUS,IOSTAT=io_stat,IOMSG=io_msg ) IF ( io_stat /= SUCCESS ) & msg = TRIM(msg)//'; Error closing file during error cleanup' err_stat = FAILURE CALL Display_Message( ROUTINE_NAME, TRIM(msg), err_stat ) END SUBROUTINE Write_Record_Cleanup END FUNCTION Write_Record END MODULE CRTM_GeometryInfo_Define
<?php namespace app\backend\model; use think\Model; class ItemAttrKey extends Model { public function vals() { return $this->hasMany('ItemAttrVal','attr_key_id'); } }
# investment 1. 技术分析 短期交易策略 1. 基础分析 1. 公司金融 corporate finance 1. investment 七个问题 1. 如何配置财富 1. 什么是好的股票 risk/benefit 1. 股票定价是否合理 capm & apt模型 1. 什么样的基金经理 1. 主动 无效的市场中寻找被错误定价的资产 1. 被动 有效的市场 1. 如何评估一个基金的表现 portfolio performance evaluation ? 1. 股票和债券的取表 stock pricing/ bond pricing, YTM and term structure. 1. 金融衍生品 derivatives 股指期货 ## 资本市场 基本要素 1. what 作用 1. 资源配置 1. 价格发现 1. 消费择时 1. 风险分担 公司发行股票风险转移到股民 1. 实物资产(real asset) NPV >= 0 和 1. 金融资产 (financial asset) NPV = 0 定价方法区别 1. 股票 1. fixed income securities 政府、银行、大型企业 1. derivatives 1. forward, future, swap, option 1. foreign exchange 1. where 1. 直接市场 direct search market 1. 中介市场 broker 赚取服务费 1. IPO 一级市场 1. 二级市场 1. 二手市场 dealer 赚取价差 1. OTC 场外交易 灵活,从多个dealer 1. 拍卖市场 auction market 1. 纽交所 1. who 1. 机构投资 1. bank 1. insurance company 1. investment company 1. 个人投资者 1. hedger 最保守 1. speculator 投机者 1. arbitrageur 套利者 1. how 1. which 如何评价一个产品 1. 谁发行的 1. 结构 1. 怎样发行 1. 持有者 1. 风险水平 ## 收益与风险 总收益 R = (D + P) / P0,净收益 r = R - 1 1. 利息收益 D / P0 1. 资本收益 (P - P0) / 1 持有期资本利得 复利 1. 几何平均年化收益 r 1. 算数平均 r 整个市场的收益率 市场指标 1. price-weighted index 价格加权指数 道琼斯指数、日经225 价格平均方式计算(高价格股票权重大) 1. value-weighted index 价值加权指数 总市值比例 * 100 上证指数、深成指数 1. equally-weighed index 等权重指数 衡量平均收益率 风险 uncertainty, ambiguity 债券:无风险利率 股票:有风险利率 股权溢价: 股票额外回报的期望值 夏普比例 Sharpe Ratio 二次效用方程 风险厌恶系数 均值average 方差root square 偏度skewness 风度kurtosis Value At Risk 在风险价值
/* * By-Health Front-end Team (https://www.by-health.com/) * * Copyright © 2016-present By-Health Co Ltd. All rights reserved. */ import { Options } from 'http-proxy-middleware'; declare interface ProxyOptions extends Options { context: string | string[]; } // Configure proxy middleware // https://github.com/chimurai/http-proxy-middleware export default [ // { // context: '/', // target: process.env.PROXY_HOST, // changeOrigin: true, // }, ] as ProxyOptions[];
# Haraka Haraka is a secure and efficient hash function, designed specifically to process short inputs and be very fast on modern platforms which support AES-NI. One of the main applications for such a design is the use in hash-based signature schemes like XMSS and SPHINCS. ## Features - Supports AES-NI - Low Latency - High performance (below 1 cycle/byte on Skylake) This repository provides a reference implementation and parts of the software used for the security analysis. For more information see our paper. ## Reference Haraka - Efficient Short-Input Hashing for Post-Quantum Applications Stefan Kölbl and Martin M. Lauridsen and Florian Mendel and Christian Rechberger https://eprint.iacr.org/2016/098
<?php declare(strict_types=1); namespace MsgPhp\Domain\Entity\Features; use MsgPhp\Domain\Entity\Fields\EnabledField; use MsgPhp\Domain\Event\{DisableEvent, EnableEvent}; /** * @author Roland Franssen <franssen.roland@gmail.com> */ trait CanBeEnabled { use EnabledField; public function enable(): void { $this->enabled = true; } public function disable(): void { $this->enabled = false; } private function handleEnableEvent(EnableEvent $event): bool { if (!$this->enabled) { $this->enable(); return true; } return false; } private function handleDisableEvent(DisableEvent $event): bool { if ($this->enabled) { $this->disable(); return true; } return false; } }
#include <iostream> #include <cstring> // evolucao da string.h e diferente da string using namespace std; int main (int argc, char** argv){ /* funcoes: - strcpy(origem, destino); - strncpy(origem, destino, quantidade de char que quero copiar) - strcmp(str1, str2); // retorna 0 se forem iguais - strncmp(str1, str2, tamanho); - strlen (str); // retorna o tamanho da string - strcat(str1, str2); // concatena a str2 na str1 - strncat (str1, str2, quantidade); - strchr(str, char); // procura a primeira ocorrencia do char na string e retorna sua posicao - strcspn (str, chave); // pesquisa uma chave na string ex: char chave[] = "yt+w"; // ele vai procurar o primeiro desses char na string - strrchr (str, char); // retorna a ultima ocorrencia do char e retorna sua posicao - strtok(str, chave); // se ele encontrar um divisor ele divide a string ex: char chave[] = ",-"; int c = strtok(str, chave); while(c!=NULL){ cout << c << endl; c = strtok(NULL, chave); } */ return 0; }
#if defined(Hiro_HorizontalLayout) struct mHorizontalLayout : mLayout { using type = mHorizontalLayout; using mLayout::append; using mLayout::remove; auto append(sSizable sizable, Size size, signed spacing = 5) -> type&; auto minimumSize() const -> Size override; auto modify(sSizable sizable, Size size, signed spacing = 5) -> type&; auto remove(sSizable sizable) -> type& override; auto reset() -> type& override; auto setAlignment(double alignment = 0.5) -> type&; auto setEnabled(bool enabled = true) -> type& override; auto setFont(const Font& font = {}) -> type& override; auto setGeometry(Geometry geometry) -> type& override; auto setMargin(signed margin = 0) -> type&; auto setSpacing(signed spacing = 5) -> type&; auto setVisible(bool visible = true) -> type&; struct Settings { double alignment = 0.5; signed margin = 0; signed spacing = 5; } settings; struct Property { signed width; signed height; signed spacing; }; vector<Property> properties; }; #endif
//! ## FTP transfer //! //! `ftp_transfer` is the module which provides the implementation for the FTP/FTPS file transfer /** * MIT License * * termscp - Copyright (c) 2021 Christian Visintin * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ use super::{ FileTransfer, FileTransferError, FileTransferErrorType, FileTransferResult, ProtocolParams, }; use crate::fs::{FsDirectory, FsEntry, FsFile, UnixPex}; use crate::utils::fmt::shadow_password; use crate::utils::path; // Includes use std::convert::TryFrom; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use std::time::UNIX_EPOCH; use suppaftp::native_tls::TlsConnector; use suppaftp::{ list::{File, PosixPexQuery}, status::FILE_UNAVAILABLE, types::{FileType, Response}, FtpError, FtpStream, }; /// ## FtpFileTransfer /// /// Ftp file transfer struct pub struct FtpFileTransfer { stream: Option<FtpStream>, ftps: bool, } impl FtpFileTransfer { /// ### new /// /// Instantiates a new `FtpFileTransfer` pub fn new(ftps: bool) -> FtpFileTransfer { FtpFileTransfer { stream: None, ftps } } /// ### resolve /// /// Fix provided path; on Windows fixes the backslashes, converting them to slashes /// While on POSIX does nothing #[cfg(target_os = "windows")] fn resolve(p: &Path) -> PathBuf { PathBuf::from(path_slash::PathExt::to_slash_lossy(p).as_str()) } #[cfg(target_family = "unix")] fn resolve(p: &Path) -> PathBuf { p.to_path_buf() } /// ### parse_list_lines /// /// Parse all lines of LIST command output and instantiates a vector of FsEntry from it. /// This function also converts from `suppaftp::list::File` to `FsEntry` fn parse_list_lines(&mut self, path: &Path, lines: Vec<String>) -> Vec<FsEntry> { // Iter and collect lines .into_iter() .map(File::try_from) // Try to convert to file .flatten() // Remove errors .map(|x| { let mut abs_path: PathBuf = path.to_path_buf(); abs_path.push(x.name()); match x.is_directory() { true => FsEntry::Directory(FsDirectory { name: x.name().to_string(), abs_path, last_access_time: x.modified(), last_change_time: x.modified(), creation_time: x.modified(), symlink: None, user: x.uid(), group: x.gid(), unix_pex: Some(Self::query_unix_pex(&x)), }), false => FsEntry::File(FsFile { name: x.name().to_string(), size: x.size(), ftype: abs_path .extension() .map(|ext| String::from(ext.to_str().unwrap_or(""))), last_access_time: x.modified(), last_change_time: x.modified(), creation_time: x.modified(), user: x.uid(), group: x.gid(), symlink: Self::get_symlink_entry(path, x.symlink()), abs_path, unix_pex: Some(Self::query_unix_pex(&x)), }), } }) .collect() } /// ### get_symlink_entry /// /// Get FsEntry from symlink fn get_symlink_entry(wrkdir: &Path, link: Option<&Path>) -> Option<Box<FsEntry>> { match link { None => None, Some(p) => { // Make abs path let abs_path: PathBuf = path::absolutize(wrkdir, p); Some(Box::new(FsEntry::File(FsFile { name: p .file_name() .map(|x| x.to_str().unwrap_or("").to_string()) .unwrap_or_default(), ftype: abs_path .extension() .map(|ext| String::from(ext.to_str().unwrap_or(""))), size: 0, last_access_time: UNIX_EPOCH, last_change_time: UNIX_EPOCH, creation_time: UNIX_EPOCH, user: None, group: None, symlink: None, unix_pex: None, abs_path, }))) } } } /// ### query_unix_pex /// /// Returns unix pex in tuple of values fn query_unix_pex(f: &File) -> (UnixPex, UnixPex, UnixPex) { ( UnixPex::new( f.can_read(PosixPexQuery::Owner), f.can_write(PosixPexQuery::Owner), f.can_execute(PosixPexQuery::Owner), ), UnixPex::new( f.can_read(PosixPexQuery::Group), f.can_write(PosixPexQuery::Group), f.can_execute(PosixPexQuery::Group), ), UnixPex::new( f.can_read(PosixPexQuery::Others), f.can_write(PosixPexQuery::Others), f.can_execute(PosixPexQuery::Others), ), ) } } impl FileTransfer for FtpFileTransfer { /// ### connect /// /// Connect to the remote server fn connect(&mut self, params: &ProtocolParams) -> FileTransferResult<Option<String>> { let params = match params.generic_params() { Some(params) => params, None => return Err(FileTransferError::new(FileTransferErrorType::BadAddress)), }; // Get stream info!("Connecting to {}:{}", params.address, params.port); let mut stream: FtpStream = match FtpStream::connect(format!("{}:{}", params.address, params.port)) { Ok(stream) => stream, Err(err) => { error!("Failed to connect: {}", err); return Err(FileTransferError::new_ex( FileTransferErrorType::ConnectionError, err.to_string(), )); } }; // If SSL, open secure session if self.ftps { info!("Setting up TLS stream..."); let ctx = match TlsConnector::builder() .danger_accept_invalid_certs(true) .danger_accept_invalid_hostnames(true) .build() { Ok(tls) => tls, Err(err) => { error!("Failed to setup TLS stream: {}", err); return Err(FileTransferError::new_ex( FileTransferErrorType::SslError, err.to_string(), )); } }; stream = match stream.into_secure(ctx, params.address.as_str()) { Ok(s) => s, Err(err) => { error!("Failed to setup TLS stream: {}", err); return Err(FileTransferError::new_ex( FileTransferErrorType::SslError, err.to_string(), )); } }; } // Login (use anonymous if credentials are unspecified) let username: String = match &params.username { Some(u) => u.to_string(), None => String::from("anonymous"), }; let password: String = match &params.password { Some(pwd) => pwd.to_string(), None => String::new(), }; info!( "Signin in with username: {}, password: {}", username, shadow_password(password.as_str()) ); if let Err(err) = stream.login(username.as_str(), password.as_str()) { error!("Login failed: {}", err); return Err(FileTransferError::new_ex( FileTransferErrorType::AuthenticationFailed, err.to_string(), )); } debug!("Setting transfer type to Binary"); // Initialize file type if let Err(err) = stream.transfer_type(FileType::Binary) { error!("Failed to set transfer type to binary: {}", err); return Err(FileTransferError::new_ex( FileTransferErrorType::ProtocolError, err.to_string(), )); } // Set stream self.stream = Some(stream); info!("Connection successfully established"); // Return OK Ok(self .stream .as_ref() .unwrap() .get_welcome_msg() .map(|x| x.to_string())) } /// ### disconnect /// /// Disconnect from the remote server fn disconnect(&mut self) -> FileTransferResult<()> { info!("Disconnecting from FTP server..."); match &mut self.stream { Some(stream) => match stream.quit() { Ok(_) => { self.stream = None; Ok(()) } Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::ConnectionError, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### is_connected /// /// Indicates whether the client is connected to remote fn is_connected(&self) -> bool { self.stream.is_some() } /// ### pwd /// /// Print working directory fn pwd(&mut self) -> FileTransferResult<PathBuf> { info!("PWD"); match &mut self.stream { Some(stream) => match stream.pwd() { Ok(path) => Ok(PathBuf::from(path.as_str())), Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::ConnectionError, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### change_dir /// /// Change working directory fn change_dir(&mut self, dir: &Path) -> FileTransferResult<PathBuf> { let dir: PathBuf = Self::resolve(dir); info!("Changing directory to {}", dir.display()); match &mut self.stream { Some(stream) => match stream.cwd(&dir.as_path().to_string_lossy()) { Ok(_) => Ok(dir), Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::ConnectionError, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### copy /// /// Copy file to destination fn copy(&mut self, _src: &FsEntry, _dst: &Path) -> FileTransferResult<()> { // FTP doesn't support file copy debug!("COPY issues (will fail, since unsupported)"); Err(FileTransferError::new( FileTransferErrorType::UnsupportedFeature, )) } /// ### list_dir /// /// List directory entries fn list_dir(&mut self, path: &Path) -> FileTransferResult<Vec<FsEntry>> { let dir: PathBuf = Self::resolve(path); info!("LIST dir {}", dir.display()); match &mut self.stream { Some(stream) => match stream.list(Some(&dir.as_path().to_string_lossy())) { Ok(lines) => { debug!("Got {} lines in LIST result", lines.len()); // Iterate over entries Ok(self.parse_list_lines(path, lines)) } Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::DirStatFailed, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### mkdir /// /// In case the directory already exists, it must return an Error of kind `FileTransferErrorType::DirectoryAlreadyExists` fn mkdir(&mut self, dir: &Path) -> FileTransferResult<()> { let dir: PathBuf = Self::resolve(dir); info!("MKDIR {}", dir.display()); match &mut self.stream { Some(stream) => match stream.mkdir(&dir.as_path().to_string_lossy()) { Ok(_) => Ok(()), Err(FtpError::UnexpectedResponse(Response { // Directory already exists code: FILE_UNAVAILABLE, body: _, })) => { error!("Directory {} already exists", dir.display()); Err(FileTransferError::new( FileTransferErrorType::DirectoryAlreadyExists, )) } Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::FileCreateDenied, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### remove /// /// Remove a file or a directory fn remove(&mut self, fsentry: &FsEntry) -> FileTransferResult<()> { if self.stream.is_none() { return Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )); } info!("Removing entry {}", fsentry.get_abs_path().display()); let wrkdir: PathBuf = self.pwd()?; match fsentry { // Match fs entry... FsEntry::File(file) => { // Go to parent directory if let Some(parent_dir) = file.abs_path.parent() { debug!("Changing wrkdir to {}", parent_dir.display()); self.change_dir(parent_dir)?; } debug!("entry is a file; removing file {}", file.abs_path.display()); // Remove file directly let result = self .stream .as_mut() .unwrap() .rm(file.name.as_ref()) .map(|_| ()) .map_err(|e| { FileTransferError::new_ex(FileTransferErrorType::PexError, e.to_string()) }); // Go to source directory match self.change_dir(wrkdir.as_path()) { Err(err) => Err(err), Ok(_) => result, } } FsEntry::Directory(dir) => { // Get directory files debug!("Entry is a directory; iterating directory entries"); let result = match self.list_dir(dir.abs_path.as_path()) { Ok(files) => { // Remove recursively files debug!("Removing {} entries from directory...", files.len()); for file in files.iter() { if let Err(err) = self.remove(file) { return Err(FileTransferError::new_ex( FileTransferErrorType::PexError, err.to_string(), )); } } // Once all files in directory have been deleted, remove directory debug!("Finally removing directory {}...", dir.name); // Enter parent directory if let Some(parent_dir) = dir.abs_path.parent() { debug!( "Changing wrkdir to {} to delete directory {}", parent_dir.display(), dir.name ); self.change_dir(parent_dir)?; } match self.stream.as_mut().unwrap().rmdir(dir.name.as_str()) { Ok(_) => { debug!("Removed {}", dir.abs_path.display()); Ok(()) } Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::PexError, err.to_string(), )), } } Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::DirStatFailed, err.to_string(), )), }; // Restore directory match self.change_dir(wrkdir.as_path()) { Err(err) => Err(err), Ok(_) => result, } } } } /// ### rename /// /// Rename file or a directory fn rename(&mut self, file: &FsEntry, dst: &Path) -> FileTransferResult<()> { let dst: PathBuf = Self::resolve(dst); info!( "Renaming {} to {}", file.get_abs_path().display(), dst.display() ); match &mut self.stream { Some(stream) => { // Get name let src_name: String = match file { FsEntry::Directory(dir) => dir.name.clone(), FsEntry::File(file) => file.name.clone(), }; // Only names are supported match stream.rename(src_name.as_str(), &dst.as_path().to_string_lossy()) { Ok(_) => Ok(()), Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::FileCreateDenied, err.to_string(), )), } } None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### stat /// /// Stat file and return FsEntry fn stat(&mut self, _path: &Path) -> FileTransferResult<FsEntry> { match &mut self.stream { Some(_) => Err(FileTransferError::new( FileTransferErrorType::UnsupportedFeature, )), None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### exec /// /// Execute a command on remote host fn exec(&mut self, _cmd: &str) -> FileTransferResult<String> { Err(FileTransferError::new( FileTransferErrorType::UnsupportedFeature, )) } /// ### send_file /// /// Send file to remote /// File name is referred to the name of the file as it will be saved /// Data contains the file data /// Returns file and its size fn send_file( &mut self, _local: &FsFile, file_name: &Path, ) -> FileTransferResult<Box<dyn Write>> { let file_name: PathBuf = Self::resolve(file_name); info!("Sending file {}", file_name.display()); match &mut self.stream { Some(stream) => match stream.put_with_stream(&file_name.as_path().to_string_lossy()) { Ok(writer) => Ok(Box::new(writer)), // NOTE: don't use BufWriter here, since already returned by the library Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::FileCreateDenied, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### recv_file /// /// Receive file from remote with provided name /// Returns file and its size fn recv_file(&mut self, file: &FsFile) -> FileTransferResult<Box<dyn Read>> { info!("Receiving file {}", file.abs_path.display()); match &mut self.stream { Some(stream) => match stream.retr_as_stream(&file.abs_path.as_path().to_string_lossy()) { Ok(reader) => Ok(Box::new(reader)), // NOTE: don't use BufReader here, since already returned by the library Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::NoSuchFileOrDirectory, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### on_sent /// /// Finalize send method. /// This method must be implemented only if necessary; in case you don't need it, just return `Ok(())` /// The purpose of this method is to finalize the connection with the peer when writing data. /// This is necessary for some protocols such as FTP. /// You must call this method each time you want to finalize the write of the remote file. fn on_sent(&mut self, writable: Box<dyn Write>) -> FileTransferResult<()> { info!("Finalizing put stream"); match &mut self.stream { Some(stream) => match stream.finalize_put_stream(writable) { Ok(_) => Ok(()), Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::ProtocolError, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } /// ### on_recv /// /// Finalize recv method. /// This method must be implemented only if necessary; in case you don't need it, just return `Ok(())` /// The purpose of this method is to finalize the connection with the peer when reading data. /// This mighe be necessary for some protocols. /// You must call this method each time you want to finalize the read of the remote file. fn on_recv(&mut self, readable: Box<dyn Read>) -> FileTransferResult<()> { info!("Finalizing get"); match &mut self.stream { Some(stream) => match stream.finalize_retr_stream(readable) { Ok(_) => Ok(()), Err(err) => Err(FileTransferError::new_ex( FileTransferErrorType::ProtocolError, err.to_string(), )), }, None => Err(FileTransferError::new( FileTransferErrorType::UninitializedSession, )), } } } #[cfg(test)] mod tests { use super::*; use crate::filetransfer::params::GenericProtocolParams; use crate::utils::file::open_file; #[cfg(feature = "with-containers")] use crate::utils::test_helpers::write_file; use crate::utils::test_helpers::{create_sample_file_entry, make_fsentry}; use pretty_assertions::assert_eq; use std::io::{Read, Write}; use std::time::Duration; #[test] fn test_filetransfer_ftp_new() { let ftp: FtpFileTransfer = FtpFileTransfer::new(false); assert_eq!(ftp.ftps, false); assert!(ftp.stream.is_none()); // FTPS let ftp: FtpFileTransfer = FtpFileTransfer::new(true); assert_eq!(ftp.ftps, true); assert!(ftp.stream.is_none()); } #[test] #[cfg(feature = "with-containers")] fn test_filetransfer_ftp_server() { let mut ftp: FtpFileTransfer = FtpFileTransfer::new(false); // Sample file let (entry, file): (FsFile, tempfile::NamedTempFile) = create_sample_file_entry(); // Connect let hostname: String = String::from("127.0.0.1"); assert!(ftp .connect(&ProtocolParams::Generic( GenericProtocolParams::default() .address(hostname) .port(10021) .username(Some("test")) .password(Some("test")) )) .is_ok()); assert_eq!(ftp.is_connected(), true); // Get pwd assert_eq!(ftp.pwd().unwrap(), PathBuf::from("/")); // List dir (dir is empty) assert_eq!(ftp.list_dir(&Path::new("/")).unwrap().len(), 0); // Make directory assert!(ftp.mkdir(PathBuf::from("/home").as_path()).is_ok()); // Remake directory (should report already exists) assert_eq!( ftp.mkdir(PathBuf::from("/home").as_path()) .err() .unwrap() .kind(), FileTransferErrorType::DirectoryAlreadyExists ); // Make directory (err) assert!(ftp.mkdir(PathBuf::from("/root/pommlar").as_path()).is_err()); // Change directory assert!(ftp.change_dir(PathBuf::from("/home").as_path()).is_ok()); // Change directory (err) assert!(ftp .change_dir(PathBuf::from("/tmp/oooo/aaaa/eee").as_path()) .is_err()); // Copy (not supported) assert!(ftp .copy(&FsEntry::File(entry.clone()), PathBuf::from("/").as_path()) .is_err()); // Exec (not supported) assert!(ftp.exec("echo 1;").is_err()); // Upload 2 files let mut writable = ftp .send_file(&entry, PathBuf::from("omar.txt").as_path()) .ok() .unwrap(); write_file(&file, &mut writable); assert!(ftp.on_sent(writable).is_ok()); let mut writable = ftp .send_file(&entry, PathBuf::from("README.md").as_path()) .ok() .unwrap(); write_file(&file, &mut writable); assert!(ftp.on_sent(writable).is_ok()); // Upload file (err) assert!(ftp .send_file(&entry, PathBuf::from("/ommlar/omarone").as_path()) .is_err()); // List dir let list: Vec<FsEntry> = ftp.list_dir(PathBuf::from("/home").as_path()).ok().unwrap(); assert_eq!(list.len(), 2); // Find assert!(ftp.change_dir(PathBuf::from("/").as_path()).is_ok()); assert_eq!(ftp.find("*.txt").ok().unwrap().len(), 1); assert_eq!(ftp.find("*.md").ok().unwrap().len(), 1); assert_eq!(ftp.find("*.jpeg").ok().unwrap().len(), 0); assert!(ftp.change_dir(PathBuf::from("/home").as_path()).is_ok()); // Rename assert!(ftp.mkdir(PathBuf::from("/uploads").as_path()).is_ok()); assert!(ftp .rename( list.get(0).unwrap(), PathBuf::from("/uploads/README.txt").as_path() ) .is_ok()); // Rename (err) assert!(ftp .rename(list.get(0).unwrap(), PathBuf::from("OMARONE").as_path()) .is_err()); let dummy: FsEntry = FsEntry::File(FsFile { name: String::from("cucumber.txt"), abs_path: PathBuf::from("/cucumber.txt"), last_change_time: UNIX_EPOCH, last_access_time: UNIX_EPOCH, creation_time: UNIX_EPOCH, size: 0, ftype: Some(String::from("txt")), // File type symlink: None, // UNIX only user: Some(0), // UNIX only group: Some(0), // UNIX only unix_pex: Some((UnixPex::from(6), UnixPex::from(4), UnixPex::from(4))), // UNIX only }); assert!(ftp .rename(&dummy, PathBuf::from("/a/b/c").as_path()) .is_err()); // Remove assert!(ftp.remove(list.get(1).unwrap()).is_ok()); assert!(ftp.remove(list.get(1).unwrap()).is_err()); // Receive file let mut writable = ftp .send_file(&entry, PathBuf::from("/uploads/README.txt").as_path()) .ok() .unwrap(); write_file(&file, &mut writable); assert!(ftp.on_sent(writable).is_ok()); let file: FsFile = ftp .list_dir(PathBuf::from("/uploads").as_path()) .ok() .unwrap() .get(0) .unwrap() .clone() .unwrap_file(); let mut readable = ftp.recv_file(&file).ok().unwrap(); let mut data: Vec<u8> = vec![0; 1024]; assert!(readable.read(&mut data).is_ok()); assert!(ftp.on_recv(readable).is_ok()); // Receive file (err) assert!(ftp.recv_file(&entry).is_err()); // Cleanup assert!(ftp.change_dir(PathBuf::from("/").as_path()).is_ok()); assert!(ftp .remove(&make_fsentry(PathBuf::from("/home"), true)) .is_ok()); assert!(ftp .remove(&make_fsentry(PathBuf::from("/uploads"), true)) .is_ok()); // Disconnect assert!(ftp.disconnect().is_ok()); assert_eq!(ftp.is_connected(), false); } #[test] #[cfg(feature = "with-containers")] fn test_filetransfer_ftp_server_bad_auth() { let mut ftp: FtpFileTransfer = FtpFileTransfer::new(false); // Connect assert!(ftp .connect(&ProtocolParams::Generic( GenericProtocolParams::default() .address("127.0.0.1") .port(10021) .username(Some("omar")) .password(Some("ommlar")) )) .is_err()); } #[test] #[cfg(feature = "with-containers")] fn test_filetransfer_ftp_no_credentials() { let mut ftp: FtpFileTransfer = FtpFileTransfer::new(false); assert!(ftp .connect(&ProtocolParams::Generic( GenericProtocolParams::default() .address("127.0.0.1") .port(10021) .username::<&str>(None) .password::<&str>(None) )) .is_err()); } #[test] fn test_filetransfer_ftp_server_bad_server() { let mut ftp: FtpFileTransfer = FtpFileTransfer::new(false); // Connect assert!(ftp .connect(&ProtocolParams::Generic( GenericProtocolParams::default() .address("mybad.veribad.server") .port(21) .username::<&str>(None) .password::<&str>(None) )) .is_err()); } #[test] fn test_filetransfer_ftp_parse_list_line_unix() { let mut ftp: FtpFileTransfer = FtpFileTransfer::new(false); // Simple file let file: FsFile = ftp .parse_list_lines( PathBuf::from("/tmp").as_path(), vec!["-rw-rw-r-- 1 root dialout 8192 Nov 5 2018 omar.txt".to_string()], ) .get(0) .unwrap() .clone() .unwrap_file(); assert_eq!(file.abs_path, PathBuf::from("/tmp/omar.txt")); assert_eq!(file.name, String::from("omar.txt")); assert_eq!(file.size, 8192); assert!(file.symlink.is_none()); assert_eq!(file.user, None); assert_eq!(file.group, None); assert_eq!( file.unix_pex.unwrap(), (UnixPex::from(6), UnixPex::from(6), UnixPex::from(4)) ); assert_eq!( file.last_access_time .duration_since(UNIX_EPOCH) .ok() .unwrap(), Duration::from_secs(1541376000) ); assert_eq!( file.last_change_time .duration_since(UNIX_EPOCH) .ok() .unwrap(), Duration::from_secs(1541376000) ); assert_eq!( file.creation_time.duration_since(UNIX_EPOCH).ok().unwrap(), Duration::from_secs(1541376000) ); } #[test] fn test_filetransfer_ftp_list_dir_dos_syntax() { let mut ftp: FtpFileTransfer = FtpFileTransfer::new(false); // Connect assert!(ftp .connect(&ProtocolParams::Generic( GenericProtocolParams::default() .address("test.rebex.net") .port(21) .username(Some("demo")) .password(Some("password")) )) .is_ok()); // Pwd assert_eq!(ftp.pwd().ok().unwrap(), PathBuf::from("/")); // List dir let files: Vec<FsEntry> = ftp.list_dir(PathBuf::from("/").as_path()).ok().unwrap(); // There should be at least 1 file assert!(files.len() > 0); // Disconnect assert!(ftp.disconnect().is_ok()); } #[test] fn test_filetransfer_ftp_uninitialized() { let file: FsFile = FsFile { name: String::from("omar.txt"), abs_path: PathBuf::from("/omar.txt"), last_change_time: UNIX_EPOCH, last_access_time: UNIX_EPOCH, creation_time: UNIX_EPOCH, size: 0, ftype: Some(String::from("txt")), // File type symlink: None, // UNIX only user: Some(0), // UNIX only group: Some(0), // UNIX only unix_pex: Some((UnixPex::from(6), UnixPex::from(4), UnixPex::from(4))), // UNIX only }; let mut ftp: FtpFileTransfer = FtpFileTransfer::new(false); assert!(ftp.change_dir(Path::new("/tmp")).is_err()); assert!(ftp.disconnect().is_err()); assert!(ftp.list_dir(Path::new("/tmp")).is_err()); assert!(ftp.mkdir(Path::new("/tmp")).is_err()); assert!(ftp .remove(&make_fsentry(PathBuf::from("/nowhere"), false)) .is_err()); assert!(ftp .rename( &make_fsentry(PathBuf::from("/nowhere"), false), PathBuf::from("/culonia").as_path() ) .is_err()); assert!(ftp.pwd().is_err()); assert!(ftp.stat(Path::new("/tmp")).is_err()); assert!(ftp.recv_file(&file).is_err()); assert!(ftp.send_file(&file, Path::new("/tmp/omar.txt")).is_err()); let (_, temp): (FsFile, tempfile::NamedTempFile) = create_sample_file_entry(); let readable: Box<dyn Read> = Box::new(std::fs::File::open(temp.path()).unwrap()); assert!(ftp.on_recv(readable).is_err()); let (_, temp): (FsFile, tempfile::NamedTempFile) = create_sample_file_entry(); let writable: Box<dyn Write> = Box::new(open_file(temp.path(), true, true, true).ok().unwrap()); assert!(ftp.on_sent(writable).is_err()); } }
/* URLify: Write a method to replace all spaces in a string with'%20'. You may assume that the string has sufficient space at the end of the string to hold the additional characters, and that you are given the "true" length of the string. (Note: Please use a character array so that you can perform this operation in place.) EXAMPLE Input: "Mr John Smith" Output: "Mr%20Dohn%20Smith" */ using System; using System.Text; namespace InterviewPreperationGuide.Core.CrackingTheCodingInterview.c1q3 { public class Solution { public static void Init (string[] args) { Console.WriteLine ("Empty: " + ReplaceSpaces_a ("")); Console.WriteLine ("Null: " + ReplaceSpaces_a (null)); Console.WriteLine ("Mr John Smith: " + ReplaceSpaces_a ("Mr John Smith")); Console.WriteLine (" b : " + ReplaceSpaces_a (" b ")); Console.WriteLine (" : " + ReplaceSpaces_a (" ")); Console.WriteLine (" : " + ReplaceSpaces_b (null, 0)); Console.WriteLine ("Mr John Smith: " + ReplaceSpaces_b ("Mr John Smith ", 13)); } private static string ReplaceSpaces_a (string input) { StringBuilder result = new StringBuilder (); if (!string.IsNullOrEmpty (input)) { for (int i = 0; i < input.Length; i++) { if (input[i] != Char.Parse (" ")) { result.Append (input[i]); } else { result.Append ("%20"); } } } return result.ToString (); } private static string ReplaceSpaces_b (string input, int length) { if (string.IsNullOrEmpty (input) || length == 0) { return string.Empty; } char[] result = input.ToCharArray (); int counter = result.Length - 1; for (int i = length - 1; i >= 0; i--) { if (result[i] == ' ') { result[counter] = '0'; result[counter - 1] = '2'; result[counter - 2] = '%'; counter = counter - 3; } else { result[counter] = result[i]; counter--; } } return new string (result); } } }
namespace Octokit.Webhooks.Events.DiscussionComment { using JetBrains.Annotations; [PublicAPI] public sealed record DiscussionCommentAction : WebhookEventAction { public static readonly DiscussionCommentAction Created = new(DiscussionCommentActionValue.Created); public static readonly DiscussionCommentAction Deleted = new(DiscussionCommentActionValue.Deleted); public static readonly DiscussionCommentAction Edited = new(DiscussionCommentActionValue.Edited); private DiscussionCommentAction(string value) : base(value) { } } }
import 'package:payouts/src/pivot.dart' as pivot; import 'constants.dart'; typedef _EntryComparator = int Function(Map<String, dynamic> a, Map<String, dynamic> b); int _compareInvoiceNumber(Map<String, dynamic> a, Map<String, dynamic> b) { final String aVal = a[Keys.invoiceNumber]; final String bVal = b[Keys.invoiceNumber]; return aVal.compareTo(bVal); } int _compareBillingStart(Map<String, dynamic> a, Map<String, dynamic> b) { final String aVal = a[Keys.billingStart]; final String bVal = b[Keys.billingStart]; // String comparison should work since we're using YYYY-MM-DD format. return aVal.compareTo(bVal); } int _compareBillingPeriod(Map<String, dynamic> a, Map<String, dynamic> b) { final String aVal = a[Keys.billingPeriod]; final String bVal = b[Keys.billingPeriod]; // String comparison should work since we're using YYYY-MM-DD format. return aVal.compareTo(bVal); } int _compareSubmitted(Map<String, dynamic> a, Map<String, dynamic> b) { final int? aVal = a[Keys.submitted]; final int? bVal = b[Keys.submitted]; if (aVal == bVal) { return 0; } else if (aVal == null) { return 1; } else if (bVal == null) { return -1; } else { return aVal.compareTo(bVal); } } int _compareResubmit(Map<String, dynamic> a, Map<String, dynamic> b) { final bool aVal = a[Keys.resubmit]; final bool bVal = b[Keys.resubmit]; if (aVal == bVal) { return 0; } else if (aVal) { return -1; } else { return 1; } } const Map<String, _EntryComparator> _entryComparators = <String, _EntryComparator>{ Keys.invoiceNumber: _compareInvoiceNumber, Keys.billingStart: _compareBillingStart, Keys.billingPeriod: _compareBillingPeriod, Keys.submitted: _compareSubmitted, Keys.resubmit: _compareResubmit, }; class EntryComparator { const EntryComparator({ required this.key, this.direction = pivot.SortDirection.ascending, }); final String key; final pivot.SortDirection direction; int compare(Map<String, dynamic> a, Map<String, dynamic> b) { final _EntryComparator basicComparator = _entryComparators[key]!; int result = basicComparator(a, b); if (direction == pivot.SortDirection.descending) { result *= -1; } return result; } }
module Nessus # This class represents each of the /NessusClientData_v2/Report/ReportHost/ReportItem # elements in the Nessus XML document. # # It provides a convenient way to access the information scattered all over # the XML in attributes and nested tags. # # Instead of providing separate methods for each supported property we rely # on Ruby's #method_missing to do most of the work. class ReportItem # Accepts an XML node from Nokogiri::XML. def initialize(xml_node) @xml = xml_node end # List of supported tags. They can be attributes, simple descendans or # collections (e.g. <bid/>, <cve/>, <xref/>) def supported_tags [ # attributes :port, :svc_name, :protocol, :severity, :plugin_id, :plugin_name, :plugin_family, # simple tags :solution, :risk_factor, :description, :plugin_publication_date, :metasploit_name, :cvss_vector, :cvss_temporal_vector, :synopsis, :exploit_available, :patch_publication_date, :plugin_modification_date, :cvss_temporal_score, :cvss_base_score, :plugin_output, :plugin_version, :exploitability_ease, :vuln_publication_date, :exploit_framework_canvas, :exploit_framework_metasploit, :exploit_framework_core, # multiple tags :bid_entries, :cve_entries, :see_also_entries, :xref_entries, # compliance tags :cm_actual_value, :cm_audit_file, :cm_check_id, :cm_check_name, :cm_info, :cm_output, :cm_policy_value, :cm_reference, :cm_result, :cm_see_also, :cm_solution ] end # This allows external callers (and specs) to check for implemented # properties def respond_to?(method, include_private=false) return true if supported_tags.include?(method.to_sym) super end # This method is invoked by Ruby when a method that is not defined in this # instance is called. # # In our case we inspect the @method@ parameter and try to find the # attribute, simple descendent or collection that it maps to in the XML # tree. def method_missing(method, *args) # We could remove this check and return nil for any non-recognized tag. # The problem would be that it would make tricky to debug problems with # typos. For instance: <>.potr would return nil instead of raising an # exception unless supported_tags.include?(method) super return end # first we try the attributes: port, svc_name, protocol, severity, # plugin_id, plugin_name, plugin_family translations_table = { # @port = xml.attributes["port"] # @svc_name = xml.attributes["svc_name"] # @protocol = xml.attributes["protocol"] # @severity = xml.attributes["severity"] :plugin_id => 'pluginID', :plugin_name => 'pluginName', :plugin_family => 'pluginFamily' } method_name = translations_table.fetch(method, method.to_s) return @xml.attributes[method_name].value if @xml.attributes.key?(method_name) # then we try the children tags: solution, risk_factor, description, # plugin_publication_date, metasploit_name, cvss_vector, # cvss_temporal_vector, synopsis, exploit_available, # patch_publication_date, plugin_modification_date, cvss_temporal_score, # cvss_base_score, plugin_output, plugin_version, exploitability_ease, # vuln_publication_date, exploit_framework_canvas, # exploit_framework_metasploit, exploit_framework_core tag = @xml.xpath("./#{method_name}").first if tag return tag.text end # then the custom XML tags (cm: namespace) if method_name.starts_with?('cm_') method_name = method_name.sub(/cm_/, 'cm:compliance-').gsub(/_/, '-') cm_value = @xml.at_xpath("./#{method_name}", { 'cm' => 'http://www.nessus.org/cm' }) if cm_value return cm_value.text else return nil end end # finally the enumerations: bid_entries, cve_entries, xref_entries translations_table = { :bid_entries => 'bid', :cve_entries => 'cve', :see_also_entries => 'see_also', :xref_entries => 'xref' } method_name = translations_table.fetch(method, nil) if method_name @xml.xpath("./#{method_name}").collect(&:text) else # nothing found, the tag is valid but not present in this ReportItem return nil end end end end
# This file is automatically required when twenv.rb starts. # Add and require other code you'd like to use here. # require_relative '../lib/foobar/foobar.rb' # require_relative 'foobar.rb' # etc..
# Define a bare test case to use with Capybara class ActiveSupport::IntegrationCase < ActiveSupport::TestCase include Capybara::DSL include Rails.application.routes.url_helpers end
# overeact (IN DEVELOPMENT - NOT READY FOR USE) Component library meant for improving React Native development speed.
package com.linecorp.kotlinjdsl.spring.data.reactive import com.linecorp.kotlinjdsl.query.clause.select.SingleSelectClause import com.linecorp.kotlinjdsl.query.creator.SubqueryCreatorImpl import com.linecorp.kotlinjdsl.querydsl.expression.col import com.linecorp.kotlinjdsl.querydsl.expression.column import com.linecorp.kotlinjdsl.querydsl.from.associate import com.linecorp.kotlinjdsl.querydsl.from.fetch import com.linecorp.kotlinjdsl.querydsl.where.WhereDsl import com.linecorp.kotlinjdsl.spring.data.reactive.query.* import com.linecorp.kotlinjdsl.spring.reactive.listQuery import com.linecorp.kotlinjdsl.spring.reactive.querydsl.SpringDataReactiveCriteriaQueryDsl import com.linecorp.kotlinjdsl.spring.reactive.querydsl.SpringDataReactivePageableQueryDsl import com.linecorp.kotlinjdsl.spring.reactive.singleQuery import com.linecorp.kotlinjdsl.spring.reactive.updateQuery import com.linecorp.kotlinjdsl.test.WithKotlinJdslAssertions import com.linecorp.kotlinjdsl.test.entity.EntityDsl import com.linecorp.kotlinjdsl.test.entity.order.Order import com.linecorp.kotlinjdsl.test.entity.order.OrderGroup import com.linecorp.kotlinjdsl.test.entity.order.OrderItem import com.linecorp.kotlinjdsl.test.reactive.MutinySessionFactoryExtension import com.linecorp.kotlinjdsl.test.reactive.runBlocking import io.smallrye.mutiny.coroutines.awaitSuspending import org.hibernate.reactive.mutiny.Mutiny import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.extension.ExtendWith import org.springframework.data.domain.Page import org.springframework.data.domain.PageRequest import org.springframework.data.domain.Sort @ExtendWith(MutinySessionFactoryExtension::class) internal class SpringDataMutinyReactiveQueryFactoryIntegrationTest : EntityDsl, WithKotlinJdslAssertions { private lateinit var sessionFactory: Mutiny.SessionFactory private lateinit var queryFactory: SpringDataHibernateMutinyReactiveQueryFactory private val order1 = order { purchaserId = 1000 } private val order2 = order { purchaserId = 1000 } private val order3 = order { purchaserId = 1000 } private val order4 = order { purchaserId = 2000 } @BeforeEach fun setUp() { queryFactory = SpringDataHibernateMutinyReactiveQueryFactory( sessionFactory = sessionFactory, subqueryCreator = SubqueryCreatorImpl() ) sequenceOf(order1, order2, order3, order4).forEach { runBlocking { sessionFactory.withSession { session -> session.persist(it).flatMap { session.flush() } } .awaitSuspending() } } } @Test fun executeSessionWithFactory() = runBlocking { val order = order { purchaserId = 5000 } val actual = queryFactory.withFactory { session, factory -> session.persist(order).awaitSuspending() session.flush().awaitSuspending() factory.singleQuery<Order> { select(entity(Order::class)) from(entity(Order::class)) where(col(Order::purchaserId).equal(5000)) } } assertThat(actual.id).isEqualTo(order.id) } @Test fun executeWithFactory() = runBlocking { val actual = queryFactory.withFactory { factory -> factory.singleQuery<Order> { select(entity(Order::class)) from(entity(Order::class)) where(col(Order::purchaserId).equal(2000)) } } assertThat(actual.id).isEqualTo(order4.id) } @Test fun singleQuery() = runBlocking { // when val actual = queryFactory.singleQuery<Order> { select(entity(Order::class)) from(entity(Order::class)) where(col(Order::purchaserId).equal(2000)) } // then assertThat(actual.id).isEqualTo(order4.id) } @Test fun `singleQueryOrNull return null`() = runBlocking { // when val actual = queryFactory.singleQueryOrNull<Order> { select(entity(Order::class)) from(entity(Order::class)) where(col(Order::purchaserId).equal(3000)) } // then assertThat(actual).isNull() } @Test fun singleQueryOrNull() = runBlocking { // when val actual = queryFactory.singleQueryOrNull<Order> { select(entity(Order::class)) from(entity(Order::class)) where(col(Order::purchaserId).equal(2000)) }!! // then assertThat(actual.id).isEqualTo(order4.id) } @Test fun listQuery() = runBlocking { // when val actual = queryFactory.listQuery<Order> { select(entity(Order::class)) from(entity(Order::class)) fetch(Order::groups) fetch(OrderGroup::items) fetch(OrderGroup::address) } // then assertThat(actual).containsExactlyInAnyOrder(order1, order2, order3, order4) } @Test fun subquery() = runBlocking { val subquery = queryFactory.subquery<Long> { val order = entity(Order::class, "o") select(count(col(order, Order::id))) from(order) where( col(order, Order::purchaserId).equal(col(Order::purchaserId)), ) } // then assertThat(queryFactory.listQuery<Long> { select(subquery) from(entity(Order::class)) orderBy(col(Order::id).asc()) }).isEqualTo(listOf(order3.id, order3.id, order3.id, order1.id)) } @Test fun updateQuery() = runBlocking { // when queryFactory.updateQuery<Order> { where(col(Order::purchaserId).equal(2000)) set(col(Order::purchaserId), 3000) } // then val actual = queryFactory.singleQuery<Order> { select(entity(Order::class)) from(entity(Order::class)) where(col(Order::purchaserId).equal(3000)) } assertThat(actual.id).isEqualTo(order4.id) assertThat(actual.purchaserId).isEqualTo(3000) } @Test fun transaction() = runBlocking { // when try { queryFactory.transactionWithFactory { session, queryFactory -> session.merge(order1).awaitSuspending() val orders = queryFactory.listQuery<Order> { select(entity(Order::class)) from(entity(Order::class)) fetch(Order::groups) fetch(OrderGroup::items) fetch(OrderGroup::address) } queryFactory.updateQuery<Order> { where(col(Order::id).equal(orders.first().id)) set(col(Order::purchaserId), orders.first().purchaserId + 1) }.executeUpdate() queryFactory.updateQuery<Order> { throw IllegalStateException("transaction rollback") }.executeUpdate() } } catch (e: IllegalStateException) { assertThat(e).hasMessage("transaction rollback") } assertThat(queryFactory.transactionWithFactory { queryFactory -> queryFactory.singleQuery<Order> { select(entity(Order::class)) from(entity(Order::class)) fetch(Order::groups) fetch(OrderGroup::items) fetch(OrderGroup::address) where(col(Order::id).equal(order1.id)) } }).isEqualTo(order1) } @Test fun deleteQuery() = runBlocking { // when queryFactory.deleteQuery<OrderItem> { where(col(OrderItem::id).equal(order1.groups.first().items.first().id)) } // then val actual = queryFactory.listQuery<OrderItem> { select(entity(OrderItem::class)) from(entity(OrderItem::class)) where(col(Order::id).equal(order1.id)) associate(OrderItem::group) associate(OrderGroup::order) } assertThat(actual).isEmpty() } @Test fun pageQuery() = runBlocking { // when val actual = queryFactory.pageQuery<Long>(PageRequest.of(1, 2, Sort.by("id"))) { select(col(Order::id)) from(entity(Order::class)) where(col(Order::purchaserId).equal(1000)) } // then assertThat(actual.content).isEqualTo(listOf(order3.id)) assertThat(actual.totalElements).isEqualTo(3) assertThat(actual.totalPages).isEqualTo(2) assertThat(actual.number).isEqualTo(1) } @Test fun pageExtractWhereQuery() = runBlocking { // given val pageable = PageRequest.of(0, 10) fun WhereDsl.equalValueSpec() = column(Order::purchaserId).equal(1000L) val dsl: SpringDataReactivePageableQueryDsl<Order>.() -> Unit = { select(entity(Order::class)) from(entity(Order::class)) where(equalValueSpec()) } val dslCriteria: SpringDataReactiveCriteriaQueryDsl<Order>.() -> Unit = { select(entity(Order::class)) from(entity(Order::class)) where(equalValueSpec()) } // when val actual: Page<Order> = queryFactory.pageQuery(pageable, dsl) val actualList: List<Order> = queryFactory.listQuery(dslCriteria) // then assertThat(actual.content.size).isEqualTo(3) assertThat(actual.map { it.id }).containsExactlyInAnyOrder(order1.id, order2.id, order3.id) assertThat(actualList.map { it.id }).containsExactlyInAnyOrder(order1.id, order2.id, order3.id) } @Test fun `pageQuery with countProjection`() = runBlocking { // given val pageable = PageRequest.of(0, 1, Sort.by(Sort.Direction.DESC, Order::id.name)) val dsl: SpringDataReactivePageableQueryDsl<Order>.() -> Unit = { select(entity(Order::class)) from(entity(Order::class)) } val countProjection: SpringDataReactivePageableQueryDsl<Long>.() -> SingleSelectClause<Long> = { select(count(column(Order::purchaserId))) } // when val actual: Page<Order> = queryFactory.pageQuery(pageable, dsl, countProjection) // then assertThat(actual).hasSize(1) assertThat(actual.content.first().id).isEqualTo(order4.id) } }
using System; using System.Collections.Generic; namespace PierresPatisserie.Bread { public class BreadOrder { public int BreadQuantity { get; set; } public int BreadPrice { get; set; } public int BreadCost { get; set; } public BreadOrder(int breadQuantity, int breadPrice) { BreadQuantity = breadQuantity; BreadPrice = breadPrice; } public int CalculateBreadCost(int breadQuantity, int breadPrice) { if (breadQuantity % 3 == 0) { return ((breadQuantity * 2) / 3) * breadPrice; } else if (breadQuantity % 3 == 1 || breadQuantity % 3 == 2) { int remainder = breadQuantity % 3; return ((((breadQuantity - remainder) * 2) / 3) + remainder) * breadPrice; } else return breadQuantity * breadPrice; } } }
# ============================================================================== # Copyright 2018-2020 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """nGraph TensorFlow relu6 test """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import pytest import numpy as np import tensorflow as tf from common import NgraphTest class TestRelu6(NgraphTest): def test_relu6(self): x = tf.placeholder(tf.float32, shape=(2, 3)) y = tf.placeholder(tf.float32, shape=(2, 3)) z = tf.placeholder(tf.float32, shape=(2, 3)) a = x + y + z b = x + y + z c = a * b d = tf.nn.relu6(c) # input value and expected value x_np = np.full((2, 3), 1.0) y_np = np.full((2, 3), 1.0) z_np = np.full((2, 3), 1.0) a_np = x_np + y_np + z_np b_np = x_np + y_np + z_np c_np = a_np * b_np c_np = np.maximum(c_np, np.full(c_np.shape, 0.0)) expected = np.minimum(c_np, np.full(c_np.shape, 6.0)) sess_fn = lambda sess: sess.run((a, c, d), feed_dict={ x: x_np, y: y_np, z: z_np }) np.allclose(self.with_ngraph(sess_fn), self.without_ngraph(sess_fn))
import { Injectable } from '@nestjs/common'; import { students } from 'src/db'; import { FindStudentsResponseDto } from './dto/student.dto'; @Injectable() export class StudentService { students = students; getStudents(): FindStudentsResponseDto[] { return this.students; } getStudentById(id) { return this.students.find((student) => student.id === id); } }
# frozen_string_literal: true require 'spec_helper' RSpec.describe PhisherPhinder::Mail do let(:base_headers) do { original_email: '', original_headers: '', original_body: '', headers: {}, tracing_headers: [], authentication_headers: [], body: '' } end describe 'reply_to_addresses' do it 'exposes all the email addresses that appear in the Reply-To headers' do mail = described_class.new( **base_headers.merge({ headers: { reply_to: [ "a@b.com", "c@d.com, <d@e.com >", "d@e.com, e@F.com", "G <g@h.com>, H <h@i.com>" ] } }) ) expect(mail.reply_to_addresses.sort).to eql([ 'a@b.com', 'c@d.com', 'd@e.com', 'e@f.com', 'g@h.com', 'h@i.com' ]) end it 'returns an empty collection if there is no `Reply-To` header' do mail = described_class.new(**base_headers) expect(mail.reply_to_addresses).to eql([]) end end it 'returns a collection of hypertext_links found in the mail body' do html_body = '<html> <a href="http://foo">Click Me!</a> <a href="http://bar">No, click me!</a> </html>' mail = described_class.new(**base_headers.merge(body: {html: html_body, text: 'Foo'})) expect(mail.hypertext_links.length).to eql 2 expect(mail.hypertext_links.first).to eq PhisherPhinder::BodyHyperlink.new('http://foo', 'Click Me!') expect(mail.hypertext_links.last).to eq PhisherPhinder::BodyHyperlink.new('http://bar', 'No, click me!') end it 'ignores hyperlinks that do not have an href' do html_body = '<html> <a href="http://foo">Click Me!</a> <a></a> <a href="http://bar">No, click me!</a> </html>' mail = described_class.new(**base_headers.merge(body: {html: html_body, text: 'Foo'})) expect(mail.hypertext_links.length).to eql 2 expect(mail.hypertext_links.first).to eq PhisherPhinder::BodyHyperlink.new('http://foo', 'Click Me!') expect(mail.hypertext_links.last).to eq PhisherPhinder::BodyHyperlink.new('http://bar', 'No, click me!') end it 'returns an empty collection if there is no content that is classified as HTML' do mail = described_class.new(**base_headers.merge(body: {html: nil, text: 'Foo'})) expect(mail.hypertext_links).to eql [] end it 'exposes the provided authentication headers' do mail = described_class.new(**base_headers.merge(authentication_headers: ['foo', 'bar'])) expect(mail.authentication_headers).to eql ['foo', 'bar'] end end
# Monitoring ### Open Source NetData #### Réseau : :round_pushpin: `SolarWinds Network Performance Monitor (NPM)` Paessler PRTG Network Monitor WhatsUp Gold `Nagios` #### [SIEM](https://en.wikipedia.org/wiki/Security_information_and_event_management): :round_pushpin: `Dell EMC RSA Netwitness` LogRhythm `Splunk` IBM QRadar Darktrace #### DNS: :round_pushpin: `CIRA D-Zone` Cisco OpenDNS Umbrella BlueCat CrowdStrike Flacon DNS # Legendes :round_pushpin: Utilisé au Collège `Populaire`: Populaire # References: https://www.softwaretestinghelp.com/cloud-monitoring-tools/ https://kalilinuxtutorials.com/netdata-performance-monitoring/
#!/bin/bash GIT_ROOT=$(git rev-parse --show-toplevel) cd $GIT_ROOT # The first line of the tests are # always empty if there are no linting errors has_errors=0 echo "Running flake8 on bentoml module.." output=$( flake8 --config=.flake8 bentoml ) firstline=`echo "${output}" | head -1` echo "$output" if ! [ -z "$firstline" ]; then has_errors=1 fi echo "Running flake8 on test module.." output=$( flake8 --config=.flake8 tests e2e_tests ) firstline=`echo "${output}" | head -1` echo "$output" if ! [ -z "$firstline" ]; then has_errors=1 fi echo "Running pylint on bentoml module.." output=$( pylint --rcfile="./pylintrc" bentoml ) firstline=`echo "${output}" | head -1` echo "$output" if ! [ -z "$firstline" ]; then has_errors=1 fi echo "Running pylint on test module.." output=$( pylint --rcfile="./pylintrc" tests e2e_tests ) firstline=`echo "${output}" | head -1` echo "$output" if ! [ -z "$firstline" ]; then has_errors=1 fi echo "Done" exit $has_errors
package com.example import java.util.Date import akka.actor.{Actor, ActorLogging, Props} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ /** * Created by anand on 1/14/16. */ class ScheduleActor extends Actor with ActorLogging { import ScheduleActor._ def receive = { case Schedule(id, date) => log.info("Received message for id: {} and on: {}", id, date) context.system.scheduler.scheduleOnce(5 seconds, self, Schedule(id, new Date)) } } object ScheduleActor { val props = Props[ScheduleActor] case class Schedule(id: Long, time: Date) }
import { Story, Meta } from '@storybook/react/types-6-0'; import TagsGrid, { TagsGridProps } from '.'; export default { component: TagsGrid, title: 'Containers/TagsGrid', argTypes: {}, } as Meta; const Template: Story<TagsGridProps> = ({ ...rest }: TagsGridProps) => <TagsGrid {...rest} />; export const Default: Story<TagsGridProps> = Template.bind({}); Default.args = { rows: [ { title: 'Invests in', type: 'category', tags: [ { id: 'tourism-and-recreation', name: 'Tourism & Recreation' }, { id: 'non-timber-forest-production', name: 'Non-timber forest production' }, { id: 'sustainable-agrosystems', name: 'Sustainable agrosystems' }, { id: 'forestry-and-agroforestry', name: 'Forestry & agroforestry' }, ], }, { title: 'Ticket size', tags: ['US$50k', '$50k - $500k', '$500k - $1M'], }, { title: 'Instrument size', tags: ['Grand', 'Loan'], }, { title: 'Impact they invest on', tags: ['Biodiversity', 'Community'], }, ], };
#! /bin/bash -e # Check that valid parameters have been specified. if [ $# -ne 2 ] || ([ "$1" != "11" ] && [ "$1" != "12" ] && [ "$1" != "14" ] && [ "$1" != "15" ]) || ([ "$2" != "Debug" ] && [ "$2" != "Release" ]) then echo "Usage: build-win.sh {11|12|14|15} {Debug|Release}" exit fi # Check that msbuild is on the system path. ./require-msbuild.sh # Build the third-party libraries. cd libraries ./build-boost_1_58_0-win.sh "$1" ./build-glew-1.12.0-win.sh "$1" ./build-lodepng-20160501-win.sh "$1" #./build-opencv-3.1.0-win.sh "$1" ./build-SDL2-2.0.7-win.sh "$1" ./extract-Eigen-3.2.2.sh cd .. # Build spaint itself. echo "[spaint] Building spaint" if [ ! -d build ] then mkdir build cd build # Note: We need to configure twice to handle conditional building. echo "[spaint] ...Configuring using CMake..." CMAKE_GENERATOR=`../determine-cmakegenerator.sh $1` VS_TOOLSET_STRING=`../determine-vstoolsetstring.sh $1` cmake -G "$CMAKE_GENERATOR" $VS_TOOLSET_STRING .. cmake .. cd .. fi cd build echo "[spaint] ...Running build..." cmd //c "msbuild /p:Configuration=$2 spaint.sln" echo "[spaint] ...Installing..." cmd //c "msbuild /p:Configuration=$2 INSTALL.vcxproj" echo "[spaint] ...Finished building spaint."
../../train -q -s 0 -c 50 -e 0.000001 ../ML_HW4_train_ZSpace.txt ../../predict ../ML_HW4_test_ZSpace.txt ./ML_HW4_train_ZSpace.txt.model ../P19/P19.txt
#!/usr/bin/perl use strict; my $num_pairs = 30; print <<EOS; //##### This file is generated by $0 ##### EOS ## JSON_VALUE* ## print <<EOS; #define JSON_VALUE_2(Name_1_, Type_1_, Name_2_, Type_2_) \\ JSON_VALUE(Name_1_, BCL_JOIN(Type_1_)) \\ JSON_VALUE(Name_2_, BCL_JOIN(Type_2_)) EOS my $a1 = 'Name_1_, Type_1_, Name_2_, Type_2_'; my $a2 = 'Name_2_, BCL_JOIN(Type_2_)'; for (my $i = 3; $i <= $num_pairs; $i++) { $a1 .= ", Name_${i}_, Type_${i}_"; $a2 .= ", Name_${i}_, BCL_JOIN(Type_${i}_)"; print <<EOS; #define JSON_VALUE_${i}($a1) \\ JSON_VALUE(Name_1_, BCL_JOIN(Type_1_)) \\ JSON_VALUE_@{[$i-1]}($a2) EOS } ## JSON_ACCESS_* ## print <<EOS; #define JSON_ACCESS_2(Object_, Name_1_, Name_2_) \\ JSON_ACCESS(Object_, Name_1_) \\ JSON_ACCESS(Object_, Name_2_) EOS $a1 = 'Name_2_'; for (my $i = 3; $i <= $num_pairs; $i++) { $a1 .= ", Name_${i}_"; print <<EOS; #define JSON_ACCESS_$i(Object_, Name_1_, $a1) \\ JSON_ACCESS(Object_, Name_1_) \\ JSON_ACCESS_@{[$i-1]}(Object_, $a1) EOS } ## JSON_OBJECT_PAIR_* ## print <<EOS; #define JSON_OBJECT_PAIR(Object_, Name_, Type_) \\ JSON_VALUE(Name_, BCL_JOIN(Type_)) \\ JSON_OBJECT(Object_, Name_) \\ JSON_ACCESS(Object_, Name_) EOS $a1 = 'Object_, Name_1_, Type_1_'; $a2 = 'Object_, Name_1_'; my $a3 = 'Name_1_, BCL_JOIN(Type_1_)'; for (my $i = 2; $i <= $num_pairs; $i++) { $a1 .= ", Name_${i}_, Type_${i}_"; $a2 .= ", Name_${i}_"; $a3 .= ", Name_${i}_, BCL_JOIN(Type_${i}_)"; print <<EOS; #define JSON_OBJECT_PAIR_$i($a1) \\ JSON_VALUE_$i($a3) \\ JSON_OBJECT($a2) \\ JSON_ACCESS_$i($a2) EOS } ## JSON_OBJECT_ROOT_PAIR_* ## print <<EOS; #define JSON_OBJECT_ROOT_PAIR(Object_, Name_, Type_) \\ JSON_VALUE(Name_, Type_) \\ JSON_OBJECT_ROOT(Object_, Name_) \\ JSON_ACCESS(Object_, Name_) EOS $a1 = 'Object_, Name_1_, Type_1_'; $a2 = 'Name_1_, BCL_JOIN(Type_1_)'; $a3 = 'Object_, Name_1_'; for (my $i = 2; $i <= $num_pairs; $i++) { $a1 .= ", Name_${i}_, Type_${i}_"; $a2 .= ", Name_${i}_, BCL_JOIN(Type_${i}_)"; $a3 .= ", Name_${i}_"; print <<EOS; #define JSON_OBJECT_ROOT_PAIR_$i($a1) \\ JSON_VALUE_$i($a2) \\ JSON_OBJECT_ROOT($a3) \\ JSON_ACCESS_$i($a3) EOS } print "\n";
--- title: 'Web Pick 6 - 3 rapid prototyping exercises to improve your UX skills' embedly_card_title: '3 rapid prototyping exercises to improve your UX skills' embedly_card_alignment: left embedly_card_url: 'https://uxdesign.cc/3-rapid-prototyping-exercises-to-improve-your-skills-in-ux-design-f2c8b2d690b3' published: true visible: true embedly_card_display: false date: '06-06-2018 00:00' cache_enable: false ---
using System.Collections; using System.Collections.Generic; using UnityEngine; public class LightFlicker : MonoBehaviour { Light myLight; Material mat; [SerializeField] float flickerThreshold = 0.6f; [SerializeField] float noiseSpeed = 0.01f; float noiseTime = 0f; float noiseOffset; [SerializeField] float flickerIntensity = 0.5f; float originalIntensity; Color originalColor; [SerializeField] AudioSource hum; [SerializeField] float humVolumeOriginal; [SerializeField] float humVolumeLow; [SerializeField] float humPitchOriginal; [SerializeField] float humPitchLow; [SerializeField] AudioSource flicker; [SerializeField] AudioClip[] flickerClips; void Start () { myLight = GetComponent<Light>(); mat = GetComponentInParent<MeshRenderer>().material; originalColor = mat.color; originalIntensity = myLight.intensity; noiseOffset = Random.Range(-10000f, 10000f); hum.Play(); } void Update () { noiseTime += noiseSpeed; float noise = Mathf.PerlinNoise(noiseTime+noiseOffset, 0); if (noise > flickerThreshold) { myLight.intensity = flickerIntensity + Random.Range(-0.2f, 0.2f); mat.SetColor("_EmissionColor", Color.gray); hum.volume = humVolumeLow; hum.pitch = humPitchLow; flicker.Stop(); flicker.clip = flickerClips[Random.Range(0, flickerClips.Length)]; flicker.Play(); } else { myLight.intensity = originalIntensity; mat.SetColor("_EmissionColor", originalColor); hum.volume = humVolumeOriginal; hum.pitch = humPitchOriginal; } } }
import { IDependency } from './IDependency'; export interface IDependencyChain extends IDependency { chain: string; }
import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context object Macros { def impl(c: Context) = { import c.universe._ def test(tree: Tree, mode: c.TypecheckMode): String = { try c.typecheck(tree, mode, silent = false).tpe.toString catch { case c.TypecheckException(_, msg) => msg } } q""" println(${test(q"List(1, 2)", c.TERMmode)}) println(${test(q"List", c.TERMmode)}) println(${test(q"RuntimeException", c.TERMmode)}) println(${test(tq"List[Int]", c.TYPEmode)}) println(${test(tq"List", c.TYPEmode)}) println(${test(q"List", c.TYPEmode)}) println(${test(q"List(1, 2)", c.TYPEmode)}) """ } def foo: Unit = macro impl }
#if !BESTHTTP_DISABLE_ALTERNATE_SSL && (!UNITY_WEBGL || UNITY_EDITOR) #pragma warning disable using System; using BestHTTP.SecureProtocol.Org.BouncyCastle.Asn1; using BestHTTP.SecureProtocol.Org.BouncyCastle.Asn1.X509; namespace BestHTTP.SecureProtocol.Org.BouncyCastle.X509 { /** * A holding class for constructing an X509 Key Usage extension. * * <pre> * id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 } * * KeyUsage ::= BIT STRING { * digitalSignature (0), * nonRepudiation (1), * keyEncipherment (2), * dataEncipherment (3), * keyAgreement (4), * keyCertSign (5), * cRLSign (6), * encipherOnly (7), * decipherOnly (8) } * </pre> */ public class X509KeyUsage : Asn1Encodable { public const int DigitalSignature = 1 << 7; public const int NonRepudiation = 1 << 6; public const int KeyEncipherment = 1 << 5; public const int DataEncipherment = 1 << 4; public const int KeyAgreement = 1 << 3; public const int KeyCertSign = 1 << 2; public const int CrlSign = 1 << 1; public const int EncipherOnly = 1 << 0; public const int DecipherOnly = 1 << 15; private readonly int usage; /** * Basic constructor. * * @param usage - the bitwise OR of the Key Usage flags giving the * allowed uses for the key. * e.g. (X509KeyUsage.keyEncipherment | X509KeyUsage.dataEncipherment) */ public X509KeyUsage( int usage) { this.usage = usage; } public override Asn1Object ToAsn1Object() { return new KeyUsage(usage); } } } #pragma warning restore #endif
# 1. react-ssr-docs + 这个仓库是关于 react 服务端渲染的使用介绍,从 webpack 的基础配置到最后项目成型,都有非常完整详细的介绍,并附带有源代码 + 目前这只是一个 react 服务端渲染的学习文档,后期会加上一个简单的项目,用来实际体验 react 服务端渲染 + 注: 这个项目只是用来学习 react 的服务端渲染,而非安利大家一定要使用服务端渲染,因为 react 和 vue 的服务端渲染和普通的服务端渲染有很多的不一样,所以可以学习一下,提高一下自己的水平 # 2. 技术栈 + 基本上是完全使用了 react 全家桶,后端采用的是 Express + 关于版本,具体可以查看 package.json + react/react-dom 16.x + react-router-dom 5.x + redux 4.x + redux-thunk 2.x + react-redux 7.x + redux-logger 3.x + express 4.x + webpack 4.x + babel 7.x # 3. 项目目录 ``` ├── node_modules/ 第三方依赖包 |── build/ 服务端打包后生成的代码 | └── server.js ├── public/ 客户端打包后生成的代码 │ └── client.js ├── src │ ├── client/ 客户端源代码 │ ├── components/ React 组件 │ ├── containers/ React 容器组件 │ ├── server/ 服务端源代码 │ ├── store/ redux | └── routes.js 路由 ├── .babelrc babel 编译 ├── .gitignore git 忽略文件 ├── package.json ├── webpack.base.js webpack 基础配置 ├── webpack.client.js webpack 客户端配置 └── webpack.server.js webpack 服务端配置 ``` # 4. docs 文档链接 + [01-项目基础架构搭建](./docs/01-项目基础架构搭建.md) + [02-最简单的服务端渲染](./docs/02-最简单的服务端渲染.md) + [03-路由](./docs/03-路由.md) + [04-redux-01](./docs/04-redux-1.md) + [05-redux-02](./docs/05-redux-2.md) + [06-优化](./docs/06-优化.md) + [07-添加CSS样式](./docs/07-添加CSS样式.md) + [08-404和重定向](./docs/08-404和重定向.md) + [09-SEO优化](./docs/09-SEO优化.md)
package mybatis.demo.phase03.test; import java.io.InputStream; import java.util.List; import com.github.pagehelper.PageInfo; import mybatis.demo.phase03.mapper.UserMapper; import mybatis.demo.phase03.po.User; import mybatis.demo.phase03.po.UserExample; import org.apache.ibatis.io.Resources; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.session.SqlSessionFactoryBuilder; import org.junit.Before; import org.junit.Test; import com.github.pagehelper.PageHelper; /** * 测试逆向工程代码和PageHelper分页插件案例 * @date: 2021/6/4 * @auther: liu */ public class Test3 { private SqlSessionFactory sqlSessionFactory; @Before public void init() throws Exception { // 加载全局配置文件(同时把映射文件也加载了) String resource = "phase03/SqlMapConfig.xml"; InputStream inputStream = Resources.getResourceAsStream(resource); // sqlsessionFactory需要通过sqlsessionFactoryBuilder读取全局配置文件信息之后 sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream); } @Test public void test() { // 创建UserMapper对象 SqlSession sqlSession = sqlSessionFactory.openSession(); UserMapper mapper = sqlSession.getMapper(UserMapper.class); UserExample example = new UserExample(); List<User> list = mapper.selectByExample(example ); System.out.println(list); } @Test public void test3() { // 创建UserMapper对象 SqlSession sqlSession = sqlSessionFactory.openSession(); UserMapper mapper = sqlSession.getMapper(UserMapper.class); //拦截器 注入实现分页,把sql 拼接到语句上 //编写分页代码 PageHelper.startPage(1, 2); UserExample example = new UserExample(); // 此处返回的list实现类不再是ArrayList,而是PageHelper提供的Page对象 List<User> list = mapper.selectByExample(example); System.out.println(list); //转为 page 对象 PageInfo<User> pageInfo = new PageInfo<User>(list); System.out.println(pageInfo); } }
# react-broadcast [![Travis][build-badge]][build] [![npm package][npm-badge]][npm] [build-badge]: https://img.shields.io/travis/ReactTraining/react-broadcast/master.svg?style=flat-square [build]: https://travis-ci.org/ReactTraining/react-broadcast [npm-badge]: https://img.shields.io/npm/v/react-broadcast.svg?style=flat-square [npm]: https://www.npmjs.com/package/react-broadcast [`react-broadcast`](https://www.npmjs.com/package/react-broadcast) provides a reliable way for React components to propagate state changes to their descendants deep in the component hierarchy, bypassing intermediaries who `return false` from [`shouldComponentUpdate`](https://reactjs.org/docs/react-component.html#shouldcomponentupdate). It was originally built to solve issues that arose from using [`react-router`](https://www.npmjs.com/package/react-router) together with [`react-redux`](https://www.npmjs.com/package/react-redux). The router needed a safe way to communicate state changes to `<Link>`s deep in the component hierarchy, but `react-redux` relies on `shouldComponentUpdate` for performance. `react-broadcast` allows the router to work seamlessly with Redux and any other component that uses `shouldComponentUpdate`. **Please note:** As with anything that uses [context](https://reactjs.org/docs/context.html), this library is experimental. It may cease working in some future version of React. For now, it's a practical workaround for the router. If we discover some better way to do things in the future, rest assured we'll do our best to share what we learn. ## Installation Using [yarn](https://yarnpkg.com/): $ yarn add react-broadcast Then, use as you would anything else: ```js // using ES6 modules import { Broadcast, Subscriber } from "react-broadcast" // using CommonJS modules var Broadcast = require("react-broadcast").Broadcast var Subscriber = require("react-broadcast").Subscriber ``` The UMD build is also available on [unpkg](https://unpkg.com): ```html <script src="https://unpkg.com/react-broadcast/umd/react-broadcast.min.js"></script> ``` You can find the library on `window.ReactBroadcast`. ## Usage The following is a totally contrived example, but illustrates the basic functionality we're after: ```js import React from "react" import { Broadcast, Subscriber } from "react-broadcast" const users = [{ name: "Michael Jackson" }, { name: "Ryan Florence" }] class UpdateBlocker extends React.Component { shouldComponentUpdate() { // This is how you indicate to React's reconciler that you don't // need to be updated. It's a great way to boost performance when // you're sure (based on your props and state) that your render // output will not change, but it makes it difficult for libraries // to communicate changes down the hierarchy that you don't really // know anything about. return false } render() { return this.props.children } } class App extends React.Component { state = { currentUser: users[0] } componentDidMount() { // Randomly change the current user every 2 seconds. setInterval(() => { const index = Math.floor(Math.random() * users.length) this.setState({ currentUser: users[index] }) }, 2000) } render() { return ( <Broadcast channel="currentUser" value={this.state.currentUser}> <UpdateBlocker> <Subscriber channel="currentUser"> {currentUser => <p>The current user is {currentUser.name}</p>} </Subscriber> </UpdateBlocker> </Broadcast> ) } } ``` You may prefer to wrap these components into channel-specific pairs to avoid typos and other problems with the indirection involved with the channel strings: ```js // Broadcasts.js import { Broadcast, Subscriber } from 'react-broadcast' const CurrentUserChannel = 'currentUser' export const CurrentUserBroadcast = (props) => <Broadcast {...props} channel={CurrentUserChannel} /> export const CurrentUserSubscriber = (props) => <Subscriber {...props} channel={CurrentUserChannel} /> // App.js import { CurrentUserBroadcast, CurrentUserSubscriber } from './Broadcasts' <CurrentUserBroadcast value={user}/> <CurrentUserSubscriber>{user => ...}</CurrentUserSubscriber> ``` Enjoy! ## About react-broadcast is developed and maintained by [React Training](https://reacttraining.com). If you're interested in learning more about what React can do for your company, please [get in touch](mailto:hello@reacttraining.com)!
#include "Terrain.h" #include "../Physics/Ray.h" #include "ChunkIndex.h" #include "../SaveError.h" #include <iostream> #include <fstream> #include <sys/stat.h> using namespace std; const int RENDER_DIST = 5; Terrain::Terrain(Player* player) : Entity("Terrain") , player(player) , raycast_listener(ID, [this](auto e) { return this->handle_raycast_event(e); }) { // create world folder if (mkdir("world/", S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH) != 0) { if (errno != EEXIST) { throw SaveError("could not create world folder"); } } // TODO: increase this to add some slack AFTER fixing the blinking bug // fill chunk pool int diameter = 1 + RENDER_DIST * 2; int num_chunks = diameter * diameter * diameter; for (int i = 0; i < num_chunks; ++i) { chunk_pool.push(make_shared<Chunk>()); } } void Terrain::update() { ChunkCoords center(player->get_position()); // remove chunks out of bounds while iterating through map for(auto it = begin(chunks); it != end(chunks); ) { int x_dist = abs(center.x - it->first.x); int y_dist = abs(center.y - it->first.y); int z_dist = abs(center.z - it->first.z); if (x_dist > RENDER_DIST || y_dist > RENDER_DIST || z_dist > RENDER_DIST) { auto chunk = it->second; if (chunk->try_set_inactive()) { it = chunks.erase(it); chunk_pool.push(chunk); continue; } } ++it; } // add chunks in bounds if not already in map for (int x_offset = -RENDER_DIST; x_offset <= RENDER_DIST; ++x_offset) { for (int y_offset = -RENDER_DIST; y_offset <= RENDER_DIST; ++y_offset) { for (int z_offset = -RENDER_DIST; z_offset <= RENDER_DIST; ++z_offset) { int x = center.x + x_offset; int y = center.y + y_offset; int z = center.z + z_offset; ChunkCoords coords(x, y, z); auto it = chunks.find(coords); if (it == chunks.end()) { if (chunk_pool.empty()) { // cout << "no chunks available right now" << endl; continue; } auto chunk = chunk_pool.top(); chunk_pool.pop(); chunk->set_active(coords); chunks.insert({coords, chunk}); } } } } } void Terrain::render_opaque(const Camera& camera) const { for (auto pair : chunks) { pair.second->render_opaque(camera); } } void Terrain::render_transparent(const Camera& camera) const { for (auto pair : chunks) { pair.second->render_transparent(camera); } } bool Terrain::handle_raycast_event(shared_ptr<RaycastEvent> event) { cout << "RAYCAST EVENT RECEIVED BY TERRAIN" << endl; Ray ray(event->get_ray()); const float MAX_DISTANCE = 32.0f; const float STEP_DISTANCE = 0.5f; BlockCoords p_block_coords(ray.get_end()); ChunkIndex p_index(p_block_coords); cout << "raycast start: " << p_block_coords << endl; shared_ptr<Chunk> chunk; auto it = chunks.find(ChunkCoords(p_block_coords)); if (it == end(chunks)) { cout << "raycast detected non-existent chunk!" << endl; return false; } chunk = it->second; // find the first block that isn't air and do something for (; ray.get_length() < MAX_DISTANCE; ray.step(STEP_DISTANCE)) { bool crossed_chunks = false; BlockCoords curr_block(ray.get_end()); // don't check the same block twice if (curr_block == p_block_coords) continue; shared_ptr<Chunk> p_chunk; if (auto curr_coords = ChunkCoords(curr_block); curr_coords != ChunkCoords(p_block_coords)) { crossed_chunks = true; auto it = chunks.find(curr_coords); if (it == end(chunks)) { cout << "raycast detected non-existent chunk!" << endl; return false; } p_chunk = chunk; chunk = it->second; } auto index = ChunkIndex(curr_block); BlockData block_data = chunk->get_block(index); cout << "Block: " << block_data.get_name() << endl; if (block_data.id != BlockID::AIR) { if (event->is_left_click()) { // break block chunk->set_block(index, {BlockID::AIR}); // chunk->mesh_job(); } else { // place block if (crossed_chunks) { p_chunk->set_block(p_index, {BlockID::COBBLESTONE}); // p_chunk->mesh_job(); } else { chunk->set_block(p_index, {BlockID::COBBLESTONE}); // chunk->mesh_job(); } } return true; } p_index = index; p_block_coords = curr_block; } return false; }
require "ablerc/version" require "active_support" require "active_support/core_ext" require "rainbow" module Ablerc autoload :Option, 'ablerc/option' autoload :DSL, 'ablerc/dsl' autoload :Context, 'ablerc/context' autoload :Configuration, 'ablerc/configuration' autoload :StubGenerator, 'ablerc/stub_generator' autoload :Errors, 'ablerc/errors' ABLE_RC_FILE = 'able.rc' mattr_accessor :scheme self.scheme = [] mattr_accessor :options self.options = [] mattr_accessor :rc_file_name self.rc_file_name = '' mattr_accessor :dsl self.dsl = Ablerc::DSL.new mattr_accessor :contexts self.contexts = Ablerc::Context mattr_accessor :stub_options self.stub_options = {} class << self # Iniatializes Ablerc with values from DSL def setup(&block) Ablerc.dsl.instance_eval(&block) scheme.each { |c| dsl.context(c, Ablerc::Context::DEFAULTS[c]) unless contexts.exists? c} end # Exposes option values from parsed rc files. # Aliased as <tt>#config</tt> def configuration Ablerc::Configuration.instance end alias :config :configuration # Prepares a stub rcfile with defined options def stub Ablerc::StubGenerator.new({:options => options}.merge(stub_options)) end # Loads the rc files in the order and locations specified by scheme def load_scheme raise RcFileMissing, "You must provide a value to rc_file_name" if rc_file_name.blank? self.scheme.each do |scheme| configuration.load File.expand_path(File.join( contexts[scheme].path, rc_file_name)) end end def load!(path) load_able_rc! File.expand_path( File.join( path, ABLE_RC_FILE)) end private def gem_root File.expand_path '../..', __FILE__ end def load_able_rc!(path) instance_eval(File.read( path )) load_scheme return configuration end end # Immediatly load options and rc file configurations #load_able_rc! end
<?php declare (strict_types=1); namespace App\NewsReview\Domain\Routing\Model; interface RouteInterface { public function hostname(): string; public function toArray(): array; }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // This module defines an abstract interface for iterating through pages in a // Parquet column chunk within a row group. It could be extended in the future // to iterate through all data pages in all chunks in a file. #ifndef PARQUET_COLUMN_PAGE_H #define PARQUET_COLUMN_PAGE_H #include <cstdint> #include <memory> #include <string> #include "parquet/statistics.h" #include "parquet/types.h" #include "parquet/util/memory.h" namespace parquet { // TODO: Parallel processing is not yet safe because of memory-ownership // semantics (the PageReader may or may not own the memory referenced by a // page) // // TODO(wesm): In the future Parquet implementations may store the crc code // in format::PageHeader. parquet-mr currently does not, so we also skip it // here, both on the read and write path class Page { public: Page(const std::shared_ptr<Buffer>& buffer, PageType::type type) : buffer_(buffer), type_(type) {} PageType::type type() const { return type_; } std::shared_ptr<Buffer> buffer() const { return buffer_; } // @returns: a pointer to the page's data const uint8_t* data() const { return buffer_->data(); } // @returns: the total size in bytes of the page's data buffer int32_t size() const { return static_cast<int32_t>(buffer_->size()); } private: std::shared_ptr<Buffer> buffer_; PageType::type type_; }; class DataPage : public Page { public: DataPage(const std::shared_ptr<Buffer>& buffer, int32_t num_values, Encoding::type encoding, Encoding::type definition_level_encoding, Encoding::type repetition_level_encoding, const EncodedStatistics& statistics = EncodedStatistics()) : Page(buffer, PageType::DATA_PAGE), num_values_(num_values), encoding_(encoding), definition_level_encoding_(definition_level_encoding), repetition_level_encoding_(repetition_level_encoding), statistics_(statistics) {} int32_t num_values() const { return num_values_; } Encoding::type encoding() const { return encoding_; } Encoding::type repetition_level_encoding() const { return repetition_level_encoding_; } Encoding::type definition_level_encoding() const { return definition_level_encoding_; } const EncodedStatistics& statistics() const { return statistics_; } private: int32_t num_values_; Encoding::type encoding_; Encoding::type definition_level_encoding_; Encoding::type repetition_level_encoding_; EncodedStatistics statistics_; }; class CompressedDataPage : public DataPage { public: CompressedDataPage(const std::shared_ptr<Buffer>& buffer, int32_t num_values, Encoding::type encoding, Encoding::type definition_level_encoding, Encoding::type repetition_level_encoding, int64_t uncompressed_size, const EncodedStatistics& statistics = EncodedStatistics()) : DataPage(buffer, num_values, encoding, definition_level_encoding, repetition_level_encoding, statistics), uncompressed_size_(uncompressed_size) {} int64_t uncompressed_size() const { return uncompressed_size_; } private: int64_t uncompressed_size_; }; class DataPageV2 : public Page { public: DataPageV2(const std::shared_ptr<Buffer>& buffer, int32_t num_values, int32_t num_nulls, int32_t num_rows, Encoding::type encoding, int32_t definition_levels_byte_length, int32_t repetition_levels_byte_length, bool is_compressed = false) : Page(buffer, PageType::DATA_PAGE_V2), num_values_(num_values), num_nulls_(num_nulls), num_rows_(num_rows), encoding_(encoding), definition_levels_byte_length_(definition_levels_byte_length), repetition_levels_byte_length_(repetition_levels_byte_length), is_compressed_(is_compressed) {} int32_t num_values() const { return num_values_; } int32_t num_nulls() const { return num_nulls_; } int32_t num_rows() const { return num_rows_; } Encoding::type encoding() const { return encoding_; } int32_t definition_levels_byte_length() const { return definition_levels_byte_length_; } int32_t repetition_levels_byte_length() const { return repetition_levels_byte_length_; } bool is_compressed() const { return is_compressed_; } private: int32_t num_values_; int32_t num_nulls_; int32_t num_rows_; Encoding::type encoding_; int32_t definition_levels_byte_length_; int32_t repetition_levels_byte_length_; bool is_compressed_; // TODO(wesm): format::DataPageHeaderV2.statistics }; class DictionaryPage : public Page { public: DictionaryPage(const std::shared_ptr<Buffer>& buffer, int32_t num_values, Encoding::type encoding, bool is_sorted = false) : Page(buffer, PageType::DICTIONARY_PAGE), num_values_(num_values), encoding_(encoding), is_sorted_(is_sorted) {} int32_t num_values() const { return num_values_; } Encoding::type encoding() const { return encoding_; } bool is_sorted() const { return is_sorted_; } private: int32_t num_values_; Encoding::type encoding_; bool is_sorted_; }; // Abstract page iterator interface. This way, we can feed column pages to the // ColumnReader through whatever mechanism we choose class PageReader { public: virtual ~PageReader() {} // @returns: shared_ptr<Page>(nullptr) on EOS, std::shared_ptr<Page> // containing new Page otherwise virtual std::shared_ptr<Page> NextPage() = 0; }; class PageWriter { public: virtual ~PageWriter() {} // The Column Writer decides if dictionary encoding is used if set and // if the dictionary encoding has fallen back to default encoding on reaching dictionary // page limit virtual void Close(bool has_dictionary, bool fallback) = 0; virtual int64_t WriteDataPage(const CompressedDataPage& page) = 0; virtual int64_t WriteDictionaryPage(const DictionaryPage& page) = 0; virtual bool has_compressor() = 0; virtual void Compress(const Buffer& src_buffer, ResizableBuffer* dest_buffer) = 0; }; } // namespace parquet #endif // PARQUET_COLUMN_PAGE_H
--- layout: post title: VolgaCTF 2017 Teaser の write-up categories: [ctf] date: 2017-02-26 05:51:00 +0900 --- チーム Harekaze で [VolgaCTF 2017 Teaser](https://teaser.2017.volgactf.ru/) に参加しました。 最終的にチームで 110 点を獲得し、順位は 34 位 (得点 80 チーム中) でした。うち、私は 1 問を解いて 100 点を入れました。 以下、解いた問題の write-up です。 ## [Stegano 100] Universal Text UTF-16LE のテキストファイル…のはずなのですが、`00 46 00 4C 41 00 47 00` と途中からビッグエンディアンになってしまっているようです。 ```python s = open('message.txt', 'rb').read() open('result.txt', 'wb').write(b'\xff\xfe' + s[0x37:]) ``` で読めました。 ``` UNICODE_SOMETIMES_HURTS ``` ## 感想 この CTF が終わるまで起きているつもりでしたが、睡魔には勝てませんでした (´・ω・`)
# install these packages library(tidyverse) library(reshape2) library(here) library(ggthemes) library(knitr) source("functions.r") # sourcing all functions ######################################################################## #### Read in data ------------------------------------------------------ ######################################################################## # Read in raw NHTS datasets raw.person2001 <- read.table(here("data", "person2001.csv"), fill=TRUE, header=TRUE, sep=",", na.strings = c(".", seq(-10,-1))) raw.trip2001 <- read.table(here("data", "trip2001.csv"), fill=TRUE, header=TRUE, sep=",", na.strings = c(".", seq(-10,-1))) raw.person2009 <- read.table(here("data", "person2009.csv"), fill=TRUE, header=TRUE, sep=",", na.strings = c(".", seq(-10,-1))) raw.trip2009 <- read.table(here("data", "trip2009.csv"), fill=TRUE, header=TRUE, sep=",", na.strings = c(".", seq(-10,-1))) # Read in CDC Vital Stats files list_of_cdc_travel_files <- c("cdc_travel_2001_2010", "cdc_travel_2001_2010_ped", "cdc_travel_2001_2010_pveh") # 2000 population distribution for age-standardized calculations pop2000stand <- read.csv(here("pop2000stand.csv")) ######################################################################## #### Clean data and perform preliminary calculations-------------------- ######################################################################## # clean all CDC data and bring them into the R environment cleaned.cdc.travel<-lapply(list_of_cdc_travel_files, make_cdc_travel_df) names(cleaned.cdc.travel) <- list_of_cdc_travel_files list2env(cleaned.cdc.travel, .GlobalEnv) # clean NHTS data person2001 <- clean_all_nhts(raw.person2001) trip2001 <- clean_all_nhts(raw.trip2001) person2009 <- clean_all_nhts(raw.person2009) trip2009 <- clean_all_nhts(raw.trip2009) # get national population represented by the data popgroup2001 <- get_pop_bygroup(person2001) popgroup2009 <- get_pop_bygroup(person2009) # calculate risk and exposure for each survey year exp2001.all <- get_exposure_1year(year_in = 2001) exp2001.ped <- get_exposure_1year(year_in = 2001, mode_in = "Walk") exp2001.pveh <- get_exposure_1year(year_in = 2001, mode_in = "Pvehicle") exp2009.all <- get_exposure_1year(year_in = 2009) exp2009.ped <- get_exposure_1year(year_in = 2009, mode_in = "Walk") exp2009.pveh <- get_exposure_1year(year_in = 2009, mode_in = "Pvehicle") # calculate risk and exposure for 2001-2010 (averaged for the two survey years) exp.all <- calc_exp_risk_2000(df_2001 = exp2001.all, df_2009 = exp2009.all, df_cdc = cdc_travel_2001_2010) exp.ped <- calc_exp_risk_2000(df_2001 = exp2001.ped, df_2009 = exp2009.ped, df_cdc = cdc_travel_2001_2010_ped) exp.pveh <- calc_exp_risk_2000(df_2001 = exp2001.pveh, df_2009 = exp2009.pveh, df_cdc = cdc_travel_2001_2010_pveh) # calculate decompositions for each type of MV death decomp.all <- decompose(df_in = exp.all) %>% mutate(type = "Total") decomp.ped <- decompose(df_in = exp.ped) %>% mutate(type = "Pedestrian") decomp.pveh <- decompose(df_in = exp.pveh) %>% mutate(type = "Passenger Vehicle") # combine the previous decompositions into one df decomp_combined <- rbind(decomp.all, decomp.ped, decomp.pveh) decomp_combined$type <- as.factor(decomp_combined$type) decomp_combined$type <- factor(decomp_combined$type, levels = c("Total", "Passenger Vehicle", "Pedestrian")) # age standardized rates for death, exposure, and risk age_stand_all <- calculate_as_rates(exp.all) age_stand_pveh <- calculate_as_rates(exp.pveh) age_stand_ped <- calculate_as_rates(exp.ped)
#include "DQMServices/Core/interface/DQMStore.h" #include "DQMServices/Core/interface/MonitorElement.h" #include "FWCore/Framework/interface/ESHandle.h" #include "FWCore/Framework/interface/Event.h" #include "FWCore/Framework/interface/Frameworkfwd.h" #include "FWCore/ServiceRegistry/interface/Service.h" #include "Validation/DTRecHits/interface/utils.h" #include "DT4DSegmentClients.h" #include "Histograms.h" using namespace std; using namespace edm; DT4DSegmentClients::DT4DSegmentClients(edm::ParameterSet const& pset) { doall_ = pset.getUntrackedParameter<bool>("doall", false); } DT4DSegmentClients::~DT4DSegmentClients() { } void DT4DSegmentClients::dqmEndJob(DQMStore::IBooker & booker, DQMStore::IGetter & getter) { MonitorElement * hResAlpha = getter.get("DT/4DSegments/Res/4D_All_hResAlpha"); MonitorElement * hResBeta = getter.get("DT/4DSegments/Res/4D_All_hResBeta"); MonitorElement * hResX = getter.get("DT/4DSegments/Res/4D_All_hResX"); MonitorElement * hResY = getter.get("DT/4DSegments/Res/4D_All_hResY"); MonitorElement * hResBetaRZ = getter.get("DT/4DSegments/Res/4D_All_hResBetaRZ"); MonitorElement * hResYRZ = getter.get("DT/4DSegments/Res/4D_All_hResYRZ"); MonitorElement * hResAlpha_W0 = getter.get("DT/4DSegments/Res/4D_W0_hResAlpha"); MonitorElement * hResBeta_W0 = getter.get("DT/4DSegments/Res/4D_W0_hResBeta"); MonitorElement * hResX_W0 = getter.get("DT/4DSegments/Res/4D_W0_hResX"); MonitorElement * hResY_W0 = getter.get("DT/4DSegments/Res/4D_W0_hResY"); MonitorElement * hResBetaRZ_W0 = getter.get("DT/4DSegments/Res/4D_W0_hResBetaRZ"); MonitorElement * hResYRZ_W0 = getter.get("DT/4DSegments/Res/4D_W0_hResYRZ"); MonitorElement * hResAlpha_W1 = getter.get("DT/4DSegments/Res/4D_W1_hResAlpha"); MonitorElement * hResBeta_W1 = getter.get("DT/4DSegments/Res/4D_W1_hResBeta"); MonitorElement * hResX_W1 = getter.get("DT/4DSegments/Res/4D_W1_hResX"); MonitorElement * hResY_W1 = getter.get("DT/4DSegments/Res/4D_W1_hResY"); MonitorElement * hResBetaRZ_W1 = getter.get("DT/4DSegments/Res/4D_W1_hResBetaRZ"); MonitorElement * hResYRZ_W1 = getter.get("DT/4DSegments/Res/4D_W1_hResYRZ"); MonitorElement * hResAlpha_W2 = getter.get("DT/4DSegments/Res/4D_W2_hResAlpha"); MonitorElement * hResBeta_W2 = getter.get("DT/4DSegments/Res/4D_W2_hResBeta"); MonitorElement * hResX_W2 = getter.get("DT/4DSegments/Res/4D_W2_hResX"); MonitorElement * hResY_W2 = getter.get("DT/4DSegments/Res/4D_W2_hResY"); MonitorElement * hResBetaRZ_W2 = getter.get("DT/4DSegments/Res/4D_W2_hResBetaRZ"); MonitorElement * hResYRZ_W2 = getter.get("DT/4DSegments/Res/4D_W2_hResYRZ"); MonitorElement * hPullAlpha = getter.get("DT/4DSegments/Pull/4D_All_hPullAlpha"); MonitorElement * hPullBeta = getter.get("DT/4DSegments/Pull/4D_All_hPullBeta"); MonitorElement * hPullX = getter.get("DT/4DSegments/Pull/4D_All_hPullX"); MonitorElement * hPullY = getter.get("DT/4DSegments/Pull/4D_All_hPullY"); MonitorElement * hPullBetaRZ = getter.get("DT/4DSegments/Pull/4D_All_hPullBetaRZ"); MonitorElement * hPullYRZ = getter.get("DT/4DSegments/Pull/4D_All_hPullYRZ"); MonitorElement * hPullAlpha_W0 = getter.get("DT/4DSegments/Pull/4D_W0_hPullAlpha"); MonitorElement * hPullBeta_W0 = getter.get("DT/4DSegments/Pull/4D_W0_hPullBeta"); MonitorElement * hPullX_W0 = getter.get("DT/4DSegments/Pull/4D_W0_hPullX"); MonitorElement * hPullY_W0 = getter.get("DT/4DSegments/Pull/4D_W0_hPullY"); MonitorElement * hPullBetaRZ_W0 = getter.get("DT/4DSegments/Pull/4D_W0_hPullBetaRZ"); MonitorElement * hPullYRZ_W0 = getter.get("DT/4DSegments/Pull/4D_W0_hPullYRZ"); MonitorElement * hPullAlpha_W1 = getter.get("DT/4DSegments/Pull/4D_W1_hPullAlpha"); MonitorElement * hPullBeta_W1 = getter.get("DT/4DSegments/Pull/4D_W1_hPullBeta"); MonitorElement * hPullX_W1 = getter.get("DT/4DSegments/Pull/4D_W1_hPullX"); MonitorElement * hPullY_W1 = getter.get("DT/4DSegments/Pull/4D_W1_hPullY"); MonitorElement * hPullBetaRZ_W1 = getter.get("DT/4DSegments/Pull/4D_W1_hPullBetaRZ"); MonitorElement * hPullYRZ_W1 = getter.get("DT/4DSegments/Pull/4D_W1_hPullYRZ"); MonitorElement * hPullAlpha_W2 = getter.get("DT/4DSegments/Pull/4D_W2_hPullAlpha"); MonitorElement * hPullBeta_W2 = getter.get("DT/4DSegments/Pull/4D_W2_hPullBeta"); MonitorElement * hPullX_W2 = getter.get("DT/4DSegments/Pull/4D_W2_hPullX"); MonitorElement * hPullY_W2 = getter.get("DT/4DSegments/Pull/4D_W2_hPullY"); MonitorElement * hPullBetaRZ_W2 = getter.get("DT/4DSegments/Pull/4D_W2_hPullBetaRZ"); MonitorElement * hPullYRZ_W2 = getter.get("DT/4DSegments/Pull/4D_W2_hPullYRZ"); Tutils util; util.drawGFit(hResAlpha->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBeta->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResX->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResY->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBetaRZ->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResYRZ->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResAlpha_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBeta_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResX_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResY_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBetaRZ_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResYRZ_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResAlpha_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBeta_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResX_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResY_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBetaRZ_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResYRZ_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResAlpha_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBeta_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResX_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResY_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResBetaRZ_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hResYRZ_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullAlpha->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBeta->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullX->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullY->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBetaRZ->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullYRZ->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullAlpha_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBeta_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullX_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullY_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBetaRZ_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullYRZ_W0->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullAlpha_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBeta_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullX_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullY_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBetaRZ_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullYRZ_W1->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullAlpha_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBeta_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullX_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullY_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullBetaRZ_W2->getTH1(),-0.2,0.2,-0.1,0.1); util.drawGFit(hPullYRZ_W2->getTH1(),-0.2,0.2,-0.1,0.1); if (doall_) { HEff4DHitHarvest hEff_S3RPhi("All", booker, getter); HEff4DHitHarvest hEff_S3RZ_W0("W0", booker, getter); HEff4DHitHarvest hEff_S3RZ_W1("W1", booker, getter); HEff4DHitHarvest hEff_S3RZ_W2("W2", booker, getter); } } // declare this as a framework plugin #include "FWCore/Framework/interface/MakerMacros.h" DEFINE_FWK_MODULE(DT4DSegmentClients);
<? $MESS["SONET_LOG_COMMENT_EMPTY"] = "The message text is empty."; $MESS["SONET_LOG_COMMENT_NO_PERMISSIONS"] = "You don't have permission to add comments."; $MESS["SONET_LOG_CREATED_BY_ANONYMOUS"] = "Unauthorized Visitor"; ?>
<?php /** * Criado por: Rafael Dourado * Data: 28/10/2020 * Hora: 10 : 44 */ declare(strict_types=1); namespace Mercatus\PaymentApi\Domain\Repositories\Getnet; use Mercatus\PaymentApi\Domain\OrderInterface; use Mercatus\PaymentApi\Domain\PaymentMethods\CreditCardInterface; use Mercatus\PaymentApi\Domain\PaymentMethods\Getnet\GetnetCreditCardInterface; use Mercatus\PaymentApi\Domain\ResponseInterface; use Mercatus\PaymentApi\Domain\UserInterface; use Mercatus\PaymentApi\Infrastructure\Repositories\Getnet\Exception\AuthenticationException; use Mercatus\PaymentApi\Infrastructure\Repositories\Getnet\Exception\TokenizationException; /** * Encapsula o acesso à API de pagamento da Getnet nessa interface. * Interface GetnetRepositoryInterface * @package Mercatus\PaymentApi\Domain\Repositories\Getnet */ interface GetnetRepositoryInterface { /** * Realiza autenticação na API e retorna o token de acesso gerado. * Mais informações em https://developers.getnet.com.br/api#tag/Autenticacao * @return string * @throws AuthenticationException */ public function authenticate(): string; /** * Realiza a tokenização do cartão de crédito e rotorna o token gerado pela API. * Mais informações em https://developers.getnet.com.br/api#tag/Tokenizacao * @param CreditCardInterface $card * @return string o token do cartão de crédito * @throws TokenizationException|AuthenticationException */ public function tokenizeCard(CreditCardInterface $card): string; /** * Lista os cartões salvos no cofre pelo usuário. * Mais informações em https://developers.getnet.com.br/api#tag/Cofre%2Fpaths%2F~1v1~1cards%2Fget * @param UserInterface $user Usuário a quem pertence o cartão salvo. * @param string $status Filtro de status dos cartões retornados. Caso não seja informado nenhum filtro, serão * retornados os cartões ativos e renovados * @return array */ public function getCardsFrom(UserInterface $user, string $status): array; /** * Realiza um pagamento com cartão de crédito e retorna um @link ResponseInterface. * @param GetnetCreditCardInterface $card * @param OrderInterface $order * @return ResponseInterface */ public function payWithCreditCard( GetnetCreditCardInterface $card, OrderInterface $order ): ResponseInterface; }
# Tutorial - How to Build a Connector !!! note "Important changes by release" This [page](https://www.notion.so/hummingbot/a26c8bcf30284535b0e5689d45a4fe88?v=869e73f78f0b426288476a2abda20f2c) lists all relevant updates to Hummingbot codebase aimed to help connector developers in making the requisite changes to their connectors. Each exchange connector is comprised of the following key functions: | Functions | Description | | --------------------------------- | --------------------------------------------------------------------------- | | **(1) Placing/Cancelling Orders** | Sending buy/sell/cancel instructions to the exchange. | | **(2) Order book tracking** | Tracking exchange's real-time order book data. | | **(3) Parsing order book data** | Formatting raw order book data into the standard format used by Hummingbot. | | **(4) Active order tracking** | Tracking orders placed by the bot on the exchange. | | **(5) User stream tracker** | Tracking user data specific to the current user of the bot. | ## Getting Started This guide will help you learn about the basic structure of a connector in Hummingbot. Included in this guide is the scope of creating/modifying the necessary components to implement an exchange connector. By the end of this guide, you should: - Have a general understanding of the base classes that serve as building blocks of a connector - Be able to integrate new connectors from scratch Implementing a new connector can generally be split into 3 major tasks: - **Task 1:** [OrderBookDataSource & OrderBookTracker](/developers/connectors/requirements/task1/) - **Task 2:** [UserStreamDataSource, UserStreamTracker & Auth](/developers/connectors/requirements/task2/) - **Task 3:** [Exchange Connector](/developers/connectors/requirements/task3/) ## Tasks and UML Diagram The following diagram displays the tasks and their relevant classes as a checklist to get started. ![connector tutorial UML](/assets/img/connector-tutorial-uml.svg) ## Order Lifecycle and Market Events Exchange connectors track status updates of all orders created in Hummingbot and emit events on status updates of its orders for the strategy modules. Be careful when implementing a new exchange connector to ensure all the status updates and emitted events adhere to the semantics defined by Hummingbot. ### Order Tracking Order tracking starts when `_create_order()` is called. It is called from within the `buy()` and `sell()` functions. An exchange connector should keep tracking the order's status and emit events for any change of states until the order is completed, cancelled, expired, or failed. !!! note This is done by calling `start_tracking_order()` method in the #Exchange# class. `start_tracking_order()` should be called before the API request for placing the order is executed. ### Order Lifecycle Flowchart ![Figure 1: Order lifecycle flowchart](/assets/img/connector-order-lifecycle.svg) ### Creating an Order An order is created by invoking `buy()` or `sell()` in an exchange connector - usually by a strategy module. `buy()` and `sell()` would return immediately with a client-side order ID that Hummingbot uses to track the order's status. They would schedule the order to be submitted to the exchange as soon as possible but would not wait for the reply from the exchange before returning. ### Submitting an Order In most of our built-in exchange connectors, order submission occurs in the `_create_order()` function - although it may be different for some decentralized exchange connectors. The `_create_order()` method is responsible for performing the necessary trading rule checks before submitting the order via the REST API. Upon receiving a successful response, a `BuyOrderCreatedEvent` or `SellOrderCreatedEvent` would be emitted. Otherwise, a `MarketOrderFailureEvent` would be emitted. Note that despite the naming, `MarketOrderFailureEvent` is emitted even for limit orders. ### Order Being Filled Other market participants could fill an order over time once it's live on an exchange. Depending on the order types, i.e. limit or market, the order could be filled either immediately or after another market participant fulfils it. For every order fill on our orders, whether partially or entirely, the exchange connector must emit an `OrderFilledEvent`, to notify the strategy modules about the order's progress. ### Order Completion Once an order has been completely filled, the exchange connector must emit a `BuyOrderCompletedEvent` or `SellOrderCompletedEvent`. The exchange connector would stop tracking the order afterward. `BuyOrderCompletedEvent` or `SellOrderCompletedEvent` should always come **after** an `OrderFilledEvent` has been emitted. ### Order Cancellation or Expiry If an order is canceled or expired before it has been completely filled, an `OrderCancelledEvent` or an `OrderExpiredEvent` should be emitted. For centralized exchanges, order tracking should end after emitting an `OrderCancelledEvent` or `OrderExpiredEvent`. On decentralized exchanges - since it's possible for orders to be filled after cancellation or even expiry, due to block delays - the exchange connector may keep tracking the order for a certain amount of time afterwards. ### Order Failure If a failed order has been rejected for any reason other than cancellation or expiry, `MarketOrderFailureEvent` must be emitted. ## InFlightOrder Helper Hummingbot comes with a built-in helper class for exchange connectors to track their order status, the `InFlightOrderBase` class. ![Figure 2: InFlightOrderBase class](/assets/img/connector-in-flight-uml.svg) While developers are free to extend or modify from `InFlightOrderBase` to suit their logic. There are a few conventions within Hummingbot's built-in exchange connectors for extending `InFlightOrderBase`, and it is recommended that new exchange connectors should stick with the same conventions. Below are some of the functions that are required to be implemented in the new exchange connector. - `is_done: bool` This property indicates whether the order is done or not, whether it has been filled or failed, canceled or expired. - `is_cancelled: bool` This property indicates whether the order has been canceled or not. - `is_failure: bool` This property indicates whether the order has been terminated before completion or not. This includes all cases like order cancellation, expiry, or rejection. - `base_asset: str` The base asset symbol. - `quote_asset: str` The quote asset symbol. - `update_exchange_order_id(str): void` This is called when the market connector has successfully submitted the order to the exchange and has got back an exchange-native order ID. This notifies any coroutines waiting on the `get_exchange_order_id()` function (detailed below\). - `async get_exchange_order_id(): str` Returns the exchange-native order ID for the order if the order has been submitted and the exchange-native order ID is known. Otherwise, it would wait until `update_exchange_order_id(str)` is called by the market connector. - `to_limit_order(): LimitOrder` Converts the in-flight order data structure to a `LimitOrder` data object. This should only be used on limit orders. - `to_json(): Dict[str, any]` Convert the in-flight order data structure to a dictionary that can be serialized into JSON format. - `from_json(): Dict[str, Any]` Convert a dictionary object containing the relevant order details into an `InFlightOrder` data structure.
import scala.reflect.{ClassTag, classTag} object Test extends App { println(implicitly[ClassTag[Byte]] eq ClassTag.Byte) println(implicitly[ClassTag[Byte]]) println(implicitly[ClassTag[Short]] eq ClassTag.Short) println(implicitly[ClassTag[Short]]) println(implicitly[ClassTag[Char]] eq ClassTag.Char) println(implicitly[ClassTag[Char]]) println(implicitly[ClassTag[Int]] eq ClassTag.Int) println(implicitly[ClassTag[Int]]) println(implicitly[ClassTag[Long]] eq ClassTag.Long) println(implicitly[ClassTag[Long]]) println(implicitly[ClassTag[Float]] eq ClassTag.Float) println(implicitly[ClassTag[Float]]) println(implicitly[ClassTag[Double]] eq ClassTag.Double) println(implicitly[ClassTag[Double]]) println(implicitly[ClassTag[Boolean]] eq ClassTag.Boolean) println(implicitly[ClassTag[Boolean]]) println(implicitly[ClassTag[Unit]] eq ClassTag.Unit) println(implicitly[ClassTag[Unit]]) println(implicitly[ClassTag[Any]] eq ClassTag.Any) println(implicitly[ClassTag[Any]]) println(implicitly[ClassTag[AnyVal]] eq ClassTag.AnyVal) println(implicitly[ClassTag[AnyVal]]) println(implicitly[ClassTag[AnyRef]] eq ClassTag.AnyRef) println(implicitly[ClassTag[AnyRef]]) println(implicitly[ClassTag[Object]] eq ClassTag.Object) println(implicitly[ClassTag[Object]]) println(implicitly[ClassTag[Null]] eq ClassTag.Null) println(implicitly[ClassTag[Null]]) println(implicitly[ClassTag[Nothing]] eq ClassTag.Nothing) println(implicitly[ClassTag[Nothing]]) }
#!/usr/bin/env ruby require 'uri' ARGF.each_line do |line| begin print URI.parse(URI.escape(line.strip)) rescue print "## invalid URI: '#{line.strip}' ##" end unless ARGF.eof? print "\n" end end
<?php namespace common\models; use Yii; use yii\behaviors\SluggableBehavior; use yii\behaviors\TimestampBehavior; use yii\behaviors\BlameableBehavior; use \common\models\base\FlatPageLang as BaseFlatPageLang; /** * This is the model class for table "flat_page_lang". */ class FlatPageLang extends BaseFlatPageLang { public function beforeValidate() { if (!$this->isNewRecord) { $this->detachBehavior('sluggable'); } return parent::beforeValidate(); } }
class Promethee::StructureUpgraderService BASE_COMPONENTS = { aside: Promethee::StructureUpgrader::Components::Aside, blockquote: Promethee::StructureUpgrader::Components::Blockquote, collection: Promethee::StructureUpgrader::Components::Collection, collection_item: Promethee::StructureUpgrader::Components::CollectionItem, column: Promethee::StructureUpgrader::Components::Column, cover: Promethee::StructureUpgrader::Components::Cover, faq: Promethee::StructureUpgrader::Components::Faq, faq_item: Promethee::StructureUpgrader::Components::FaqItem, image: Promethee::StructureUpgrader::Components::Image, page: Promethee::StructureUpgrader::Components::Page, row: Promethee::StructureUpgrader::Components::Row, slider: Promethee::StructureUpgrader::Components::Slider, slider_item: Promethee::StructureUpgrader::Components::SliderItem, table: Promethee::StructureUpgrader::Components::Table, table_cell: Promethee::StructureUpgrader::Components::TableCell, text: Promethee::StructureUpgrader::Components::Text, video: Promethee::StructureUpgrader::Components::Video } attr_accessor :objects def initialize(model_name) begin model_class = model_name.constantize objects = model_class.all rescue puts 'Please provide a valid model name (e.g. `rake promethee:upgrade_structure[Page]`)' exit end @objects = objects end def start puts '= START STRUCTURE UPGRADE =' puts "Number of objects: #{objects.count}" i = 0 objects.each do |object| next unless can_process?(object.data) i += 1 process_object(object) end puts "Number of processed objects: #{i}" puts '====== END UPGRADER ========' end def process_object(object) puts "Processing object ##{object.id}" object.data = object.data.has_key?('components') ? process_localization(object.data) : process_component(object.data) object.save puts "End processing object ##{object.id}" end def process_localization(data) data['components'].map! { |component| process_localization_component(component) }.compact! # We remove the possible children to concatenate them to the list children = [] data['components'].each { |component| children.concat component.delete('children').to_a } data['components'].concat(children).compact! data end def process_localization_component(component) upgraded_component = process_component(component) return nil if upgraded_component.nil? # We only keep the translatable attributes upgraded_component['attributes'].keep_if { |key, object_value| object_value['translatable'] } upgraded_component end def process_component(data) return nil unless data.has_key? 'type' component_type = data['type'] component_upgrader = search_component(component_type).new(data) data = component_upgrader.upgraded_data data['children'] ||= [] data['children'].map! { |child| process_component(child) }.compact! data end protected def can_process?(data) data.is_a?(Hash) && (data.has_key?("components") || data.has_key?("children")) end def search_component(type) component = components_library[type.to_sym] puts "Component <#{type}> not found." if component.nil? component end def components_library @components_library ||= BASE_COMPONENTS.merge(custom_components) end def custom_components # Overriden in derivated services {} end end
# frozen_string_literal: true module BunnyMock module Exchanges class Topic < BunnyMock::Exchange # @private # @return [String] Multiple subdomain wildcard MULTI_WILDCARD = '#'.freeze # @private # @return [String] Single subdomain wildcard SINGLE_WILDCARD = '*'.freeze # # API # ## # Deliver a message to route with keys matching wildcards # # @param [Object] payload Message content # @param [Hash] opts Message properties # @param [String] key Routing key # # @api public # def deliver(payload, opts, key) delivery_routes = @routes.dup.keep_if { |route, _| key =~ route_to_regex(route) } delivery_routes.values.flatten.each { |dest| dest.publish(payload, opts) } end private # @private def route_to_regex(key) key = key.gsub('.', '\.') key = key.gsub(SINGLE_WILDCARD, '(?:\w*)') key = key.gsub(MULTI_WILDCARD, '[\w\.]*\.?') Regexp.new("^#{key}$") end end end end
// Copyright 2021 Code Intelligence GmbH // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "jvm_tooling.h" #include "coverage_tracker.h" #include "fuzz_target_runner.h" #include "gflags/gflags.h" #include "gtest/gtest.h" DECLARE_string(cp); DECLARE_string(jvm_args); DECLARE_string(target_class); DECLARE_string(target_args); DECLARE_string(agent_path); DECLARE_string(instrumentation_excludes); namespace jazzer { std::vector<std::string> splitOnSpace(const std::string &s); TEST(SpaceSplit, SpaceSplitSimple) { ASSERT_EQ((std::vector<std::string>{"first", "se\\ cond", "third"}), splitOnSpace("first se\\ cond third")); } class JvmToolingTest : public ::testing::Test { protected: // After DestroyJavaVM() no new JVM instance can be created in the same // process, so we set up a single JVM instance for this test binary which gets // destroyed after all tests in this test suite have finished. static void SetUpTestCase() { FLAGS_jvm_args = "-Denv1=val1;-Denv2=val2"; FLAGS_instrumentation_excludes = "**"; jvm_ = std::make_unique<JVM>("test_executable"); CoverageTracker::Setup(jvm_->GetEnv()); } static void TearDownTestCase() { jvm_.reset(nullptr); } static std::unique_ptr<JVM> jvm_; }; std::unique_ptr<JVM> JvmToolingTest::jvm_ = nullptr; TEST_F(JvmToolingTest, ClassNotFound) { ASSERT_THROW(jvm_->FindClass(""), std::runtime_error); ASSERT_THROW(jvm_->FindClass("test.NonExistingClass"), std::runtime_error); ASSERT_THROW(jvm_->FindClass("test/NonExistingClass"), std::runtime_error); } TEST_F(JvmToolingTest, ClassInClassPath) { ASSERT_NE(nullptr, jvm_->FindClass("test.PropertyPrinter")); ASSERT_NE(nullptr, jvm_->FindClass("test/PropertyPrinter")); } TEST_F(JvmToolingTest, JniProperties) { auto property_printer_class = jvm_->FindClass("test.PropertyPrinter"); ASSERT_NE(nullptr, property_printer_class); auto method_id = jvm_->GetStaticMethodID(property_printer_class, "printProperty", "(Ljava/lang/String;)Ljava/lang/String;"); ASSERT_NE(nullptr, method_id); auto &env = jvm_->GetEnv(); for (const auto &el : std::vector<std::pair<std::string, std::string>>{ {"not set property", ""}, {"env1", "val1"}, {"env2", "val2"}}) { jstring str = env.NewStringUTF(el.first.c_str()); auto ret = (jstring)env.CallStaticObjectMethod(property_printer_class, method_id, str); ASSERT_FALSE(env.ExceptionCheck()); if (el.second.empty()) { ASSERT_EQ(nullptr, ret); } else { ASSERT_NE(nullptr, ret); jboolean is_copy; ASSERT_EQ(el.second, jvm_->GetEnv().GetStringUTFChars(ret, &is_copy)); } } } TEST_F(JvmToolingTest, SimpleFuzzTarget) { // see testdata/test/SimpleFuzzTarget.java for the implementation of the fuzz // target FLAGS_target_class = "test/SimpleFuzzTarget"; FLAGS_target_args = ""; FuzzTargetRunner fuzz_target_runner(*jvm_); // normal case: fuzzerTestOneInput returns false std::string input("random"); ASSERT_EQ(RunResult::kOk, fuzz_target_runner.Run( (const uint8_t *)input.c_str(), input.size())); // exception is thrown in fuzzerTestOneInput input = "crash"; ASSERT_EQ( RunResult::kException, fuzz_target_runner.Run((const uint8_t *)input.c_str(), input.size())); } class ExceptionPrinterTest : public ExceptionPrinter { public: ExceptionPrinterTest(JVM &jvm) : ExceptionPrinter(jvm), jvm_(jvm) {} std::string TriggerJvmException() { jclass illegal_argument_exception = jvm_.FindClass("java.lang.IllegalArgumentException"); jvm_.GetEnv().ThrowNew(illegal_argument_exception, "Test"); jthrowable exception = jvm_.GetEnv().ExceptionOccurred(); jvm_.GetEnv().ExceptionClear(); return getStackTrace(exception); } private: const JVM &jvm_; }; TEST_F(JvmToolingTest, ExceptionPrinter) { ExceptionPrinterTest exception_printer(*jvm_); // a.k.a std::string.startsWith(java.lang...) ASSERT_TRUE(exception_printer.TriggerJvmException().rfind( "java.lang.IllegalArgumentException", 0) == 0); } TEST_F(JvmToolingTest, FuzzTargetWithInit) { // see testdata/test/FuzzTargetWithInit.java for the implementation of the // fuzz target. All string arguments provided in fuzzerInitialize(String[]) // will cause a crash if input in fuzzerTestOneInput(byte[]). FLAGS_target_class = "test/FuzzTargetWithInit"; FLAGS_target_args = "crash_now crash_harder"; FuzzTargetRunner fuzz_target_runner(*jvm_); // normal case: fuzzerTestOneInput returns false std::string input("random"); ASSERT_EQ(RunResult::kOk, fuzz_target_runner.Run( (const uint8_t *)input.c_str(), input.size())); input = "crash_now"; ASSERT_EQ( RunResult::kException, fuzz_target_runner.Run((const uint8_t *)input.c_str(), input.size())); input = "this is harmless"; ASSERT_EQ(RunResult::kOk, fuzz_target_runner.Run( (const uint8_t *)input.c_str(), input.size())); input = "crash_harder"; ASSERT_EQ( RunResult::kException, fuzz_target_runner.Run((const uint8_t *)input.c_str(), input.size())); } TEST_F(JvmToolingTest, TestCoverageMap) { CoverageTracker::Clear(); // check that after the initial clear the first coverage counter is 0 auto coverage_counters_array = CoverageTracker::GetCoverageCounters(); ASSERT_EQ(0, coverage_counters_array[0]); FLAGS_target_class = "test/FuzzTargetWithCoverage"; FLAGS_target_args = ""; FuzzTargetRunner fuzz_target_runner(*jvm_); // run a fuzz target input which will cause the first coverage counter to // increase fuzz_target_runner.Run(nullptr, 0); ASSERT_EQ(1, coverage_counters_array[0]); CoverageTracker::Clear(); // back to initial state ASSERT_EQ(0, coverage_counters_array[0]); // calling the fuzz target twice fuzz_target_runner.Run(nullptr, 0); fuzz_target_runner.Run(nullptr, 0); ASSERT_EQ(2, coverage_counters_array[0]); } } // namespace jazzer
package hm.binkley.labs.skratch.math.matrix import java.util.Objects.hash interface HasA<N, Norm : GeneralNumber<Norm, Norm>, M> where N : GeneralNumber<N, Norm>, M : SquareMatrix<N, Norm, M> { val a: N } abstract class Matrix1x1<N, Norm : GeneralNumber<Norm, Norm>, M>( a: N, ) : SquareMatrix<N, Norm, M>(1, listOf(a)), HasA<N, Norm, M> where N : GeneralNumber<N, Norm>, M : Matrix1x1<N, Norm, M> { constructor(m: Holder<N, Norm>) : this(m.a) data class Holder<N, Norm : GeneralNumber<Norm, Norm>>( val a: N, ) where N : GeneralNumber<N, Norm> override val a: N get() = this[1, 1] override val det get() = a override val conj: M get() = matrixCtor(a.conj) override val T get() = matrixCtor(a) override val adj: M get() = matrixCtor(a) protected abstract fun elementCtor(n: Long): N protected abstract fun matrixCtor(a: N): M override fun matrixCtor(values: List<N>) = matrixCtor(values[0]) override operator fun unaryMinus() = matrixCtor(-a) override operator fun plus(other: M) = matrixCtor(a + other.a) override operator fun times(other: Long) = this * elementCtor(other) override operator fun div(other: M): M = if (other.isSingular()) throw ArithmeticException("Divisor is singular") else this * other.multInv override operator fun div(other: Long) = this / elementCtor(other) override fun isDiagonal() = true override fun isSymmetric() = true override fun isZero() = isDiagonal() && a.isZero() override fun isUnit() = isDiagonal() && a.isUnit() override fun isUpperTriangular() = true override fun isLowerTriangular() = true override fun symmetricPart() = (this + T) / elementCtor(2L) override fun antisymmetricPart() = (this - T) / elementCtor(2L) @Suppress("UNCHECKED_CAST") override fun equals(other: Any?) = this === other || javaClass == other?.javaClass && equivalent(other as M) override fun hashCode() = hash(a) override fun toString() = if (isUnit()) "I" else "[$a]" }
extension DateWeekExtensions on DateTime { /// The ISO 8601 week of year [1..53]. /// /// Algorithm from https://en.wikipedia.org/wiki/ISO_week_date#Algorithms int get weekOfYear { // Add 3 to always compare with January 4th, which is always in week 1 // Add 7 to index weeks starting with 1 instead of 0 final woy = ((ordinalDate - weekday + 10) ~/ 7); // If the week number equals zero, it means that the given date belongs to the preceding (week-based) year. if (woy == 0) { // The 28th of December is always in the last week of the year return DateTime(year - 1, 12, 28).weekOfYear; } // If the week number equals 53, one must check that the date is not actually in week 1 of the following year if (woy == 53 && DateTime(year, 1, 1).weekday != DateTime.thursday && DateTime(year, 12, 31).weekday != DateTime.thursday) { return 1; } return woy; } /// The ordinal date, the number of days since December 31st the previous year. /// /// January 1st has the ordinal date 1 /// /// December 31st has the ordinal date 365, or 366 in leap years int get ordinalDate { const offsets = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]; return offsets[month - 1] + day + (isLeapYear && month > 2 ? 1 : 0); } /// True if this date is on a leap year. bool get isLeapYear { return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0); } }
--- title: Test date: 2019-07-06T20:13:19.000+00:00 background_color: "#B4CEC8" contact_info: twitter_handle: forestryio github_handle: forestryio email: les.turner@me.com twitter_url: https://twitter.com/forestryio github_url: https://github.com/forestryio type: '' --- ## This is a test
require 'nokogiri' require 'fileutils' require 'csv' require 'net/http' module SitemapGen IGNORE_DIRS_REGEX = /img|cgi-bin|images|css|js/i autoload :CSV, 'sitemap_gen/csv' autoload :Fixer, 'sitemap_gen/fixer' autoload :XMLCrawler, 'sitemap_gen/xml_crawler' def self.generate(dir_path, base_url, save_path = nil, checking_url = false) CSV.new(dir_path, base_url, save_path, checking_url).execute end def self.fix(dir_path) Fixer.new(dir_path).execute end def self.crawl_xml(xml_path, save_path) XMLCrawler.execute(xml_path, save_path) end end
import { Record, Set } from 'immutable'; import { Dmca } from './dmca/dmca'; import { Cp } from './cp'; export class Takedown extends Record( { id: undefined, reporterId: undefined, involvedIds: [], // We must send usernames becasue of T168571 // @link https://phabricator.wikimedia.org/T168571 involvedNames: [], created: undefined, status: 'clean', error: undefined, siteId: undefined, type: undefined, pageIds: new Set(), metadataIds: new Set(), dmca: new Dmca(), cp: new Cp() }, 'Takedown' ) { constructor( data = {} ) { data = { ...data, pageIds: new Set( data.pageIds || [] ), metadataIds: new Set( data.metadataIds ? data.metadataIds : [] ), dmca: new Dmca( data.dmca ? data.dmca : {} ), cp: new Cp( data.cp ? data.cp : {} ) }; super( data ); } equals( other ) { if ( !( other instanceof Takedown ) ) { return super.equals( other ); } if ( typeof other.id === 'undefined' || typeof this.id === 'undefined' ) { return super.equals( other ); } return ( other.id === this.id ); } hashCode() { if ( typeof this.id === 'undefined' ) { return super.hashCode(); } return this.id; } }
require "spec_helper" RSpec.describe Array do let(:empty_array) {["","","","","","",""]} let(:test_connected_four) {["RED","RED","RED","RED","YELLOW","YELLOW","YELLOW"]} let(:test_unconnected_four) {["RED", "YELLOW", "RED", "RED", "RED", "", "YELLOW"]} describe "#all_empty?" do context "checks for empty elements in array" do it "returns true if all elements are empty strings" do expect(empty_array.all_empty?).to be true end end end describe "#four_connected?" do context "checks whether four adjacent elements in array are same" do it "returns true if the condition is met" do expect(test_connected_four.four_connected?).to be true end it "returns false if the array is empty" do expect(empty_array.four_connected?).to be false end it "returns false if there are four same elements but they are not connected" do expect(test_unconnected_four.four_connected?).to be false end end end describe "#any_empty?" do context "checks whether any cells are empty" do it "returns true for an empty array" do expect(empty_array.any_empty?).to be true end it "returns true for an array with one empty element" do expect(test_unconnected_four.any_empty?).to be true end it "returns false if all cells are populated" do expect(test_connected_four.any_empty?).to be false end end end describe "#none_empty?" do context "checks whether all cells are full" do it "returns false for an empty array" do expect(empty_array.none_empty?).to be false end it "returns false for an array with one empty cell" do expect(test_unconnected_four.none_empty?).to be false end it "returns true for a fully populated array" do expect(test_connected_four.none_empty?).to be true end end end let(:grid) { [[1,2,3],[1,2,3],[1,2,3]] } describe "#diagonals" do context "collects all possible diagonals of a 2d array as arrays" do it "returns a 2d array of diagonals" do expect(grid.diagonals).to eq [[1, 2], [1, 2, 3], [2, 3], [3, 2], [3, 2, 1], [2, 1]] end end end end
import 'package:cartesian_graph/coordinates.dart'; import 'package:cartesian_graph/graph_bounds.dart'; import 'package:flutter_test/flutter_test.dart'; void main() { group('Input validation', () { test('should mandate larger max x than min x', () { expect(() => GraphBounds(2, 1, -2, 2), throwsAssertionError); }); test('should mandate larger max y than min y', () { expect(() => GraphBounds(0, 1, 2, 1), throwsAssertionError); }); }); group('Checks if contains coordinates',(){ test('when coordinates are contained',(){ expect(GraphBounds(-1,1,-1,1).isWithin(Coordinates(0,0)), true); }); test('when coordinates are contained on boundary',(){ expect(GraphBounds(-1,1,-1,1).isWithin(Coordinates(1,1)), true); }); test('when coordinates are outside x',(){ expect(GraphBounds(-1,1,-1,1).isWithin(Coordinates(2,1)), false); }); test('when coordinates are outside y',(){ expect(GraphBounds(-1,1,-1,1).isWithin(Coordinates(1,2)), false); }); }); group('Checks if contains values',(){ group('x values',(){ test('when x value is within bounds',(){ expect(GraphBounds(-1,1,-1,1).isXWithin(0), true); }); test('when x value is beneath bounds',(){ expect(GraphBounds(-1,1,-1,1).isXWithin(-2), false); }); test('when x value above bounds',(){ expect(GraphBounds(-1,1,-1,1).isXWithin(2), false); }); }); group('y values',(){ test('when y value is within bounds',(){ expect(GraphBounds(-1,1,-1,1).isYWithin(0), true); }); test('when y value is beneath bounds',(){ expect(GraphBounds(-1,1,-1,1).isYWithin(-2), false); }); test('when y value above bounds',(){ expect(GraphBounds(-1,1,-1,1).isYWithin(2), false); }); }); }); }
#include "FileReader.h" namespace FileReader { std::string readFileAsString(std::string filePath) { std::ifstream t(filePath); std::stringstream buffer; buffer << t.rdbuf(); return buffer.str(); } } // namespace FileReader
package software.orpington.rozkladmpk.routeDetails import software.orpington.rozkladmpk.BaseView import software.orpington.rozkladmpk.data.model.RouteDirections import software.orpington.rozkladmpk.data.model.RouteInfo import software.orpington.rozkladmpk.data.model.Timeline import software.orpington.rozkladmpk.data.source.IDataSource interface RouteDetailsContract { interface Presenter { fun attachInfoView(view: InfoView) fun detachInfoView() fun attachDirectionsView(view: DirectionsView) fun detachDirectionsView() fun attachTimetableView(view: TimetableView) fun detachTimetableView() fun attachTimelineView(view: TimelineView) fun detachTimelineView() fun setRouteID(id: String) fun setStopName(name: String) fun setDirection(direction: String) fun loadRouteInfo() fun loadRouteDirections() fun onDirectionClicked(directionIdx: Int) fun onDirectionFavouriteClicked(directionIdx: Int) fun loadTimeTable() fun onTimeClicked(time: String) fun loadTimeline() fun setTimelinePosition(position: Int) fun mapClicked() fun getState(): RouteDetailsState fun setState(state: RouteDetailsState) fun setDepartureTime(departureTime: String, tripID: Int) } interface InfoView : BaseView { fun showRouteInfo(routeInfo: RouteInfo) fun switchToTimetableTab() fun switchToTimelineTab() } interface DirectionsView : BaseView { fun attachPresenter(newPresenter: Presenter) fun showRouteDirections(routeDirections: List<String>, favouriteDirections: Set<Int>, idxToHighlight: Int = -1) fun highlightDirection(directionIdx: Int) fun getFavouriteDirections( routeID: String, stopName: String, isBus: Boolean ): Set<String> fun setFavouriteDirections(routeID: String, stopName: String, isBus: Boolean, favourites: Set<String>, favouritesIndices: Set<Int>) } interface TimetableView : BaseView { fun attachPresenter(newPresenter: Presenter) fun showTimeTable( items: List<TimetableViewHelper.ViewItem>, timeToHighlight: String = "", hourToScrollTo: HourCoordinates? = null ) fun highlightTime(tag: String) fun unhighlightTime(tag: String) fun navigateToMap( routeID: String, direction: String, stopName: String ) } interface TimelineView: BaseView { fun attachPresenter(newPresenter: Presenter) fun showTimeline( timeline: Timeline, itemToHighlight: Int = -1, itemToScrollTo: Int = -1 ) } }
import { TemplateRef } from '@angular/core'; import { Observable, Subscription } from 'rxjs'; import { ESCAPE } from '@angular/cdk/keycodes'; import { HorizontalConnectionPos, VerticalConnectionPos } from '@angular/cdk/overlay'; export type LuPopoverScrollStrategy = 'reposition' | 'block' | 'close'; export declare interface ILuPopoverPanel { scrollStrategy: LuPopoverScrollStrategy; closeOnClick: boolean; panelId?: string; triggerId?: string; templateRef?: TemplateRef<any>; /** will emit when the panel wants to close */ close: Observable<void>; /** will emit when the panel wants to open */ open: Observable<void>; /** will emit when the panel is hovered */ hovered: Observable<boolean>; /** classes to apply to the panel, uses ' ' for separating values */ panelClasses: string; /** class to apply to the panel content, uses ' ' for separating values */ contentClasses: string; keydownEvents$: Observable<KeyboardEvent>; setPositionClasses: (posX: HorizontalConnectionPos, posY: VerticalConnectionPos) => void; /** method called by the trigger when it opens the popover */ onOpen(): void; /** method called by the trigger when it closes the popover */ onClose(): void; } /** * abstract class for basic implementation of a popover panel */ export abstract class ALuPopoverPanel implements ILuPopoverPanel { panelId: string; triggerId: string; protected _isOpen: boolean; get isOpen() { return this._isOpen; } protected _closeOnClick = false; get closeOnClick() { return this._closeOnClick; } set closeOnClick(coc: boolean) { this._closeOnClick = coc; } protected _trapFocus = false; get trapFocus() { return this._trapFocus; } set trapFocus(tf: boolean) { this._trapFocus = tf; } protected _scrollStrategy: LuPopoverScrollStrategy = 'reposition'; get scrollStrategy() { return this._scrollStrategy; } set scrollStrategy(ss: LuPopoverScrollStrategy) { this._scrollStrategy = ss; } protected _templateRef: TemplateRef<any>; get templateRef() { return this._templateRef; } set templateRef(tr: TemplateRef<any>) { this._templateRef = tr; } protected _positionClassesMap: any = {}; protected _panelClasses = ''; get panelClasses() { return this._panelClasses; } set panelClasses(cl: string) { this._panelClasses = cl; } get panelClassesMap() { const map = this._panelClasses .split(' ') .filter(c => !!c) .reduce((obj: any, className: string) => { obj[className] = true; return obj; }, {}); // also add positiopn classes return { ...map, ...this._positionClassesMap }; } protected _contentClasses = ''; get contentClasses() { return this._contentClasses; } set contentClasses(cl: string) { this._contentClasses = cl; } get contentClassesMap() { return this._contentClasses .split(' ') .reduce((obj: any, className: string) => { obj[className] = true; return obj; }, {}); } /** Classes to be passed into the popover's overlay */ protected _overlayPaneClass: string | string[]; public get overlayPaneClass() { return this._overlayPaneClass; } public set overlayPaneClass(opc) { this._overlayPaneClass = opc; } // /** Config object to be passed into the popover's content ngStyle */ protected _keydownEventsSub: Subscription; set keydownEvents$(evt$: Observable<KeyboardEvent>) { if (!this._keydownEventsSub) { this._keydownEventsSub = evt$.subscribe(e => this._handleKeydown(e)); } } close: Observable<void>; open: Observable<void>; hovered: Observable<boolean>; abstract _emitCloseEvent(): void; abstract _emitOpenEvent(): void; abstract _emitHoveredEvent(hovered: boolean): void; setPositionClasses(posX: HorizontalConnectionPos, posY: VerticalConnectionPos): void { this._positionClassesMap['is-before'] = posX === 'end'; this._positionClassesMap['is-after'] = posX === 'start'; this._positionClassesMap['is-above'] = posY === 'bottom'; this._positionClassesMap['is-below'] = posY === 'top'; } onClick() { if (this.closeOnClick) { this._emitCloseEvent(); } } onOpen() { this._isOpen = true; } onClose() { this._isOpen = false; } /** * TODO: Refactor when @angular/cdk includes feature I mentioned on github see link below. * https://github.com/angular/material2/pull/5493#issuecomment-313085323 */ /** Disables close of popover when leaving trigger element and mouse over the popover */ onMouseOver() { this._emitHoveredEvent(true); } /** Enables close of popover when mouse leaving popover element */ onMouseLeave() { this._emitHoveredEvent(false); } /** does nothing but must be overridable */ onMouseDown($event) {} _handleKeydown(event: KeyboardEvent) { switch (event.keyCode) { case ESCAPE: this._emitCloseEvent(); return; } } }
import React from 'react'; import List from 'grommet/components/List'; import PageTypeListItem from './listItem'; export default function PageTypeList({ pageTypes, onMenuItemClick }) { if (!pageTypes || pageTypes && !pageTypes.length) { return null; } return ( <List style={{ maxWidth: '100vw' }}> {pageTypes.map((item, i) => <PageTypeListItem key={item.title} maxOrder={Math.max(...pageTypes.map(j => j.sortOrder))} minOrder={Math.min(...pageTypes.map(k => k.sortOrder))} onMenuItemClick={type => onMenuItemClick(type, i)} {...item} /> )} </List> ); }
/*=== extensible: true extensible: false extensible: false undefined bar ===*/ function basicTest() { function printObj(o) { print('extensible: ' + Object.isExtensible(o)); } var proto = {}; var obj = Object.create(proto); printObj(obj); Object.preventExtensions(obj); printObj(obj); Object.preventExtensions(obj); printObj(obj); try { obj.foo = 'bar'; } catch (e) { print(e.name); } print(obj.foo); // ancestor can still be extended try { proto.foo = 'bar'; } catch (e) { print(e.name); } print(obj.foo); } try { basicTest(); } catch (e) { print(e); } /*=== isExtensible 0 false isExtensible 1 false isExtensible 2 false isExtensible 3 false isExtensible 4 false isExtensible 5 false isExtensible 6 true isExtensible 7 true preventExtensions 0 undefined preventExtensions 1 null preventExtensions 2 true preventExtensions 3 false preventExtensions 4 123 preventExtensions 5 foo preventExtensions 6 1,2,3 preventExtensions 7 [object Object] ===*/ function coercionTest() { // Note: ES5 behavior was to throw a TypeError for non-object values. ES2015 // changes this to treat them as already non-extensible objects instead. // This goes for undefined and null too, even though they are not normally // object coercible! var values = [ undefined, null, true, false, 123, 'foo', [1,2,3], { foo: 1, bar: 1 } ]; for (i = 0; i < values.length; i++) { print('isExtensible', i); print(Object.isExtensible(values[i])); } for (i = 0; i < values.length; i++) { print('preventExtensions', i); print(Object.preventExtensions(values[i])); } } try { coercionTest(); } catch (e) { print(e); }
package fr.eisti.recuit; /** * Users: Lucie Anglade, Cécile Riquart * Date: 12/5/14 * Time: 3:29 PM * To change this template use File | Settings | File Templates. */ public class AffichageMatriceCarreeDouble { /** * afficherMatriceDouble : affiche une matrice de double * @param matriceAffiche : la matrice à afficher * @param prompt : petit commentaire associé à l'affichage de la matrice */ public static void afficherMatriceDouble(double[][] matriceAffiche, String prompt) { int i,j; int tailleI, tailleJ; try { tailleI = matriceAffiche.length; tailleJ = matriceAffiche[0].length; //À modifier pour matrices non carrées System.out.println("############ "+prompt+" ############"); System.out.println(); for (i=0 ; i<tailleI ; i++) { for (j=0 ; j<tailleJ ; j++) { if (j==0) { System.out.print("| "+matriceAffiche[i][j]+" | "); } else { System.out.print(matriceAffiche[i][j]+" | "); } } System.out.println(); } System.out.println(); } catch (NullPointerException | ArrayIndexOutOfBoundsException e) { System.err.println("Erreur lors de l'affichage : matrice vide ou dépassement : "+e.getMessage()); } } }
'use strict'; var fs = require('fs'); var path = require('path'); var rimraf = require('rimraf'); exports.getCacheDir = getCacheDir; function getCacheDir(index, name) { name = name || 'api'; var tmpPath = path.resolve(__dirname, '../../../tmp', name); try { fs.mkdirSync(tmpPath); } catch (err) {} var dir = path.resolve(tmpPath, 'test' + index); rimraf.sync(dir); return dir; }
# Basic API for node and mongodb ### Prerequisites ``` - node js ^8.x.x - mongodb ``` ### Installing Clona el repositoio ``` git clone https://github.com/josuedor/api-node-basic.git ``` Instalas las dependencias ``` npm install ``` Configura las variables de entorno de la aplicación, crea tu archivo .env en la raiz del proyecto. ``` IP=127.0.0.1 PORT=3000 DBURL=mongodb://localhost/database ORIGIN=http://localhost:8080 CREDENTIALS=true ``` Ejecuta el proyecto ``` npm start ``` ## Documentation [Documentación v1.0](https://josuedor.github.io/api-node-basic/) ## Client [Api-node-basic-cli.](https://github.com/josuedor/api-node-basic-cli) ## License This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
sub data_do_nothing { } __DATA__ sub data_another_fn { } =head1 CAPTURE ME in DATA =cut
{- "Devuelve True/False si un elemento está dentro de una lista" Nota: Un string es una lista de caracteres "ABC" == ['A', 'B', 'C'] > elem 3 [1,2,3,4] True > elem 3 [1,2,4] False > elem 'a' "abc" True > elem 'a' "bc" False -}
package util import ( "os" "github.com/Cray-HPE/yapl/model" "gopkg.in/yaml.v2" ) func getCacheDir() string { res := "/etc/cray/yapl/.cache" if mp := os.Getenv("CACHE_DIR"); mp != "" { res = mp } return res } func PushToCache(genericYAML model.GenericYAML) error { if err := os.MkdirAll(getCacheDir(), os.ModePerm); err != nil { return err } f, err := os.Create(getCacheDir() + "/" + string(genericYAML.Metadata.Id)) if err != nil { return err } defer f.Close() out, _ := yaml.Marshal(genericYAML) if _, err = f.Write(out); err != nil { return err } return nil } func PopFromCache(id string) (model.GenericYAML, error) { ret, err := ReadYAML(getCacheDir() + "/" + id) return ret, err } func HasRunAlready(id string) bool { genericYAML, _ := PopFromCache(id) return genericYAML.Metadata.Completed } func ClearCache() error { return os.RemoveAll(getCacheDir()) } func IsCached(id string) bool { if _, err := os.Stat(getCacheDir() + "/" + id); os.IsNotExist(err) { return false } return true }
// Copyright (c) Johnny Z. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace DotNetty.NetUV.Tests.Handles { using System; using DotNetty.NetUV.Handles; using DotNetty.NetUV.Native; using Xunit; public sealed class PipeGetSockNameTests : IDisposable { Loop loop; Pipe listener; int connectionError; int connectedCount; int closeCount; [Fact] public void Run() { this.loop = new Loop(); Pipe server = this.loop.CreatePipe(); var error = Assert.Throws<OperationException>(() => server.GetSocketName()); Assert.Equal(ErrorCode.EBADF, error.ErrorCode); error = Assert.Throws<OperationException>(() => server.GetPeerName()); Assert.Equal(ErrorCode.EBADF, error.ErrorCode); string pipeName = GetPipeName(); server.Bind(pipeName); string name = server.GetSocketName(); Assert.Equal(pipeName, name); error = Assert.Throws<OperationException>(() => server.GetPeerName()); Assert.Equal(ErrorCode.ENOTCONN, error.ErrorCode); this.listener = server.Listen(this.OnConnection); Pipe client = this.loop .CreatePipe() .ConnectTo(pipeName, this.OnConnected); name = client.GetSocketName(); Assert.True(string.IsNullOrEmpty(name)); name = client.GetPeerName(); Assert.Equal(pipeName, name); this.loop.RunDefault(); Assert.Equal(0, this.connectionError); Assert.Equal(1, this.connectedCount); Assert.Equal(2, this.closeCount); } void OnConnected(Pipe pipe, Exception exception) { if (exception == null) { string peerName = pipe.GetPeerName(); string sockName = pipe.GetSocketName(); if (peerName == GetPipeName() && string.IsNullOrEmpty(sockName)) { this.connectedCount++; } } this.listener.CloseHandle(this.OnClose); pipe.CloseHandle(this.OnClose); } void OnClose(StreamHandle handle) { handle.Dispose(); this.closeCount++; } void OnConnection(Pipe pipe, Exception exception) { // This function *may* be called, depending on whether accept or the // connection callback is called first. if (exception != null) { this.connectionError++; } } static string GetPipeName() => Platform.IsWindows ? "\\\\?\\pipe\\uv-test2" : "/tmp/uv-test2-sock"; public void Dispose() { this.listener?.Dispose(); this.listener = null; this.loop?.Dispose(); this.loop = null; } } }
FecMall用户取消订单 ============== > 订单创建后,用户进行订单取消的操作 ### 订单取消操作 用户可以在`账户中心`, 订单管理功能页面,点击`取消`按钮 ![xx](images/order-4.png) 1.`订单直接取消`:对于`未订单审核`,`未发货`的订单,当用户进行`订单取消`操作 , 提交后,`订单直接取消`,不需要经销商审核,如果订单已经支付,那么会发生 订单`退款`,需要在`平台商`进行`订单取消`退款(目前退款为`线下退款`,也就是通过支付渠道退款,然后在商城中更改状态) 2.`订单取消需要审核` 如果订单`用户`支付后,`经销商`审核订单操作通过后,如果这个时候用户进行订单的`取消操作`, 那么需要经销商审核,订单是否可以`取消` 2.1`订单取消`请求发起后,如果用户想撤回该`请求`,用户可以点击订单`撤销取消`请求, 来撤回该请求 ![xx](images/order-5.png) 2.2订单取消请求发起后,经销商在后台可以看到该请求 , 查看该订单是否已经发货,如果已经发货,那么订单不能被取消,如果订单未发货,那么经销商可以进行`订单取消`操作 ![xx](images/order-6.png) 经销商勾选处理的`订单取消`请求,选择`订单取消通过`还是`订单取消拒绝` 如果点击的是`订单取消通过`,那么订单将会被`取消`, 进而发生`订单退款` 如果点击的是`订单取消拒绝`,那么`订单取消`请求将会被驳回, 订单将继续按照后续的处理流程继续处理。 3.订单发货后,用户将不能发起`订单取消请求`,如果用户坚持不要商品,只能等收到商品,确认收货后,进行退货操作(根据退款条约) `订单取消请求`还没有审核的时候,订单无法进行其他处理( 也就是无法进行后续的订单发货操作),只能等 `订单取消审核`处理完成。 4.订单取消后 产品将会返还`库存`, 订单取消之后,代表订单终结,`订单取消`操作成功后,订单不可以进行其他的操作。 5.订单取消后,如果订单是在线支付(收款方为平台), 那么平台需要进行`订单退款`操作。 ![xx](images/order-7.png) 线下退款完成后,平台商在后台更改退款状态。 ### 订单取消类型 `订单取消`,指的是用户下单后,进行订单取消的操作 ,下面是用户(customer)在哪些订单状态下,可以进行订单取消的操作: ``` Yii::$service->order->payment_status_pending, // 订单创建状态(未支付) Yii::$service->order->payment_status_processing, // 订单支付中状态 Yii::$service->order->payment_status_canceled, // 订单支付取消状态 Yii::$service->order->payment_no_need_status_confirmed, // 订单-货到付款支付方式,确认 Yii::$service->order->payment_status_confirmed, // 订单支付完成 Yii::$service->order->status_audit_fail, // 订单内容审核失败 Yii::$service->order->status_processing // 订单内容审核通过,备货中状态 ``` 根据订单的状态,有的状态的订单可以直接取消,不需要经销商审核,有的需要经销商审核。 1.用户直接发起订单取消请求后,不需要经销商确认, `直接订单取消`成功的情况: 当订单创建,支付,以及审核失败等状态,详细参看代码: ``` Yii::$service->order->info->orderStatusRedirectCancelArr = [ Yii::$service->order->payment_status_pending, // 订单创建状态(未支付) Yii::$service->order->payment_status_processing, // 订单支付中状态 Yii::$service->order->payment_status_canceled, // 订单支付取消状态 Yii::$service->order->payment_no_need_status_confirmed, // 订单-货到付款支付方式,确认 Yii::$service->order->payment_status_confirmed, // 订单支付完成 Yii::$service->order->status_audit_fail, // 订单内容审核失败 ] ``` 当订单状态在上面的状态范围中,那么,当用户发起订单取消操作后,订单会被直接取消, 进行产品库存的返还,如果订单已经被在线支付,那么会进行退款处理(平台进行收款,因此由平台进行退款)。 详细代码参看: ``` Yii::$service->order->process->redirectCancel($orderModel) ``` 2.订单取消发起后,需要`经销商`确认 用户发起`订单取消`请求,需要 经销商审核的情况 ``` $this->orderStatusRequestCancelArr = [ Yii::$service->order->status_processing, // 订单审核通过,备货中 ]; ``` 当订单审核通过后,订单进入备货状态中,如果在该状态,用户发起`订单取消`操作, 那么需要经销商后台审核,如果进行`订单取消通过`操作,那么订单将会被取消, 如果进行`订单取消拒绝`操作(譬如已经发货了,无法取消),那么`订单取消请求`将会被拒绝,经销商可以对订单进行正常 流程的发货操作 当订单发货后,如果存在其他的问题,可以`确认收货`后,在售后部分进行`退货处理` ### 脚本取消订单 对于用户发起的订单,在一段时间内没有支付,系统脚本将会将这部分订单取消掉 详细参看: [订单取消脚本](fecmall-console-order-cancel.md)
../sratool*/bin/prefetch SRR11445486 ../sratool*/bin/prefetch SRR11445485 ../sratool*/bin/prefetch SRR11547279
import random import pygame import math from dataclasses import dataclass SIZE = (400, 400) MAX_LINE_LENGTH = 80 SPEED_MU = 20 MAX_SIZE = 3 WHITE = (255, 255, 255, 255) @dataclass class Star: pos: pygame.Vector2 speed: pygame.Vector2 size: int def update(self, screen, elapsed): pygame.draw.circle(screen, WHITE, self.pos, self.size) self.pos += elapsed * self.speed return int(self.pos.x) in range(SIZE[0]) and int(self.pos.y) in range(SIZE[1]) def __sub__(self, other): return (self.pos - other.pos).length() def map_distance_to_color(distance): if distance < MAX_LINE_LENGTH: g = int(255 * (1 - distance / MAX_LINE_LENGTH)) return (g, g, g, g) return None class StarField: def __init__(self, count=100): self._count = count self._stars = [] self._respawn() def _respawn(self): for _ in range(self._count - len(self._stars)): star = Star( pos=pygame.Vector2( random.randint(0, SIZE[0] - 1), random.randint(0, SIZE[1] - 1), ), speed=pygame.Vector2( random.gauss(0, SPEED_MU), random.gauss(0, SPEED_MU), ), size=random.randint(1, MAX_SIZE), ) self._stars.append(star) def update(self, screen, elapsed): remove = [] lines = [] for a in self._stars: for b in self._stars: if a is not b: distance = a - b color = map_distance_to_color(distance) if color is not None: lines.append((color, a.pos, b.pos)) for color, a_pos, b_pos in sorted(lines, key=lambda line: line[0]): pygame.draw.line(screen, color, a_pos, b_pos) for star in self._stars: alive = star.update(screen, elapsed) if not alive: remove.append(star) for star in remove: self._stars.remove(star) self._respawn() def main(): pygame.init() pygame.display.set_caption("Star Geometry") screen = pygame.display.set_mode(SIZE) clock = pygame.time.Clock() star_field = StarField() running = True elapsed = 0 while running: for event in pygame.event.get(): if event.type == pygame.QUIT: running = False screen.fill((0, 0, 0)) star_field.update(screen, elapsed / 1000) pygame.display.flip() elapsed = clock.tick(60) if __name__ == "__main__": main()
import { Pipe, PipeTransform } from '@angular/core'; import { IMqttMessage } from 'ngx-mqtt'; @Pipe({ name: 'toVal', pure: false }) export class ToVal implements PipeTransform { transform(message: IMqttMessage): number { try { let payload: any = JSON.parse(message.payload.toString()); return payload.val; } catch(e) { //console.error(e); } return null; } } @Pipe({ name: 'toBoolean', pure: false }) export class ToBoolean implements PipeTransform { transform(message: IMqttMessage): boolean { try { let payload: any = JSON.parse(message.payload.toString()); return (payload.val == 1); } catch(e) { //console.error(e); } return false; } } @Pipe({ name: 'toBooleanString', pure: false }) export class ToBooleanString implements PipeTransform { transform(message: IMqttMessage): String { try { let payload: any = JSON.parse(message.payload.toString()); return (payload.val == 1) ? 'ON' : 'OFF'; } catch(e) { //console.error(e); } return 'OFF'; } } @Pipe({ name: 'toColor', pure: false }) export class ToColor implements PipeTransform { transform(message: IMqttMessage): string { try { let payload: any = JSON.parse(message.payload.toString()); return (payload.val == 1 ? 'success' : 'danger'); } catch(e) { //console.error(e); } return 'warning'; } } @Pipe({ name: 'statusToString', pure: false }) export class StatusToString implements PipeTransform { transform(message: IMqttMessage): string { try { let payload: any = JSON.parse(message.payload.toString()); if (payload.session == 'online') return 'ONLINE'; if (payload.session == 'timeout') return 'OFFLINE'; if (payload.session == 'offline') return 'SHUTDOWN'; } catch(e) { // console.error(e); } return 'Unknown'; } } @Pipe({ name: 'statusToColor', pure: false }) export class StatusToColor implements PipeTransform { transform(message: IMqttMessage): string { try { let payload: any = JSON.parse(message.payload.toString()); if (payload.session == 'online') return 'success'; if (payload.session == 'timeout') return 'danger'; if (payload.session == 'offline') return 'danger'; } catch(e) { // console.error(e); } return 'warning'; } } @Pipe({ name: 'statusToUptime', pure: false }) export class StatusToUptime implements PipeTransform { transform(message: IMqttMessage): Date { try { let payload: any = JSON.parse(message.payload.toString()); return new Date(payload.tms * 1000); } catch(e) { // console.error(e); } return null; } } @Pipe({ name: 'eventToText', pure: false }) export class EventToText implements PipeTransform { transform(message: IMqttMessage): string { try { let payload: any = JSON.parse(message.payload.toString()); if (payload['ev']) { let event = payload.ev[0]; if (event.msg == null || event.msg == '') { return '[NO TEXT]'; } else { return event.msg; } } if (payload['ack']) { return 'Command Number ' + payload['ack']; } } catch(e) { // console.error(e); } return 'Unknown'; } } @Pipe({ name: 'eventToTime', pure: false }) export class EventToTime implements PipeTransform { transform(message: IMqttMessage): string { try { let payload: any = JSON.parse(message.payload.toString()); if (payload['ev']) { let event = payload.ev[0]; return (new Date(event.tms * 1000)).toLocaleString(); } if (payload['ack']) { return ''; } } catch(e) { // console.error(e); } return 'Unknown'; } } @Pipe({ name: 'eventToId', pure: false }) export class EventToId implements PipeTransform { transform(message: IMqttMessage): string { try { let payload: any = JSON.parse(message.payload.toString()); if (payload['ev']) { let event = payload.ev[0]; return '(id ' + event.id + ')'; } if (payload['ack']) { return ''; } } catch(e) { // console.error(e); } return 'Unknown'; } } @Pipe({ name: 'eventToType', pure: false }) export class EventToType implements PipeTransform { private ACK = 1; private STATUS = 2; private ALERT = 3; private ERROR = 4; private WARNING = 5; private NOTE = 6; private MESSAGE = 7; private INFO = 8; private DEBUG = 9; transform(message: IMqttMessage): string { try { let payload: any = JSON.parse(message.payload.toString()); if (payload['ev']) { let event = payload.ev[0]; if (event.src == this.ACK) return 'ACK'; if (event.src == this.STATUS) return 'STATUS'; if (event.src == this.ALERT) return 'ALERT'; if (event.src == this.ERROR) return 'ERROR'; if (event.src == this.WARNING) return 'WARNING'; if (event.src == this.NOTE) return 'NOTE'; if (event.src == this.MESSAGE) return 'MESSAGE'; if (event.src == this.INFO) return 'INFO'; if (event.src == this.DEBUG) return 'DEBUG'; } if (payload['ack']) { return 'ACK'; } } catch(e) { // console.error(e); } return 'Unknown'; } } @Pipe({ name: 'stateToString' }) export class StateToStringPipe implements PipeTransform { private states = [ 'CLOSED', 'CONNECTING', 'CONNECTED' ]; transform(state: number): string { return `${this.states[state]}`; } } @Pipe({ name: 'stateToClass' }) export class StateToClassPipe implements PipeTransform { private states = [ 'danger', 'warn', 'success' ]; transform(state: number): string { return `${this.states[state]}`; } }
package com.geecommerce.mediaassets.converter; import java.util.Iterator; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row; import com.itextpdf.text.Phrase; import com.itextpdf.text.pdf.PdfPCell; import com.itextpdf.text.pdf.PdfPTable; public class SpreadShitToPdfConverter { protected void printPdf(Iterator<Row> rowIterator, PdfPTable my_table) { PdfPCell table_cell; while (rowIterator.hasNext()) { Row row = rowIterator.next(); Iterator<Cell> cellIterator = row.cellIterator(); while (cellIterator.hasNext()) { Cell cell = cellIterator.next(); // Fetch CELL switch (cell.getCellType()) { // Identify CELL type // you need to add more code here based on // your requirement / transformations case Cell.CELL_TYPE_STRING: // Push the data from Excel to PDF Cell table_cell = new PdfPCell(new Phrase(cell.getStringCellValue())); // feel free to move the code below to suit to your needs my_table.addCell(table_cell); break; } // next line } } } }