text stringlengths 1 1.05M |
|---|
<reponame>fiscoflex/erp
package mx.fiscoflex.contabilida.empresa;
public class ErrorEmpresa {
public static String NOMBRE_ES_REQUERIDO = "E1201";
public static String NOMBRE_ES_MUY_LARGO = "E1202";
public static String RFC_ES_REQUERIDO = "E1203";
public static String RFC_FORMATO_INVALIDO = "E1204";
}
|
#!/bin/bash
nb-clean add-filter --remove-empty-cells
|
for i, num1 in enumerate(nums):
for j, num2 in enumerate(nums):
if (i != j):
for k, num3 in enumerate(nums):
if (j != k) and (i != k):
if num1 + num2 + num3 == 0: # Check if all three numbers add up to 0
print(f"Triplet: {num1}, {num2}, {num3}") # Print the triplet |
# Define custom utilities
# Test for OSX with [ -n "$IS_OSX" ]
function pre_build {
# Any stuff that you need to do before you start building the wheels
# Runs in the root directory of this repository.
pushd protobuf
yum install -y devtoolset-2-libatomic-devel
# Build protoc
./autogen.sh
./configure
CXXFLAGS="-fPIC -g -O2" ./configure
make -j8
# Generate python dependencies.
pushd python
python setup.py build_py
popd
popd
}
function bdist_wheel_cmd {
# Builds wheel with bdist_wheel, puts into wheelhouse
#
# It may sometimes be useful to use bdist_wheel for the wheel building
# process. For example, versioneer has problems with versions which are
# fixed with bdist_wheel:
# https://github.com/warner/python-versioneer/issues/121
local abs_wheelhouse=$1
# Modify build version
pwd
ls
python setup.py bdist_wheel --cpp_implementation --compile_static_extension
cp dist/*.whl $abs_wheelhouse
}
function build_wheel {
build_wheel_cmd "bdist_wheel_cmd" $@
}
function run_tests {
# Runs tests on installed distribution from an empty directory
python --version
python -c "from google.protobuf.pyext import _message;"
}
|
package ru.job4j.servlets;
import ru.job4j.controller.BookStore;
import ru.job4j.model.Book;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.sql.SQLException;
public class GetBookServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
public GetBookServlet() {
super();
}
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
int bookId = Integer.parseInt(request.getParameter("id"));
BookStore dao = BookStore.instOf();
try {
Book book = dao.getBook(bookId);
request.setAttribute("book", book);
String page = "/index.jsp";
RequestDispatcher requestDispatcher = request.getRequestDispatcher(page);
requestDispatcher.forward(request, response);
} catch (SQLException ex) {
throw new ServletException(ex);
}
}
}
|
from typing import List, Tuple, Any
from ordered_set import OrderedSet
def organize_blocks(block_data: List[Tuple[float, float, Any]]) -> Tuple[OrderedSet[Tuple[float, float, Any]], List[float]]:
organized_set = OrderedSet()
cumulative_y_coordinates = []
block_data.sort(key=lambda x: x[0]) # Sort the block data based on y-coordinate
y_block_loc = 0
for y, height, axial_mesh_point in block_data:
y_block_center_loc = y
block_height = height
y_block_loc += block_height
cumulative_y_coordinates.append(y_block_center_loc)
organized_set.add((y_block_center_loc, block_height, axial_mesh_point))
return organized_set, cumulative_y_coordinates |
require 'rails_helper'
RSpec.describe Box, :type => :model do
subject(:requester) { Requester.new(first_name: "Jane", last_name: "Doe", street_address: "122 Boggie Woogie Avenue", city: "Fairfax", state: "VA", zip: "22030", ok_to_email: true, ok_to_text: false, ok_to_call: false, ok_to_mail: true, underage: false) }
subject(:volunteer) { Volunteer.create(first_name: "John", last_name: "Doe", street_address: "123 Boggie Woogie Avenue", city: "Fairfax", state: "VA", zip: "22030", ok_to_email: true, ok_to_text: false, ok_to_call: false, ok_to_mail: true, underage: false) }
subject(:user) { User.create!(email: "<EMAIL>", password: "<PASSWORD>", volunteer_id: volunteer.id ) }
subject(:box_request_1) {
BoxRequest.create(requester: requester,
summary: "Lorem ipsum text.... Caramels tart sweet pudding pie candy lollipop.",
question_re_affect: "Lorem ipsum text.... Tart jujubes candy canes pudding I love gummies.",
question_re_current_situation: "Sweet roll cake pastry cookie.",
question_re_referral_source: "Ice cream sesame snaps danish marzipan macaroon icing jelly beans." ) }
subject(:box) { box_request_1.create_box }
describe "state transitions" do
before :each do
box.save
end
it "has state design_in_progress after box_request is reviewed" do
box_request_1.reviewed_by_id = user.id;
box_request_1.save
box_request_1.review
box_request_1.reviewed_at = DateTime.now
box_request_1.end_review
expect(box).to transition_from(:pending_review).to(:design_in_progress).on_event(:initialize_design)
end
it "transitions from design_in_progress to designed" do
box.initialize_design
box.designed_by_id = 3;
box.designed_at = DateTime.now
box.save
box.design
expect(box).to transition_from(:design_in_progress).to(:designed).on_event(:design)
end
it "transitions from designed to assembly in progress" do
box.initialize_design
box.designed_by_id = 3;
box.designed_at = DateTime.now
box.design
box.assembled_by_id = 6;
box.save
box.assembling
expect(box).to transition_from(:designed).to(:assembly_in_progress).on_event(:assembling)
end
it "transitons from assembly in progress to assembled" do
box.initialize_design
box.designed_by_id = 3;
box.designed_at = DateTime.now
box.design
box.assembled_by_id = 6;
box.assembling
box.assemble
expect(box).to transition_from(:assembly_in_progress).to(:assembled).on_event(:assemble)
end
it "transitons from assembled to shipping in progress" do
box.initialize_design
box.designed_by_id = 3;
box.designed_at = DateTime.now
box.design
box.assembled_by_id = 6;
box.assembling
box.assemble
box.shipped_by_id = 8;
box.shipping
expect(box).to transition_from(:assembled).to(:shipping_in_progress).on_event(:shipping)
end
it "transitons from shipping in progress to shipped" do
box.initialize_design
box.designed_by_id = 3;
box.designed_at = DateTime.now
box.design
box.assembled_by_id = 6;
box.assembling
box.assemble
box.shipped_by_id = 8;
box.shipping
box.shipped_at = DateTime.now
box.ship
expect(box).to transition_from(:shipping_in_progress).to(:shipped).on_event(:ship)
end
end
end |
<reponame>valenterry/bamboomigrate
package bamboomigrate
import bamboomigrate.Transform.StepConstraint.OnlySteps
import bamboomigrate.Transform.{ApplyTransformationStep, transformationByAnyStep}
import bamboomigrate.TypelevelUtils.{LazyLeftFolder, LazyLeftScanner, getFieldValue}
import shapeless._
import shapeless.labelled.{FieldType, field}
import shapeless.ops.hlist.{Init, Last, LeftFolder, Patcher, Prepend}
import shapeless.ops.record.{Remover, Renamer}
import scala.annotation.implicitNotFound
/**
* A typeclass which allows to convert an instance of one type to an instance of another type
* @tparam From The instance which acts as the base
* @tparam To The instance that should be created when applying the migration
*/
trait Migration[From, To] {
/**
* Given an instance of type `From` turns it into an instances of type `To`
* @param from The instance which acts as the base
* @return The instance created by the migration process
*/
def migrate(from: From): To
}
object Migration {
def apply[From, To](implicit migration: Migration[From, To]): Migration[From, To] = migration
/**
* A helper to easily create a migration, given a function that does the conversion
*/
def instance[From, To](m: From => To): Migration[From, To] = new Migration[From, To] {
def migrate(from: From): To = m(from)
}
/**
* If already given a migration, this helper will create a new migration by converting the target instance of the given migration into a new type
*/
def mapTo[From, OldTo, NewTo](old: Migration[From, OldTo], f: OldTo => NewTo): Migration[From, NewTo] =
Migration.instance( (obj: From) => f(old.migrate(obj)))
final class StartingFromPartialTypeApplication[BaseType] {
def apply(steps: HNil) = HNil
def apply[BaseTypeLG <: HList, FirstStep, StepList <: HList, FirstStepResult, GeneratedMigrations <: HList]
(steps: FirstStep :: StepList)
(implicit lgen: LabelledGeneric.Aux[BaseType, BaseTypeLG],
firstStep: ApplyTransformationStep.Aux[BaseTypeLG, FirstStep, transformationByAnyStep.type, FirstStepResult],
scanner: LazyLeftScanner.Aux[StepList, Migration[BaseType, FirstStepResult], migrationByAnyStep.type, GeneratedMigrations]
): GeneratedMigrations = {
val firstMigration: Migration[BaseType, FirstStepResult] = Migration.instance{(obj:BaseType) => firstStep.apply(lgen.to(obj), steps.head)}
scanner.apply(steps.tail, firstMigration)
}
}
/**
* Does the same as the `between` method, but the last migration will lead to an abstract LabelledGeneric which is the result from the transformations
* @return The list of migrations from base type to the abstract LabelledGeneric target
*/
def startingFrom[BaseType] = new StartingFromPartialTypeApplication[BaseType]
final class FromTargetPartialTypeApplication[TargetType] {
def apply[TargetTypeLG <: HList, Second, Migrations <: HList, PenultimateType, MigrationsWithoutLast <: HList, GeneratedMigrations <: HList]
(migrations: Migrations)
(implicit
targetLG: LabelledGeneric.Aux[TargetType, TargetTypeLG],
last: Last.Aux[Migrations, Migration[PenultimateType, TargetTypeLG]],
init: Init.Aux[Migrations, MigrationsWithoutLast],
prepend: Prepend.Aux[MigrationsWithoutLast, Migration[PenultimateType, TargetType] :: HNil, GeneratedMigrations]
): GeneratedMigrations = prepend(init(migrations), Migration.instance{(obj:PenultimateType) => targetLG.from(last(migrations).migrate(obj)) } :: HNil)
}
/**
* Replaces the last migration of a list of migrations (where the last migrations leads to an abstract LabelledGeneric) with a new migration that,
* instead, leads to to the provided TargetType
* @tparam TargetType The TargetType to which the last migration should lead to
* @return The new list of migrations
*/
def fromTarget[TargetType] = new FromTargetPartialTypeApplication[TargetType]
final class BetweenPartialTypeApplication[BaseType, TargetType] {
/**
* Given one transformation step (as an HList), returns one migration (as an HList) to migrate the base type to the target type
* @return The migration from base type to target type
* Will fail if the provided transformation step will not transform the base type into the target type
*/
def apply[BaseTypeLG <: HList, TargetTypeLG <: HList, FirstStep <: TransformStep, Result]
(steps: FirstStep :: HNil)
(implicit migrationsBetween: MigrationsBetween.Aux[BaseType, TargetType, FirstStep :: HNil, Result]): Result = migrationsBetween.apply(steps)
/**
* Given transformation steps, returns a list of migrations (as an HList) to step by step migrate the base type to the target type
* @return The list of migrations from base type to target type
* Will fail if the provided transformation steps will not transform the base type into the target type after beeing applied step by step (starting with the first step in the list)
*/
def apply[BaseTypeLG <: HList, Start, Second, FirstStep, StepList <: HList : OnlySteps, NextLG, ResultingMigrations <: HList, LastFrom, LastTo <: HList, ReplaceResult, Replaced, Result]
(steps: FirstStep :: StepList)
(implicit migrationsBetween: MigrationsBetween.Aux[BaseType, TargetType, FirstStep :: StepList, Result]
): Result = migrationsBetween(steps)
}
/**
* Creates a list (HList) of migrations which can be used step by step to migrate from the base type to the target type
* @tparam BaseType The base type where the first migration starts from
* @tparam TargetType The target type where the last migration ends
* @return A BetweenPartialTypeApplication object (used for partial type application)
*/
def between[BaseType, TargetType] = new BetweenPartialTypeApplication[BaseType, TargetType]
/**
* Creates a list (HList) of migrations between the given `BaseType` and `TargetType` by using a `StepList` consisting of TransformationSteps
*/
@implicitNotFound("Unable to migrate from base ${BaseType} to target ${TargetType} using steps ${StepList}. Check that transforming the specified base step by step (in order) of the provided steps really leads to the specified base")
trait MigrationsBetween[BaseType, TargetType, StepList <: HList] extends DepFn1[StepList] with Serializable
trait LowPriorityMigrationsBetween {
@implicitNotFound("Unable to migrate from base ${BaseType} to target ${TargetType} using steps ${StepList}. Check that transforming the specified base step by step (in order) of the provided steps really leads to the specified base")
type Aux[BaseType, TargetType, StepList <: HList, Out0] = MigrationsBetween[BaseType, TargetType, StepList] { type Out = Out0 }
implicit def multipleSteps[BaseType, TargetType, BaseTypeLG <: HList, FirstStep, StepList <: HList : OnlySteps, NextLG, ResultingMigrations <: HList, LastFrom, LastTo <: HList, ReplaceResult, Replaced]
(implicit lgen: Lazy[LabelledGeneric.Aux[BaseType, BaseTypeLG]],
stepApplier: ApplyTransformationStep.Aux[BaseTypeLG, FirstStep, transformationByAnyStep.type, NextLG],
scanner: LazyLeftScanner.Aux[StepList, Migration[BaseType, NextLG], migrationByAnyStep.type, ResultingMigrations],
getLastMigration: Last.Aux[ResultingMigrations, Migration[LastFrom, LastTo]],
replaceModifier: shapeless.ops.hlist.Modifier.Aux[ResultingMigrations, Migration[LastFrom, LastTo], Migration[LastFrom, TargetType], (Replaced, ReplaceResult)],
targetGen: Lazy[LabelledGeneric.Aux[TargetType, LastTo]]
): Aux[BaseType, TargetType, FirstStep :: StepList, ReplaceResult] =
new MigrationsBetween[BaseType, TargetType, FirstStep :: StepList] {
type Out = ReplaceResult
def apply(steps : FirstStep :: StepList): Out = {
val firstMigration:Migration[BaseType, NextLG] = Migration.instance { (base: BaseType) => stepApplier(lgen.value.to(base), steps.head) }
val migrationsResult: ResultingMigrations = scanner.apply(steps.tail, firstMigration)
val transformLastMigration: Migration[LastFrom, LastTo] => Migration[LastFrom, TargetType] =
(lastMigration: Migration[LastFrom, LastTo]) => Migration.mapTo(lastMigration, (obj: LastTo) => targetGen.value.from(obj))
val migrationsWithLastMigrationToTarget:ReplaceResult = replaceModifier(migrationsResult, transformLastMigration)._2
migrationsWithLastMigrationToTarget
}
}
}
object MigrationsBetween extends LowPriorityMigrationsBetween {
def apply[BaseType, TargetType, StepList <: HList]
(implicit migrationsBetween: MigrationsBetween[BaseType, TargetType, StepList]):
Aux[BaseType, TargetType, StepList, migrationsBetween.Out] = migrationsBetween
implicit def oneStep[BaseType, TargetType, BaseTypeLG <: HList, TargetTypeLG <: HList, FirstStep <: TransformStep]
(implicit lgen: Lazy[LabelledGeneric.Aux[BaseType, BaseTypeLG]],
onlyStepApplier: ApplyTransformationStep.Aux[BaseTypeLG, FirstStep, transformationByAnyStep.type, TargetTypeLG],
targetGen: Lazy[LabelledGeneric.Aux[TargetType, TargetTypeLG]]): Aux[BaseType, TargetType, FirstStep :: HNil, Migration[BaseType, TargetType] :: HNil] =
new MigrationsBetween[BaseType, TargetType, FirstStep :: HNil] {
type Out = Migration[BaseType, TargetType] :: HNil
def apply(steps : FirstStep :: HNil): Out = {
val x: Migration[BaseType, TargetType] :: HNil = Migration.instance { (base: BaseType) => targetGen.value.from(onlyStepApplier(lgen.value.to(base), steps.head)) } :: HNil
x
}
}
}
object combineMigrations extends Poly2 {
implicit def combine[A, B, C]: Case.Aux[Migration[A, B], Migration[B, C], Migration[A, C]] =
at{ (migration: Migration[A, B], m2: Migration[B, C]) => Migration.instance( a => m2.migrate(migration.migrate(a)) ) }
}
/**
* If given a list of migrations, creates one single new migration that will go from the base type of the first migration to the target type of the last migration
* @param migrations The list of migrations
* @return The new migration
*/
def combinedMigration[Start, Second, MigrationList <: HList](migrations: Migration[Start, Second] :: MigrationList)(
implicit folder: LeftFolder[MigrationList, Migration[Start, Second], combineMigrations.type]
) = migrations.tail.foldLeft(migrations.head)(combineMigrations)
object migrationByAnyStep extends Poly2 {
implicit def byPrependStep[LastType, B <: HList, Name, Type]: Case.Aux[PrependStep[Name, Type], Migration[LastType, B], Migration[B, FieldType[Name, Type] :: B]] =
at{ (prep: PrependStep[Name, Type], _: Migration[LastType, B]) => Migration.instance{ (obj:B) => prep.kt :: obj } }
implicit def byAppendStep[LastType, B <: HList, Name, Type, AppendResult <: HList]
(implicit prepend : Prepend.Aux[B, FieldType[Name, Type] :: HNil, AppendResult]): Case.Aux[AppendStep[Name, Type], Migration[LastType, B], Migration[B, AppendResult]] =
at{ (app: AppendStep[Name, Type], _: Migration[LastType, B]) => Migration.instance{ (obj:B) => prepend(obj, app.kt :: HNil) } }
implicit def byRemoveStep[LastType, B <: HList, Name, _RemovedElement, RemoveResult <:HList]
(implicit remover1: Remover.Aux[B, Name, (_RemovedElement, RemoveResult)]): Case.Aux[RemoveStep[Name], Migration[LastType, B], Migration[B, RemoveResult]] =
at{ (_: RemoveStep[Name], _: Migration[LastType, B]) => Migration.instance{ (obj:B) => remover1(obj)._2:RemoveResult } }
implicit def byInsertAtStep[LastType, B <: HList, InsertPosition <: Nat, Name, Type, InsertResult <: HList]
(implicit patch: Patcher.Aux[InsertPosition, _0, B, FieldType[Name, Type] :: HNil, InsertResult]):
Case.Aux[InsertAtStep[InsertPosition, Name, Type], Migration[LastType, B], Migration[B, InsertResult]] =
at{ (rs: InsertAtStep[InsertPosition, Name, Type], _: Migration[LastType, B]) => Migration.instance{ (obj:B) => patch(obj, rs.kt :: HNil) } }
implicit def byRenameStep[LastType, B <: HList, OldName, NewName, RenameResult <: HList]
(implicit rename: Renamer.Aux[B, OldName, NewName, RenameResult]):
Case.Aux[RenameStep[OldName, NewName], Migration[LastType, B], Migration[B, RenameResult]] =
at{ (_: RenameStep[OldName, NewName], _: Migration[LastType, B]) => Migration.instance{ (obj:B) => rename(obj):RenameResult } }
implicit def byReplaceStep[LastType, B <: HList, OldName, NewName, OldType, NewType, _ReplacedElement, ReplaceResult]
(implicit replaceModifier: shapeless.ops.hlist.Modifier.Aux[B, FieldType[OldName, OldType], FieldType[NewName, NewType], (_ReplacedElement, ReplaceResult)]):
Case.Aux[ReplaceStep[OldName, NewName, OldType, NewType], Migration[LastType, B], Migration[B, ReplaceResult]] =
at{ (replaceStep: ReplaceStep[OldName, NewName, OldType, NewType], _: Migration[LastType, B]) => Migration.instance{ (obj:B) =>
val replaceFunction: FieldType[OldName, OldType] => FieldType[NewName, NewType] = { ft:FieldType[OldName, OldType] =>
val oldValue: OldType = getFieldValue(ft)
val newValue: NewType = replaceStep.mapValue(oldValue)
field[NewName](newValue): FieldType[NewName, NewType]
}
replaceModifier(obj, replaceFunction)._2
}
}
implicit def byChangeTypeStep[LastType, B <: HList, Name, OldType, NewType, _ReplacedElement, ReplaceResult]
(implicit replaceModifier: shapeless.ops.hlist.Modifier.Aux[B, FieldType[Name, OldType], FieldType[Name, NewType], (_ReplacedElement, ReplaceResult)]):
Case.Aux[ChangeTypeStep[Name, OldType, NewType], Migration[LastType, B], Migration[B, ReplaceResult]] =
at{ (changeTypeStep: ChangeTypeStep[Name, OldType, NewType], _: Migration[LastType, B]) => Migration.instance{ (obj:B) =>
val replaceFunction: FieldType[Name, OldType] => FieldType[Name, NewType] = { ft:FieldType[Name, OldType] =>
val oldValue: OldType = getFieldValue(ft)
val newValue: NewType = changeTypeStep.mapValue(oldValue)
field[Name](newValue): FieldType[Name, NewType]
}
replaceModifier(obj, replaceFunction)._2
}
}
implicit def byFullTransformStep[LastType, B <: HList, To <: HList]:
Case.Aux[FullTransformStep[B, To], Migration[LastType, B], Migration[B, To]] =
at{ (fullTransformStep: FullTransformStep[B, To], _: Migration[LastType, B]) => Migration.instance{ (obj:B) =>
fullTransformStep.transform(obj):To
}
}
implicit def bySequenceStep[LastType, B <: HList, Steps <: HList, FirstStep <: TransformStep, FoldResult]
(implicit folder: LazyLeftFolder.Aux[FirstStep :: Steps, B, transformationByAnyStep.type, FoldResult]):
Case.Aux[Migration[LastType, B], SequenceStep[FirstStep, Steps], Migration[B, FoldResult]] =
at { (_: Migration[LastType, B], sequenceStep: SequenceStep[FirstStep, Steps]) => Migration.instance{ (obj:B) =>
folder(sequenceStep.steps, obj)
}
}
}
}
|
from django.core.urlresolvers import reverse
from slacker.django_backend.conf import SLACKER_SERVER
def generate_slack_channel_url(channel_name):
# Using the reverse function to generate the URL for the Slack channel
channel_url = reverse('channel', kwargs={'channel_name': channel_name})
# Combining the base Slack server URL with the channel URL
full_url = f"{SLACKER_SERVER}{channel_url}"
return full_url |
<filename>src/lang/lang.js
import English from "./en";
import SimplifiedChinese from "./zh-cn";
export default {
en: {
label: "English",
lang: English
},
zhcn: {
label: "Simplified Chinese",
lang: SimplifiedChinese
}
};
|
#!/bin/bash
# Script to create input files for PTMC runs from template
# Note the variables being added to the array must be in quotes,
# otherwise it will add nothing
source $1
fields=()
vars=()
#echo "Input directory:"; read inpdir
fields+=(INPDIR)
vars+=("$inpdir")
#echo "System:"; read system
fields+=(SYSTEM)
vars+=("$system")
fields+=(DOMAINPAIRS)
vars+=("$domainpairs")
fields+=(STAPLETYPES)
vars+=("$stapletypes")
fields+=(FULLYSTACKEDPAIRS)
vars+=("$fullystackedpairs")
fields+=(SCAFFOLDDOMAINS)
vars+=("$scaffolddomains")
#echo "State (unbound or assembled):"; read state
fields+=(STATE)
vars+=("$state")
sysfile=$inpdir/${system}_${state}.json
fields+=(SYSFILE)
vars+=("$sysfile")
#echo "Variant:"; read variant
fields+=(VARIANT)
vars+=("$var")
#echo "Reps:"; read reps
fields+=(REPS)
vars+=("$reps")
#echo "Temp:"; read temp
fields+=(TEMP)
vars+=("$temp")
#echo "Windows file:"; read winfile
fields+=(WINFILE)
vars+=("$winfile")
#echo "Max bias change:"; read maxdbias
fields+=(MAXDBIAS)
vars+=("$maxdbias")
#echo "Equilibrium steps:"; read esteps
fields+=(ESTEPS)
vars+=("$esteps")
#echo "Max time for equilibration (s):"; read emaxdur
fields+=(EMAXDUR)
vars+=("$emaxdur")
#echo "Steps per iteration:"; read isteps
fields+=(ISTEPS)
vars+=("$isteps")
#echo "Max time per iteration (s):"; read imaxdur
fields+=(IMAXDUR)
vars+=("$imaxdur")
#echo "Iterations:"; read iters
fields+=(ITERS)
vars+=("$iters")
#echo "Number of windows:"; read wins
fields+=(WINS)
vars+=("$wins")
#echo "Queue:"; read queue
fields+=(QUEUE)
vars+=("$queue")
#echo "Walltime hours:"; read walltime
fields+=(WALLTIME)
vars+=("$walltime")
#echo "Number of nodes:"; read nodes
fields+=(NODES)
vars+=("$nodes")
#echo "Number of procs per node:"; read procspernode
fields+=(PROCSPERNODE)
vars+=("$procspernode")
#echo "Grid bias tag:"; read gridtag
fields+=(GRIDTAG)
vars+=("$gridtag")
#echo "Node list:"; read nodenames
#fields+=(NODENAMES)
#vars+=("$nodenames")
#echo "Output file directory:"; read outputfiledir
fields+=(OUTPUTFILEDIR)
vars+=("$outputfiledir")
fields+=(SHAREDFILEDIR)
vars+=("$sharedfiledir")
#echo "Sceduler (pbs or slurm):"; read sched
#echo "Hybridization potential:"; read hybridpot
fields+=(HYBRIDPOT)
vars+=("$hybridpot")
#echo "Domain type:"; read domaintype
fields+=(DOMAINTYPE)
vars+=("$domaintype")
#echo "Binding potential:"; read bindpot
fields+=(BINDPOT)
vars+=("$bindpot")
#echo "Misbinding potential:"; read misbindpot
fields+=(MISBINDPOT)
vars+=("$misbindpot")
#echo "Stacking potential:"; read stackpot
fields+=(STACKPOT)
vars+=("$stackpot")
#echo "Staple concentration (mol/L):"; read staplem
fields+=(STAPLEM)
vars+=("$staplem")
#echo "Cation concentration (mol/L):"; read cationm
fields+=(CATIONM)
vars+=("$cationm")
fields+=(TEMPERROR)
vars+=("$temp_error")
#echo "Maximum number of total staples:"; read maxstaples
fields+=(MAXSTAPLES)
vars+=("$maxstaples")
#echo "Maximum number of staples of given type:"; read maxtypestaples
fields+=(MAXTYPESTAPLES)
vars+=("$maxtypestaples")
#echo "Maximum number of domains per staple:"; read maxsizestaple
fields+=(MAXSIZESTAPLE)
vars+=("$maxsizestaple")
#echo "Domain level biases present?"; read domainbiases
fields+=(DOMAINBIASES)
vars+=("$domainbiases")
#echo "Binding enthalpy for uniform potential"; read bindh
fields+=(BINDH)
vars+=("$bindh")
#echo "Binding entropy for uniform potential"; read binds
fields+=(BINDS)
vars+=("$binds")
#echo "Misbinding enthalpy for uniform potential"; read misbindh
fields+=(MISBINDH)
vars+=("$misbindh")
#echo "Misbinding entropy for uniform potential"; read misbinds
fields+=(MISBINDS)
vars+=("$misbinds")
#echo "Stacking energy"; read stackene
fields+=(STACKENE)
vars+=("$stackene")
#echo "Order parameter file:"; read opfile
fields+=(OPFILE)
vars+=("$opfile")
#echo "Bias function file:"; read biasfile
fields+=(BIASFILE)
vars+=("$biasfile")
#echo "Restart step:"; read restartstep
fields+=(RESTARTSTEP)
vars+=("$restartstep")
#echo "Restart postfix:"; read restartpostfix
fields+=(RESTARTPOSTFIX)
vars+=("$restartpostfix")
#echo "Default check/center/write/etc freq:"; read defaultint
fields+=(SKIP)
vars+=("$skip")
#echo "Centering freq:"; read centeringfreq
fields+=(CENTERINGFREQ)
vars+=("$centeringfreq")
#echo "Constraint check freq:"; read concheckfreq
fields+=(CONCHECKFREQ)
vars+=("$concheckfreq")
#echo "Logging freq:"; read loggingfreq
fields+=(LOGGINGFREQ)
vars+=("$loggingfreq")
#echo "Config write freq (all formats):"; read configsfreq
fields+=(CONFIGSFREQ)
vars+=("$configsfreq")
#echo "Counts write freq:"; read countsfreq
fields+=(COUNTSFREQ)
vars+=("$countsfreq")
#echo "Tags of order parameters to output"; read tags
fields+=(TAGS)
vars+=("$tags")
fields+=(TAGPAIRS)
vars+=("$tagpairs")
#echo "Order parameter write freq:"; read opfreq
fields+=(OPFREQ)
vars+=("$opfreq")
#echo "Timing write freq:"; read timefreq
fields+=(TIMEFREQ)
vars+=("$timefreq")
#echo "Energy write freq:"; read energyfreq
fields+=(ENERGYFREQ)
vars+=("$energyfreq")
fields+=(MOVETYPEFILE)
vars+=("$movetypefile")
sedcommand=""
for i in ${!fields[@]}
do
sedcommand+="s:%${fields[i]}:${vars[i]}:g;"
done
sed "$sedcommand" mwus_simulation_template_${sched}.sh > $inpdir/${system}-${var}_simulation.sh
sed "$sedcommand" mwus_analysis_template_${sched}.sh > $inpdir/${system}-${var}_analysis.sh
sed "$sedcommand" mwus_template.inp > $inpdir/${system}-${var}_template.inp
# Create inputs for first run
run=0
numfields=${#fields[@]}
fields+=(OUTPUTFILEBASE)
fields+=(REP)
fields+=(RESTARTFILEBASE)
for ((rep=0; $rep<$reps; rep += 1))
do
select_starting_configs.py inps/${system}_unbound.json $winfile $biasfile ${seedrunprerep}rep-${rep}${seedrunposrep} inps/${system}-${var}_run-${run}_rep-${rep} .trj.restart
filebase=${system}-${var}_run-${run}_rep-${rep}
vars[$numfields]=$filebase
vars[$numfields + 1]=$rep
vars[$numfields + 2]=$filebase
sedcommand=""
for i in ${!fields[@]}
do
sedcommand+="s:%${fields[i]}:${vars[i]}:g;"
done
sed "$sedcommand" mwus_init_template.inp > $inpdir/${filebase}.inp
done
runfilebase=${system}-${var}_run-${run}
echo "arrayid=\$(qsub inps/${system}-${var}_simulation.sh\
-N ${runfilebase}\
-q ${queue}\
-l walltime=${walltime}:00:00\
-l nodes=${nodes}:ppn=${procspernode}\
-o outs/${runfilebase}.o\
-e outs/${runfilebase}.e\
-t 0-$((${reps} - 1))\
-v run=${run})" >\
inps/${system}-${var}_init.sh
echo "qsub inps/${system}-${var}_analysis.sh\
-N ${runfilebase}_analysis\
-l walltime=3:00:00\
-l nodes=1:ppn=1\
-o outs/${runfilebase}_analysis.o\
-e outs/${runfilebase}_analysis.e\
-v run=${run}\
-W depend=afterokarray:\${arrayid}" >>\
inps/${system}-${var}_init.sh
echo -e "run=${run}" >\
inps/${system}-${var}_current.sh
|
<gh_stars>1-10
/**
* EdDSA-Java by str4d
*
* To the extent possible under law, the person who associated CC0 with
* EdDSA-Java has waived all copyright and related or neighboring rights
* to EdDSA-Java.
*
* You should have received a copy of the CC0 legalcode along with this
* work. If not, see <https://creativecommons.org/publicdomain/zero/1.0/>.
*
*/
package net.i2p.crypto.eddsa.math;
import net.i2p.crypto.eddsa.math.*;
import org.hamcrest.core.*;
import org.junit.*;
import java.math.BigInteger;
/**
* Tests rely on the BigInteger class.
*/
public abstract class AbstractFieldElementTest {
protected abstract FieldElement getRandomFieldElement();
protected abstract BigInteger toBigInteger(FieldElement f);
protected abstract BigInteger getQ();
protected abstract Field getField();
// region isNonZero
protected abstract FieldElement getZeroFieldElement();
protected abstract FieldElement getNonZeroFieldElement();
@Test
public void isNonZeroReturnsFalseIfFieldElementIsZero() {
// Act:
final FieldElement f = getZeroFieldElement();
// Assert:
Assert.assertThat(f.isNonZero(), IsEqual.equalTo(false));
}
@Test
public void isNonZeroReturnsTrueIfFieldElementIsNonZero() {
// Act:
final FieldElement f = getNonZeroFieldElement();
// Assert:
Assert.assertThat(f.isNonZero(), IsEqual.equalTo(true));
}
// endregion
// region mod q arithmetic
@Test
public void addReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final FieldElement f2 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
final BigInteger b2 = toBigInteger(f2);
// Act:
final FieldElement f3 = f1.add(f2);
final BigInteger b3 = toBigInteger(f3).mod(getQ());
// Assert:
Assert.assertThat(b3, IsEqual.equalTo(b1.add(b2).mod(getQ())));
}
}
@Test
public void subtractReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final FieldElement f2 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
final BigInteger b2 = toBigInteger(f2);
// Act:
final FieldElement f3 = f1.subtract(f2);
final BigInteger b3 = toBigInteger(f3).mod(getQ());
// Assert:
Assert.assertThat(b3, IsEqual.equalTo(b1.subtract(b2).mod(getQ())));
}
}
@Test
public void negateReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
// Act:
final FieldElement f2 = f1.negate();
final BigInteger b2 = toBigInteger(f2).mod(getQ());
// Assert:
Assert.assertThat(b2, IsEqual.equalTo(b1.negate().mod(getQ())));
}
}
@Test
public void multiplyReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final FieldElement f2 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
final BigInteger b2 = toBigInteger(f2);
// Act:
final FieldElement f3 = f1.multiply(f2);
final BigInteger b3 = toBigInteger(f3).mod(getQ());
// Assert:
Assert.assertThat(b3, IsEqual.equalTo(b1.multiply(b2).mod(getQ())));
}
}
@Test
public void squareReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
// Act:
final FieldElement f2 = f1.square();
final BigInteger b2 = toBigInteger(f2).mod(getQ());
// Assert:
Assert.assertThat(b2, IsEqual.equalTo(b1.multiply(b1).mod(getQ())));
}
}
@Test
public void squareAndDoubleReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
// Act:
final FieldElement f2 = f1.squareAndDouble();
final BigInteger b2 = toBigInteger(f2).mod(getQ());
// Assert:
Assert.assertThat(b2, IsEqual.equalTo(b1.multiply(b1).multiply(new BigInteger("2")).mod(getQ())));
}
}
@Test
public void invertReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
// Act:
final FieldElement f2 = f1.invert();
final BigInteger b2 = toBigInteger(f2).mod(getQ());
// Assert:
Assert.assertThat(b2, IsEqual.equalTo(b1.modInverse(getQ())));
}
}
@Test
public void pow22523ReturnsCorrectResult() {
for (int i=0; i<1000; i++) {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final BigInteger b1 = toBigInteger(f1);
// Act:
final FieldElement f2 = f1.pow22523();
final BigInteger b2 = toBigInteger(f2).mod(getQ());
// Assert:
Assert.assertThat(b2, IsEqual.equalTo(b1.modPow(BigInteger.ONE.shiftLeft(252).subtract(new BigInteger("3")), getQ())));
}
}
// endregion
// region cmov
@Test
public void cmovReturnsCorrectResult() {
final FieldElement zero = getZeroFieldElement();
final FieldElement nz = getNonZeroFieldElement();
final FieldElement f = getRandomFieldElement();
Assert.assertThat(zero.cmov(nz, 0), IsEqual.equalTo(zero));
Assert.assertThat(zero.cmov(nz, 1), IsEqual.equalTo(nz));
Assert.assertThat(f.cmov(nz, 0), IsEqual.equalTo(f));
Assert.assertThat(f.cmov(nz, 1), IsEqual.equalTo(nz));
}
// endregion
// region hashCode / equals
@Test
public void equalsOnlyReturnsTrueForEquivalentObjects() {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final FieldElement f2 = getField().getEncoding().decode(f1.toByteArray());
final FieldElement f3 = getRandomFieldElement();
final FieldElement f4 = getRandomFieldElement();
// Assert:
Assert.assertThat(f1, IsEqual.equalTo(f2));
Assert.assertThat(f1, IsNot.not(IsEqual.equalTo(f3)));
Assert.assertThat(f1, IsNot.not(IsEqual.equalTo(f4)));
Assert.assertThat(f3, IsNot.not(IsEqual.equalTo(f4)));
}
@Test
public void hashCodesAreEqualForEquivalentObjects() {
// Arrange:
final FieldElement f1 = getRandomFieldElement();
final FieldElement f2 = getField().getEncoding().decode(f1.toByteArray());
final FieldElement f3 = getRandomFieldElement();
final FieldElement f4 = getRandomFieldElement();
// Assert:
Assert.assertThat(f1.hashCode(), IsEqual.equalTo(f2.hashCode()));
Assert.assertThat(f1.hashCode(), IsNot.not(IsEqual.equalTo(f3.hashCode())));
Assert.assertThat(f1.hashCode(), IsNot.not(IsEqual.equalTo(f4.hashCode())));
Assert.assertThat(f3.hashCode(), IsNot.not(IsEqual.equalTo(f4.hashCode())));
}
// endregion
}
|
let regexPattern = "^[a-z]+$";
let inputString = "foo";
if (inputString.match(regexPattern)) {
console.log(true);
} else {
console.log(false);
} |
package me.legit.models.reward;
import com.google.gson.TypeAdapter;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import com.google.gson.stream.JsonWriter;
import me.legit.models.equipment.CustomEquipment;
import me.legit.models.equipment.CustomEquipmentCustomization;
import me.legit.models.equipment.CustomEquipmentPart;
import me.legit.models.equipment.EquipmentCustomizationType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class RewardAdapter extends TypeAdapter<Reward> {
@Override
public void write(JsonWriter out, Reward reward) throws IOException {
out.setSerializeNulls(true);
out.beginObject();
out.name("coins").value(reward.getCoins());
out.name("currency");
out.beginObject();
out.name("coins").value(reward.getCoins());
out.endObject();
out.name("mascotXP");
if (reward.getMascotXP() == null) {
out.nullValue();
} else {
out.beginObject();
for (Map.Entry<String, Integer> entry : reward.getMascotXP().entrySet()) {
out.name(entry.getKey()).value(entry.getValue());
}
out.endObject();
}
out.name("collectibleCurrency");
if (reward.getCollectibleCurrency() == null) {
out.nullValue();
} else {
out.beginObject();
for (Map.Entry<String, Integer> entry : reward.getCollectibleCurrency().entrySet()) {
out.name(entry.getKey()).value(entry.getValue());
}
out.endObject();
}
out.name("colourPacks");
if (reward.getColourPacks() == null) {
out.nullValue();
} else {
out.beginArray();
for (String color : reward.getColourPacks()) {
out.value(color);
}
out.endArray();
}
out.name("decals");
if (reward.getDecals() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer decal : reward.getDecals()) {
out.value(decal);
}
out.endArray();
}
out.name("fabrics");
if (reward.getFabrics() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer fabric : reward.getFabrics()) {
out.value(fabric);
}
out.endArray();
}
out.name("emotePacks");
if (reward.getEmotePacks() == null) {
out.nullValue();
} else {
out.beginArray();
for (String emote : reward.getEmotePacks()) {
out.value(emote);
}
out.endArray();
}
out.name("sizzleClips");
if (reward.getSizzleClips() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer sizzleClip : reward.getSizzleClips()) {
out.value(sizzleClip);
}
out.endArray();
}
out.name("equipmentTemplates");
if (reward.getEquipmentTemplates() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer equipmentTemplate : reward.getEquipmentTemplates()) {
out.value(equipmentTemplate);
}
out.endArray();
}
out.name("equipmentInstances");
if (reward.getEquipmentInstances() == null) {
out.nullValue();
} else {
out.beginArray();
for (CustomEquipment equipment : reward.getEquipmentInstances()) {
out.beginObject();
out.name("definitionId").value(equipment.getDefinitionId());
out.name("parts");
out.beginArray();
for (CustomEquipmentPart part : equipment.getParts()) {
out.beginObject();
out.name("slotIndex").value(part.getSlotIndex());
out.name("customizations");
out.beginArray();
for (CustomEquipmentCustomization customization : part.getCustomizations()) {
out.beginObject();
out.name("type").value(customization.getType().ordinal());
out.name("definitionId").value(customization.getDefinitionId());
out.name("index").value(customization.getIndex());
out.name("scale").value(customization.getScale());
out.name("rotation").value(customization.getRotation());
out.name("repeat").value(customization.isRepeat());
out.name("uoffset").value(customization.getUoffset());
out.name("voffset").value(customization.getVoffset());
out.endObject();
}
out.endArray();
out.endObject();
}
out.endArray();
out.name("equipmentId").value(equipment.getEquipmentId());
out.name("dateTimeCreated").value(equipment.getDateTimeCreated());
out.name("source").value("None");
out.name("sourceId").value(0);
out.endObject();
}
out.endArray();
}
out.name("lots");
if (reward.getLots() == null) {
out.nullValue();
} else {
out.beginArray();
for (String lot : reward.getLots()) {
out.value(lot);
}
out.endArray();
}
out.name("decorationInstances");
if (reward.getDecorationInstances() == null) {
out.nullValue();
} else {
out.beginObject();
for (Map.Entry<Integer, Integer> entry : reward.getDecorationInstances().entrySet()) {
out.name(Integer.toString(entry.getKey())).value(entry.getValue());
}
out.endObject();
}
out.name("structureInstances");
if (reward.getStructureInstances() == null) {
out.nullValue();
} else {
out.beginObject();
for (Map.Entry<Integer, Integer> entry : reward.getStructureInstances().entrySet()) {
out.name(Integer.toString(entry.getKey())).value(entry.getValue());
}
out.endObject();
}
out.name("decorationPurchaseRights");
if (reward.getDecorationPurchaseRights() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer decoration : reward.getDecorationPurchaseRights()) {
out.value(decoration);
}
out.endArray();
}
out.name("structurePurchaseRights");
if (reward.getStructurePurchaseRights() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer structure : reward.getStructurePurchaseRights()) {
out.value(structure);
}
out.endArray();
}
out.name("musicTracks");
if (reward.getMusicTracks() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer musicTrack : reward.getMusicTracks()) {
out.value(musicTrack);
}
out.endArray();
}
out.name("lighting");
if (reward.getLighting() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer lighting : reward.getLighting()) {
out.value(lighting);
}
out.endArray();
}
out.name("durables");
if (reward.getDurables() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer durable : reward.getDurables()) {
out.value(durable);
}
out.endArray();
}
out.name("tubes");
if (reward.getTubes() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer tube : reward.getTubes()) {
out.value(tube);
}
out.endArray();
}
out.name("savedOutfitSlots").value(0);
out.name("iglooSlots").value(0);
out.name("consumables");
if (reward.getConsumables() == null) {
out.nullValue();
} else {
out.beginObject();
for (Map.Entry<String, Integer> entry : reward.getConsumables().entrySet()) {
out.name(entry.getKey()).value(entry.getValue());
}
out.endObject();
}
out.name("partySupplies");
if (reward.getPartySupplies() == null) {
out.nullValue();
} else {
out.beginArray();
for (Integer partySupply : reward.getPartySupplies()) {
out.value(partySupply);
}
out.endArray();
}
out.endObject();
}
@Override
public Reward read(JsonReader in) throws IOException {
Reward.Builder reward = new Reward.Builder();
in.beginObject();
while (in.hasNext()) {
String name = in.nextName();
switch (name) {
case "coins":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
reward.withCoins(in.nextInt());
break;
case "currency":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
Map<String, Integer> currency = new HashMap<>();
in.beginObject();
while (in.hasNext()) {
String currencyName = in.nextName();
int amount = in.nextInt();
currency.put(currencyName, amount);
}
in.endObject();
reward.withCurrency(currency);
break;
case "mascotXP":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
Map<String, Integer> mascotXP = new HashMap<>();
in.beginObject();
while (in.hasNext()) {
String mascotName = in.nextName();
int xpAmount = in.nextInt();
mascotXP.put(mascotName, xpAmount);
}
in.endObject();
reward.withMascotXP(mascotXP);
break;
case "collectibleCurrency":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
Map<String, Integer> collectibleCurrency = new HashMap<>();
in.beginObject();
while (in.hasNext()) {
String currencyName = in.nextName();
int amount = in.nextInt();
collectibleCurrency.put(currencyName, amount);
}
in.endObject();
reward.withCollectibleCurrency(collectibleCurrency);
break;
case "colourPacks":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<String> colourPacks = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
colourPacks.add(in.nextString());
}
in.endArray();
reward.withColourPacks(colourPacks);
break;
case "decals":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> decals = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
decals.add(in.nextInt());
}
in.endArray();
reward.withDecals(decals);
break;
case "fabrics":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> fabrics = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
fabrics.add(in.nextInt());
}
in.endArray();
reward.withFabrics(fabrics);
break;
case "emotePacks":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<String> emotePacks = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
emotePacks.add(in.nextString());
}
in.endArray();
reward.withEmotePacks(emotePacks);
break;
case "sizzleClips":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> sizzleClips = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
sizzleClips.add(in.nextInt());
}
in.endArray();
reward.withSizzleClips(sizzleClips);
break;
case "equipmentTemplates":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> equipmentTemplates = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
equipmentTemplates.add(in.nextInt());
}
in.endArray();
reward.withEquipmentTemplates(equipmentTemplates);
break;
case "equipmentInstances":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<CustomEquipment> equipmentList = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
CustomEquipment equipment = new CustomEquipment();
in.beginObject();
while (in.hasNext()) {
String key = in.nextName();
switch (key) {
case "definitionId":
equipment.setDefinitionId(in.nextInt());
break;
case "parts":
List<CustomEquipmentPart> parts = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
CustomEquipmentPart equipmentPart = new CustomEquipmentPart();
in.beginObject();
while (in.hasNext()) {
String partKey = in.nextName();
if (partKey.equals("slotIndex")) {
equipmentPart.setSlotIndex(in.nextInt());
} else if (partKey.equals("customizations")) {
List<CustomEquipmentCustomization> customizations = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
CustomEquipmentCustomization customization = new CustomEquipmentCustomization();
in.beginObject();
while (in.hasNext()) {
String customizationKey = in.nextName();
switch (customizationKey) {
case "type":
int type = in.nextInt();
EquipmentCustomizationType customizationType = EquipmentCustomizationType.values()[type];
customization.setType(customizationType);
break;
case "definitionId":
customization.setDefinitionId(in.nextInt());
break;
case "index":
customization.setIndex(in.nextInt());
break;
case "scale":
customization.setScale((float) in.nextDouble());
break;
case "rotation":
customization.setRotation((float) in.nextDouble());
break;
case "repeat":
customization.setRepeat(in.nextBoolean());
break;
case "uoffset":
customization.setUoffset((float) in.nextDouble());
break;
case "voffset":
customization.setVoffset((float) in.nextDouble());
break;
}
}
in.endObject();
customizations.add(customization);
}
in.endArray();
equipmentPart.setCustomizations(customizations);
}
}
in.endObject();
parts.add(equipmentPart);
}
in.endArray();
equipment.setParts(parts);
break;
case "equipmentId":
equipment.setEquipmentId(in.nextLong());
break;
case "dateTimeCreated":
equipment.setDateTimeCreated(in.nextLong());
break;
case "source":
equipment.setSource(in.nextString());
break;
case "sourceId":
equipment.setSourceId(in.nextInt());
break;
}
}
in.endObject();
equipmentList.add(equipment);
}
in.endArray();
reward.withEquipmentInstances(equipmentList);
break;
case "lots":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<String> lots = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
lots.add(in.nextString());
}
in.endArray();
reward.withLots(lots);
break;
case "decorationInstances":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
Map<Integer, Integer> decorationInstances = new HashMap<>();
in.beginObject();
while (in.hasNext()) {
String key = in.nextName();
int value = in.nextInt();
decorationInstances.put(Integer.valueOf(key), value);
}
in.endObject();
reward.withDecorationInstances(decorationInstances);
break;
case "structureInstances":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
Map<Integer, Integer> structureInstances = new HashMap<>();
in.beginObject();
while (in.hasNext()) {
String key = in.nextName();
int value = in.nextInt();
structureInstances.put(Integer.valueOf(key), value);
}
in.endObject();
reward.withStructureInstances(structureInstances);
break;
case "decorationPurchaseRights":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> decorationPurchaseRights = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
decorationPurchaseRights.add(in.nextInt());
}
in.endArray();
reward.withDecorationPurchaseRights(decorationPurchaseRights);
break;
case "structurePurchaseRights":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> structurePurchaseRights = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
structurePurchaseRights.add(in.nextInt());
}
in.endArray();
reward.withStructurePurchaseRights(structurePurchaseRights);
break;
case "musicTracks":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> musicTracks = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
musicTracks.add(in.nextInt());
}
in.endArray();
reward.withMusicTracks(musicTracks);
break;
case "lighting":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> lighting = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
lighting.add(in.nextInt());
}
in.endArray();
reward.withLighting(lighting);
break;
case "durables":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> durables = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
durables.add(in.nextInt());
}
in.endArray();
reward.withDurables(durables);
break;
case "tubes":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> tubes = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
tubes.add(in.nextInt());
}
in.endArray();
reward.withTubes(tubes);
break;
case "savedOutfitSlots":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
reward.withSavedOutfitSlots(in.nextInt());
break;
case "iglooSlots":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
reward.withIglooSlots(in.nextInt());
break;
case "consumables":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
Map<String, Integer> consumables = new HashMap<>();
in.beginObject();
while (in.hasNext()) {
String key = in.nextName();
int value = in.nextInt();
consumables.put(key, value);
}
in.endObject();
reward.withConsumables(consumables);
break;
case "partySupplies":
if (in.peek() == JsonToken.NULL) {
in.nextNull();
break;
}
List<Integer> partySupplies = new ArrayList<>();
in.beginArray();
while (in.hasNext()) {
partySupplies.add(in.nextInt());
}
in.endArray();
reward.withPartySupplies(partySupplies);
break;
}
}
in.endObject();
return reward.build();
}
}
|
BUILD_DATE=`date +%Y-%m-%d-%H.%M.%S`
ArchivePath=Agora-Mac-Tutorial-${BUILD_DATE}.xcarchive
xcodebuild clean -project "Agora-Mac-Tutorial-Objective-C.xcodeproj" -scheme "Agora-Mac-Tutorial-Objective-C" -configuration Release
xcodebuild CODE_SIGN_IDENTITY="" CODE_SIGNING_REQUIRED=NO CODE_SIGNING_ALLOWED=NO -project "Agora-Mac-Tutorial-Objective-C.xcodeproj" -scheme "Agora-Mac-Tutorial-Objective-C" -archivePath ${ArchivePath} archive
xcodebuild CODE_SIGN_IDENTITY="" CODE_SIGNING_REQUIRED=NO CODE_SIGNING_ALLOWED=NO -exportArchive -exportOptionsPlist exportPlist.plist -archivePath ${ArchivePath} -exportPath .
|
const json = {
"name": "John Doe",
"age": 42,
"address": {
"street": "123 Main Street",
"city": "Boston",
"state": "MA"
}
}
const result = Object.keys(json).map(key => [key, json[key]]);
console.log(result); |
export enum HttpMethod {
GET = "GET",
POST = "POST"
} |
from onnx_tf.common import exception
from onnx_tf.handlers.frontend_handler import FrontendHandler
from onnx_tf.handlers.handler import onnx_op
from onnx_tf.handlers.handler import tf_op
@onnx_op("TopK")
@tf_op("TopKV2")
class TopK(FrontendHandler):
@classmethod
def args_check(cls, node, **kwargs):
if node.inputs[1] not in kwargs["consts"]:
exception.CONST_NOT_FOUND_EXCEPT(node.inputs[1], node.op_type)
@classmethod
def version_1(cls, node, **kwargs):
consts = kwargs["consts"]
k = int(consts[node.inputs[1]])
return cls.make_node_from_tf_node(
node, inputs=[node.inputs[0]], k=k, axis=-1)
|
#!/bin/bash
#
# Copyright IBM Corp All Rights Reserved
#
# SPDX-License-Identifier: Apache-2.0
#
# Exit on first error, print all commands.
set -ev
# don't rewrite paths for Windows Git Bash users
export MSYS_NO_PATHCONV=1
docker-compose -f docker-compose.yml up -d
# wait for Hyperledger Fabric to start
# incase of errors when running later commands, issue export FABRIC_START_TIMEOUT=<larger number>
export FABRIC_START_TIMEOUT=30
for i in $(seq 1 ${FABRIC_START_TIMEOUT})
do
# This command only works if the peer is up and running
if docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" <%= dockerName %>_peer0.org1.example.com peer channel list > /dev/null 2>&1
then
# Peer now available
break
else
# Sleep and try again
sleep 1
fi
done
echo Hyperledger Fabric started in $i seconds
# Check to see if the channel already exists
if ! docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" <%= dockerName %>_peer0.org1.example.com peer channel getinfo -c mychannel
then
# Create the channel
docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" <%= dockerName %>_peer0.org1.example.com peer channel create -o orderer.example.com:<%= orderer %> -c mychannel -f /etc/hyperledger/configtx/channel.tx
# Update the channel with the anchor peers
docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" <%= dockerName %>_peer0.org1.example.com peer channel update -o orderer.example.com:<%= orderer %> -c mychannel -f /etc/hyperledger/configtx/Org1MSPanchors.tx
# Join peer0.org1.example.com to the channel.
docker exec -e "CORE_PEER_LOCALMSPID=Org1MSP" -e "CORE_PEER_MSPCONFIGPATH=/etc/hyperledger/msp/users/Admin@org1.example.com/msp" <%= dockerName %>_peer0.org1.example.com peer channel join -b mychannel.block
fi
|
from setuptools import setup
# # read the contents of your README file
# from pathlib import Path
# this_directory = Path(__file__).parent
# long_description = (this_directory / "README.md").read_text()
setup(
name='concurrentbuffer',
version='0.0.5',
author='<NAME>',
author_email='<EMAIL>',
packages=['concurrentbuffer'],
license='LICENSE.txt',
install_requires=['numpy>=1.18.1'],
url='https://github.com/martvanrijthoven/concurrent-buffer'
)
|
<filename>PlateCodeInqury/wwwroot/js/site.js
$("#getValueBtn").click(() => {
if (checkValidity($("#plateCode").val())) {
const data = {
plateCode: $("#plateCode").val()
}
$.ajax({
type: "POST",
url: "/Home/GetPlateValue",
data: data,
success: function (response) {
$("#listHistory").prepend("<li>" + response + "</li>", $("#listHistory"))
},
error: function (response) {
alert("Teknik Hata");
}
});
}
else {
alert("1 ile 81 arasında bir sayı giriniz.")
}
})
$("#clear").click(() => {
$("#plateCode").val('')
var parentList = document.querySelector("#listHistory");
parentList.innerHTML = ""
})
function checkValidity(x) {
if (x < 1 || x > 81)
return false
return true
} |
<filename>app/app.js
'use strict';
var app = angular.module('gg', ['ngRoute', 'ngResource']);
app.config(config);
config.$inject = ['$routeProvider', '$locationProvider'];
function config($routeProvider, $locationProvider, $httpProvider){
$locationProvider.hashPrefix('');
$routeProvider.
when('/', {
templateUrl: '../entity/home/home.html'
})
.otherwise({
redirectTo: '/'
});
}
|
export const global = (state = {}, action) => {};
|
#ifndef ODFAEG_CREATOR_RECTANGULAR_SELECTION_HPP
#define ODFAEG_CREATOR_RECTANGULAR_SELECTION_HPP
#include "odfaeg/Graphics/rectangleShape.h"
class RectangularSelection : public odfaeg::graphic::Drawable {
public :
RectangularSelection();
void setRect(int posX, int posY, int posZ, int width, int height, int depth);
void addItem(odfaeg::graphic::Transformable* item);
odfaeg::physic::BoundingBox getSelectionRect();
std::vector<odfaeg::graphic::Transformable*>& getItems();
void setColor(odfaeg::graphic::Transformable* transformable);
void draw (odfaeg::graphic::RenderTarget& target, odfaeg::graphic::RenderStates states);
private :
odfaeg::graphic::RectangleShape selectionRect;
std::vector<odfaeg::graphic::Transformable*> items;
};
#endif // RECTANGULAR_SELECTION
|
def reverse_list_without_function(list_):
result = []
for i in range(len(list_)-1,-1,-1):
result.append(list_[i])
return result
list_ = [1,2,3,4]
print(reverse_list_without_function(list_)) # => [4, 3, 2, 1] |
#!/bin/bash
#
# Use politeiawwwcli to test the politeiawww API routes
readonly PROP_STATUS_NOT_REVIEWED=2
readonly PROP_STATUS_CENSORED=3
readonly PROP_STATUS_PUBLIC=4
cmd="politeiawwwcli -j"
admin_email=""
admin_password=""
override_token=""
print_json="false"
vote="false"
# expect_success executes the passed in command and ensures that the command
# exits with no errors. expect_success is used when you expect a command to
# succeed and you don't need to store the output.
expect_success() {
if [ "$print_json" == "true" ]; then
# execute passed in command. stdout will be printed to the console.
$1
else
# execute passed in command and suppress stdout
$1 > /dev/null
fi
# check exit status for errors. Exit script if errors found.
if [ $? -ne 0 ]; then
exit 1
fi
}
# expect_failure executes the passed in command and ensures that the command
# exits with an error. expect_failure is used when you expect a command to fail
# and you don't need to store the output.
expect_failure() {
if [ "$print_json" == "true" ]; then
# execute passed in command. stdout will be printed to the console.
$1
else
# execute passed in command and suppress stdout and stderr
$1 &> /dev/null
fi
# check exit status for errors. Exit script if no errors found.
if [ $? -eq 0 ]; then
echo "Expected failure, but did not recieve any errors."
exit 1
fi
}
# check_error checks the exit status of the previously run command and exits
# the script if an error is found. This allows you to save the output of a
# command to a variable, then call check_error to check the exit status of the
# command. The variable containing the command's output is passed to
# check_error and is written to stdout if the json flag is present.
check_error() {
# check exit status of previous command
if [ $? -ne 0 ]; then
if [ "$print_json" == "true" ]; then
echo $1
fi
exit 1
fi
if [ "$print_json" == "true" ]; then
echo $1
fi
}
# error writes an error message to stderr and exits the script
error() {
echo "Error: $@" >&2
exit 1
}
# run_admin_routes tests the politeiawww api routes that require admin privileges
run_admin_routes() {
echo "Admin - Login"
login=`$cmd login $admin_email $admin_password`
check_error "$login"
# validate that the user is an admin
is_admin=`echo $login | jq -r '. | select(.userid).isadmin'`
if [ $is_admin != "true" ]; then
error "$admin_email is not an admin"
fi
echo "Admin - Me"
me=`$cmd me`
check_error "$me"
me_email=`echo $me | jq -r '.email'`
me_is_admin=`echo $me | jq -r '.isadmin'`
if [ $me_email != $admin_email ]; then
error "/me email got $me_email wanted $admin_email"
fi
if [ $me_is_admin != "true" ]; then
error "/me isAdmin got $me_is_admin wanted true"
fi
echo "Admin - Create new identity"
expect_success "$cmd updateuserkey"
echo "Admin - Unvetted paging"
# only fetch proposals that were created during the execution of this script
unvetted_page1=`$cmd getunvetted --after=$prop1_censorship_token`
check_error "$unvetted_page1"
page1_last_censorship_token=`echo $unvetted_page1 | jq -r ".[] | last | .censorshiprecord.token"`
unvetted_page2=`$cmd getunvetted --after=$page1_last_censorship_token`
check_error "$unvetted_page2"
unvetted_page2_length=`echo $unvetted_page2 | jq ".proposals | length"`
if [ $unvetted_page2_length -eq 0 ]; then
error "Empty 2nd page of unvetted proposals"
fi
echo "Admin - Get proposal"
pr1=`$cmd getproposal $prop1_censorship_token`
check_error "$pr1"
pr1_files_length=`echo $pr1 | jq ".proposal.files | length"`
if [ $pr1_files_length -eq 0 ]; then
error "pr1 expected proposal data"
fi
echo "Admin - Set proposal status: move prop1 to public"
psr1=`$cmd setproposalstatus $prop1_censorship_token $PROP_STATUS_PUBLIC`
check_error "$psr1"
prop1_status=`echo $psr1 | jq -r ". | select(.proposal).proposal.status"`
if [ $prop1_status != $PROP_STATUS_PUBLIC ]; then
error "Invalid status got $prop1_status wanted $PROP_STATUS_PUBLIC"
fi
echo "Admin - Set proposal status: move prop2 to censored"
psr2=`$cmd setproposalstatus $prop2_censorship_token $PROP_STATUS_CENSORED`
check_error "$psr2"
prop2_status=`echo $psr2 | jq -r ". | select(.proposal).proposal.status"`
if [ $prop2_status != $PROP_STATUS_CENSORED ]; then
error "Invalid status got $prop2_status wanted $PROP_STATUS_CENSORED"
fi
echo "Admin - Get proposal: validate prop1 and prop2"
_pr1=`$cmd getproposal $prop1_censorship_token`
check_error "$_pr1"
_pr1_censorship_token=`echo $_pr1 | jq -r ".proposal.censorshiprecord.token"`
_pr1_status=`echo $_pr1 | jq -r ".proposal.status"`
if [ $_pr1_censorship_token != $prop1_censorship_token ]; then
error "_pr1 invalid got $_pr1_censorship_token wanted $prop1_censorship_token"
fi
if [ $_pr1_status != $PROP_STATUS_PUBLIC ]; then
error "_pr1 invalid status got $_pr1_status wanted $PROP_STATUS_PUBLIC"
fi
_pr2=`$cmd getproposal $prop2_censorship_token`
check_error "$_pr2"
_pr2_censorship_token=`echo $_pr2 | jq -r ".proposal.censorshiprecord.token"`
_pr2_status=`echo $_pr2 | jq -r ".proposal.status"`
if [ $_pr2_censorship_token != $prop2_censorship_token ]; then
error "_pr2 invalid got $_pr2_censorship_token wanted $prop2_censorship_token"
fi
if [ $_pr2_status != $PROP_STATUS_CENSORED ]; then
error "_pr2 invalid status got $_pr2_status wanted $PROP_STATUS_CENSORED"
fi
echo "Admin - New comment 1: prop1 no parent"
cr1=`$cmd newcomment $prop1_censorship_token "parentComment"`
check_error "$cr1"
cr1_comment_id=`echo $cr1 | jq -r ". | select(.commentid).commentid"`
echo "Admin - New comment 1: prop1 with parent"
expect_success "$cmd newcomment $prop1_censorship_token childComment $cr1_comment_id"
echo "Admin - New comment 1: prop1 with parent"
expect_success "$cmd newcomment $prop1_censorship_token childComment $cr1_comment_id"
echo "Admin - New comment 2: prop1 no parent"
cr2=`$cmd newcomment $prop1_censorship_token "parentComment"`
check_error "$cr2"
cr2_comment_id=`echo $cr2 | jq -r ". | select(.commentid).commentid"`
echo "Admin - New comment 2: prop1 with parent"
expect_success "$cmd newcomment $prop1_censorship_token childComment $cr2_comment_id"
echo "Admin - New comment 2: prop1 with parent"
expect_success "$cmd newcomment $prop1_censorship_token childComment $cr2_comment_id"
echo "Admin - Get comments: validate number of comments on prop1"
gcr1=`$cmd getcomments $prop1_censorship_token`
check_error "gcr1"
gcr1_num_comments=`echo $gcr1 | jq ".comments | length"`
if [ $gcr1_num_comments -ne 6 ]; then
error "Expected 6 comments, got $gcr1_num_comments"
fi
echo "Admin - Get proposal: validate number of comments on prop1"
_pr1=`$cmd getproposal $prop1_censorship_token`
check_error "$_pr1"
_pr1_num_comments=`echo $_pr1 | jq ".proposal.numcomments"`
if [ $_pr1_num_comments -ne 6 ]; then
error "Expected 6 comments, got $_pr1_num_comments"
fi
echo "Admin - Get comments: validate number of comments on prop2"
gcr2=`$cmd getcomments $prop2_censorship_token`
check_error "gcr2"
gcr2_num_comments=`echo $gcr2 | jq ".comments | length"`
if [ $gcr2_num_comments -ne 0 ]; then
error "Expected 0 comments, got $gcr2_num_comments"
fi
echo "Admin - Get proposal: validate number of comments on prop2"
_pr2=`$cmd getproposal $prop2_censorship_token`
check_error "$_pr2"
_pr2_num_comments=`echo $_pr2 | jq ".proposal.numcomments"`
if [ $_pr2_num_comments -ne 0 ]; then
error "Expected 0 comments, got $_pr2_num_comments"
fi
}
# run_vote_routes tests the politeiawww api routes that handle proposal voting
run_vote_routes() {
echo "Vote - Login"
login=`$cmd login $admin_email $admin_password`
check_error "$login"
echo "Vote - Verify admin status"
is_admin=`echo $login | jq -r ". | select(.userid).isadmin"`
if [ $is_admin != "true" ]; then
error "$admin_email is not an admin"
fi
echo "Vote - Update user identity"
expect_success "$cmd updateuserkey"
echo "Vote - New proposal"
vprop=`$cmd newproposal --random`
check_error "$vprop"
vprop_censorship_token=`echo $vprop | jq -r '. | select(.censorshiprecord).censorshiprecord.token'`
echo "Vote - Start vote failure: wrong state"
expect_failure "$cmd startvote $vprop_censorship_token"
echo "Vote - Move proposal to vetted"
psr=`$cmd setproposalstatus $vprop_censorship_token $PROP_STATUS_PUBLIC`
check_error "$psr"
vprop_status=`echo $psr | jq -r ". | select(.proposal).proposal.status"`
if [ $vprop_status != $PROP_STATUS_PUBLIC ]; then
error "Invalid status got $vprop_status wanted $PROP_STATUS_PUBLIC"
fi
echo "Vote - Add comment"
expect_success "$cmd newcomment $vprop_censorship_token parentComment"
echo "Vote - Start vote"
expect_success "$cmd startvote $vprop_censorship_token"
}
print_usage() {
echo "Usage:
politeaiwww_refclient.sh [options] [ -e admin_email ] [ -p admin_password ]
Options:
-e specify an admin email
-h show this help message
-j print json output
-o override token for faucet
-p specify an admin password
-s specify server (i.e. host)
-v run vote routes
* to run admin routes, specify admin login credentials using -e and -p"
}
main() {
# Parse command line flags
while getopts 'e:hjo:p:s:v' flag; do
case "${flag}" in
e) admin_email="${OPTARG}" ;;
h) print_usage
exit 0 ;;
j) print_json="true" ;;
o) override_token="${OPTARG}" ;;
p) admin_password="${OPTARG}" ;;
s) cmd="politeiawwwcli -j --host=${OPTARG}" ;;
v) vote="true" ;;
*) print_usage
exit 1 ;;
esac
done
# Start tests
echo "Version: fetch CSRF token"
expect_success "$cmd version"
# Run vote routes if -v flag is used
if [ $vote == "true" ]; then
if [[ $admin_email == "" || $admin_password == "" ]]; then
error "Vote routes require admin credentials"
fi
run_vote_routes
printf "\nCompleted with no errors\n"
exit 0
fi
echo "Policy"
policy=`$cmd policy`
check_error "$policy"
min_password_length=`echo $policy | jq ".minpasswordlength"`
echo "Generate user credentials"
username1=`openssl rand -hex $min_password_length`
email1="$username1@example.com"
password1="$username1"
username2=`openssl rand -hex $min_password_length`
password2="$username2"
printf " Username: %s\n Email: %s\n Password: %s\n" $username1 $email1 $password1
echo "Create new user & verify"
newuser=`$cmd newuser $email1 $username1 $password1 --save --verify`
check_error "$newuser"
paywall_address=`echo $newuser | jq -r '.| select(.paywalladdress).paywalladdress'`
paywall_amount=`echo $newuser | jq '. | select(.paywallamount).paywallamount'`
# Paywall fee
if [[ $paywall_address != "" && $paywall_amount -ne 0 ]]; then
printf "Paywall\n Address: %s\n Amount: %s\n Sending DCR...\n" $paywall_address $paywall_amount
faucet=`$cmd faucet $paywall_address $paywall_amount --overridetoken=$override_token`
check_error "$faucet"
faucet_tx=`echo $faucet | jq -r ".faucetTx"`
echo " faucet_tx: $faucet_tx"
fi
echo "New proposal failure: user hasn't paid paywall"
expect_failure "$cmd newproposal --random"
echo "Reset password"
expect_success "$cmd resetpassword $email1 $password2"
echo "Login failure: incorrect password"
expect_failure "$cmd login $email1 $password1"
echo "Login"
login=`$cmd login $email1 $password2`
check_error "$login"
echo "Admin failure"
me=`$cmd me`
check_error "$me"
is_admin=`echo $me | jq '.isadmin'`
if [ "$is_admin" != "false" ]; then
error "Expected non-admin"
fi
echo "Secret"
expect_success "$cmd secret"
echo "Me"
expect_success "$cmd me"
echo "Change password"
expect_success "$cmd changepassword $password2 $password1"
echo "Change username"
expect_success "$cmd changeusername $password1 $username2"
# Wait for paywall payment to get confirmed
echo "Verify user payment"
has_paid="false"
verifyuserpayment=`$cmd verifyuserpayment $faucet_tx`
check_error "$verifyuserpayment"
has_paid=`echo $verifyuserpayment | jq '.haspaid'`
while [ "$has_paid" == "false" ]; do
echo "Waiting for confirmations..."
sleep 15
verifyuserpayment=`$cmd verifyuserpayment $faucet_tx`
check_error "$verifyuserpayment"
has_paid=`echo $verifyuserpayment | jq '.haspaid'`
done
echo "New proposal #1"
prop1=`$cmd newproposal --random`
check_error "$prop1"
prop1_censorship_token=`echo $prop1 | jq -r '. | select(.censorshiprecord).censorshiprecord.token'`
# Get proposals for user and validate that it matches prop1
echo "Proposals for user"
me=`$cmd me`
check_error "$me"
userId=`echo $me | jq -r ".userid"`
userproposals=`$cmd userproposals $userId`
check_error "$userproposals"
user_props=`echo $userproposals | jq '.proposals'`
user_prop1_censorship_token=`echo $user_props | jq -r ".[0].censorshiprecord.token"`
if [ `echo $user_props | jq '. | length'` -ne 1 ]; then
error "Incorrect number of proposals returned for user"
fi
if [ $user_prop1_censorship_token != $prop1_censorship_token ]; then
error "Proposal tokens don't match"
fi
echo "Create new identity"
expect_success "$cmd updateuserkey"
echo "New proposal #2"
prop2=`$cmd newproposal --random`
check_error "$prop2"
prop2_censorship_token=`echo $prop2 | jq -r '. | select(.censorshiprecord).censorshiprecord.token'`
echo "Get proposal #1 and validate"
pr1=`$cmd getproposal $prop1_censorship_token`
check_error "$pr1"
pr1_censorship_token=`echo $pr1 | jq -r '.proposal.censorshiprecord.token'`
pr1_status=`echo $pr1 | jq -r '.proposal.status'`
pr1_num_files=`echo $pr1 | jq '.proposal.files | length'`
if [ $pr1_censorship_token != $prop1_censorship_token ]; then
error "Proposal tokens don't match"
fi
if [ $pr1_status -ne $PROP_STATUS_NOT_REVIEWED ]; then
error "pr1 invalid status got $pr1_status wanted $PROP_STATUS_NOT_REVIEWED"
fi
if [ $pr1_num_files -gt 0 ]; then
error "pr1 unexpected proposal data recieved"
fi
echo "Get proposal #2 and validate"
pr2=`$cmd getproposal $prop2_censorship_token`
check_error "$pr2"
pr2_censorship_token=`echo $pr2 | jq -r '.proposal.censorshiprecord.token'`
pr2_status=`echo $pr2 | jq -r '.proposal.status'`
pr2_num_files=`echo $pr2 | jq '.proposal.files | length'`
if [ $pr2_censorship_token != $prop2_censorship_token ]; then
error "Proposal tokens don't match"
fi
if [ $pr2_status -ne $PROP_STATUS_NOT_REVIEWED ]; then
error "pr2 invalid status got $pr2_status wanted $PROP_STATUS_NOT_REVIEWED"
fi
if [ $pr2_num_files -gt 0 ]; then
error "pr2 unexpected proposal data recieved"
fi
echo "Create 2 pages of proposals"
prop_list_page_size=`echo $policy | jq ".proposallistpagesize"`
for i in `seq 1 $prop_list_page_size`; do
echo " New proposal"
expect_success "$cmd newproposal --random"
done
echo "Get unvetted failure: only accessible by admin users"
expect_failure "$cmd getunvetted"
echo "Get vetted proposals"
expect_success "$cmd getvetted"
echo "Logout"
expect_success "$cmd logout"
echo "Secret failure: user not logged in"
expect_failure "$cmd secret"
echo "Me failure: user not logged in"
expect_failure "$cmd me"
if [[ $admin_email != "" && $admin_password != "" ]]; then
run_admin_routes
fi
printf "\nCompleted with no errors\n"
}
main "$@"
|
<gh_stars>1-10
package test.controller;
import org.noear.solon.annotation.Controller;
import org.noear.solon.annotation.Mapping;
import org.noear.solon.core.handle.Context;
import org.noear.solon.core.handle.Result;
/**
* @author noear 2021/8/8 created
*/
@Mapping("/user/")
@Controller
public class UserController {
@Mapping("add")
public Object add(Context ctx) throws Throwable{
System.out.println(ctx.method() +"...ADD");
return Result.succeed();
}
}
|
<reponame>Melgo4/ICS4U<filename>Assignment 6/src/zoo/TestZooStats.java<gh_stars>0
package zoo;
public class TestZooStats {
public static void main(String[] args) {
ZooStats checkout = new ZooStats();
checkout.enterItem(new Mammal("Panda Bear",3, 399));
checkout.enterItem(new Reptile("Alligator",5, 1500));
checkout.enterItem(new Amphibian("Wood Frog",45, 50));
checkout.enterItem(new Bird("Great Horned Owl", 4, 1098));
checkout.enterItem(new Fish("Discus", 200, 150));
checkout.enterItem(new Invertebrate("Southern Black Widow", 30, 67));
System.out.println("\nNumber of items: " + checkout.numberOfItems());
System.out.println("\nTotal cost: " + checkout.totalCost() + "\n\n");
System.out.println(checkout);
checkout.clear();
checkout.enterItem(new Rodent("White Footed Mouse", 24, 98, 145));
checkout.enterItem(new Amphibian("Salamander", 10, 600));
checkout.enterItem(new Invertebrate("European Hornet", 200, 23));
checkout.enterItem(new Snake("Cobra", 4, 235, 90));
checkout.enterItem(new Mammal("Elephant", 6, 5040));
checkout.enterItem(new Bird("Flamingo",12, 340));
System.out.println("\nNumber of items: " + checkout.numberOfItems() + "\n");
System.out.println("\nTotal cost: " + checkout.totalCost() + "\n");
System.out.println(checkout);
}
}
|
// Generated by script, don't edit it please.
import createSvgIcon from '../createSvgIcon';
import UnvisibleSvg from '@rsuite/icon-font/lib/status/Unvisible';
const Unvisible = createSvgIcon({
as: UnvisibleSvg,
ariaLabel: 'unvisible',
category: 'status',
displayName: 'Unvisible'
});
export default Unvisible;
|
<gh_stars>1-10
/**
* index页面的widget配置
* @copyright 火星科技 mars3d.cn
* @author 火星吴彦祖 2021-12-30
*/
import { defineAsyncComponent, markRaw } from "vue"
import { WidgetState } from "@mars/common/store/widget"
import { StoreOptions } from "vuex"
const store: StoreOptions<WidgetState> = {
state: {
widgets: [
{
component: markRaw(defineAsyncComponent(() => import(/* webpackChunkName: "query-poi" */ "@mars/widgets/basic/query-poi/index.vue"))),
name: "query-poi",
autoDisable: true
},
{
component: markRaw(defineAsyncComponent(() => import(/* webpackChunkName: "toolbar" */ "@mars/widgets/basic/toolbar/index.vue"))),
name: "toolbar",
autoDisable: true
},
{
component: markRaw(defineAsyncComponent(() => import(/* webpackChunkName: "manage-basemap" */ "@mars/widgets/basic/manage-basemap/index.vue"))),
name: "manage-basemap",
group: "manage"
},
{
component: markRaw(defineAsyncComponent(() => import(/* webpackChunkName: "manage-layers" */ "@mars/widgets/basic/manage-layers/index.vue"))),
name: "manage-layers",
group: "manage",
disableOther: ["roamfly"]
},
{
component: markRaw(defineAsyncComponent(() => import(/* webpackChunkName: "layer-tree" */ "@mars/widgets/basic/manage-layers/layer-tree.vue"))),
name: "layer-tree"
},
{
component: markRaw(
defineAsyncComponent(() => import(/* webpackChunkName: "layer-picture-heatmap" */ "@mars/widgets/basic/manage-layers/layer-picture-heatmap.vue"))
),
name: "layer-picture-heatmap"
},
{
component: markRaw(
defineAsyncComponent(() => import(/* webpackChunkName: "layer-picture-guihua" */ "@mars/widgets/basic/manage-layers/layer-picture-guihua.vue"))
),
name: "layer-picture-guihua"
},
{
component: markRaw(defineAsyncComponent(() => import(/* webpackChunkName: "select-point" */ "@mars/widgets/basic/select-point/index.vue"))),
name: "select-point",
group: "tools"
},
{
component: markRaw(defineAsyncComponent(() => import(/* webpackChunkName: "measure" */ "@mars/widgets/basic/measure/index.vue"))),
name: "measure",
group: "tools"
}
],
openAtStart: ["query-poi", "toolbar"]
}
}
export default store
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.common.domain.order;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import org.hibernate.Criteria;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.condition.EntityCondition;
import org.ofbiz.entity.condition.EntityOperator;
import org.opentaps.base.constants.OrderTypeConstants;
import org.opentaps.base.constants.RoleTypeConstants;
import org.opentaps.base.constants.StatusItemConstants;
import org.opentaps.base.entities.OrderHeader;
import org.opentaps.base.entities.OrderItem;
import org.opentaps.base.entities.OrderRole;
import org.opentaps.base.entities.ProductAndGoodIdentification;
import org.opentaps.common.util.UtilDate;
import org.opentaps.domain.DomainsDirectory;
import org.opentaps.domain.order.OrderViewForListing;
import org.opentaps.domain.order.PurchaseOrderLookupRepositoryInterface;
import org.opentaps.domain.product.Product;
import org.opentaps.domain.product.ProductRepositoryInterface;
import org.opentaps.foundation.entity.Entity;
import org.opentaps.foundation.entity.EntityNotFoundException;
import org.opentaps.foundation.entity.hibernate.HibernateUtil;
import org.opentaps.foundation.entity.hibernate.Session;
import org.opentaps.foundation.infrastructure.InfrastructureException;
import org.opentaps.foundation.repository.RepositoryException;
import org.opentaps.foundation.repository.ofbiz.CommonLookupRepository;
/**
* Repository to lookup Purchase Orders.
*/
public class PurchaseOrderLookupRepository extends CommonLookupRepository implements PurchaseOrderLookupRepositoryInterface {
@SuppressWarnings("unused")
private static final String MODULE = PurchaseOrderLookupRepository.class.getName();
private String orderId;
private String productPattern;
private String statusId;
private String orderName;
private String organizationPartyId;
private String createdBy;
private String supplierPartyId;
private Timestamp fromDate;
private String fromDateStr;
private Timestamp thruDate;
private String thruDateStr;
private boolean findDesiredOnly = false;
private Locale locale;
private TimeZone timeZone;
private List<String> orderBy;
private ProductRepositoryInterface productRepository;
/**
* Default constructor.
*/
public PurchaseOrderLookupRepository() {
super();
}
/** {@inheritDoc} */
public List<OrderViewForListing> findOrders() throws RepositoryException {
// convert fromDateStr / thruDateStr into Timestamps if the string versions were given
if (UtilValidate.isNotEmpty(fromDateStr)) {
fromDate = UtilDate.toTimestamp(fromDateStr, timeZone, locale);
}
if (UtilValidate.isNotEmpty(thruDateStr)) {
thruDate = UtilDate.toTimestamp(thruDateStr, timeZone, locale);
}
Session session = null;
try {
// get a hibernate session
session = getInfrastructure().getSession();
Criteria criteria = session.createCriteria(OrderHeader.class);
// always filter by the current organization
criteria.add(Restrictions.eq(OrderHeader.Fields.billToPartyId.name(), organizationPartyId));
// filters by order type, we only want purchase order
criteria.add(Restrictions.eq(OrderHeader.Fields.orderTypeId.name(), OrderTypeConstants.PURCHASE_ORDER));
// set the from/thru date filter if they were given
if (fromDate != null) {
criteria.add(Restrictions.ge(OrderHeader.Fields.orderDate.name(), fromDate));
}
if (thruDate != null) {
criteria.add(Restrictions.le(OrderHeader.Fields.orderDate.name(), thruDate));
}
// filter the role assoc, there is only one supplier role per order
Criteria roleCriteria = criteria.createAlias("orderRoles", "or");
roleCriteria.add(Restrictions.eq("or.id." + OrderRole.Fields.roleTypeId.name(), RoleTypeConstants.BILL_FROM_VENDOR));
// filter by order status
if (findDesiredOnly) {
List<String> statuses = UtilMisc.toList(StatusItemConstants.OrderStatus.ORDER_APPROVED, StatusItemConstants.OrderStatus.ORDER_CREATED, StatusItemConstants.OrderStatus.ORDER_HOLD);
criteria.add(Restrictions.in(OrderHeader.Fields.statusId.name(), statuses));
}
// filter by the given orderId string
if (UtilValidate.isNotEmpty(orderId)) {
criteria.add(Restrictions.ilike(OrderHeader.Fields.orderId.name(), orderId, MatchMode.START));
}
// filter by exact matching status, if a statusId was given
if (UtilValidate.isNotEmpty(statusId)) {
criteria.add(Restrictions.eq(OrderHeader.Fields.statusId.name(), statusId));
}
// filter by the user who created the order if given
if (UtilValidate.isNotEmpty(createdBy)) {
criteria.add(Restrictions.eq(OrderHeader.Fields.createdBy.name(), createdBy));
}
// filter by the given orderName string
if (UtilValidate.isNotEmpty(orderName)) {
criteria.add(Restrictions.ilike(OrderHeader.Fields.orderName.name(), orderName, MatchMode.START));
}
// filter by the given supplierPartyId string, from the OrderRole entity
if (UtilValidate.isNotEmpty(supplierPartyId)) {
roleCriteria.add(Restrictions.ilike("or.id." + OrderRole.Fields.partyId.name(), supplierPartyId, MatchMode.START));
}
// filter by product, if given
criteria.createAlias("orderItems", "oi");
if (UtilValidate.isNotEmpty(productPattern)) {
try {
// try to get product by using productPattern as productId
Product product = getProductRepository().getProductById(productPattern);
criteria.add(Restrictions.eq("oi." + OrderItem.Fields.productId.name(), product.getProductId()));
} catch (EntityNotFoundException e) {
// could not get the product by using productPattern as productId
// find all the products that may match
String likePattern = "%" + productPattern + "%";
EntityCondition conditionList = EntityCondition.makeCondition(EntityOperator.OR,
EntityCondition.makeCondition(ProductAndGoodIdentification.Fields.productId.getName(), EntityOperator.LIKE, likePattern),
EntityCondition.makeCondition(ProductAndGoodIdentification.Fields.internalName.getName(), EntityOperator.LIKE, likePattern),
EntityCondition.makeCondition(ProductAndGoodIdentification.Fields.productName.getName(), EntityOperator.LIKE, likePattern),
EntityCondition.makeCondition(ProductAndGoodIdentification.Fields.comments.getName(), EntityOperator.LIKE, likePattern),
EntityCondition.makeCondition(ProductAndGoodIdentification.Fields.description.getName(), EntityOperator.LIKE, likePattern),
EntityCondition.makeCondition(ProductAndGoodIdentification.Fields.longDescription.getName(), EntityOperator.LIKE, likePattern),
EntityCondition.makeCondition(ProductAndGoodIdentification.Fields.idValue.getName(), EntityOperator.LIKE, likePattern)
);
List<ProductAndGoodIdentification> products = findList(ProductAndGoodIdentification.class, conditionList);
if (products.size() > 0) {
criteria.add(Restrictions.in("oi." + OrderItem.Fields.productId.name(), Entity.getDistinctFieldValues(products, ProductAndGoodIdentification.Fields.productId)));
}
}
}
// specify the fields to return
criteria.setProjection(Projections.projectionList()
.add(Projections.distinct(Projections.property(OrderHeader.Fields.orderId.name())))
.add(Projections.property(OrderHeader.Fields.orderName.name()))
.add(Projections.property(OrderHeader.Fields.statusId.name()))
.add(Projections.property(OrderHeader.Fields.grandTotal.name()))
.add(Projections.property(OrderHeader.Fields.orderDate.name()))
.add(Projections.property(OrderHeader.Fields.currencyUom.name()))
.add(Projections.property("or.id." + OrderRole.Fields.partyId.name())));
// set the order by
if (orderBy == null) {
orderBy = Arrays.asList(OrderHeader.Fields.orderDate.desc());
}
// some substitution is needed to fit the hibernate field names
// this also maps the calculated fields and indicates the non sortable fields
Map<String, String> subs = new HashMap<String, String>();
subs.put("partyId", "or.id.partyId");
subs.put("partyName", "or.id.partyId");
subs.put("orderDateString", "orderDate");
subs.put("orderNameId", "orderId");
subs.put("statusDescription", "statusId");
HibernateUtil.setCriteriaOrder(criteria, orderBy, subs);
ScrollableResults results = null;
List<OrderViewForListing> results2 = new ArrayList<OrderViewForListing>();
try {
// fetch the paginated results
results = criteria.scroll(ScrollMode.SCROLL_INSENSITIVE);
if (usePagination()) {
results.setRowNumber(getPageStart());
} else {
results.first();
}
// convert them into OrderViewForListing objects which will also calculate or format some fields for display
Object[] o = results.get();
int n = 0; // number of results actually read
while (o != null) {
OrderViewForListing r = new OrderViewForListing();
r.initRepository(this);
int i = 0;
r.setOrderId((String) o[i++]);
r.setOrderName((String) o[i++]);
r.setStatusId((String) o[i++]);
r.setGrandTotal((BigDecimal) o[i++]);
r.setOrderDate((Timestamp) o[i++]);
r.setCurrencyUom((String) o[i++]);
r.setPartyId((String) o[i++]);
r.calculateExtraFields(getDelegator(), timeZone, locale);
results2.add(r);
n++;
if (!results.next()) {
break;
}
if (usePagination() && n >= getPageSize()) {
break;
}
o = results.get();
}
results.last();
// note: row number starts at 0
setResultSize(results.getRowNumber() + 1);
} finally {
results.close();
}
return results2;
} catch (InfrastructureException e) {
throw new RepositoryException(e);
} finally {
if (session != null) {
session.close();
}
}
}
protected ProductRepositoryInterface getProductRepository() throws RepositoryException {
if (productRepository == null) {
productRepository = DomainsDirectory.getDomainsDirectory(this).getProductDomain().getProductRepository();
}
return productRepository;
}
/** {@inheritDoc} */
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
/** {@inheritDoc} */
public void setOrderBy(List<String> orderBy) {
this.orderBy = orderBy;
}
/** {@inheritDoc} */
public void setSupplierPartyId(String supplierPartyId) {
this.supplierPartyId = supplierPartyId;
}
/** {@inheritDoc} */
public void setFromDate(String fromDate) {
this.fromDateStr = fromDate;
}
/** {@inheritDoc} */
public void setFromDate(Timestamp fromDate) {
this.fromDate = fromDate;
}
/** {@inheritDoc} */
public void setThruDate(String thruDate) {
this.thruDateStr = thruDate;
}
/** {@inheritDoc} */
public void setThruDate(Timestamp thruDate) {
this.thruDate = thruDate;
}
/** {@inheritDoc} */
public void setOrderId(String orderId) {
this.orderId = orderId;
}
/** {@inheritDoc} */
public void setStatusId(String statusId) {
this.statusId = statusId;
}
/** {@inheritDoc} */
public void setProductPattern(String productPattern) {
this.productPattern = productPattern;
}
/** {@inheritDoc} */
public void setOrderName(String orderName) {
this.orderName = orderName;
}
/** {@inheritDoc} */
public void setOrganizationPartyId(String organizationPartyId) {
this.organizationPartyId = organizationPartyId;
}
/** {@inheritDoc} */
public void setLocale(Locale locale) {
this.locale = locale;
}
/** {@inheritDoc} */
public void setTimeZone(TimeZone timeZone) {
this.timeZone = timeZone;
}
/** {@inheritDoc} */
public void setFindDesiredOnly(boolean findDesiredOnly) {
this.findDesiredOnly = findDesiredOnly;
}
}
|
<gh_stars>0
package types
import (
"github.com/cosmos/cosmos-sdk/codec"
cdctypes "github.com/cosmos/cosmos-sdk/codec/types"
channel "github.com/cosmos/cosmos-sdk/x/ibc/04-channel"
commitmenttypes "github.com/cosmos/cosmos-sdk/x/ibc/23-commitment/types"
)
func RegisterCodec(cdc *codec.Codec) {
cdc.RegisterConcrete(IBCAccountPacketData{}, "ibcaccount/IBCAccountPacketData", nil)
cdc.RegisterConcrete(IBCAccountPacketAcknowledgement{}, "ibcaccount/IBCAccountPacketAcknowledgement", nil)
}
var (
amino = codec.New()
ModuleCdc = codec.NewHybridCodec(amino, cdctypes.NewInterfaceRegistry())
)
func init() {
RegisterCodec(amino)
channel.RegisterCodec(amino)
commitmenttypes.RegisterCodec(amino)
amino.Seal()
}
|
from django.http import HttpResponse
def greet_view(request):
message = "Hello, welcome to my website!"
return HttpResponse(message) |
<filename>sources/UEADB/Core/Application.hpp<gh_stars>0
#pragma once
#include <cstdlib>
#include <UEAA/Utils/SharedPointer.hpp>
#include <UEAA/Utils/ReferenceCounted.hpp>
#include <UEADB/Core/TypeDefs.hpp>
namespace UEADB
{
CommandsList ReadCommands (const std::vector <std::string> &cmdArguments);
void PrintCommands (const CommandsList &commandsList);
void PrintCommand (const CommandInfo &command);
int ExecuteCommands (const CommandsList &commandsList, const std::map <unsigned, CommandExecutor> &commandExecutors);
int ExecuteCommand (const CommandInfo &command, SharedPointersMap &sharedPointersMap,
const std::map <unsigned, CommandExecutor> &commandExecutors);
}
|
package cyclops.async.reactive.futurestream.react.simple;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import cyclops.async.reactive.futurestream.SimpleReact;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.junit.Ignore;
import org.junit.Test;
public class IterationTest {
@Test
public void testIterate() throws InterruptedException, ExecutionException {
List<Integer> list = Arrays.asList(1,
2,
3,
4);
List<String> strings = new SimpleReact().<Integer>from(list.iterator()).peek(it -> System.out.println(it))
.then(it -> it * 100)
.then(it -> "*" + it)
.block();
assertThat(strings.size(),
is(4));
}
@Test
public void testReactWithCollection() throws InterruptedException, ExecutionException {
List<Integer> list = Arrays.asList(1,
2,
3,
4);
List<String> strings = new SimpleReact().<Integer>from(list).peek(it -> System.out.println(it))
.then(it -> it * 100)
.then(it -> "*" + it)
.block();
assertThat(strings.size(),
is(4));
}
@Test
public void testReactWithCollectionOfStrings() throws InterruptedException, ExecutionException {
List<String> list = Arrays.asList("hello",
"world",
"$da^",
"along",
"$orrupted",
null);
int count = new SimpleReact().from(list)
.capture(e -> e.printStackTrace())
.filter(it -> !it.startsWith("$"))
.onFail(e -> {
if (e.getCause() instanceof NullPointerException) {
return "null";
}
return "";
})
.then(it -> it.length())
.block()
.stream()
.reduce(0,
(acc, next) -> acc + next);
assertThat(count,
is(19));
}
@Test
public void testIterateLargeMaxSize() throws InterruptedException, ExecutionException {
List<Integer> list = Arrays.asList(1,
2,
3,
4);
List<String> strings = new SimpleReact().<Integer>from(list.iterator()).then(it -> it * 100)
.then(it -> "*" + it)
.block();
assertThat(strings.size(),
is(4));
}
@Test
public void testIterateEmptyIterator() throws InterruptedException, ExecutionException {
List<Integer> list = Arrays.asList();
List<String> strings = new SimpleReact().<Integer>from(list.iterator()).then(it -> it * 100)
.then(it -> "*" + it)
.block();
assertThat(strings.size(),
is(0));
}
@Test
@Ignore
public void testIterateMaxSize() throws InterruptedException, ExecutionException {
Iterator<Integer> iterator = createInfiniteIterator();
List<String> strings = new SimpleReact().<Integer>from(iterator).then(it -> it * 100)
.then(it -> "*" + it)
.block();
assertThat(strings.size(),
is(8));
}
@Test
@Ignore
public void testIterateMaxSize0() throws InterruptedException, ExecutionException {
Iterator<Integer> iterator = createInfiniteIterator();
List<String> strings = new SimpleReact().<Integer>from(iterator).then(it -> it * 100)
.then(it -> "*" + it)
.block();
assertThat(strings.size(),
is(0));
}
/**
* @Test(expected=IllegalArgumentException.class) public void testIterateMaxSizeMinus1() throws InterruptedException,
* ExecutionException { Iterator<Integer> iterator = createInfiniteIterator(); List<String> strings = new SimpleReact()
* .<Integer>of(iterator) .then(it -> it * 100) .then(it -> "*" + it) .block();
* <p>
* fail("IllegalArgumentException expected");
* <p>
* }
**/
private Iterator<Integer> createInfiniteIterator() {
Iterator<Integer> iterator = new Iterator<Integer>() {
public boolean hasNext() {
return true;
}
public Integer next() {
return 10;
}
};
return iterator;
}
}
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/video/VideoContentHandler.java
package io.opensphere.core.video;
import io.opensphere.core.util.Service;
/**
* Content handler for video.
*
* @param <T> The type of content handled.
*/
public interface VideoContentHandler<T> extends Service
{
/**
* Handle a packet of data.
*
* @param content The data.
* @param ptsMS The time since stream start at which the packet should be
* presented in milliseconds.
*/
void handleContent(T content, long ptsMS);
}
|
package org.arquillian.cube.persistence;
import java.io.IOException;
import javax.inject.Inject;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
public class UserRepositoryTest {
@Inject
private UserRepository repository;
@Deployment
public static WebArchive create() {
return ShrinkWrap.create(WebArchive.class)
.addClasses(User.class, UserRepository.class, UserRepositoryTest.class)
.addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml")
.addAsResource("test-persistence.xml", "META-INF/persistence.xml")
.addAsManifestResource(new StringAsset("Dependencies: com.h2database.h2\n"), "MANIFEST.MF");
}
@Test
public void shouldStoreUser() throws IOException {
repository.store(new User("test"));
}
}
|
<filename>src/components/footer/index.js
import React from 'react'
import { graphql, useStaticQuery } from 'gatsby'
import FooterStyle from './style'
import JsSVG from '../../assets/svgs/js.svg'
import ReactSVG from '../../assets/svgs/react.svg'
import GatsbySVG from '../../assets/svgs/gatsby.svg'
import StyledComponentsSVG from '../../assets/svgs/styled-components.svg'
import MarkdownSVG from '../../assets/svgs/markdown.svg'
import GitHubSVG from '../../assets/svgs/github.svg'
const Icon = ({ to, src, title }) => {
return (
<li>
<a target="_blank" href={to} rel="noreferrer">
<img src={src} title={title} alt={title} />
</a>
</li>
)
}
const Footer = () => {
const data = useStaticQuery(graphql`
query {
site {
siteMetadata {
author
}
}
currentBuildDate {
currentDate
}
}
`)
return (
<FooterStyle>
<footer>
<span className="typoSpan">
See a typo or want to contribute? submit a PR or issue on{' '}
<a
target="_blank"
href="https://github.com/rahulsrma26/rahulsrma26.github.io/"
rel="noreferrer"
>
the Github repo
</a>
!
</span>
<div className="stackIcons">
<h4>This website built with:</h4>
<ul>
<Icon
src={JsSVG}
title="Javascript"
to="https://www.javascript.com/"
/>
<Icon
src={ReactSVG}
title="reactjs"
to="https://reactjs.org/"
/>
<Icon
src={GatsbySVG}
title="Gatsby"
to="https://www.gatsbyjs.com/"
/>
<Icon
src={StyledComponentsSVG}
title="Styled Components"
to="https://styled-components.com/"
/>
<Icon
src={MarkdownSVG}
title="Markdown"
to="https://www.markdownguide.org/"
/>
<Icon
src={GitHubSVG}
title="GitHub-Pages"
to="https://pages.github.com/"
/>
</ul>
</div>
<p>
<small>
©{new Date().getFullYear()} -{' '}
{data.site.siteMetadata.author}
</small>
</p>
<span className='buildDate'>
Last build on {data.currentBuildDate.currentDate} UTC
</span>
</footer>
</FooterStyle>
)
}
export default Footer
|
#!/bin/bash
# set the path for the secrets below to be created in vault or credhub
export concourse_root_secrets_path="/concourse"
export concourse_team_name="team-name"
export concourse_pipeline_name="pcf-nsxt-config"
# VAULT or CREDHUB - targeted secrets management system
export targeted_system="VAULT"
# This script assumes that:
# 1) the credhub or vault CLI is installed
# 2) you setup your vault or credhub target and login commands prior to invoking it
# e.g. for VAULT
# export VAULT_ADDR=https://myvaultdomain:8200
# export VAULT_SKIP_VERIFY=true
# export VAULT_TOKEN=vault-token
#
# e.g. for CREDHUB
# credhub login -s credhub-server-uri -u username -p password --skip-tls-validation
##
## TEAM level secrets (shared by all pipelines in that team)
##
export team_secrets=(
)
##
## PIPELINE LEVEL secrets (specific to the pipeline)
##
export pipeline_secrets=(
# NSX Manager Params
"nsx_manager_address"::"nsx-manager.abc.io"
"nsx_manager_username"::"admin"
"nsx_manager_password"::"mynsxpassword"
#Unique Name for this PCF install
"pcf_foundation_name"::"pcf-fd1"
# Names of NSX Components. Used to connect switches and routers to already established NSX Components
"vlan_transport_zone"::"tz-vlan"
"overlay_transport_zone"::"tz-overlay"
"edge_cluster_name"::"edge-cluster-1"
# T0 router IP and mask
"t0_router_ip"::"1.2.3.2"
"t0_router_ip_mask"::"26"
# Static route where T0 router should send all traffic back to IaaS
"t0_next_hop_ip"::"1.2.3.1"
#Params for DNAT and SNAT rules created on the T) router
"ops_mgr_dnat_ip"::"1.2.3.150"
"infrastructure_network_snat_ip"::"1.2.3.151"
#Params to define a pool of IPs that will be used for dynamically created Organizations
"external_nat_ip_pool_cidr"::"1.2.3.0/24"
"external_nat_ip_pool_start_ip"::"1.2.3.100"
"external_nat_ip_pool_end_ip"::"1.2.3.119"
"vlan_uplink_switch_name"::"vlan-uplink"
"infrastructure_switch_name"::"infrastructure-ls"
"deployment_switch_name"::"deployment-ls"
"services_switch_name"::"services-ls"
"dynamic_services_switch_name"::"dynamic-services-ls"
"t0_router_name"::"t0-router"
"infrastructure_router_name"::"infrastructure-t1"
"deployment_router_name"::"deployment-t1"
"services_router_name"::"services-t1"
"dynamic_services_router_name"::"dynamic-services-t1"
)
main () {
# team level secrets
concourse_team_level_secrets_path="${concourse_root_secrets_path}/${concourse_team_name}"
writeCredentials "${concourse_team_level_secrets_path}" "${team_secrets[*]}"
# pipeline level secrets
concourse_pipeline_level_secrets_path="${concourse_team_level_secrets_path}/${concourse_pipeline_name}"
writeCredentials "${concourse_pipeline_level_secrets_path}" "${pipeline_secrets[*]}"
}
writeCredentials () {
secretsPath=${1}
secretsObject=(${2})
for i in "${secretsObject[@]}"
do
KEY="${i%%::*}"
VALUE="${i##*::}"
echo "Creating secret for [$KEY]"
if [[ $targeted_system == "VAULT" ]]; then
vault write "${secretsPath}/${KEY}" value="${VALUE}"
else # CREDHUB
credhub set -n "${secretsPath}/${KEY}" -v "${VALUE}"
fi
done
}
main
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -exu
if [ $# -lt 2 ]; then
echo "Usage: $0 VERSION rc"
echo " $0 VERSION staging-rc"
echo " $0 VERSION release"
echo " $0 VERSION staging-release"
echo " $0 VERSION local"
echo " e.g.: $0 0.13.0 rc # Verify 0.13.0 RC"
echo " e.g.: $0 0.13.0 staging-rc # Verify 0.13.0 RC on staging"
echo " e.g.: $0 0.13.0 release # Verify 0.13.0"
echo " e.g.: $0 0.13.0 staging-release # Verify 0.13.0 on staging"
echo " e.g.: $0 0.13.0-dev20210203 local # Verify 0.13.0-dev20210203 on local"
exit 1
fi
VERSION="$1"
TYPE="$2"
local_prefix="/arrow/dev/tasks/linux-packages"
echo "::group::Prepare repository"
export DEBIAN_FRONTEND=noninteractive
APT_INSTALL="apt install -y -V --no-install-recommends"
apt update
${APT_INSTALL} \
ca-certificates \
curl \
lsb-release
code_name="$(lsb_release --codename --short)"
distribution="$(lsb_release --id --short | tr 'A-Z' 'a-z')"
artifactory_base_url="https://apache.jfrog.io/artifactory/arrow/${distribution}"
case "${TYPE}" in
rc|staging-rc|staging-release)
suffix=${TYPE%-release}
artifactory_base_url+="-${suffix}"
;;
esac
have_flight=yes
have_plasma=yes
workaround_missing_packages=()
case "${distribution}-${code_name}" in
debian-*)
sed \
-i"" \
-e "s/ main$/ main contrib non-free/g" \
/etc/apt/sources.list
;;
esac
if [ "$(arch)" = "aarch64" ]; then
have_plasma=no
fi
if [ "${TYPE}" = "local" ]; then
case "${VERSION}" in
*-dev*)
package_version="$(echo "${VERSION}" | sed -e 's/-dev\(.*\)$/~dev\1/g')"
;;
*-rc*)
package_version="$(echo "${VERSION}" | sed -e 's/-rc.*$//g')"
;;
*)
package_version="${VERSION}"
;;
esac
package_version+="-1"
apt_source_path="${local_prefix}/apt/repositories"
apt_source_path+="/${distribution}/pool/${code_name}/main"
apt_source_path+="/a/apache-arrow-apt-source"
apt_source_path+="/apache-arrow-apt-source_${package_version}_all.deb"
${APT_INSTALL} "${apt_source_path}"
else
package_version="${VERSION}-1"
apt_source_base_name="apache-arrow-apt-source-latest-${code_name}.deb"
curl \
--output "${apt_source_base_name}" \
"${artifactory_base_url}/${apt_source_base_name}"
${APT_INSTALL} "./${apt_source_base_name}"
fi
if [ "${TYPE}" = "local" ]; then
sed \
-i"" \
-e "s,^URIs: .*$,URIs: file://${local_prefix}/apt/repositories/${distribution},g" \
/etc/apt/sources.list.d/apache-arrow.sources
keys="${local_prefix}/KEYS"
if [ -f "${keys}" ]; then
gpg \
--no-default-keyring \
--keyring /usr/share/keyrings/apache-arrow-apt-source.gpg \
--import "${keys}"
fi
else
case "${TYPE}" in
rc|staging-rc|staging-release)
suffix=${TYPE%-release}
sed \
-i"" \
-e "s,^URIs: \\(.*\\)/,URIs: \\1-${suffix}/,g" \
/etc/apt/sources.list.d/apache-arrow.sources
;;
esac
fi
apt update
echo "::endgroup::"
echo "::group::Test Apache Arrow C++"
${APT_INSTALL} libarrow-dev=${package_version}
required_packages=()
required_packages+=(cmake)
required_packages+=(g++)
required_packages+=(git)
required_packages+=(make)
required_packages+=(pkg-config)
required_packages+=(${workaround_missing_packages[@]})
${APT_INSTALL} ${required_packages[@]}
mkdir -p build
cp -a /arrow/cpp/examples/minimal_build build
pushd build/minimal_build
cmake .
make -j$(nproc)
./arrow_example
c++ -std=c++11 -o arrow_example example.cc $(pkg-config --cflags --libs arrow)
./arrow_example
popd
echo "::endgroup::"
echo "::group::Test Apache Arrow GLib"
${APT_INSTALL} libarrow-glib-dev=${package_version}
${APT_INSTALL} libarrow-glib-doc=${package_version}
${APT_INSTALL} ruby-gobject-introspection
ruby -r gi -e "p GI.load('Arrow')"
echo "::endgroup::"
if [ "${have_flight}" = "yes" ]; then
echo "::group::Test Apache Arrow Flight"
${APT_INSTALL} libarrow-flight-glib-dev=${package_version}
${APT_INSTALL} libarrow-flight-glib-doc=${package_version}
ruby -r gi -e "p GI.load('ArrowFlight')"
echo "::endgroup::"
fi
echo "::group::Test libarrow-python"
${APT_INSTALL} libarrow-python-dev=${package_version}
echo "::endgroup::"
if [ "${have_plasma}" = "yes" ]; then
echo "::group::Test Plasma"
${APT_INSTALL} libplasma-glib-dev=${package_version}
${APT_INSTALL} libplasma-glib-doc=${package_version}
${APT_INSTALL} plasma-store-server=${package_version}
ruby -r gi -e "p GI.load('Plasma')"
echo "::endgroup::"
fi
echo "::group::Test Gandiva"
${APT_INSTALL} libgandiva-glib-dev=${package_version}
${APT_INSTALL} libgandiva-glib-doc=${package_version}
ruby -r gi -e "p GI.load('Gandiva')"
echo "::endgroup::"
echo "::group::Test Apache Parquet"
${APT_INSTALL} libparquet-glib-dev=${package_version}
${APT_INSTALL} libparquet-glib-doc=${package_version}
ruby -r gi -e "p GI.load('Parquet')"
echo "::endgroup::"
echo "::group::Test Apache Arrow Dataset"
${APT_INSTALL} libarrow-dataset-glib-dev=${package_version}
${APT_INSTALL} libarrow-dataset-glib-doc=${package_version}
ruby -r gi -e "p GI.load('ArrowDataset')"
echo "::endgroup::"
|
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
module OCI
module FileStorage
# Module containing models for requests made to, and responses received from,
# OCI FileStorage services
module Models
end
end
end
# Require models
require 'oci/file_storage/models/change_file_system_compartment_details'
require 'oci/file_storage/models/change_mount_target_compartment_details'
require 'oci/file_storage/models/client_options'
require 'oci/file_storage/models/create_export_details'
require 'oci/file_storage/models/create_file_system_details'
require 'oci/file_storage/models/create_mount_target_details'
require 'oci/file_storage/models/create_snapshot_details'
require 'oci/file_storage/models/export'
require 'oci/file_storage/models/export_set'
require 'oci/file_storage/models/export_set_summary'
require 'oci/file_storage/models/export_summary'
require 'oci/file_storage/models/file_system'
require 'oci/file_storage/models/file_system_summary'
require 'oci/file_storage/models/mount_target'
require 'oci/file_storage/models/mount_target_summary'
require 'oci/file_storage/models/snapshot'
require 'oci/file_storage/models/snapshot_summary'
require 'oci/file_storage/models/source_details'
require 'oci/file_storage/models/update_export_details'
require 'oci/file_storage/models/update_export_set_details'
require 'oci/file_storage/models/update_file_system_details'
require 'oci/file_storage/models/update_mount_target_details'
require 'oci/file_storage/models/update_snapshot_details'
# Require generated clients
require 'oci/file_storage/file_storage_client'
require 'oci/file_storage/file_storage_client_composite_operations'
# Require service utilities
require 'oci/file_storage/util'
|
<gh_stars>1-10
/*******************************************************************************
* Copyright 2020 <NAME> | ABI INC.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package net.abi.abisEngine.rendering.gl.memory;
import java.util.HashMap;
import java.util.Map;
import org.lwjgl.opengl.GL45;
import net.abi.abisEngine.rendering.shader.compiler.AEGLInfo;
/**
* @author <NAME>
*
*/
public class GLUniform {
/**
* The defined name of this uniform in GLSL.
*/
public String name;
/**
* The size of this uniform variable if in an array the array stride property
* will be non 0, and if this variable is a Matrix then the Matrix Stride will
* be non 0.
*/
public int size = -1;
/**
* The attributes of this uniform such as its location, offset and others.
*/
private Map<Integer, Integer> attributes;
/**
* Initializes the Uniform to the name.
*/
public GLUniform(String name) {
this.name = name;
attributes = new HashMap<>();
}
public void addAttribute(int attrib, int value) {
attributes.put(attrib, value);
}
public int getAttribute(int attrib) {
return attributes.get(attrib);
}
public void setSize(int size) {
this.size = size;
}
public int getSize() {
return size;
}
@Override
public String toString() {
StringBuilder s = new StringBuilder();
s.append("\nUniform Name: " + name + " Size: " + size + " \n");
attributes.forEach((k, v) -> {
if (k == GL45.GL_TYPE) {
s.append("| " + AEGLInfo.spInternalF.get(k) + " : " + AEGLInfo.glslTypeToWord.get(v) + " |");
} else {
s.append("| " + AEGLInfo.spInternalF.get(k) + " : " + v + " |");
}
});
return s.toString();// + super.toString();
}
}
|
<gh_stars>1-10
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
class ServiceProvider {
constructor(app) {
this.app = app;
}
/* istanbul ignore next */
register() {
throw new TypeError('Method not implemented.');
}
}
exports.default = ServiceProvider;
//# sourceMappingURL=ServiceProvider.js.map |
<reponame>edivansilvajr/javascript<gh_stars>0
var idade = 12
console.log (`Você tem ${idade} anos.`)
if(idade < 16) {
console.log ('Não vota !')
} else if(idade < 18 || idade > 62) {
console.log ('Voto opcional !')
}else {
console.log ('Voto obtigatorio !')
}
|
<gh_stars>10-100
package io.opensphere.csvcommon.detect.location;
import java.util.List;
import io.opensphere.core.util.MathUtil;
import io.opensphere.csvcommon.common.CellSampler;
import io.opensphere.csvcommon.detect.CellDetector;
import io.opensphere.csvcommon.detect.ValuesWithConfidence;
import io.opensphere.csvcommon.detect.location.algorithm.LocationMatchMaker;
import io.opensphere.csvcommon.detect.location.algorithm.LocationMatchMakerFactory;
import io.opensphere.csvcommon.detect.location.format.LocationFormatDetector;
import io.opensphere.csvcommon.detect.location.model.LatLonColumnResults;
import io.opensphere.csvcommon.detect.location.model.LocationResults;
import io.opensphere.csvcommon.detect.location.model.PotentialLocationColumn;
/**
* The LocationMatchMakerDetector will create available match makers and attempt
* to detect location data without knowing the header names.
*/
public class LocationMatchMakerDetector implements CellDetector<LocationResults>
{
@Override
public ValuesWithConfidence<LocationResults> detect(CellSampler sampler)
{
LocationResults aggregateResults = new LocationResults();
List<LocationMatchMaker> matchMakers = LocationMatchMakerFactory.getInstance().buildMatchMakers();
for (LocationMatchMaker mm : matchMakers)
{
ValuesWithConfidence<LocationResults> results = mm.detect(sampler);
LocationResults locRes = results.getBestValue();
List<LatLonColumnResults> llColRes = locRes.getLatLonResults();
List<PotentialLocationColumn> locationRes = locRes.getLocationResults();
if (llColRes != null && !llColRes.isEmpty())
{
for (LatLonColumnResults latLonRes : llColRes)
{
aggregateResults.addResult(latLonRes);
}
}
if (locationRes != null && !locationRes.isEmpty())
{
for (PotentialLocationColumn plc : locationRes)
{
aggregateResults.addResult(plc);
}
}
}
LocationFormatDetector lfd = new LocationFormatDetector();
lfd.detectLocationColumnFormats(aggregateResults, sampler.getBeginningSampleCells());
return new ValuesWithConfidence<LocationResults>(aggregateResults,
MathUtil.clamp(aggregateResults.getConfidence(), 0f, 1f));
}
}
|
<reponame>gaeacodes/insight-api-komodo<gh_stars>0
"use strict";
var bitcore = require("bitcore-lib-komodo");
var _ = bitcore.deps._;
var $ = bitcore.util.preconditions;
var Common = require("./common");
var async = require("async");
var getKomodoRewards = require("./get-komodo-rewards");
var moment = require("moment");
var MAXINT = 0xffffffff; // Math.pow(2, 32) - 1;
function TxController(node) {
this.node = node;
this.common = new Common({ log: this.node.log });
}
TxController.prototype.show = function (req, res) {
if (req.transaction) {
res.jsonp(req.transaction);
}
};
/**
* Find transaction by hash ...
*/
TxController.prototype.transaction = function (req, res, next) {
var self = this;
var txid = req.params.txid;
this.node.getDetailedTransaction(txid, function (err, transaction) {
if (err && err.code === -5) {
return self.common.handleErrors(null, res);
} else if (err) {
return self.common.handleErrors(err, res);
}
self.transformTransaction(
transaction,
function (err, transformedTransaction) {
if (err) {
return self.common.handleErrors(err, res);
}
req.transaction = transformedTransaction;
next();
}
);
});
};
TxController.prototype.transformTransaction = function (
transaction,
options,
callback
) {
if (_.isFunction(options)) {
callback = options;
options = {};
}
$.checkArgument(_.isFunction(callback));
var confirmations = 0;
if (transaction.height >= 0) {
confirmations = this.node.services.bitcoind.height - transaction.height + 1;
}
var transformed = {
txid: transaction.hash,
version: transaction.version,
locktime: transaction.locktime,
rewardClaimed: transaction.rewardClaimed,
};
if (transaction.coinbase) {
transformed.vin = [
{
coinbase: transaction.inputs[0].script,
sequence: transaction.inputs[0].sequence,
n: 0,
timeRewardAccrued: transaction.inputs[0].timeRewardAccrued,
rewardClaimed: transaction.inputs[0].rewardClaimed,
},
];
} else {
transformed.vin = transaction.inputs.map(
this.transformInput.bind(this, options)
);
}
transformed.vout = transaction.outputs.map(
this.transformOutput.bind(
this,
transaction.locktime,
transaction.height,
options
)
);
if (transformed.version >= 2) {
transformed.vjoinsplit = transaction.joinSplits.map(
this.transformJoinSplit.bind(this, options)
);
}
transformed.blockhash = transaction.blockHash;
transformed.blockheight = transaction.height;
transformed.confirmations = confirmations;
// TODO consider mempool txs with receivedTime?
var time = transaction.blockTimestamp
? transaction.blockTimestamp
: Math.round(Date.now() / 1000);
transformed.time = time;
if (transformed.confirmations) {
transformed.blocktime = transformed.time;
}
if (transaction.coinbase) {
transformed.isCoinBase = true;
}
transformed.valueOut = transaction.outputSatoshis / 1e8;
transformed.size = transaction.hex.length / 2; // in bytes
if (!transaction.coinbase) {
transformed.valueIn = transaction.inputSatoshis / 1e8;
transformed.fees = transaction.feeSatoshis / 1e8;
}
// Overwinter START
transformed.fOverwintered = transaction.fOverwintered;
if (transaction.fOverwintered) {
transformed.nVersionGroupId = transaction.nVersionGroupId;
transformed.nExpiryHeight = transaction.nExpiryHeight;
}
// Overwinter END
// Sapling START
if (transaction.fOverwintered && transaction.version >= 4) {
transformed.valueBalance = transaction.valueBalance;
transformed.spendDescs = transaction.spendDescs;
transformed.outputDescs = transaction.outputDescs;
if (transaction.bindingSig) {
transformed.bindingSig = transaction.bindingSig;
}
}
// Sapling END
//Potential Rewards Lost
var rewardsClaimed = transformed.rewardClaimed;
var valueOut = transformed.valueOut;
var valueIn = transformed.valueIn;
var fee = transformed.fees;
if (rewardsClaimed > 0 && valueOut < valueIn) {
transformed.lostRewards = true;
transformed.userFee = (fee - rewardsClaimed).toFixed(8);
} else {
transformed.lostRewards = false;
transformed.userFee = fee;
}
//
callback(null, transformed);
};
TxController.prototype.transformInput = function (options, input, index) {
// Input scripts are validated and can be assumed to be valid
var transformed = {
txid: input.prevTxId,
vout: input.outputIndex,
sequence: input.sequence,
n: index,
timeRewardAccrued: input.timeRewardAccrued,
rewardClaimed: input.rewardClaimed,
};
if (!options.noScriptSig) {
transformed.scriptSig = {
hex: input.script,
};
if (!options.noAsm) {
transformed.scriptSig.asm = input.scriptAsm;
}
}
//transformed.rewardsAvailableAtTheTimeOfSpend = rewards;
transformed.addr = input.address;
transformed.valueSat = input.satoshis;
transformed.value = input.satoshis / 1e8;
transformed.doubleSpentTxID = null; // TODO
//transformed.isConfirmed = null; // TODO
//transformed.confirmations = null; // TODO
//transformed.unconfirmedInput = null; // TODO
return transformed;
};
TxController.prototype.transformOutput = function (
locktime,
height,
options,
output,
index
) {
var rewardData = {
tiptime: this.node.services.bitcoind.tipTime || moment().unix(), //Using current time if tiptime is not available
locktime: locktime,
height: height,
satoshis: output.satoshis,
};
var rewards = getKomodoRewards(rewardData);
var transformed = {
value: (output.satoshis / 1e8).toFixed(8),
n: index,
scriptPubKey: {
hex: output.script,
},
};
if (!options.noAsm) {
transformed.scriptPubKey.asm = output.scriptAsm;
}
if (!options.noSpent) {
transformed.spentTxId = output.spentTxId || null;
transformed.spentIndex = _.isUndefined(output.spentIndex)
? null
: output.spentIndex;
transformed.spentHeight = output.spentHeight || null;
}
transformed.rewardsAvailable = transformed.spentTxId ? 0 : rewards;
if (output.address) {
transformed.scriptPubKey.addresses = [output.address];
var address = bitcore.Address(output.address); //TODO return type from bitcore-node
transformed.scriptPubKey.type = address.type;
}
return transformed;
};
TxController.prototype.transformJoinSplit = function (options, jsdesc, index) {
var transformed = {
vpub_old: (jsdesc.oldZatoshis / 1e8).toFixed(8),
vpub_new: (jsdesc.newZatoshis / 1e8).toFixed(8),
n: index,
};
return transformed;
};
TxController.prototype.transformInvTransaction = function (transaction, func) {
var self = this;
var transformed;
this.node.getDetailedTransaction(
transaction.hash,
function (err, detailedTransaction) {
if (err) {
self.common.log.error(err.message + ". Code:" + err.code);
}
var valueOut = 0;
var vout = [];
for (var i = 0; i < transaction.outputs.length; i++) {
var output = transaction.outputs[i];
valueOut += output.satoshis;
if (output.script) {
var address = output.script.toAddress(self.node.network);
if (address) {
var obj = {};
obj[address.toString()] = output.satoshis;
vout.push(obj);
}
}
}
var isRBF = _.any(
_.pluck(transaction.inputs, "sequenceNumber"),
function (seq) {
return seq < MAXINT - 1;
}
);
transformed = {
rewardClaimed: detailedTransaction
? detailedTransaction.rewardClaimed
: "N/A",
txid: transaction.hash,
valueOut: valueOut / 1e8,
vout: vout,
isRBF: isRBF,
};
func(transformed);
}
);
};
TxController.prototype.rawTransaction = function (req, res, next) {
var self = this;
var txid = req.params.txid;
this.node.getTransaction(txid, function (err, transaction) {
if (err && err.code === -5) {
return self.common.handleErrors(null, res);
} else if (err) {
return self.common.handleErrors(err, res);
}
req.rawTransaction = {
rawtx: transaction.toBuffer().toString("hex"),
};
next();
});
};
TxController.prototype.showRaw = function (req, res) {
if (req.rawTransaction) {
res.jsonp(req.rawTransaction);
}
};
TxController.prototype.list = function (req, res) {
var self = this;
var blockHash = req.query.block;
var address = req.query.address;
var page = parseInt(req.query.pageNum) || 0;
var pageLength = 10;
var pagesTotal = 1;
if (blockHash) {
self.node.getBlockOverview(blockHash, function (err, block) {
if (err && err.code === -5) {
return self.common.handleErrors(null, res);
} else if (err) {
return self.common.handleErrors(err, res);
}
var totalTxs = block.txids.length;
var txids;
if (!_.isUndefined(page)) {
var start = page * pageLength;
txids = block.txids.slice(start, start + pageLength);
pagesTotal = Math.ceil(totalTxs / pageLength);
} else {
txids = block.txids;
}
async.mapSeries(
txids,
function (txid, next) {
self.node.getDetailedTransaction(txid, function (err, transaction) {
if (err) {
return next(err);
}
self.transformTransaction(transaction, next);
});
},
function (err, transformed) {
if (err) {
return self.common.handleErrors(err, res);
}
res.jsonp({
pagesTotal: pagesTotal,
txs: transformed,
});
}
);
});
} else if (address) {
var options = {
from: page * pageLength,
to: (page + 1) * pageLength,
};
self.node.getAddressHistory(address, options, function (err, result) {
if (err) {
return self.common.handleErrors(err, res);
}
var txs = result.items
.map(function (info) {
return info.tx;
})
.filter(function (value, index, self) {
return self.indexOf(value) === index;
});
async.map(
txs,
function (tx, next) {
self.transformTransaction(tx, next);
},
function (err, transformed) {
if (err) {
return self.common.handleErrors(err, res);
}
res.jsonp({
pagesTotal: Math.ceil(result.totalCount / pageLength),
txs: transformed,
});
}
);
});
} else {
return self.common.handleErrors(
new Error("Block hash or address expected"),
res
);
}
};
TxController.prototype.send = function (req, res) {
var self = this;
this.node.sendTransaction(req.body.rawtx, function (err, txid) {
if (err) {
// TODO handle specific errors
return self.common.handleErrors(err, res);
}
res.json({ txid: txid });
});
};
module.exports = TxController;
|
#!/bin/bash
# package type (subfolder in packager)
# default version to install
DEFAULT=5.6
if [ -z $1 ]; then
TYPE=$DEFAULT
else
TYPE=$1
fi
if [[ $TYPE != "force" ]]; then
OS_VERSION=`sw_vers -productVersion | egrep --color=never -o '10\.[0-9]+'`
if [[ $OS_VERSION == "10.13" ]]; then
echo "****"
echo "[WARNING]"
echo "Detected macOS Unnamed 10.13. As this is quite new, there may be issues still. Your mileage may vary."
echo "****"
sleep 2
elif [[ $OS_VERSION == "10.12" ]]; then
echo "Detected macOS Sierra 10.12. All ok."
elif [[ $OS_VERSION == "10.11" ]]; then
echo "Detected OS X El Capitan 10.11. All ok."
elif [[ $OS_VERSION == "10.10" ]]; then
echo "Detected OS X Yosemite 10.10. All ok."
elif [[ $OS_VERSION == "10.9" ]]; then
echo "Detected OS X Mavericks 10.9 All ok."
elif [[ $OS_VERSION == "10.8" ]]; then
echo "Detected OS X Mountain Lion 10.8 All ok."
elif [[ $OS_VERSION == "10.7" ]]; then
echo "Detected OS X Lion 10.7. All ok."
elif [[ $OS_VERSION == "10.6" ]]; then
echo "Detected OS X Snow Leopard 10.6 All ok."
else
echo "****"
echo "Your version of OS X ($OS_VERSION) is not supported, you need at least 10.6"
echo "Stopping installation..."
echo "If you think that's wrong, try"
echo "****"
echo "curl -o install.sh -s https://php-osx.liip.ch/install.sh | bash install.sh force"
echo "****"
exit 2
fi
if [[ -f /usr/sbin/sysctl ]]; then
SYSCTL="/usr/sbin/sysctl"
elif [[ -f /sbin/sysctl ]]; then
SYSCTL="/sbin/sysctl"
else
SYSCTL="sysctl"
fi
HAS64BIT=`$SYSCTL -n hw.cpu64bit_capable 2> /dev/null`
if [[ $HAS64BIT != 1 ]]; then
echo "****"
echo "ERROR! 32 BIT NOT SUPPORTED!"
echo "****"
echo "No 64bit capable system found. Your hardware is too old."
echo "We don't support that (yet). Patches are welcome ;)"
echo "If you think that's wrong, try"
echo "****"
echo "curl -o install.sh -s https://php-osx.liip.ch/install.sh | bash install.sh force"
echo "****"
exit 1
fi
fi
if [[ $TYPE = "force" ]]; then
if [ -z $2 ]; then
TYPE=$DEFAULT
else
TYPE=$2
fi
fi
if [[ $OS_VERSION = "10.8" ]] || [[ $OS_VERSION = "10.9" ]]; then
if [[ $TYPE = "5.4" ]]; then
TYPE=5.4-10.8
elif [[ $TYPE = "5.5" ]]; then
TYPE=5.5-10.8
elif [[ $TYPE = "5.6" ]]; then
TYPE=5.6-10.8
elif [[ $TYPE = "5.3" ]]; then
TYPE=5.3-10.8
fi
fi
# 10.11 & 10.12 should be compatible with 10.10 versions for now.
# See https://github.com/liip/build-entropy-php/issues/16 for more
if [[ $OS_VERSION = "10.11" ]] || [[ $OS_VERSION = "10.12" ]] || [[ $OS_VERSION = "10.10" ]]; then
if [[ $TYPE = "5.4" ]]; then
TYPE=5.4-10.10
elif [[ $TYPE = "5.5" ]]; then
TYPE=5.5-10.10
elif [[ $TYPE = "5.6" ]]; then
TYPE=5.6-10.10
elif [[ $TYPE = "7.0" ]]; then
TYPE=7.0-10.10
elif [[ $TYPE = "7.1" ]]; then
TYPE=7.1-10.10
elif [[ $TYPE = "5.3" ]]; then
TYPE=5.3-10.10
fi
fi
if [[ $TYPE = "5.6" ]]; then
echo "PHP 5.6 is not available for OS X < 10.8"
exit 1
elif [[ $TYPE = "7.1" ]]; then
echo "PHP 7.1 is not available for OS X < 10.10"
exit 1
elif [[ $TYPE = "7.0" ]]; then
echo "PHP 7.0 is not available for OS X < 10.10"
exit 1
fi
echo "Get packager.tgz";
curl -s -o /tmp/packager.tgz https://s3-eu-west-1.amazonaws.com/php-osx.liip.ch/packager/packager.tgz
echo "Unpack packager.tgz";
echo "Please type in your password, as we want to install this into /usr/local"
if [ ! -d /usr/local ] ; then sudo mkdir /usr/local; fi
sudo tar -C /usr/local -xzf /tmp/packager.tgz
echo "Start packager (may take some time)";
sudo /usr/local/packager/packager.py install $TYPE-frontenddev
cd $ORIPWD
|
package local
import (
"errors"
"io/ioutil"
"os/user"
"path/filepath"
goyaml "gopkg.in/yaml.v2"
)
// BoshConfig describes a local ~/.bosh_config file
// See testhelpers/fixtures/bosh_config.yml
type BoshConfig struct {
Target string
Name string `yaml:"target_name"`
Version string `yaml:"target_version"`
UUID string `yaml:"target_uuid"`
Aliases map[string]map[string]string
Authentication map[string]*authentication `yaml:"auth"`
Deployments map[string]string `yaml:"deployment"`
}
type authentication struct {
Username string
Password string
}
// LoadBoshConfig loads and unmarshals ~/.bosh_config
func LoadBoshConfig(configPath string) (config *BoshConfig, err error) {
config = &BoshConfig{}
contents, err := ioutil.ReadFile(configPath)
if err != nil {
return config, err
}
goyaml.Unmarshal(contents, config)
return
}
// DefaultBoshConfigPath returns the path to ~/.bosh_config
func DefaultBoshConfigPath() (configPath string, err error) {
usr, err := user.Current()
if err != nil {
return "", err
}
return filepath.Abs(usr.HomeDir + "/.bosh_config")
}
// CurrentBoshTarget returns the connection information for local user's current target BOSH
func (config *BoshConfig) CurrentBoshTarget() (target, username, password string, err error) {
if config.Target == "" {
return "", "", "", errors.New("Please target a BOSH first. Run 'bosh target DIRECTOR_IP'.")
}
auth := config.Authentication[config.Target]
if auth == nil {
return "", "", "", errors.New("Current target has not been authenticated yet. Run 'bosh login'.")
}
return config.Target, auth.Username, auth.Password, nil
}
// CurrentDeploymentManifest returns the path to the deployment manifest for the currently target BOSH
func (config *BoshConfig) CurrentDeploymentManifest() (manifestPath string) {
return config.Deployments[config.Target]
}
|
require "test/test_helper"
class TypusUserTest < ActiveSupport::TestCase
[ %Q(<EMAIL>\n<script>location.href="http://spammersite.com"</script>),
'admin', '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>' ].each do |value|
should_not allow_value(value).for(:email)
end
[ '<EMAIL>', '<EMAIL>', '<EMAIL>', '<EMAIL>' ].each do |value|
should allow_value(value).for(:email)
end
should validate_presence_of :role
should_not allow_mass_assignment_of :status
should ensure_length_of(:password).is_at_least(8).is_at_most(40)
# should validate_uniqueness_of :email
context "TypusUser" do
setup do
@typus_user = Factory(:typus_user)
end
should "verify definition" do
attributes = %w( id first_name last_name email role status salt crypted_password token preferences created_at updated_at )
TypusUser.columns.collect { |u| u.name }.each { |c| assert attributes.include?(c) }
end
should "return email when first_name and last_name are not set" do
assert_equal @typus_user.email, @typus_user.name
end
should "return name when first_name and last_name are set" do
@typus_user.first_name, @typus_user.last_name = "John", "Lock"
assert_equal "<NAME>", @typus_user.name
end
should "verify salt never changes" do
salt, crypted_password = @typus_user.salt, @typus_user.crypted_password
@typus_user.update_attributes :password => '<PASSWORD>', :password_confirmation => '<PASSWORD>'
assert_equal salt, @typus_user.salt
assert_not_equal crypted_password, @typus_user.crypted_password
end
should "verify authenticated?" do
assert @typus_user.authenticated?('12345678')
assert !@typus_user.authenticated?('87654321')
end
should "verify can? (with a model as a param)" do
assert @typus_user.can?('delete', TypusUser)
end
should "verify can? (with a string as a param)" do
assert @typus_user.can?('delete', 'TypusUser')
end
should "verify cannot?" do
assert !@typus_user.cannot?('delete', 'TypusUser')
end
end
context "TypusUser which is not root" do
setup do
@typus_user = Factory(:typus_user, :role => "editor")
end
should "verify is not root" do
assert @typus_user.is_not_root?
assert !@typus_user.is_root?
end
end
context "Class methods" do
should "verify generate requires the role" do
assert TypusUser.generate(:email => '<EMAIL>', :password => '<PASSWORD>').invalid?
assert TypusUser.generate(:email => '<EMAIL>', :password => '<PASSWORD>', :role => 'admin').valid?
end
end
end
|
<filename>lang/py/pylib/06/tempfile/tempfile_tempdir.py
#!/usr/bin/env python
import tempfile
tempfile.tempdir='/I/changed/this/path'
print'gettempdir():',tempfile.gettempdir()
|
#!/bin/bash
# exit when any command fails
set -e
# echo on
set -x
git checkout master
git remote add upstream https://github.com/clelange/cds_paper_bot.git
git fetch upstream
if [[ -n $(git log ..upstream/master) ]]; then
git config --global user.email "${GITMAIL}"
git config --global user.name "${GITNAME}"
git merge upstream/master -m "merge with upstream"
git remote set-url origin "${REMOTE_GIT_REPO}"
git push origin HEAD
else
echo "No changes found."
fi
|
<reponame>Ankuraxz/cruzhacks-2021-website<gh_stars>1-10
import * as React from "react";
import Lottie from "react-lottie";
import { ReactComponent as Grid } from "images/components/hero/grid.svg";
import { ReactComponent as Computer } from "images/components/hero/computer.svg";
import { ReactComponent as Mouse } from "images/components/hero/mouse.svg";
import { ReactComponent as LargeFlare } from "images/components/hero/ellipse1.svg";
import { ReactComponent as MediumFlare } from "images/components/hero/ellipse2.svg";
import { ReactComponent as SmallFlare } from "images/components/hero/ellipse3.svg";
import PostcardStack from "components/PostcardStack/index.view";
import "./Hero.scss";
import animationData from "./postcard.json";
interface Description {
description: string;
style: string;
line: number;
}
interface Title {
text: string;
style: string;
}
interface HeroProps {
pageName: string;
title: Title[];
description: Description[];
children?: React.ReactNode;
}
const defaultOptions = {
animationData,
loop: false,
rendererSettings: {
preserveAspectRatio: "xMidYMid slice",
},
};
const Hero: React.FC<HeroProps> = ({
pageName,
title,
description,
children,
}: HeroProps) => {
return (
<>
<div className="Hero__container">
<Grid className="Hero__grid" />
<LargeFlare className="Hero__largeFlare" />
<MediumFlare className="Hero__mediumFlare" />
<SmallFlare className="Hero__smallFlare" />
<Computer className="Hero__computer" />
<Mouse className="Hero__mouse" />
<div className="Hero__visual">
{window.matchMedia("(prefers-reduced-motion: reduce)").matches ? (
<PostcardStack pageName={pageName} />
) : (
<Lottie options={defaultOptions} />
)}
</div>
<div className="Hero__textContainer">
{title.map((item) => (
<h1 className={`Hero__titleText${item.style}`} key={item.text}>
{item.text}
</h1>
))}
{description.map((item) => (
<p className={`Hero__${item.style}`} key={item.line}>
{item.description}
</p>
))}
{children}
</div>
</div>
</>
);
};
export default Hero;
|
def caesar_cipher(message, shift):
encrypted_message = ""
for char in message:
encrypted_message += chr((ord(char)-shift)%256)
return encrypted_message |
import m from 'mithril'
import * as R from 'ramda'
import { labelStyle, showRevDecimal, labelRev, showNetworkError } from './common'
const sampleReturnCode = `new return(\`rho:rchain:deployId\`) in {
return!((42, true, "Hello from blockchain!"))
}`
const sampleInsertToRegistry = `new return(\`rho:rchain:deployId\`),
insertArbitrary(\`rho:registry:insertArbitrary\`)
in {
new uriCh, valueCh in {
insertArbitrary!("My value", *uriCh) |
for (@uri <- uriCh) {
return!(("URI", uri))
}
}
}`
const sampleRegistryLookup = `new return(\`rho:rchain:deployId\`),
lookup(\`rho:registry:lookup\`)
in {
new valueCh in {
// Fill in registry URI: \`rho:id:11fhnau8j3...h4459w9bpus6oi\`
lookup!( <registry_uri> , *valueCh) |
for (@value <- valueCh) {
return!(("Value from registry", value))
}
}
}`
const samples = [
['return data', sampleReturnCode],
['insert to registry', sampleInsertToRegistry],
['registry lookup', sampleRegistryLookup],
]
const initSelected = (st, wallet) => {
const {selRevAddr, phloLimit = 250000} = st
// Pre-select first account if not selected
const initRevAddr = R.isNil(selRevAddr) && !R.isNil(wallet) && !!wallet.length
? R.head(wallet).revAddr : selRevAddr
return {...st, selRevAddr: initRevAddr, phloLimit}
}
export const customDeployCtrl = (st, {wallet = [], node, onSendDeploy, onPropose, warn}) => {
const onSendDeployEv = code => async _ => {
st.update(s => ({...s, status: '...', dataError: ''}))
const account = R.find(R.propEq('revAddr', selRevAddr), wallet)
const [status, dataError] = await onSendDeploy({code, account, phloLimit})
.then(x => [x, ''])
.catch(ex => {
warn('DEPLOY ERROR', ex)
return ['', ex.message]
})
st.update(s => ({...s, status, dataError}))
}
const onProposeEv = async _ => {
st.update(s => ({...s, proposeStatus: '...', proposeError: ''}))
const [proposeStatus, proposeError] = await onPropose(node)
.then(x => [x, ''])
.catch(ex => ['', ex.message])
st.update(s => ({...s, proposeStatus, proposeError}))
}
const accountChangeEv = ev => {
const { revAddr } = wallet[ev.target.selectedIndex]
st.update(s => ({...s, selRevAddr: revAddr}))
}
const updateCodeEv = code => _ => {
st.update(s => ({...s, code}))
}
// Field update by name
const valEv = name => ev => {
const val = ev.target.value
st.update(s => ({...s, [name]: val}))
}
// Control state
const {selRevAddr, code, phloLimit, status, dataError, proposeStatus, proposeError}
= initSelected(st.view({}), wallet)
const labelAddr = 'Signing account'
const labelCode = 'Rholang code'
const labelPhloLimit = 'Phlo limit (in revlettes x10^8)'
const isWalletEmpty = R.isNil(wallet) || R.isEmpty(wallet)
const showPropose = node.network === 'localnet'
const canDeploy = (code || '').trim() !== '' && !!selRevAddr
const phloLimitPreview = showRevDecimal(phloLimit)
return m('.ctrl.custom-deploy-ctrl',
m('h2', 'Custom deploy'),
isWalletEmpty ? m('b', 'REV wallet is empty, add accounts to make deploys.') : [
m('span', 'Send deploy to selected validator RNode.'),
// Rholang examples
m('',
m('span', 'Sample code: '),
samples.map(([title, code]) =>
m('a', {onclick: updateCodeEv(code), href: '#'}, title),
)
),
// REV address dropdown
m('', labelStyle(!!selRevAddr), labelAddr),
m('select', {onchange: accountChangeEv},
wallet.map(({name, revAddr}) =>
m('option', `${name}: ${revAddr}`)
),
),
// Rholang code (editor)
m('', labelStyle(code), labelCode),
m('textarea.deploy-code', {value: code, rows: 13, placeholder: 'Rholang code', oninput: valEv('code')}),
// Phlo limit
m('', labelStyle(true), labelPhloLimit),
m('input[type=number].phlo-limit', {
value: phloLimit, placeholder: labelPhloLimit, oninput: valEv('phloLimit')
}),
labelRev(phloLimitPreview),
// Action buttons / results
m(''),
m('button', {onclick: onSendDeployEv(code), disabled: !canDeploy}, 'Deploy Rholang code'),
status && m('b', status),
dataError && m('b.warning', showNetworkError(dataError)),
m(''),
showPropose && m('button', {onclick: onProposeEv}, 'Propose'),
showPropose && proposeStatus && m('b', proposeStatus),
showPropose && proposeError && m('b.warning', showNetworkError(proposeError)),
]
)
}
|
'use strict';
const sleep = require('mz-modules/sleep');
exports.keys = 'my keys';
let times = 0;
exports.onClientError = async (err, socket, app) => {
app.logger.error(err);
await sleep(50);
times++;
if (times === 2) times = 0;
if (!times) throw new Error('test throw');
return {
body: err.rawPacket,
headers: { foo: 'bar', 'Content-Length': 100 },
status: 418,
};
};
|
import React from 'react';
import {Row ,Col ,Tabs , Carousel} from 'antd';
import PCNewsBlock from './pc_news_block';
import PCImageBlock from './pc_news_image_block';
import PCProducts from './pc_products';
const TabPane = Tabs.TabPane;
export default class PCNewsContainer extends React.Component{
render(){
const settings = {
dots: true,
infinite: true,
speed: 500,
slidesToShow: 1,
autoplay: true
};
return (
<div>
<Row>
<Col span={2}></Col>
<Col span={20} class="container">
<div class="leftContainer">
<div class="carousel">
<Carousel {...settings}>
<div><img src='./src/images/carousel_1.jpg' /></div>
<div><img src='./src/images/carousel_2.jpg' /></div>
<div><img src='./src/images/carousel_3.jpg' /></div>
<div><img src='./src/images/carousel_4.jpg' /></div>
</Carousel>
</div>
<PCImageBlock type="guoji" imgWidth="33.3%" count="6" width="400px" cardTitle="国际头条" imageWidth="112px" />
</div>
<Tabs class='tabs_news'>
<TabPane tab='热门' key='1'>
<PCNewsBlock width='100%' count={22} type='top' bordered='true' />
</TabPane>
<TabPane tab='社会' key='2'>
<PCNewsBlock width='100%' count={22} type='shehui' bordered='true' />
</TabPane>
<TabPane tab='娱乐' key='3'>
<PCNewsBlock width='100%' count={22} type='yule' bordered='true' />
</TabPane>
</Tabs>
<Tabs class='tabs_product'>
<TabPane tab='ReactNews 产品' key='1'>
<PCProducts />
</TabPane>
</Tabs>
<div>
<PCImageBlock type="guonei" count="10" imgWidth="10%" width="100%" cardTitle="国内" imageWidth="132px" />
<PCImageBlock type="keji" count="20" imgWidth="10%" width="100%" cardTitle="娱乐" imageWidth="132px" />
</div>
</Col>
<Col span={2}></Col>
</Row>
</div>
)
}
}
|
<reponame>psyking841/spark-pipeline-toolkit<filename>BatchPipelineToolkit/src/test/scala/com/span/test/spark/batch/CommandLineTest.scala
package com.span.test.spark.batch
import com.span.spark.batch.app.{AppParams, BatchAppBase, BatchAppSettings}
import com.span.spark.batch.datasinks.SinkFactory
import com.span.spark.batch.datasources.SourceFactory
import org.scalatest.{FlatSpec, Matchers}
class CommandLineTest extends FlatSpec with Matchers {
System.setProperty("environment", "dev")
System.setProperty("dryRun", "") //Dryrun mode - only print the spark-submit command line
System.setProperty("format", "parquet")
System.setProperty("startDate", "2018-09-01T00:00:00-0000") //default start date
System.setProperty("endDate", "2018-09-01T01:00:00-0000") //default end date
val defaultSettings = new BatchAppSettings()
val sourceFactory = new SourceFactory(defaultSettings)
sourceFactory.toCMLString should be (
"-Dinputs.anotherInputData.schema=s3 -Dinputs.anotherInputData.bucket=s3-bucket " +
"-Dinputs.anotherInputData.pathPrefix=/another-source -Dinputs.anotherInputData.format=parquet " +
"-Dinputs.anotherInputData.layout=hourly -Dinputs.anotherInputData.startDate=2018-09-01T00:00:00-0000 " +
"-Dinputs.anotherInputData.endDate=2018-09-01T01:00:00-0000 " +
"-Dinputs.textData.schema=cos -Dinputs.textData.bucket=test-bucket-span001 -Dinputs.textData.pathPrefix=/source " +
"-Dinputs.textData.format=textFile -Dinputs.textData.layout=daily " +
"-Dinputs.textData.startDate=2018-09-01T00:00:00-0000 -Dinputs.textData.endDate=2018-09-01T01:00:00-0000")
val sinkFactory = new SinkFactory(defaultSettings)
sinkFactory.toCMLString should be (
"-Doutputs.wordCountData.schema=cos " +
"-Doutputs.wordCountData.bucket=test-bucket-span001 " +
"-Doutputs.wordCountData.pathPrefix=/word_count_sink " +
"-Doutputs.wordCountData.format=parquet " +
"-Doutputs.wordCountData.layout=customized " +
"-Doutputs.wordCountData.startDate=2018-09-01T00:00:00-0000"
)
val appParams = new AppParams(defaultSettings)
appParams.sparkConfigsToCMLString should be (
"master=local[*] -conf spark.submit.deployMode=client -conf spark.sql.session.timeZone=UTC"
)
appParams.hadoopOptionsToCLMString should be (
"-Dfs.s3a.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem " +
"-Dfs.cos.ibmServiceName.access.key=changeme " +
"-Dfs.cos.ibmServiceName.endpoint=changeme " +
"-Dfs.stocator.scheme.list=cos " +
"-Dfs.cos.ibmServiceName.iam.service.id=changeme " +
"-Dfs.s3a.awsSecretAccessKey=changeme " +
"-Dfs.cos.ibmServiceName.v2.signer.type=false " +
"-Dfs.s3a.awsAccessKeyId=changeme " +
"-Dfs.cos.impl=com.ibm.stocator.fs.ObjectStoreFileSystem " +
"-Dfs.cos.ibmServiceName.secret.key=changeme " +
"-Dfs.stocator.cos.scheme=cos " +
"-Dfs.stocator.cos.impl=com.ibm.stocator.fs.cos.COSAPIClient"
)
appParams.toCMLString should be ("master=local[*] -conf spark.submit.deployMode=client " +
"-conf spark.sql.session.timeZone=UTC --driver-java-options " +
"'-Dfs.s3a.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem " +
"-Dfs.cos.ibmServiceName.access.key=changeme " +
"-Dfs.cos.ibmServiceName.endpoint=changeme " +
"-Dfs.stocator.scheme.list=cos " +
"-Dfs.cos.ibmServiceName.iam.service.id=changeme " +
"-Dfs.s3a.awsSecretAccessKey=changeme " +
"-Dfs.cos.ibmServiceName.v2.signer.type=false " +
"-Dfs.s3a.awsAccessKeyId=changeme " +
"-Dfs.cos.impl=com.ibm.stocator.fs.ObjectStoreFileSystem " +
"-Dfs.cos.ibmServiceName.secret.key=changeme " +
"-Dfs.stocator.cos.scheme=cos " +
"-Dfs.stocator.cos.impl=com.ibm.stocator.fs.cos.COSAPIClient'")
// Test for printing command line
class TestAppClass extends BatchAppBase {
run {
print("Test")
}
}
val testClass = new TestAppClass()
testClass.getCommand should be (
"spark-submit master=local[*] -conf spark.submit.deployMode=client -conf spark.sql.session.timeZone=UTC " +
"--driver-java-options '-Denvironment=dev -DstartDate=2018-09-01T00:00:00-0000 -DendDate=2018-09-01T01:00:00-0000 " +
"-Dfs.s3a.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem -Dfs.cos.ibmServiceName.access.key=changeme " +
"-Dfs.cos.ibmServiceName.endpoint=changeme -Dfs.stocator.scheme.list=cos " +
"-Dfs.cos.ibmServiceName.iam.service.id=changeme -Dfs.s3a.awsSecretAccessKey=changeme " +
"-Dfs.cos.ibmServiceName.v2.signer.type=false -Dfs.s3a.awsAccessKeyId=changeme " +
"-Dfs.cos.impl=com.ibm.stocator.fs.ObjectStoreFileSystem -Dfs.cos.ibmServiceName.secret.key=changeme " +
"-Dfs.stocator.cos.scheme=cos -Dfs.stocator.cos.impl=com.ibm.stocator.fs.cos.COSAPIClient " +
"-Dinputs.anotherInputData.schema=s3 -Dinputs.anotherInputData.bucket=s3-bucket " +
"-Dinputs.anotherInputData.pathPrefix=/another-source -Dinputs.anotherInputData.format=parquet " +
"-Dinputs.anotherInputData.layout=hourly -Dinputs.anotherInputData.startDate=2018-09-01T00:00:00-0000 " +
"-Dinputs.anotherInputData.endDate=2018-09-01T01:00:00-0000 -Dinputs.textData.schema=cos " +
"-Dinputs.textData.bucket=test-bucket-span001 -Dinputs.textData.pathPrefix=/source " +
"-Dinputs.textData.format=textFile -Dinputs.textData.layout=daily -Dinputs.textData.startDate=2018-09-01T00:00:00-0000 " +
"-Dinputs.textData.endDate=2018-09-01T01:00:00-0000 -Doutputs.wordCountData.schema=cos " +
"-Doutputs.wordCountData.bucket=test-bucket-span001 -Doutputs.wordCountData.pathPrefix=/word_count_sink " +
"-Doutputs.wordCountData.format=parquet -Doutputs.wordCountData.layout=customized " +
"-Doutputs.wordCountData.startDate=2018-09-01T00:00:00-0000' --class [class name] [jar location]"
)
}
|
def longest_word_length(string):
words = string.split()
longest_length = 0
for word in words:
if len(word) > longest_length:
longest_length = len(word)
return longest_length |
package io.dronefleet.mavlink.generator;
import com.squareup.javapoet.JavaFile;
import java.util.List;
import java.util.stream.Collectors;
public class MavlinkGenerator {
private final List<PackageGenerator> packages;
MavlinkGenerator(List<PackageGenerator> packages) {
this.packages = packages;
}
public List<JavaFile> generate() {
return packages.stream()
.map(PackageGenerator::generate)
.flatMap(List::stream)
.collect(Collectors.toList());
}
}
|
<gh_stars>1-10
package eu.itdc.internetprovider.service.dto;
import javax.validation.Valid;
import javax.validation.constraints.Email;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
@Valid
public class SignupRequestDTO {
@NotBlank
@Size(min = 5, max = 20)
private String username;
@NotBlank
@Email
@Size(max = 50)
private String email;
@NotBlank
@Size(min = 5, max = 20)
private String password;
public SignupRequestDTO(String username, String email, String password) {
this.username = username;
this.email = email;
this.password = password;
}
public SignupRequestDTO() {
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
}
|
package com.honyum.elevatorMan.activity.worker;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.baidu.navisdk.util.common.StringUtils;
import com.honyum.elevatorMan.R;
import com.honyum.elevatorMan.base.Config;
import com.honyum.elevatorMan.constant.Constant;
import com.honyum.elevatorMan.data.AlarmInfo;
import com.honyum.elevatorMan.fragment.AlarmListFragment;
import com.honyum.elevatorMan.fragment.AlarmMapFragment;
import com.honyum.elevatorMan.net.AlarmListRequest;
import com.honyum.elevatorMan.net.AlarmListResponse;
import com.honyum.elevatorMan.net.base.NetConstant;
import com.honyum.elevatorMan.net.base.NetTask;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.RequestHead;
import java.util.List;
//import com.honyum.elevatorMan.utils.AlarmSqliteUtils;
public class AlarmListActivity extends WorkerBaseActivity {
public static final String TAG = "AlarmListActivity";
private List<AlarmInfo> mAlarmListInProcess;
private List<AlarmInfo> mAlarmListFinished;
private List<AlarmInfo> mAlarmListReceived;
private ListView mListView;
private int mCurrentPage = 0;
private ImageView mIvAlarm, mIvAssigned, mIvHistory;
private TextView mTvAlarm, mTvAssigned, mTvHistory;
@Override
public void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
//setContentView(R.layout.activity_alarm_list);
setContentView(R.layout.activity_alarm_list_new);
initTitleBar();
}
/**
* 初始化标题栏
*/
private void initTitleBar() {
initTitleBar(getString(R.string.title_alarm_list), R.id.title_alarm_list,
R.drawable.back_normal, backClickListener);
}
@Override
public void onResume() {
super.onResume();
initView();
// if (0 == mCurrentPage) {
// requestUnassignedAlarmList();
// } else if (1 == mCurrentPage) {
// requestUnfinishedAlarmList();
// } else if (2 == mCurrentPage) {
// requestFinishedAlarmList();
// }
}
public void loadAlarmUnAssigned() {
AlarmMapFragment fragment = AlarmMapFragment.newInstance(this, getConfig());
FragmentManager fm = getFragmentManager();
FragmentTransaction transaction = fm.beginTransaction();
transaction.replace(R.id.ll_content, fragment);
transaction.commit();
}
public void loadAlarmAssigned() {
AlarmListFragment fragment = AlarmListFragment.newInstance(this, getConfig(), AlarmListFragment.TYPE_ASSIGNED);
FragmentManager fm = getFragmentManager();
FragmentTransaction transaction = fm.beginTransaction();
transaction.replace(R.id.ll_content, fragment);
transaction.commit();
}
public void loadAlarmHistory() {
AlarmListFragment fragment = AlarmListFragment.newInstance(this, getConfig(), AlarmListFragment.TYPE_HISTORY);
FragmentManager fm = getFragmentManager();
FragmentTransaction transaction = fm.beginTransaction();
transaction.replace(R.id.ll_content, fragment);
transaction.commit();
}
/**
* 初始化视图
*/
private void initView() {
//约定0和1
if(getIntent().hasExtra("newCode"))
{
mCurrentPage = getIntent().getIntExtra("newCode",0);
findViewById(R.id.ll_bottom).setVisibility(View.GONE);
if(mCurrentPage == 0)
initTitleBar("报警处置", R.id.title_alarm_list,
R.drawable.back_normal, backClickListener);
else
initTitleBar("接警汇总", R.id.title_alarm_list,
R.drawable.back_normal, backClickListener);
}
//接收到的报警
findViewById(R.id.ll_alarm).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (0 == mCurrentPage) {
return;
}
mCurrentPage = 0;
loadAlarmUnAssigned();
updateBottomStyle();
}
});
//已经指派的任务
findViewById(R.id.ll_assigned).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (1 == mCurrentPage) {
return;
}
mCurrentPage = 1;
loadAlarmAssigned();
updateBottomStyle();
}
});
//已经完成的任务
findViewById(R.id.ll_history).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (2 == mCurrentPage) {
return;
}
mCurrentPage = 2;
loadAlarmHistory();
updateBottomStyle();
}
});
if (0 == mCurrentPage) {
loadAlarmUnAssigned();
} else if (1 == mCurrentPage) {
loadAlarmAssigned();
} else if (2 == mCurrentPage) {
loadAlarmHistory();
}
mIvAlarm = (ImageView) findViewById(R.id.iv_alarm);
mIvAssigned = (ImageView) findViewById(R.id.iv_assigned);
mIvHistory = (ImageView) findViewById(R.id.iv_history);
mTvAlarm = (TextView) findViewById(R.id.tv_alarm);
mTvAssigned = (TextView) findViewById(R.id.tv_assigned);
mTvHistory = (TextView) findViewById(R.id.tv_history);
updateBottomStyle();
}
/**
* 展示报警信息列表
*/
private void showAlarmList() {
if (0 == mCurrentPage) {
mListView.setAdapter(new MyAdapter(this, mAlarmListReceived));
} else if (1 == mCurrentPage) {
mListView.setAdapter(new MyAdapter(this, mAlarmListInProcess));
} else if (2 == mCurrentPage) {
mListView.setAdapter(new MyAdapter(this, mAlarmListFinished));
}
setListener(mListView);
}
/**
* 设置列表点击事件
*
* @param listView
*/
private void setListener(ListView listView) {
listView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view,
int position, long id) {
// TODO Auto-generated method stub
final AlarmInfo alarmInfo = (AlarmInfo) view.getTag();
if (!StringUtils.isEmpty(alarmInfo.getIsMisinformation())
&& alarmInfo.getIsMisinformation().equals("1")) {
//报警撤销
Log.i("zhenhao", "alarm canceled!");
} else {
if (alarmInfo.getUserState().equals(Constant.WORKER_STATE_RECEIVED)) { //报警通知到维修工
Intent intent = new Intent(AlarmListActivity.this, WorkerActivity.class);
intent.putExtra("alarm_id", alarmInfo.getId());
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setAction(Constant.ACTION_ALARM_RECEIVED);
//定义跳转来源,用于title显示后退按钮
intent.putExtra("from", TAG);
startActivity(intent);
} else if (alarmInfo.getUserState().equals(Constant.WORKER_STATE_START)) { //已出发
Intent intent = new Intent(AlarmListActivity.this, WorkerActivity.class);
intent.putExtra("alarm_id", alarmInfo.getId());
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setAction(Constant.ACTION_ALARM_ASSIGNED);
//定义跳转来源,用于title的显示
intent.putExtra("from", TAG);
startActivity(intent);
} else if (alarmInfo.getUserState().equals(Constant.WORKER_STATE_ARRIVED)) { //已到达
Intent intent = new Intent(AlarmListActivity.this, RescuProcessActivity.class);
intent.putExtra("alarm_id", alarmInfo.getId());
//定义跳转来源,用于title的显示
intent.putExtra("from", TAG);
startActivity(intent);
}
}
}
});
}
/**
* 请求报警列表
*
* @return
*/
private RequestBean getAlarmListRequest(String scope) {
AlarmListRequest request = new AlarmListRequest();
AlarmListRequest.AlarmListReqBody body = request.new AlarmListReqBody();
RequestHead head = new RequestHead();
head.setUserId(getConfig().getUserId());
head.setAccessToken(getConfig().getToken());
body.setScope(scope);
request.setHead(head);
request.setBody(body);
return request;
}
/**
* 请求未完成报警列表
*/
private void requestUnfinishedAlarmList() {
NetTask netTask = new NetTask(getConfig().getServer() + NetConstant.URL_WORKER_ALARM_LIST,
getAlarmListRequest("unfinished")) {
@Override
protected void onResponse(NetTask task, String result) {
// TODO Auto-generated method stub
AlarmListResponse response = AlarmListResponse.getAlarmListResponse(result);
mAlarmListInProcess = response.getBody();
showAlarmList();
}
};
addTask(netTask);
}
private void requestFinishedAlarmList() {
NetTask netTask = new NetTask(getConfig().getServer() + NetConstant.URL_WORKER_ALARM_LIST,
getAlarmListRequest("finished")) {
@Override
protected void onResponse(NetTask task, String result) {
// TODO Auto-generated method stub
AlarmListResponse response = AlarmListResponse.getAlarmListResponse(result);
// if (response.getBody() != null) {
// filterList(response.getBody());
// }
mAlarmListFinished = response.getBody();
showAlarmList();
}
};
addTask(netTask);
}
/**
* ListView 适配器
*
* @author chang
*/
public class MyAdapter extends BaseAdapter {
private Context mContext;
private List<AlarmInfo> mAlarmInfoList;
public MyAdapter(Context context, List<AlarmInfo> alarmInfoList) {
mContext = context;
mAlarmInfoList = alarmInfoList;
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return mAlarmInfoList.size();
}
@Override
public Object getItem(int position) {
// TODO Auto-generated method stub
return mAlarmInfoList.get(position);
}
@Override
public long getItemId(int position) {
// TODO Auto-generated method stub
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
// TODO Auto-generated method stub
if (null == convertView) {
convertView = View.inflate(mContext, R.layout.layout_alarm_item, null);
}
TextView tvIndex = (TextView) convertView.findViewById(R.id.tv_index);
TextView tvProject = (TextView) convertView.findViewById(R.id.tv_project);
TextView tvDate = (TextView) convertView.findViewById(R.id.tv_date);
TextView tvState = (TextView) convertView.findViewById(R.id.tv_state);
AlarmInfo alarmInfo = mAlarmInfoList.get(position);
tvIndex.setText("" + (position + 1));
tvIndex.setTag(position);
if (0 == mCurrentPage) {
tvProject.setText(mAlarmInfoList.get(position).getCommunityInfo().getName());
tvDate.setText(mAlarmInfoList.get(position).getAlarmTime());
mAlarmInfoList.get(position).setUserState("-1");
} else {
tvProject.setText(mAlarmInfoList.get(position).getCommunityInfo().getName());
tvDate.setText(mAlarmInfoList.get(position).getAlarmTime());
}
if (!StringUtils.isEmpty(alarmInfo.getIsMisinformation()) && alarmInfo.getIsMisinformation().equals("1")) {
tvState.setText("已撤消");
} else {
tvState.setText(getStringByState(mAlarmInfoList.get(position).getUserState()));
}
setIndexColor(tvIndex);
setStateColor(mAlarmInfoList.get(position).getState(), tvState);
convertView.setTag(mAlarmInfoList.get(position));
return convertView;
}
}
/**
* 请求没有指派的任务
*/
private void requestUnassignedAlarmList() {
Config config = getConfig();
String server = config.getServer();
NetTask task = new NetTask(server + NetConstant.URL_ALARM_UNASSIGNED, getAlarmListRequest("")) {
@Override
protected void onResponse(NetTask task, String result) {
AlarmListResponse response = AlarmListResponse.getAlarmListResponse(result);
mAlarmListReceived = response.getBody();
//过滤掉已经超过设置的超时时间的报警
// int waitSecond = getConfig().getAlarmWaitTime();
// Iterator<AlarmInfo> iterator = mAlarmListReceived.iterator();
// long currentMillions = new Date().getTime();
//
//
// while (iterator.hasNext()) {
// AlarmInfo info = iterator.next();
// long alarmMillions = com.honyum.elevatorMan.utils.Utils.stringToMillions(info.getAlarmTime());
// long interval = currentMillions - alarmMillions;
//
// if (interval > waitSecond * 1000 * 1.5) {
// iterator.remove();
// }
// }
showAlarmList();
}
};
addTask(task);
}
/**
* 更新底部显示风格
*/
private void updateBottomStyle() {
if (0 == mCurrentPage) {
mIvAlarm.setImageResource(R.drawable.alarm_pressed);
mTvAlarm.setTextColor(getResources().getColor(R.color.color_alarm_bottom_text));
mIvAssigned.setImageResource(R.drawable.assigned_normal);
mTvAssigned.setTextColor(getResources().getColor(R.color.grey));
mIvHistory.setImageResource(R.drawable.history_normal);
mTvHistory.setTextColor(getResources().getColor(R.color.grey));
} else if (1 == mCurrentPage) {
mIvAlarm.setImageResource(R.drawable.alarm_normal);
mTvAlarm.setTextColor(getResources().getColor(R.color.grey));
mIvAssigned.setImageResource(R.drawable.assigned_pressed);
mTvAssigned.setTextColor(getResources().getColor(R.color.color_alarm_bottom_text));
mIvHistory.setImageResource(R.drawable.history_normal);
mTvHistory.setTextColor(getResources().getColor(R.color.grey));
} else if (2 == mCurrentPage) {
mIvAlarm.setImageResource(R.drawable.alarm_normal);
mTvAlarm.setTextColor(getResources().getColor(R.color.grey));
mIvAssigned.setImageResource(R.drawable.assigned_normal);
mTvAssigned.setTextColor(getResources().getColor(R.color.grey));
mIvHistory.setImageResource(R.drawable.history_pressed);
mTvHistory.setTextColor(getResources().getColor(R.color.color_alarm_bottom_text));
}
}
} |
#!/usr/bin/env bash
rm fits/*.RData
mkdir $1
mkdir $1/ACCplots
mkdir $1/ROCplots
mkdir $1/varImpPlots
mv ACCplots/*.png $1/ACCplots/
mv ROCplots/*.png $1/ROCplots/
mv varImpPlots/*.png $1/varImpPlots/
mv accAll.png $1/
mv perfResults.csv $1/
mv rocAll.png $1/
|
/*
* OwO Bot for Discord
* Copyright (C) 2019 <NAME>
* This software is licensed under Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International
* For more information, see README.md and LICENSE
*/
const request = require('request');
const secret = require('../../tokens/wsserver.json');
exports.fetchInit = function(){
return new Promise( (resolve, reject) => {
request.get(secret.url+"/sharder-info/"+secret.server,function(err,res,body){
if(err)
reject(err);
else if(res.statusCode==200)
resolve(JSON.parse(body));
else
reject(res);
});
});
}
|
import React from 'react';
class Form extends React.Component {
render() {
return (
<form onSubmit={this.props.onSubmit}>
<input type="text" name="textInput" />
<input type="submit" value="Submit" />
</form>
);
}
}
export default Form; |
#!/usr/bin/env bash
scriptdir="$( cd "$(dirname "$0")" ; pwd -P )"
simudir=$scriptdir
source /opt/intel/compilers_and_libraries/linux/mpi/intel64/bin/mpivars.sh
# get number of nodes
IFS=',' read -ra HOSTS <<< "$AZ_BATCH_HOST_LIST"
nodes=${#HOSTS[@]}
echo "Number of nodes: $nodes"
echo "Hosts: $AZ_BATCH_HOST_LIST"
# number of processes per node
ppn=24
echo "Number of processes per node to use: $ppn"
# number of processes
np=$(($nodes * $ppn))
echo "Total number of processes: $np"
# get number of GPUs on machine
ngpusmax=`nvidia-smi -L | wc -l`
echo "Number of GPU devices available on each node: $ngpusmax"
# number of GPUs to use per node
CUDA_VISIBLE_DEVICES=0,1,2,3
IFS=',' read -ra GPUS <<< "$CUDA_VISIBLE_DEVICES"
ngpus=${#GPUS[@]}
echo "Number of GPU devices per node to use: $ngpus ($CUDA_VISIBLE_DEVICES)"
echo "PATH: $PATH"
echo "LD_LIBRARY_PATH: $LD_LIBRARY_PATH"
cp $AZ_BATCH_TASK_DIR/stdout.txt $simudir
cp $AZ_BATCH_TASK_DIR/stderr.txt $simudir
mpirun -np $np -ppn $ppn -host $AZ_BATCH_HOST_LIST \
-genv CUDA_VISIBLE_DEVICES=$CUDA_VISIBLE_DEVICES \
petibm-decoupledibpm \
-directory $simudir \
-log_view ascii:$simudir/view.log \
-malloc_log \
-memory_view \
-options_left >> $simudir/stdout.txt 2> $simudir/stderr.txt |
#!/bin/bash
#
# Copyright 2018 The Outline Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Mimics the output of "yarn licenses generate-disclaimer"
# for the packages in /third_party.
set -eu
readonly THIRD_PARTY_DIR=$(git rev-parse --show-toplevel)/third_party
for i in $(find $THIRD_PARTY_DIR -name METADATA); do
PACKAGE_NAME=$(basename $(dirname $i))
HOMEPAGE=$(grep -C 2 HOMEPAGE $i | grep value | sed s/value:// | tr -d ' "')
echo "The following software may be included in this product: $PACKAGE_NAME"
echo "A copy of the source code may be downloaded from: $HOMEPAGE"
echo "This software contains the following license and notice below:"
echo
cat $(dirname $i)/LICEN?E*
echo
echo
done
|
import { Component, NgZone, ViewChild } from '@angular/core';
import { TranslateService } from '@ngx-translate/core';
import { TitleBarComponent } from 'src/app/components/titlebar/titlebar.component';
import { BuiltInIcon, TitleBarIcon, TitleBarIconSlot, TitleBarMenuItem } from 'src/app/components/titlebar/titlebar.types';
import { CredContextImportIdentityIntent } from 'src/app/identity/model/identity.intents';
import { IntentReceiverService } from 'src/app/identity/services/intentreceiver.service';
import { Logger } from 'src/app/logger';
import { DIDPublicationStatus, GlobalPublicationService } from 'src/app/services/global.publication.service';
import { GlobalThemeService } from 'src/app/services/global.theme.service';
import { VerifiableCredential } from '../../../model/verifiablecredential.model';
import { AuthService } from '../../../services/auth.service';
import { DIDService } from '../../../services/did.service';
import { PopupProvider } from '../../../services/popup';
import { UXService } from '../../../services/ux.service';
declare let didManager: DIDPlugin.DIDManager;
// TODO: Resolve issuer's DID and try to display more user friendly information about the issuer
// TODO: check if the credentials have not already been imported to avoid duplicates? (or update them if something has changed)
// Displayable version of a verifiable credential subject entry (a credential can contain several information
// in its subject).
type ImportedCredentialItem = {
name: string,
value: string,
showData: boolean
}
// Displayable version of a verifiable credential. Can contain one or more ImportedCredentialItem that
// are displayable version of verifiable credential subject entries.
type ImportedCredential = {
name: string,
values: ImportedCredentialItem[],
showData: boolean,
credential: VerifiableCredential,
}
@Component({
selector: 'page-credentialcontextimportrequest',
templateUrl: 'credentialcontextimportrequest.html',
styleUrls: ['credentialcontextimportrequest.scss']
})
export class CredentialContextImportRequestPage {
@ViewChild(TitleBarComponent, { static: false }) titleBar: TitleBarComponent;
private titleBarIconClickedListener: (icon: TitleBarIcon | TitleBarMenuItem) => void;
public receivedIntent: CredContextImportIdentityIntent = null;
public requestDappIcon: string = null;
public requestDappName: string = null;
public requestDappColor = '#565bdb';
private alreadySentIntentResponce = false;
public accepting = false;
public popup: HTMLIonPopoverElement = null;
public wrongTargetDID = false; // Whether the credential we are trying to import is for us or not.
displayableCredential: ImportedCredential = null; // Displayable reworked matarial
preliminaryChecksCompleted = false;
constructor(
private zone: NgZone,
public didService: DIDService,
private popupProvider: PopupProvider,
private appServices: UXService,
private translate: TranslateService,
public theme: GlobalThemeService,
private intentService: IntentReceiverService,
private globalPublicationService: GlobalPublicationService
) {
}
ionViewWillEnter() {
this.titleBar.setTitle("Credential context import");
this.titleBar.setNavigationMode(null);
this.titleBar.setIcon(TitleBarIconSlot.OUTER_LEFT, { key: null, iconPath: BuiltInIcon.CLOSE }); // Replace ela logo with close icon
// eslint-disable-next-line @typescript-eslint/no-misused-promises
this.titleBar.addOnItemClickedListener(this.titleBarIconClickedListener = async (icon) => {
// Close icon
await this.rejectRequest();
void this.titleBar.globalNav.exitCurrentContext();
});
void this.zone.run(async () => {
this.receivedIntent = this.intentService.getReceivedIntent();
await this.runPreliminaryChecks();
await this.organizeImportedCredential();
});
}
ionViewWillLeave() {
this.titleBar.removeOnItemClickedListener(this.titleBarIconClickedListener);
}
ngOnDestroy() {
if (!this.alreadySentIntentResponce) {
void this.rejectRequest(false);
}
}
/**
* Check a few things after entering the screen. Mostly, imported credentials content quality.
*/
async runPreliminaryChecks() {
// Make sure that we received the credential
if (!this.receivedIntent.params.credential) {
await this.popupProvider.ionicAlert("Error", "Sorry, there is actually no credential provided in the given information", "Close");
return;
}
// Check credentials content
// TODO
Logger.log('Identity', 'Received credential to be imported:', this.receivedIntent.params.credential);
// Auto-select the targeted DID. Show an error if user doesn't have a DID targeted by this issuance.
let targetDIDString = this.receivedIntent.params.credential.credentialSubject.id;
let activeDIDString = this.didService.getActiveDid().getDIDString();
if (targetDIDString != activeDIDString) {
this.wrongTargetDID = true;
return;
}
await this.didService.loadGlobalIdentity();
this.preliminaryChecksCompleted = true; // Checks completed and everything is all right.
}
/**
* From the raw credential provided by the caller, we create our internal model
* ready for UI.
*/
organizeImportedCredential() {
let credential = this.receivedIntent.params.credential;
let importedCredential: DIDPlugin.VerifiableCredential = didManager.VerifiableCredentialBuilder.fromJson(JSON.stringify(credential));
Logger.log('Identity', "Received imported credential:", importedCredential);
let credentialSubject = importedCredential.getSubject();
// Generate a displayable version of each entry found in the credential subject
let displayableEntries: ImportedCredentialItem[] = [];
for (let subjectEntryKey of Object.keys(credentialSubject)) {
let subjectEntryValue = credentialSubject[subjectEntryKey];
if (subjectEntryKey == "id") // Don't display the special subject id entry
continue;
let displayableEntry: ImportedCredentialItem = {
name: subjectEntryKey,
value: subjectEntryValue,
showData: false
}
displayableEntries.push(displayableEntry);
}
this.displayableCredential = {
name: this.didService.getUserFriendlyBasicProfileKeyName(importedCredential.getFragment()),
values: displayableEntries,
showData: false,
credential: new VerifiableCredential(importedCredential),
};
}
getDisplayableIssuer() {
return this.receivedIntent.params.credential.issuer;
}
getDisplayableEntryValue(value: any) {
if (value instanceof Object) {
return JSON.stringify(value);
}
return value;
}
acceptRequest() {
if (this.accepting) // Prevent double action
return;
this.accepting = true;
// Save the credential to user's DID.
// eslint-disable-next-line @typescript-eslint/no-floating-promises
AuthService.instance.checkPasswordThenExecute(async () => {
Logger.log('Identity', "CredContextImportRequest - accepting credential context import request: ", this.displayableCredential.credential, this.receivedIntent.params.serviceName);
let password = AuthService.instance.getCurrentUserPassword();
// Save the credential locally
Logger.log('Identity', "CredContextImportRequest - storing the credential locally");
await this.didService.getActiveDid().upsertRawCredential(this.displayableCredential.credential, true);
// Also add the credential to the DID document
Logger.log('Identity', "CredContextImportRequest - storing the credential to the DID document");
await this.didService.getActiveDid().getLocalDIDDocument().updateOrAddCredential(this.displayableCredential.credential.pluginVerifiableCredential, password);
let importedCredentialId = this.displayableCredential.credential.pluginVerifiableCredential.getId();
// Add or update the service entry with a reference to the new credential
// Expected service format:
// "service": [
// {
// "id": "did:elastos:insTmxdDDuS9wHHfeYD1h5C2onEHh3D8Vq#DiplomaCredential",
// "type": "CredentialContext",
// "serviceEndpoint": "did:elastos:insTmxdDDuS9wHHfeYD1h5C2onEHh3D8Vq#1234567890"
// }
// ],
let shortServiceName = this.receivedIntent.params.serviceName; // Eg: "DiplomaCredential"
let longServiceName = `${this.didService.getActiveDid().getDIDString()}#${shortServiceName}`;
let service: DIDPlugin.Service = await this.didService.getActiveDid().getLocalDIDDocument().getService(longServiceName);
if (service) {
Logger.log("identity", `The ${longServiceName} service already exists, deleting it to update it`);
await this.didService.getActiveDid().getLocalDIDDocument().removeService(longServiceName, password);
}
service = didManager.ServiceBuilder.createService(longServiceName, 'CredentialContext', importedCredentialId);
await this.didService.getActiveDid().getLocalDIDDocument().addService(service, password);
void this.publishAndFinalize(importedCredentialId);
}, () => {
// Cancelled
this.accepting = false;
});
}
private async publishAndFinalize(importedCredentialId: string) {
let publicationStatus = this.globalPublicationService.publicationStatus.subscribe((status) => {
Logger.log("identity", "(import credential context) DID publication status update for DID", status);
if (status.status == DIDPublicationStatus.PUBLISHED_AND_CONFIRMED) {
Logger.log("identity", "(import credential context) DID publication complete");
publicationStatus.unsubscribe();
this.finalizeRequest(importedCredentialId);
}
else if (status.status == DIDPublicationStatus.FAILED_TO_PUBLISH) {
Logger.warn("identity", "(import credential context) DID publication failure");
publicationStatus.unsubscribe();
// Publication failed but still, we return the imported credential list because
// they were at least imported locally, we are not going to revert this.
this.finalizeRequest(importedCredentialId);
}
});
await this.didService.getActiveDid().getLocalDIDDocument().publish(AuthService.instance.getCurrentUserPassword(), this.receivedIntent.intentId);
}
private finalizeRequest(importedCredentialId: string) {
void this.popupProvider.ionicAlert(this.translate.instant('identity.credimport-success-title'), this.translate.instant('identity.credimport-success'), this.translate.instant('identity.credimport-success-done')).then(async () => {
Logger.log('Identity', "Sending credimport intent response for intent id " + this.receivedIntent.intentId)
await this.sendIntentResponse({
importedcredential: importedCredentialId
}, this.receivedIntent.intentId);
})
}
async rejectRequest(navigateBack = true) {
await this.sendIntentResponse({}, this.receivedIntent.intentId, navigateBack);
}
private async sendIntentResponse(result, intentId, navigateBack = true) {
this.intentService.clearOnGoingIntentId();
this.alreadySentIntentResponce = true;
await this.appServices.sendIntentResponse(result, intentId, navigateBack);
}
getDappIcon() {
return 'assets/identity/icon/elastos-icon.svg';
}
}
|
/*
* Copyright 2019-2021 Expedia, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const path = require('path');
const avsc = require('avsc');
const fs = require('fs').promises;
const { javro } = require('../../../src/javro');
test('serialises and deserialises JSON correctly', () => javro({
jsonSchemaFile: path.resolve(__dirname, './sample_schema.json'),
namespace: 'test.jsonschema.to.avro.namespace',
}).then((res) => {
const type = avsc.Type.forSchema(res.avsc);
return fs.readFile(path.resolve(__dirname, './sample_msg.json'), { encoding: 'UTF-8' }).then((sampleJson) => {
const sample = JSON.parse(sampleJson);
const buf = type.toBuffer(sample);
const actual = type.fromBuffer(buf);
const actualWithoutTheWeirdObjectName = JSON.parse(JSON.stringify(actual));
expect(actualWithoutTheWeirdObjectName).toStrictEqual(sample);
});
}));
|
import { createAction } from 'redux-act';
export const Compiling = createAction('Preview - Compiling to bytecode');
export const Compiled = createAction('Preview - Compile successful!');
export const CompileErrors = createAction('Preview - Compile error');
export const Updated = createAction('Preview - Source is updated so not validated');
|
<gh_stars>0
import { AnnotatedColor } from '@/code/AnnotatedColor';
import Color from 'ts-color-class';
import SunScreenState from './sunscreenstate.js';
const state: SunScreenState = {
lampTypes: [
new AnnotatedColor(
'candle',
'Candle',
new Color([255, 147, 41]),
'cake'
),
new AnnotatedColor(
'fourtyWattTungsten',
'40W Incandescent',
new Color([255, 197, 143]),
'light'
),
new AnnotatedColor(
'hundredWattTungsten',
'100W Incandescent',
new Color([255, 214, 170]),
'tungsten'
),
new AnnotatedColor(
'halogen',
'Halogen',
new Color([255, 241, 224]),
'online_prediction'
),
new AnnotatedColor(
'warmFluorescent',
'Warm Fluorescent',
new Color([255, 244, 229]),
'wb_iridescent'
),
new AnnotatedColor(
'standardFluorescent',
'Standard Fluorescent',
new Color([244, 255, 250]),
'wb_iridescent'
),
new AnnotatedColor(
'coolWhiteFluorescent',
'Cool White Fluorescent',
new Color([212, 235, 255]),
'wb_iridescent'
),
new AnnotatedColor(
'fullSpectrumFluorescent',
'Full Spectrum Fluorescent',
new Color([255, 244, 242]),
'wb_iridescent'
),
],
selectedLampTypeId: 'hundredWattTungsten',
skyTypes: [
new AnnotatedColor(
'highNoonSun',
'High Noon Sun',
new Color([255, 255, 251]),
'light_mode'
),
new AnnotatedColor(
'overcastSky',
'Overcast Sky',
new Color([201, 226, 255]),
'wb_cloudy'
),
new AnnotatedColor(
'clearBlueSky',
'Clear Blue Sky',
new Color([64, 156, 255]),
'landscape'
),
],
selectedSkyTypeId: 'overcastSky',
/** The inflecion point of the gradation, in percent */
gradationInflectionPoint: 50,
/** The inflecion width of the gradation, in percent */
gradationInflectionWidth: 3,
neverShowSplashAgain: false,
};
export default state;
|
arr = [12, 15, 8, 10]
arr[:] = [x for x in arr if x % 3 != 0] |
import sys
def start_dev_environment():
print("Starting the development environment...")
def migrate_database():
print("Migrating the database...")
def load_card_data():
print("Loading data for poetry cards...")
def load_poet_data():
print("Loading data for poets...")
def main():
if len(sys.argv) < 2:
print("Usage: python dev_tool.py <command>")
return
command = sys.argv[1]
if command == "start":
start_dev_environment()
elif command == "migrate":
migrate_database()
elif command == "load_card_data":
load_card_data()
elif command == "load_poet_data":
load_poet_data()
else:
print("Invalid command. Supported commands: start, migrate, load_card_data, load_poet_data")
if __name__ == "__main__":
main() |
import statistics
def calculate_average_without_outliers(numbers, threshold):
mean = statistics.mean(numbers)
std_dev = statistics.stdev(numbers) if len(numbers) > 1 else 0 # Avoid division by zero for single-element lists
filtered_numbers = [num for num in numbers if abs((num - mean) / std_dev) <= threshold]
return statistics.mean(filtered_numbers) |
<reponame>konojunya/goroutine-sample
package service
import "testing"
func TestScraping(t *testing.T) {
GetUserFromTwitter("konojunya")
}
|
#!/bin/bash
# Utilities for both OSX and Docker Linux
# Python should be on the PATH
# Only source common_utils once
if [ -n "$COMMON_UTILS_SOURCED" ]; then
return
fi
COMMON_UTILS_SOURCED=1
# Turn on exit-if-error
set -e
MULTIBUILD_DIR=$(dirname "${BASH_SOURCE[0]}")
DOWNLOADS_SDIR=downloads
PYPY_URL=https://bitbucket.org/pypy/pypy/downloads
GET_PIP_URL=https://bootstrap.pypa.io/get-pip.py
if [ $(uname) == "Darwin" ]; then IS_OSX=1; fi
# Work round bug in travis xcode image described at
# https://github.com/direnv/direnv/issues/210
shell_session_update() { :; }
# Workaround for https://github.com/travis-ci/travis-ci/issues/8703
# suggested by Thomas K at
# https://github.com/travis-ci/travis-ci/issues/8703#issuecomment-347881274
unset -f cd
unset -f pushd
unset -f popd
function start_spinner {
if [ -n "$MB_SPINNER_PID" ]; then
return
fi
>&2 echo "Building libraries..."
# Start a process that runs as a keep-alive
# to avoid travis quitting if there is no output
(while true; do
sleep 60
>&2 echo "Still building..."
done) &
MB_SPINNER_PID=$!
disown
}
function stop_spinner {
if [ ! -n "$MB_SPINNER_PID" ]; then
return
fi
kill $MB_SPINNER_PID
unset MB_SPINNER_PID
>&2 echo "Building libraries finished."
}
function abspath {
python -c "import os.path; print(os.path.abspath('$1'))"
}
function relpath {
# Path of first input relative to second (or $PWD if not specified)
python -c "import os.path; print(os.path.relpath('$1','${2:-$PWD}'))"
}
function realpath {
python -c "import os; print(os.path.realpath('$1'))"
}
function lex_ver {
# Echoes dot-separated version string padded with zeros
# Thus:
# 3.2.1 -> 003002001
# 3 -> 003000000
echo $1 | awk -F "." '{printf "%03d%03d%03d", $1, $2, $3}'
}
function unlex_ver {
# Reverses lex_ver to produce major.minor.micro
# Thus:
# 003002001 -> 3.2.1
# 003000000 -> 3.0.0
echo "$((10#${1:0:3}+0)).$((10#${1:3:3}+0)).$((10#${1:6:3}+0))"
}
function strip_ver_suffix {
echo $(unlex_ver $(lex_ver $1))
}
function is_function {
# Echo "true" if input argument string is a function
# Allow errors during "set -e" blocks.
(set +e; $(declare -Ff "$1" > /dev/null) && echo true)
}
function gh_clone {
git clone https://github.com/$1
}
# gh-clone was renamed to gh_clone, so we have this alias for
# backwards compatibility.
alias gh-clone=gh_clone
function set_opts {
# Set options from input options string (in $- format).
local opts=$1
local chars="exhmBH"
for (( i=0; i<${#chars}; i++ )); do
char=${chars:$i:1}
[ -n "${opts//[^${char}]/}" ] && set -$char || set +$char
done
}
function suppress {
# Run a command, show output only if return code not 0.
# Takes into account state of -e option.
# Compare
# https://unix.stackexchange.com/questions/256120/how-can-i-suppress-output-only-if-the-command-succeeds#256122
# Set -e stuff agonized over in
# https://unix.stackexchange.com/questions/296526/set-e-in-a-subshell
local tmp=$(mktemp tmp.XXXXXXXXX) || return
local errexit_set
echo "Running $@"
if [[ $- = *e* ]]; then errexit_set=true; fi
set +e
( if [[ -n $errexit_set ]]; then set -e; fi; "$@" > "$tmp" 2>&1 ) ; ret=$?
[ "$ret" -eq 0 ] || cat "$tmp"
rm -f "$tmp"
if [[ -n $errexit_set ]]; then set -e; fi
return "$ret"
}
function rm_mkdir {
# Remove directory if present, then make directory
local path=$1
if [ -z "$path" ]; then echo "Need not-empty path"; exit 1; fi
if [ -d "$path" ]; then rm -rf $path; fi
mkdir $path
}
function untar {
local in_fname=$1
if [ -z "$in_fname" ];then echo "in_fname not defined"; exit 1; fi
local extension=${in_fname##*.}
case $extension in
tar) tar -xf $in_fname ;;
gz|tgz) tar -zxf $in_fname ;;
bz2) tar -jxf $in_fname ;;
zip) unzip -qq $in_fname ;;
xz) unxz -c $in_fname | tar -xf ;;
*) echo Did not recognize extension $extension; exit 1 ;;
esac
}
function install_rsync {
if [ -z "$IS_OSX" ]; then
[[ $(type -P rsync) ]] || yum_install rsync
fi
}
function fetch_unpack {
# Fetch input archive name from input URL
# Parameters
# url - URL from which to fetch archive
# archive_fname (optional) archive name
#
# Echos unpacked directory and file names.
#
# If `archive_fname` not specified then use basename from `url`
# If `archive_fname` already present at download location, use that instead.
local url=$1
if [ -z "$url" ];then echo "url not defined"; exit 1; fi
local archive_fname=${2:-$(basename $url)}
local arch_sdir="${ARCHIVE_SDIR:-archives}"
# Make the archive directory in case it doesn't exist
mkdir -p $arch_sdir
local out_archive="${arch_sdir}/${archive_fname}"
# If the archive is not already in the archives directory, get it.
if [ ! -f "$out_archive" ]; then
# Source it from multibuild archives if available.
local our_archive="${MULTIBUILD_DIR}/archives/${archive_fname}"
if [ -f "$our_archive" ]; then
ln -s $our_archive $out_archive
else
# Otherwise download it.
curl -L $url > $out_archive
fi
fi
# Unpack archive, refreshing contents, echoing dir and file
# names.
rm_mkdir arch_tmp
install_rsync
(cd arch_tmp && \
untar ../$out_archive && \
ls -1d * &&
rsync --delete -ah * ..)
}
function clean_code {
local repo_dir=${1:-$REPO_DIR}
local build_commit=${2:-$BUILD_COMMIT}
[ -z "$repo_dir" ] && echo "repo_dir not defined" && exit 1
[ -z "$build_commit" ] && echo "build_commit not defined" && exit 1
# The package $repo_dir may be a submodule. git submodules do not
# have a .git directory. If $repo_dir is copied around, tools like
# Versioneer which require that it be a git repository are unable
# to determine the version. Give submodule proper git directory
fill_submodule "$repo_dir"
(cd $repo_dir \
&& git fetch origin \
&& git checkout $build_commit \
&& git clean -fxd \
&& git reset --hard \
&& git submodule update --init --recursive)
}
function build_wheel_cmd {
# Builds wheel with named command, puts into $WHEEL_SDIR
#
# Parameters:
# cmd (optional, default "pip_wheel_cmd"
# Name of command for building wheel
# repo_dir (optional, default $REPO_DIR)
#
# Depends on
# REPO_DIR (or via input argument)
# WHEEL_SDIR (optional, default "wheelhouse")
# BUILD_DEPENDS (optional, default "")
# MANYLINUX_URL (optional, default "") (via pip_opts function)
local cmd=${1:-pip_wheel_cmd}
local repo_dir=${2:-$REPO_DIR}
[ -z "$repo_dir" ] && echo "repo_dir not defined" && exit 1
local wheelhouse=$(abspath ${WHEEL_SDIR:-wheelhouse})
start_spinner
if [ -n "$(is_function "pre_build")" ]; then pre_build; fi
stop_spinner
if [ -n "$BUILD_DEPENDS" ]; then
pip install $(pip_opts) $BUILD_DEPENDS
fi
(cd $repo_dir && $cmd $wheelhouse)
repair_wheelhouse $wheelhouse
}
function pip_wheel_cmd {
local abs_wheelhouse=$1
pip wheel $(pip_opts) -w $abs_wheelhouse --no-deps .
}
function bdist_wheel_cmd {
# Builds wheel with bdist_wheel, puts into wheelhouse
#
# It may sometimes be useful to use bdist_wheel for the wheel building
# process. For example, versioneer has problems with versions which are
# fixed with bdist_wheel:
# https://github.com/warner/python-versioneer/issues/121
local abs_wheelhouse=$1
python setup.py bdist_wheel
cp dist/*.whl $abs_wheelhouse
}
function build_pip_wheel {
# Standard wheel building command with pip wheel
build_wheel_cmd "pip_wheel_cmd" $@
}
function build_bdist_wheel {
# Wheel building with bdist_wheel. See bdist_wheel_cmd
build_wheel_cmd "bdist_wheel_cmd" $@
}
function build_wheel {
# Set default building method to pip
build_pip_wheel $@
}
function build_index_wheel {
# Builds wheel from some index, usually pypi
#
# Parameters:
# project_spec
# requirement to install, e.g. "tornado" or "tornado==4.4.1"
# *args
# Any other arguments to be passed to pip `install` and `wheel`
# commands.
#
# Depends on
# WHEEL_SDIR (optional, default "wheelhouse")
# BUILD_DEPENDS (optional, default "")
# MANYLINUX_URL (optional, default "") (via pip_opts function)
#
# You can also override `pip_opts` command to set indices other than pypi
local project_spec=$1
[ -z "$project_spec" ] && echo "project_spec not defined" && exit 1
# Discard first argument to pass remainder to pip
shift
local wheelhouse=$(abspath ${WHEEL_SDIR:-wheelhouse})
start_spinner
if [ -n "$(is_function "pre_build")" ]; then pre_build; fi
stop_spinner
if [ -n "$BUILD_DEPENDS" ]; then
pip install $(pip_opts) $@ $BUILD_DEPENDS
fi
pip wheel $(pip_opts) $@ -w $wheelhouse --no-deps $project_spec
repair_wheelhouse $wheelhouse
}
function pip_opts {
[ -n "$MANYLINUX_URL" ] && echo "--find-links $MANYLINUX_URL"
}
function get_platform {
# Report platform as given by uname
python -c 'import platform; print(platform.uname()[4])'
}
if [ "$(get_platform)" == x86_64 ] || \
[ "$(get_platform)" == i686 ]; then IS_X86=1; fi
function get_distutils_platform {
# Report platform as given by distutils get_platform.
# This is the platform tag that pip will use.
python -c "import distutils.util; print(distutils.util.get_platform())"
}
function install_wheel {
# Install test dependencies and built wheel
#
# Pass any input flags to pip install steps
#
# Depends on:
# WHEEL_SDIR (optional, default "wheelhouse")
# TEST_DEPENDS (optional, default "")
# MANYLINUX_URL (optional, default "") (via pip_opts function)
local wheelhouse=$(abspath ${WHEEL_SDIR:-wheelhouse})
if [ -n "$TEST_DEPENDS" ]; then
while read TEST_DEPENDENCY; do
pip install $(pip_opts) $@ $TEST_DEPENDENCY
done <<< "$TEST_DEPENDS"
fi
pip install packaging
local supported_wheels=$(python $MULTIBUILD_DIR/supported_wheels.py $wheelhouse/*.whl)
if [ -z "$supported_wheels" ]; then
echo "ERROR: no supported wheels found"
exit 1
fi
# Install compatible wheel
pip install $(pip_opts) $@ $supported_wheels
}
function install_run {
# Depends on function `run_tests` defined in `config.sh`
install_wheel
mkdir tmp_for_test
(cd tmp_for_test && run_tests)
rmdir tmp_for_test 2>/dev/null || echo "Cannot remove tmp_for_test"
}
function fill_submodule {
# Restores .git directory to submodule, if necessary
# See:
# https://stackoverflow.com/questions/41776331/is-there-a-way-to-reconstruct-a-git-directory-for-a-submodule
local repo_dir="$1"
[ -z "$repo_dir" ] && echo "repo_dir not defined" && exit 1
local git_loc="$repo_dir/.git"
# For ordinary submodule, .git is a file.
[ -d "$git_loc" ] && return
# Need to recreate .git directory for submodule
local origin_url=$(cd "$repo_dir" && git config --get remote.origin.url)
local repo_copy="$repo_dir-$RANDOM"
git clone --recursive "$repo_dir" "$repo_copy"
rm -rf "$repo_dir"
mv "${repo_copy}" "$repo_dir"
(cd "$repo_dir" && git remote set-url origin $origin_url)
}
# As of 2020-01-15, the latest verions of PyPy.
LATEST_PP_5p0=5.0.1
LATEST_PP_5p1=5.1.1
LATEST_PP_5p3=5.3.1
LATEST_PP_5p4=5.4.1
LATEST_PP_5p6=5.6.0
LATEST_PP_5p7=5.7.1
LATEST_PP_5p8=5.8.0
LATEST_PP_5p9=5.9.0
LATEST_PP_5=$LATEST_PP_5p9
LATEST_PP_6p0=6.0.0
LATEST_PP_6=$LATEST_PP_6p0
LATEST_PP_7p0=7.0.0
LATEST_PP_7p1=7.1.1
LATEST_PP_7p2=7.2.0
LATEST_PP_7p3=7.3.0
LATEST_PP_7=$LATEST_PP_7p3
function unroll_version {
# Convert major or major.minor format to major.minor.micro using the above
# values recursively
# Parameters:
# $prefix : one of LATEST_PP or LATEST_PP3
# $version : major[.minor[.patch]]
# Hence:
# LATEST_PP 5 -> 5.7.0
# LATEST 2.7 -> 2.7.11
local prefix=$1
local ver=$2
local latest=${prefix}_${ver//./p}
if [ -n "${!latest}" ]; then
echo $(unroll_version ${prefix} ${!latest})
else
echo $ver
fi
}
function install_pypy {
# Installs pypy.org PyPy
# Parameter $version
# Version given in major or major.minor or major.minor.micro e.g
# "3" or "3.7" or "3.7.1".
# Uses $PLAT
# sets $PYTHON_EXE variable to python executable
local version=$1
suffix=linux64
case "$PLAT" in
"x86_64") suffix="linux64";;
"i686") suffix="linux32";;
"darwin") suffix="osx64";;
"ppc64le") suffix="ppc64le";;
"s30x") suffix="s390x";;
"aarch64") suffix="aarch64";;
*) if [ -n "$IS_OSX" ]; then
suffix="osx64";
else
echo unknown platform "$PLAT"; exit 1
fi;;
esac
# Need to convert pypy-7.2 to pypy2.7-v7.2.0 and pypy3.6-7.3 to pypy3.6-v7.3.0
local prefix=$(get_pypy_build_prefix $version)
# since prefix is pypy3.6v7.2 or pypy2.7v7.2, grab the 4th (0-index) letter
local major=${prefix:4:1}
# get the pypy version 7.2.0
local py_version=$(fill_pypy_ver $(echo $version | cut -f2 -d-))
local py_build=$prefix$py_version-$suffix
local py_zip=$py_build.tar.bz2
local zip_path=$DOWNLOADS_SDIR/$py_zip
mkdir -p $DOWNLOADS_SDIR
wget -nv $PYPY_URL/${py_zip} -P $DOWNLOADS_SDIR
untar $zip_path
# bug/feature: pypy package for pypy3 only has bin/pypy3 :(
if [ "$major" == "3" ] && [ ! -x "$py_build/bin/pypy" ]; then
ln $py_build/bin/pypy3 $py_build/bin/pypy
fi
PYTHON_EXE=$(realpath $py_build/bin/pypy)
$PYTHON_EXE -mensurepip
if [ "$major" == "3" ] && [ ! -x "$py_build/bin/pip" ]; then
ln $py_build/bin/pip3 $py_build/bin/pip
fi
PIP_CMD=pip
}
function fill_pypy_ver {
# Convert major or major.minor format to major.minor.micro
# Parameters:
# $version : major[.minor[.patch]]
# Hence:
# 5 -> 5.7.0
echo $(unroll_version LATEST_PP $1)
}
function get_pypy_build_prefix {
# Return the file prefix of a PyPy file
# Parameters:
# $version : pypy version number, for example pypy-7.2 or pypy3.6-7.2
local version=$1
if [[ $version =~ pypy([0-9]+)\.([0-9]+)-([0-9]+)\.([0-9]+) ]]; then
local py_major=${BASH_REMATCH[1]}
local py_minor=${BASH_REMATCH[2]}
echo "pypy$py_major.$py_minor-v"
elif [[ $version =~ ([0-9]+)\.([0-9]+) ]]; then
local major=${BASH_REMATCH[1]}
local minor=${BASH_REMATCH[2]}
if (( $major > 6 )); then
echo "pypy2.7-v"
elif (( $major > 5 || ($major == 5 && $minor >= 3) )); then
echo "pypy2-v"
else
echo "pypy-"
fi
else
echo "error: expected version like pypy-7.2 or pypy3.6-7.2, got $1" 1>&2
exit 1
fi
}
retry () {
# Retry command (with arguments) up to 5 times
# https://gist.github.com/fungusakafungus/1026804
local retry_max=5
local count=$retry_max
while [ $count -gt 0 ]; do
"$@" && break
count=$(($count - 1))
sleep 1
done
[ $count -eq 0 ] && {
echo "Retry failed [$retry_max]: $@" >&2
return 1
}
return 0
}
function install_pip {
# Generic install pip
# Gets needed version from version implied by $PYTHON_EXE
# Installs pip into python given by $PYTHON_EXE
# Assumes pip will be installed into same directory as $PYTHON_EXE
check_python
mkdir -p $DOWNLOADS_SDIR
local py_mm=`get_py_mm`
local get_pip_path=$DOWNLOADS_SDIR/get-pip.py
curl $GET_PIP_URL > $get_pip_path
# Travis VMS now install pip for system python by default - force install
# even if installed already.
$PYTHON_EXE $get_pip_path --ignore-installed $pip_args
PIP_CMD=$(dirname $PYTHON_EXE)/pip$py_mm
if [ "$USER" != "root" ]; then
# inside a docker, there is no sudo but the user is already root
PIP_CMD="sudo $PIP_CMD"
fi
# Append pip_args if present (avoiding trailing space cf using variable
# above).
if [ -n "$pip_args" ]; then
PIP_CMD="$PIP_CMD $pip_args"
fi
}
function check_python {
if [ -z "$PYTHON_EXE" ]; then
echo "PYTHON_EXE variable not defined"
exit 1
fi
}
function check_pip {
if [ -z "$PIP_CMD" ]; then
echo "PIP_CMD variable not defined"
exit 1
fi
}
function get_py_mm {
check_python
$PYTHON_EXE -c "import sys; print('{0}.{1}'.format(*sys.version_info[0:2]))"
}
|
#!/bin/bash
runner() {
if [ "$1" = "direct" ]; then
go run main.go
elif [ "$1" = "docker" ]; then
docker build -t unfire .
# read dotenv
eval "$(cat .env <(echo) <(declare -x))"
docker run -e APP_PORT=8080 -e TWITTER_CONSUMER_KEY="$TWITTER_CONSUMER_KEY" -e TWITTER_CONSUMER_SECRET="$TWITTER_CONSUMER_SECRET" -p 8080:8080 -t unfire
else
echo "usage: ./manager.sh { docker | run }"
fi
}
deploy() {
echo "工事中!"
}
allocator() {
if [ "$1" = "run" ]; then
runner "$2"
elif [ "$1" = "deploy" ]; then
deploy
else
echo "usage: ./manager.sh { run | deploy } { command argument }"
fi
}
allocator "$1" "$2" |
import ast
from typing import List, Optional
from flake8_plugin_utils import Visitor, is_none
from flake8_pytest_style.config import Config
from flake8_pytest_style.errors import (
AssertInExcept,
RaisesTooBroad,
RaisesWithMultipleStatements,
RaisesWithoutException,
)
from flake8_pytest_style.utils import (
get_qualname,
get_simple_call_args,
is_empty_string,
is_nontrivial_with_statement,
is_raises_call,
is_raises_with,
)
class RaisesVisitor(Visitor[Config]):
def __init__(self, config: Optional[Config] = None) -> None:
super().__init__(config=config)
self._exception_names: List[str] = []
self._current_assert: Optional[ast.Assert] = None
def _check_raises_call(self, node: ast.Call) -> None:
"""
Checks for violations regarding `pytest.raises` call args (PT010 and PT011).
"""
args = get_simple_call_args(node)
exception = args.get_argument('expected_exception', position=0)
if not exception:
self.error_from_node(RaisesWithoutException, node)
return
exception_name = get_qualname(exception)
if exception_name not in self.config.raises_require_match_for:
return
match = args.get_argument('match')
if match is None or is_none(match) or is_empty_string(match):
self.error_from_node(RaisesTooBroad, node, exception=exception_name)
def _check_raises_with(self, node: ast.With) -> None:
"""Checks for PT012."""
body = node.body
is_complex_body = False
if len(body) != 1:
is_complex_body = True
elif isinstance(
body[0],
(
ast.If,
ast.For,
ast.AsyncFor,
ast.While,
ast.Try,
),
):
is_complex_body = True
elif is_nontrivial_with_statement(body[0]):
is_complex_body = True
if is_complex_body:
self.error_from_node(RaisesWithMultipleStatements, node)
def visit_Call(self, node: ast.Call) -> None:
if is_raises_call(node):
self._check_raises_call(node)
def visit_With(self, node: ast.With) -> None:
if is_raises_with(node):
self._check_raises_with(node)
self.generic_visit(node)
def visit_ExceptHandler(self, node: ast.ExceptHandler) -> None:
if node.name:
self._exception_names.append(node.name)
try:
self.generic_visit(node)
finally:
if node.name:
self._exception_names.pop()
def visit_Assert(self, node: ast.Assert) -> None:
self._current_assert = node
try:
self.visit(node.test)
finally:
self._current_assert = None
if node.msg:
self.visit(node.msg)
def visit_Name(self, node: ast.Name) -> None:
if self._current_assert:
if node.id in self._exception_names:
self.error_from_node(AssertInExcept, self._current_assert, name=node.id)
|
import Faction from '@mafia/structures/Faction';
import type Game from '@mafia/structures/Game';
const SUPPORTING_FACTIONS = ['Juggernaut', 'Witch', 'Survivor'];
export default class JuggernautFaction extends Faction {
public name = 'Juggernaut';
public winCondition = 'game/factions:nkWinCondition';
public hasWon(game: Game) {
const aliveOpposing = game.players.filter((player) => player.isAlive && !SUPPORTING_FACTIONS.includes(player.role.faction.name)).length;
const aliveFactional = game.players.filter((player) => player.isAlive && player.role.faction.name === 'Juggernaut').length;
return aliveOpposing === 0 && aliveFactional > 0;
}
}
|
class ExtendedSet2Command:
def __init__(self, address: Address, data1=None, data2=None):
"""Init the ExtendedSet2Command."""
if data2 in [0, 1]:
raise ValueError("Error creating extended set command: data2 cannot be 0 or 1")
self._address = address
self._data1 = data1
self._data2 = data2 |
<filename>blitzd/packets/udp/PacketUDP.cpp
#include "Config.h"
#include "PacketUDP.h"
namespace Packets
{
namespace UDP
{
bool PacketUDP::Build()
{
_packet << GetCmd();
return Pack();
}
}
}
|
<filename>app.rb
# frozen_string_literal: true
require 'rubygems'
require 'sinatra'
require 'sinatra/reloader'
require 'sinatra/activerecord'
set :database, { adapter: 'sqlite3', database: 'barbershop.db' }
class Client < ActiveRecord::Base
validates :name, presence: true, length: { in: 3..20}
validates :phone, presence: true
validates :datestamp, presence: true
validates :color, presence: true
end
class Barber < ActiveRecord::Base
end
class Contact < ActiveRecord::Base
validates :user_name, presence: true
validates :user_message, presence: true
end
before do
@barbers = Barber.all
@clnt = Client.new params[:client]
end
get '/' do
erb :index
end
get '/visit' do
erb :visit
end
post '/visit' do
@clnt.save
if @clnt.save
erb 'You in!'
else
@error= @clnt.errors.full_messages.first
erb :visit
end
end
get '/contact' do
erb :contact
end
post '/contact' do
cntct = Contact.new params[:contact]
cntct.save
if cntct.save
erb :contact
else
@error = cntct.errors.full_messages.first
erb :contact
end
end
get '/barber/:id' do
@barber = Barber.find(params[:id])
erb :barber
end
get '/bookings' do
@visiters = Client.order(:datestamp).reverse_order
erb :bookings
end
get '/client/:id' do
@client = Client.find(params[:id])
erb :client
end
|
#!/bin/bash
set -e
cut -d' ' -f4,5 libsyms.rel | uniq > libsyms.index
|
class CreateProjects < ActiveRecord::Migration[7.0]
def change
create_table :projects do |t|
t.string :name
t.string :url_opensea
t.string :url_discord
t.string :url_twitter
t.string :url_website
t.text :description
t.string :slug
t.string :image_avatar_url
t.timestamps
end
end
end
|
import jsonRpc from 'simple-jsonrpc-js'; // Hook-up transport for simple-jsonrpc-js
export function PdRpc() {
let rpc = new jsonRpc();
rpc.toStream = (msg) => {
fetch('/rpc', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: msg,
}).then(async (response) => {
if(response.ok) {
let body = await response.text();
rpc.messageHandler(body);
} else {
console.log("Failed RPC fetch: ", response.statusText);
}
}, (reason) => {
console.log("Failed RPC fetch: ", reason);
});
};
const obj = {
getBoardDefinition() {
return rpc.call('get_board_definition');
},
getDeviceInfo() {
return rpc.call('get_device_info');
},
getParameterDefinitions() {
return rpc.call('get_parameter_definitions');
},
getParameter(paramIdx) {
return rpc.call('get_parameter', [paramIdx]);
},
setParameter(paramIdx, value) {
return rpc.call('set_parameter', [paramIdx, value]);
},
setElectrodePins(pins) {
// Send a list of activated pin numbers, e.g. [4, 2, 100] will enable
// electrode outputs 4, 2, and 100 while disabling all others.
return rpc.call('set_electrode_pins', [pins]);
},
calibrateCapacitanceOffset() {
console.log("Recalibrate");
return rpc.call('calibrate_capacitance_offset');
},
};
return obj;
}
export default PdRpc;
|
def sum_odd_recurse(num):
if num == 0:
return 0
elif num % 2 != 0:
return num + sum_odd_recurse(num - 1)
else:
return sum_odd_recurse(num - 1)
# driver code
x = 4
print(sum_odd_recurse(x)) # Output: 9 (1+3+5) |
// Controller method for handling page creation and update
public function store(Request $request)
{
// Validate the input data
$validatedData = $request->validate([
'desc_ar' => 'required|string',
'desc' => 'required|string',
'intro' => 'required|string',
'intro_ar' => 'required|string',
'image' => 'required|image',
'status' => 'required|in:active,inactive',
]);
// Retrieve the store based on the request or any other appropriate method
$store = Store::find($request->store_id);
// Create or update the page record in the database
$page = Page::updateOrCreate(
[
'store_id' => $store->id,
],
[
'desc_ar' => $validatedData['desc_ar'],
'desc' => $validatedData['desc'],
'intro' => $validatedData['intro'],
'intro_ar' => $validatedData['intro_ar'],
'image' => $validatedData['image']->store('images', 'public'),
'status' => $validatedData['status'],
]
);
// Redirect the user to the index page
return redirect()->route('pages.index');
} |
import React from 'react'
import {Container, Segment, Button} from 'semantic-ui-react'
import {NavLink} from 'react-router-dom'
const OrderCompleteConfirm = props => {
return (
<div>
<br />
<Container>
<Segment>
<div className="cart-topbar">
<div>
<h1>Purchase successful- check your email for confirmation!</h1>
</div>
</div>
<br />
<NavLink to="/home">
<Button type="button" color="blue">
Browse All Products
</Button>
</NavLink>
</Segment>
</Container>
</div>
)
}
export default OrderCompleteConfirm
|
<gh_stars>1-10
import { Exam } from '@prisma/client';
import { injectable, inject } from 'tsyringe';
import { GetExamsByUserIdDTO } from '../dtos/GetExamsByUserId.dto';
import { IExamRepository } from '../repositories/IExamRepository';
@injectable()
export class GetExamsByUserIdService {
constructor(
@inject('PrismaExamRepository')
private ExamRepository: IExamRepository
) {}
async handle({ id }: GetExamsByUserIdDTO): Promise<Exam[]> {
return this.ExamRepository.getExamsByUserId(id);
}
}
|
/*
* Copyright 2021 Solace Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.solace.samples.java.snippets;
import com.solace.messaging.MessagingService;
import com.solace.messaging.config.SolaceProperties.MessageProperties;
import com.solace.messaging.publisher.DirectMessagePublisher;
import com.solace.messaging.publisher.OutboundMessage;
import com.solace.messaging.publisher.OutboundMessageBuilder;
import com.solace.messaging.resources.Topic;
import com.solace.messaging.util.Converter.ObjectToBytes;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
import java.time.Instant;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Sampler for direct message publisher
*/
public class HowToPublishDirectMessage {
/**
* Example how to start direct message publisher in a blocking mode
*
* @param service connected instance of a messaging service, ready to be used
*/
public static void startDirectMessagePublisher(MessagingService service) {
final DirectMessagePublisher messagePublisher = service.createDirectMessagePublisherBuilder()
.build().start();
}
/**
* Example how to start direct message publisher in a asynchronous mode with future interface
*
* @param service connected instance of a messaging service, ready to be used
*/
public static void startAsyncDirectMessagePublisher(MessagingService service) {
final CompletableFuture<DirectMessagePublisher> messagePublisherFuture = service
.createDirectMessagePublisherBuilder()
.build().startAsync();
messagePublisherFuture.whenComplete(
(directMessageReceiver, throwable) -> {
if (throwable != null) {
// This method can't prevent exception propagation.
// Exception logging can be performed here,
// or code can be placed here that releases some external resources or disconnect from a messaging service.
// IMPORTANT: when exception is occurred during receiver start, then
// this exception is propagated over entire call chain, preventing ALL another calls
// from execution (message listener won't be registered, since message receiver failed to start)
// IMPORTANT: 'throwable' is of type java.util.concurrent.CompletionException, to get out the nested exception use:
final Throwable wrappedException = throwable.getCause();
// wrappedException is very likely of type PubSubPlusClientException, use cast/type check for processing
}
}
);
// methods messagePublisherFuture.thenAccept(..) or messagePublisherFuture.thenCompose(..)
// of CompletableFuture APIs can be used to perform publishing activities
}
/**
* Example how to create a topic
*
* @param topicName topic name/expression
* @return topic instanch to be used for publishing purposes
*/
public static Topic createATopic(String topicName) {
return Topic.of(topicName);
}
/**
* Example how to publish direct message with a given byte payload
*
* @param service connected instance of a messaging service, ready to be used
* @param rawMessage byte representation of a payload
* @param toDestination message destination
*/
public static void publishBytesAsDirectMessage(MessagingService service, byte[] rawMessage,
Topic toDestination) {
final DirectMessagePublisher messagePublisher = service.createDirectMessagePublisherBuilder()
.build().start();
messagePublisher.publish(rawMessage, toDestination);
}
/**
* Example how to publish direct message with a given string payload
*
* @param service connected instance of a messaging service, ready to be used
* @param toDestination message destination
*/
public static void publishStringAsDirectMessage(MessagingService service,
Topic toDestination) {
final DirectMessagePublisher messagePublisher = service.createDirectMessagePublisherBuilder()
.build().start();
messagePublisher.publish("simple message to the world", toDestination);
}
/**
* Example how to publish direct message with a business object/pojo(aka Plain Java Object) as a
* payload
*
* @param service connected instance of a messaging service, ready to be used
* @param toDestination message destination
*/
public static void publishTypedBusinessObjectAsDirectMessage(MessagingService service,
Topic toDestination) {
final DirectMessagePublisher messagePublisher = service
.createDirectMessagePublisherBuilder()
.build().start();
// builder for creation of similarly configured messages
final OutboundMessageBuilder messageBuilder = service.messageBuilder();
// converter to turn business object/ pojo (plain java object) into byte []
final ObjectToBytes<MyData> pojo2ByteConverter = (pojo) -> {
return pojo.getName().getBytes(StandardCharsets.US_ASCII);
};
// business object to be published in a message
final MyData myBusinessObject = new MyData("my message");
messagePublisher
.publish(messageBuilder.build(myBusinessObject, pojo2ByteConverter), toDestination);
}
/**
* Example how to publish direct message with message header customization and custom properties
*
* @param service connected instance of a messaging service, ready to be used
* @param toDestination message destination
*/
public static void publishWithFullControlDirectMessage(MessagingService service,
Topic toDestination) {
final DirectMessagePublisher messagePublisher = service.createDirectMessagePublisherBuilder()
.build().start();
// converter to turn business object/ pojo (plain java object) into byte []
final ObjectToBytes<MyData> pojo2ByteConverter = (pojo) -> {
return pojo.getName().getBytes(StandardCharsets.US_ASCII);
};
// builder for creation of similarly configured messages
final OutboundMessageBuilder messageBuilder = service.messageBuilder();
final MyData myBusinessObject = new MyData("my message");
final Properties additionalProperties = new Properties();
additionalProperties.setProperty("key", "value");
final OutboundMessage message = messageBuilder
.fromProperties(additionalProperties)
// expire in 10 sec
.withExpiration(Instant.now().toEpochMilli() + 10000L)
.build(myBusinessObject, pojo2ByteConverter);
messagePublisher.publish(message, toDestination);
}
/**
* Example how to publish direct message with REUSABLE {@link OutboundMessageBuilder}. We
* recommend for performance purposes to REUSE builder (one or multiple) instance, to produce
* {@link OutboundMessage} instances. In this particular example one builder creates High priority
* another low priority messages.
*
* @param service connected instance of a messaging service, ready to be used
* @param highPriorityMessages list with high priority string messages
* @param lowPriorityMessages list with low priority string messages
* @param publisher direct publisher to publish messages on
*/
public static void publishCustomMessagesUsingBuilderTemplate(MessagingService service,
List<String> highPriorityMessages,
List<String> lowPriorityMessages, DirectMessagePublisher publisher) {
// pre-configured message template for HIGH PRIORITY MESSAGES
final OutboundMessageBuilder messageTemplateHighPriority = service.messageBuilder()
// for Rest compatibility
.withHTTPContentHeader("text/plain", "UTF-8")
.withPriority(255);
final Topic topic = Topic.of("example/myTopic");
final OutboundMessageBuilder messageTemplateLowPriority = service.messageBuilder()
// for Rest compatibility
.withHTTPContentHeader("text/plain", "UTF-8")
.withPriority(1);
final AtomicInteger atomicCounter1 = new AtomicInteger(0);
highPriorityMessages.forEach(stringMessage -> {
final OutboundMessage myMessage = messageTemplateHighPriority
.build(stringMessage);
// publish a high priority message, that can be redelivered max 3 times
publisher.publish(myMessage, topic);
});
final AtomicInteger atomicCounter2 = new AtomicInteger(0);
lowPriorityMessages.forEach(stringMessage -> {
final OutboundMessage myMessage = messageTemplateLowPriority
.build(stringMessage);
// publish a low priority message, that can be redelivered max 1 time
publisher.publish(myMessage, topic);
});
}
/**
* Example how to publish direct messages using same instance of a publisher from different
* threads. This example is simplified, interruption for the publishing loop and exception
* handling are not provided
*
* @param service connected instance of a messaging service, ready to be used
* @param toDestination message destination
* @param blockingQueueWithMessages source of the messages
* @param executorService executor that provides threads used form message publishing
*/
public static void publishDirectMessagesAsynchronously(MessagingService service,
Topic toDestination,
final BlockingQueue<? extends OutboundMessage> blockingQueueWithMessages,
ExecutorService executorService) {
final DirectMessagePublisher messagePublisher = service
.createDirectMessagePublisherBuilder()
.build().start();
// converter to turn business object/ pojo (plain java object) into byte []
final ObjectToBytes<MyData> pojo2ByteConverter = (pojo) -> {
return pojo.getName().getBytes(StandardCharsets.US_ASCII);
};
boolean interrupted = false;
try {
// in this simplified example publishing forever
while (true) {
try {
final OutboundMessage nextMessage = blockingQueueWithMessages
.poll(100L, TimeUnit.MILLISECONDS);
if (nextMessage != null) {
// Publisher supports publishing from different threads once it is fully configured.
// Each message is published here on another thread provided from the given
// executor service
executorService.submit(() -> messagePublisher.publish(nextMessage, toDestination));
}
} catch (InterruptedException e) {
interrupted = true;
// fall through and retry
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
/**
* Example how to publish direct messages using message builder and custom set of parameter per
* publish action
*
* @param service connected instance of a messaging service, ready to be used
* @param rawMessage byte representation of a payload
* @param toDestination message destination
*/
public static void publishDirectMessageWithAdditionalProperties(MessagingService service,
byte[] rawMessage, Topic toDestination) {
final DirectMessagePublisher messagePublisher = service.createDirectMessagePublisherBuilder()
.build().start();
// builder for creation of similarly configured messages
final OutboundMessageBuilder messageBuilder = service.messageBuilder();
// i.e configure highest priority
messageBuilder.withPriority(255);
// create individual message
final OutboundMessage myMessage = messageBuilder.build(rawMessage);
final String correlationId = Long
.toHexString(Double.doubleToLongBits(new SecureRandom().nextDouble()));
// properties per message publishing attempt
final Properties additionalMessageProperties = new Properties();
// add some custom key-value pair
additionalMessageProperties.setProperty("myKey", "myValue");
// add correlation id (which is one of well known message properties)
additionalMessageProperties.setProperty(MessageProperties.CORRELATION_ID, correlationId);
// publish message with additional properties that applied for the particular publishing attempt.
// Same message can be published multiple times but each time it can be customized individually with
// properties
messagePublisher.publish(myMessage, toDestination, additionalMessageProperties);
}
/**
* basic example for a business object to be send in a message
*/
static class MyData implements Serializable {
private static final long serialVersionUID = 1L;
private final String name;
MyData(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
}
|
<gh_stars>1-10
import os, sys
from PIL import Image
for infile in os.listdir(sys.argv[1]):
outfile = os.path.splitext(infile)[0] + ".transparent.png"
if infile != outfile:
try:
im = Image.open(infile)
im = im.convert("RGBA")
datas = im.getdata()
newData = []
for item in datas:
if item[0] == 255 and item[1] == 255 and item[2] == 255:
newData.append((255, 255, 255, 0))
else:
newData.append(item)
im.putdata(newData)
im.save(outfile, "PNG")
except IOError:
print("Cannot convert white to transparent for", infile)
|
function binarySearch(arr, elem) {
let start = 0;
let end = arr.length - 1;
let middle = Math.floor((start + end) / 2);
while (arr[middle] !== elem && start <= end) {
if (elem < arr[middle]) {
end = middle - 1;
} else {
start = middle + 1;
}
middle = Math.floor((start + end) / 2);
}
return arr[middle] === elem ? middle : -1;
}
const arr = [1, 5, 7, 9, 10];
const result = binarySearch(arr, 7); // 2 |
def replace_placeholders(words, template):
replaced_template = template
for i in range(len(words)):
placeholder = "{" + str(i) + "}"
replaced_template = replaced_template.replace(placeholder, words[i])
return replaced_template
# Test the function
words = ["program", "Python", "know", "Every"]
template = "Every {3} should know the use of {2}-{1} programming and {0}"
print(replace_placeholders(words, template)) # Output: Every Every should know the use of Python-programming and program |
#! /bin/bash
echo -e "Hi, please type the word: \c "
read word
echo "The word you entered is: $word"
echo -e "Can you please enter two words?"
read word1 word2
echo "Here is your input: \"$word1\" \"$word2\""
echo -e "How do you fell about bash scripting?"
# read stores the replay in the build-in variable
read
echo "You said $REPLY, I'm glad to hear that!"
echo -e "What are you favourite colours?"
# read -a reads into an array
read -a colours
echo "My favourite colours are alos ${colours[0]}, ${colours[1]} and ${colours[2]} :-)" |
// Taussig
//
// Written in 2013 by <NAME> <<EMAIL>>
//
// To the extent possible under law, the author(s) have dedicated all copyright and related
// and neighboring rights to this software to the public domain worldwide. This software is
// distributed without any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
// Source-owning sequence
#ifndef TAUSSIG_ALGORITHMS_DETAIL_SOURCE_SEQUENCE_HPP
#define TAUSSIG_ALGORITHMS_DETAIL_SOURCE_SEQUENCE_HPP
#include <taussig/primitives/empty.h++>
#include <taussig/primitives/front.h++>
#include <taussig/primitives/pop_front.h++>
#include <taussig/primitives/as_sequence.h++>
#include <taussig/traits/true_sequence.h++>
#include <taussig/traits/is_true_sequence.h++>
#include <taussig/traits/fake_sequence.h++>
#include <taussig/traits/reference_type.h++>
#include <taussig/traits/value_type.h++>
#include <wheels/meta/decay.h++>
#include <utility> // forward
namespace seq {
namespace detail {
template <typename Source>
struct source_sequence : true_sequence {
private:
using source_type = wheels::meta::Decay<Source>;
using seq_type = wheels::meta::Decay<seq::result_of::as_sequence<source_type>>;
public:
template <typename SourceF>
source_sequence(SourceF&& source)
: source(std::forward<SourceF>(source))
, sequence(seq::as_sequence(this->source)) {}
source_sequence(source_sequence const& that)
: source(that.source)
, sequence(seq::as_sequence(this->source)) {}
source_sequence(source_sequence&& that)
: source(std::move(that.source))
, sequence(seq::as_sequence(this->source)) {}
source_sequence& operator=(source_sequence const& that) {
source = that.source;
sequence = seq::as_sequence(source);
return *this;
}
source_sequence& operator=(source_sequence&& that) {
source = std::move(that.source);
sequence = seq::as_sequence(source);
return *this;
}
using reference = ReferenceType<seq_type>;
using value_type = ValueType<seq_type>;
bool empty() const {
return seq::empty(sequence);
}
void pop_front() {
seq::pop_front(sequence);
}
reference front() const {
return seq::front(sequence);
}
private:
source_type source;
seq_type sequence;
};
static_assert(is_true_sequence<source_sequence<fake_sequence<int>>>(), "");
} // namespace detail
} // namespace seq
#endif // TAUSSIG_ALGORITHMS_DETAIL_SOURCE_SEQUENCE_HPP
|
<filename>chrome-extension/background.js
const ContextMenuId = 'a';
const createContextMenu = () => {
chrome.contextMenus.create({
title: 'ページをメモ(Googleカレンダーに追加)',
contexts: [
'page',
'selection',
],
id: ContextMenuId,
});
};
chrome.runtime.onInstalled.addListener(createContextMenu);
chrome.runtime.onStartup.addListener(createContextMenu);
chrome.contextMenus.onClicked.addListener((info, tab) => {
if (info.menuItemId === ContextMenuId) {
const startDate = new Date();
const endDate = new Date(startDate.getTime());
endDate.setHours(startDate.getHours() + 1);
const dates = [
startDate.toISOString().replace(/-|:|\.\d{3}/g, ''),
endDate.toISOString().replace(/-|:|\.\d{3}/g, ''),
];
const queryObject = {
action: 'TEMPLATE',
text: tab.title,
details: tab.url,
dates: dates.join('/'),
};
if (info.selectionText) {
queryObject.details += '\n\n' + info.selectionText;
}
const querys = Object.entries(queryObject).map(([key, value]) => {
return `${key}=${encodeURIComponent(value)}`;
});
const calendarCreateUrl = 'https://www.google.com/calendar/render?' + querys.join('&');
const createProperties = {
url: calendarCreateUrl,
};
const openerTabId = tab.id;
if (openerTabId !== chrome.tabs.TAB_ID_NONE) {
createProperties.openerTabId = openerTabId;
}
chrome.tabs.create(createProperties);
}
});
|
#!/usr/bin/env bats
load test_helper
setup() {
mkdir -p "$NODENV_TEST_DIR"
cd "$NODENV_TEST_DIR"
}
create_file() {
mkdir -p "$(dirname "$1")"
touch "$1"
}
@test "detects global 'version' file" {
create_file "${NODENV_ROOT}/version"
run nodenv-version-file
assert_success "${NODENV_ROOT}/version"
}
@test "prints global file if no version files exist" {
refute [ -e "${NODENV_ROOT}/version" ]
refute [ -e ".node-version" ]
run nodenv-version-file
assert_success "${NODENV_ROOT}/version"
}
@test "in current directory" {
create_file ".node-version"
run nodenv-version-file
assert_success "${NODENV_TEST_DIR}/.node-version"
}
@test "in parent directory" {
create_file ".node-version"
mkdir -p project
cd project
run nodenv-version-file
assert_success "${NODENV_TEST_DIR}/.node-version"
}
@test "topmost file has precedence" {
create_file ".node-version"
create_file "project/.node-version"
cd project
run nodenv-version-file
assert_success "${NODENV_TEST_DIR}/project/.node-version"
}
@test "NODENV_DIR has precedence over PWD" {
create_file "widget/.node-version"
create_file "project/.node-version"
cd project
NODENV_DIR="${NODENV_TEST_DIR}/widget" run nodenv-version-file
assert_success "${NODENV_TEST_DIR}/widget/.node-version"
}
@test "PWD is searched if NODENV_DIR yields no results" {
mkdir -p "widget/blank"
create_file "project/.node-version"
cd project
NODENV_DIR="${NODENV_TEST_DIR}/widget/blank" run nodenv-version-file
assert_success "${NODENV_TEST_DIR}/project/.node-version"
}
@test "finds version file in target directory" {
create_file "project/.node-version"
run nodenv-version-file "${PWD}/project"
assert_success "${NODENV_TEST_DIR}/project/.node-version"
}
@test "fails when no version file in target directory" {
run nodenv-version-file "$PWD"
assert_failure ""
}
|
#$ -S /bin/bash
#$ -e /net/data/GTEx/eo_files
#$ -o /net/data/GTEx/eo_files
#$ -l mf=15G
#$ -V
mkdir -p /net/data/GTEx/GTEx_Analysis_v7_QTLs/GTEx_Analysis_v7_eQTL_all_associations/whole_blood/regions
cd /net/data/GTEx/GTEx_Analysis_v7_QTLs/GTEx_Analysis_v7_eQTL_all_associations/whole_blood
while read -r line; do
chr=$(echo $line | awk '{print $1}')
if [ $chr = "CHR" ]; then
continue
fi
start=$(echo $line | awk '{print $4}')
end=$(echo $line | awk '{print $5}')
awk -v chr=$chr -v start=$start -v end=$end '\
NR == 1 {print $0}
NR > 1 {split($2,pos,"_");
if (pos[1] == chr && pos[2] >= start && pos[2] <= end) {print $0}}' \
Whole_Blood.chr$chr.txt \
> regions/whole_blood.$chr.$start.$end.txt
done < /net/data/GTEx/gwas/ukbb_hdl_conditioned/main_pheno_peaks_hg19/ukbb_hdl_conditioned_hg19.indexSNP.tsv
|
#!/bin/bash
# Entry script inside the container
set -e
echo Starting Docker script
echo "node version:"
node --version
echo "npm version:"
npm --version
echo "os info:"
uname -a
echo "Starting tests"
cd /app
npm run test:integration
echo Ending Docker script
|
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package br.puc_rio.inf.les.med.model;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.xml.bind.annotation.XmlRootElement;
/**
*
* @author tassio
*/
@Entity
@Table(name = "tb_patient_data")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "TbPatientData.findAll", query = "SELECT t FROM TbPatientData t")
, @NamedQuery(name = "TbPatientData.findByPkId", query = "SELECT t FROM TbPatientData t WHERE t.pkId = :pkId")
, @NamedQuery(name = "TbPatientData.findByStrValue", query = "SELECT t FROM TbPatientData t WHERE t.strValue = :strValue")
, @NamedQuery(name = "TbPatientData.findByDtmTimestamp", query = "SELECT t FROM TbPatientData t WHERE t.dtmTimestamp = :dtmTimestamp")})
public class TbPatientData implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "pk_id")
private Integer pkId;
@Basic(optional = false)
@Column(name = "str_value")
private String strValue;
@Basic(optional = false)
@Column(name = "dtm_timestamp")
@Temporal(TemporalType.TIMESTAMP)
private Date dtmTimestamp;
@JoinColumn(name = "fk_monitoring_has_sensor_id", referencedColumnName = "pk_id")
@ManyToOne(optional = false)
private TbMonitoringHasSensor fkMonitoringHasSensorId;
public TbPatientData() {
}
public TbPatientData(Integer pkId) {
this.pkId = pkId;
}
public TbPatientData(Integer pkId, String strValue, Date dtmTimestamp) {
this.pkId = pkId;
this.strValue = strValue;
this.dtmTimestamp = dtmTimestamp;
}
public Integer getPkId() {
return pkId;
}
public void setPkId(Integer pkId) {
this.pkId = pkId;
}
public String getStrValue() {
return strValue;
}
public void setStrValue(String strValue) {
this.strValue = strValue;
}
public Date getDtmTimestamp() {
return dtmTimestamp;
}
public void setDtmTimestamp(Date dtmTimestamp) {
this.dtmTimestamp = dtmTimestamp;
}
public TbMonitoringHasSensor getFkMonitoringHasSensorId() {
return fkMonitoringHasSensorId;
}
public void setFkMonitoringHasSensorId(TbMonitoringHasSensor fkMonitoringHasSensorId) {
this.fkMonitoringHasSensorId = fkMonitoringHasSensorId;
}
@Override
public int hashCode() {
int hash = 0;
hash += (pkId != null ? pkId.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof TbPatientData)) {
return false;
}
TbPatientData other = (TbPatientData) object;
if ((this.pkId == null && other.pkId != null) || (this.pkId != null && !this.pkId.equals(other.pkId))) {
return false;
}
return true;
}
@Override
public String toString() {
return "br.puc_rio.inf.les.med.dao.model.TbPatientData[ pkId=" + pkId + " ]";
}
}
|
// Set the dimensions and margins of the graph
var width = 450
var height = 450
var margin = 40
// The radius of the pie chart is half the width or half the height (smallest one). I subtract a bit from the radius to make sure it fits within the canvas
var radius = Math.min(width, height) / 2 - margin
// Create svg container and set the dimensions
var svg = d3.select('#chart')
.append('svg')
.attr('width', width)
.attr('height', height)
// Create group element to hold pie chart and set the coordinates of the center of the pie chart
// the translate() method moves the (0, 0) position of the <g> element to the center
var g = svg.append('g')
.attr('transform', 'translate(' + width / 2 + ',' + height / 2 + ')');
// Create group element to hold pie chart and set the pie chart radius
var arc = d3.arc()
.innerRadius(0)
.outerRadius(radius);
// Create group element to hold shape and colors
var pie = d3.pie()
.value(function (d) { return d.population; })
// Read the data and create the pie chart
d3.csv('data.csv', function (data) {
var arcs = g.selectAll('arc')
.data(pie(data))
.enter()
.append('g')
.attr('class', 'arc')
// Append path and fill with colors
arcs.append('path')
.attr('d', arc)
.attr('fill', function (d) { return color(d.data.country) });
// Append text
arcs.append('text')
.attr('transform', function (d) { return 'translate(' + arc.centroid(d) + ')' })
.attr('text-anchor', 'middle')
.text(function (d) { return d.data.country });
}); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.