text stringlengths 1 1.05M |
|---|
def insertion_sort(nums):
for i in range(1, len(nums)):
current_val = nums[i]
position = i
while position > 0 and nums[position - 1] > current_val:
nums[position] = nums[position - 1]
position = position - 1
nums[position] = current_val
nums = [8, 4, 7, 3, 6, 5]
insertion_sort(nums)
print(nums) # outputs [3, 4, 5, 6, 7, 8] |
<reponame>Jorch72/OpenTransport-old<filename>src/main/java/xyz/brassgoggledcoders/opentransport/wrappers/world/WorldWrapper.java
package xyz.brassgoggledcoders.opentransport.wrappers.world;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumParticleTypes;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.SoundEvent;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.Explosion;
import net.minecraft.world.World;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.chunk.IChunkProvider;
import xyz.brassgoggledcoders.opentransport.api.blockwrappers.IBlockWrapper;
import xyz.brassgoggledcoders.opentransport.api.entities.IHolderEntity;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
public class WorldWrapper extends World {
private IHolderEntity entity;
private BlockPos originPos = new BlockPos(0, 0, 0);
public WorldWrapper(IHolderEntity entity) {
this(entity.getEntity().worldObj, entity);
}
protected WorldWrapper(World world, IHolderEntity entity) {
super(world.getSaveHandler(), world.getWorldInfo(), world.provider, world.theProfiler, world.isRemote);
this.entity = entity;
}
@Override
@Nonnull
protected IChunkProvider createChunkProvider() {
return chunkProvider;
}
@Override
protected boolean isChunkLoaded(int x, int z, boolean allowEmpty) {
return true;
}
//Enderchest use this for open and close
@Override
public void addBlockEvent(@Nonnull BlockPos pos, Block blockIn, int eventID, int eventParam) {
this.getBlockWrapper().getBlockState().onBlockEventReceived(this, pos, eventID, eventParam);
}
@Override
public boolean isSideSolid(@Nonnull BlockPos blockPos, @Nonnull EnumFacing blockSide) {
return false;
}
@Override
public TileEntity getTileEntity(@Nonnull BlockPos blockPos) {
if (blockPos.equals(originPos)) {
return this.getBlockWrapper().getTileEntity();
}
return null;
}
@Override
public Entity getEntityByID(int id) {
return this.getWorld().getEntityByID(id);
}
//Most Blocks use this.
@Override
@Nonnull
public IBlockState getBlockState(@Nonnull BlockPos blockPos) {
if (blockPos.equals(originPos)) {
return this.getBlockWrapper().getBlockState();
}
return Blocks.AIR.getDefaultState();
}
//Enderchest Particles
@Override
public void spawnParticle(EnumParticleTypes enumParticleType, double posX, double posY, double posZ, double velX,
double velY, double velZ, @Nonnull int... what) {
this.getWorld().spawnParticle(enumParticleType,posX + this.getPosX(), posY + this.getPosY(), posZ +
this.getPosZ(), velX, velY, velZ);
}
//Infinitato tries to get Entities and add potion effects
//TODO: Actually get the right AABB
@Override
@Nonnull
public <T extends Entity> List<T> getEntitiesWithinAABB(@Nonnull Class<? extends T> entityClass,
@Nonnull AxisAlignedBB axisAlignedBB) {
return this.getWorld().getEntitiesWithinAABB(entityClass, axisAlignedBB);
}
//Infinitato creates explosions when it lands
@Override
@Nonnull
public Explosion createExplosion(Entity entity, double posX, double posY, double posZ, float size, boolean damage) {
return this.getWorld().createExplosion(entity, this.getPosX(), this.getPosY(), this.getPosZ(), size, damage);
}
//Shia Labouef tiny potato screams "Just do it"
@Override
public void playSound(@Nullable EntityPlayer player, BlockPos pos, @Nonnull SoundEvent sound,
@Nonnull SoundCategory category, float volume, float pitch) {
this.getWorld().playSound(player, this.getEntity().getPosition(), sound, category, volume, pitch);
}
@Override
@Nonnull
public Chunk getChunkFromChunkCoords(int chunkX, int chunkZ) {
return this.getWorld().getChunkFromChunkCoords(chunkX, chunkZ);
}
@Override
public void markChunkDirty(@Nonnull BlockPos pos, @Nonnull TileEntity tileEntity) {
this.getBlockWrapper().markDirty();
}
@Override
public boolean isBlockPowered(BlockPos pos) {
return this.getHolderEntity().getRedstonePower();
}
@Override
public boolean setBlockToAir(@Nonnull BlockPos pos) {
this.getEntity().setDead();
this.getWorld().spawnEntityInWorld(this.getHolderEntity().getEmptyEntity());
return true;
}
public Entity getEntity() {
return this.getHolderEntity().getEntity();
}
public IHolderEntity getHolderEntity() {
return this.entity;
}
public IBlockWrapper getBlockWrapper() {
return this.getHolderEntity().getBlockWrapper();
}
public double getPosX() {
return this.getEntity().posX;
}
public double getPosY() {
return this.getEntity().posY;
}
public double getPosZ() {
return this.getEntity().posZ;
}
public World getWorld() {
return this.getEntity().worldObj;
}
public void notifyBlocks() {
this.notifyBlockOfStateChange(BlockPos.ORIGIN, this.getBlockState(BlockPos.ORIGIN).getBlock());
}
}
|
#!/bin/bash
# General m3u8 Live Stream Recorder
if [[ ! -n "$1" ]]; then
echo "usage: $0 m3u8_url [loop]"
exit 1
fi
while true; do
# Record using MPEG-2 TS format to avoid broken file caused by interruption
FNAME="stream_$(date +"%Y%m%d_%H%M%S").ts"
ffmpeg -i "$1" -codec copy -f mpegts "$FNAME"
[[ "$2" != "loop" ]] && break
LOG_PREFIX=$(date +"[%Y-%m-%d %H:%M:%S]")
echo "$LOG_PREFIX The stream is not available now."
echo "$LOG_PREFIX Retry after 30 seconds..."
sleep 30
done
|
var express = require('express') // web server
var sass = require('node-sass-middleware') // CSS compiler
var path = require('path') // (core)
var app = module.exports = express()
app.set('view engine', 'pug')
app.locals.pretty = true // don't minify HTML
// compile CSS from SASS
app.use(sass({
src: path.join(__dirname, 'public'),
includePaths: [
path.join(__dirname, 'node_modules', 'govuk-elements-sass', 'public', 'sass'),
path.join(__dirname, 'node_modules', 'govuk_frontend_toolkit', 'stylesheets')
]
}))
// serve it
app.use('/', express.static(path.join(__dirname, 'public')))
// serve icons too
app.use('/', express.static(path.join(__dirname, 'node_modules', 'govuk_frontend_toolkit', 'images')))
// serve routes :)
require('./routes') // is this portable?
if (app.get('env') === 'test') {
// make it easier to develop and test at the same time
app.listen(8080)
} else {
app.listen(3000) // seems standard for node apps
}
|
#!/usr/bin/env bash
__db_pg_su_exec__()
{
sudo sudo -u postgres psql --no-align --tuples-only --variable=ON_ERROR_STOP=1 --quiet --command="${1}"
return "${?}"
}
alias db_pg_restart='sudo service postgresql restart'
alias db_pg_start='sudo service postgresql start'
#alias db_pg_status='sudo service postgresql status'
alias db_pg_stop='sudo service postgresql stop'
alias db_pg_su_exec=__db_pg_su_exec__
|
## Import data
import os as os
import glob as glob
import pandas as pd
# File management
game_files = glob.glob(os.path.join(os.getcwd(), 'games', '*.EVE'))
# Sort file names
game_files.sort() # sorts in place. sorted(list) would create a new list
# create a loop to import data
game_frames = [] # create empty list
for game_file in game_files:
game_frame = pd.read_csv(game_file, names=['type', 'multi2', 'multi3', 'multi4', 'multi5', 'multi6', 'event'])
game_frames.append(game_frame)
# concatenate dataframes
games = pd.concat(game_frames)
# replace values
games.loc[games['multi5'] =='??', 'multi5'] = ''
# extract feature and clean columns
identifiers = games['multi2'].str.extract(r'(.LS(\d{4})\d{5})')
identifiers = identifiers.fillna(method='ffill')
identifiers.columns = ['game_id', 'year'] # rename columns
games = pd.concat([games, identifiers], axis=1, sort=False)
# Fill NaN values
games = games.fillna(' ')
# Reduce memory usage by assigning data type to pandas
games.loc[:, 'type'] = pd.Categorical(games.loc[:, 'type'])
# print dataframe
print(games.head())
|
#!/usr/bin/env bats
load bats_helper
@test "Displays help with no args" {
run cidb
[ "$status" -eq 0 ]
[ "${lines[0]}" == 'USAGE:' ]
[ "${lines[1]}" == ' cidb [COMMAND] [OPTIONS]' ]
}
@test "Dispatch to scan --help" {
run cidb scan --help
[ "$status" -eq 0 ]
[ "${lines[1]}" == ' cidb-scan [OPTIONS] [PATH]' ]
}
# @test "Can scan for its own repo" {
# gh-repo-scan ls | grep github-scanner
# }
|
#!/usr/bin/env bash
# Twilight colors for Tmux
# Style: twilight_dark
# Upstream: https://github.com/jzone1366/twilight.nvim/raw/main/extra/dark/twilighttmux.tmux
set -g mode-style "fg=#1c88ce,bg=#26374d"
set -g message-style "fg=#1c88ce,bg=#26374d"
set -g message-command-style "fg=#1c88ce,bg=#26374d"
set -g pane-border-style "fg=#26374d"
set -g pane-active-border-style "fg=#1c88ce"
set -g status "on"
set -g status-justify "left"
set -g status-style "fg=#1c88ce,bg=#0e1926"
set -g status-left-length "100"
set -g status-right-length "100"
set -g status-left-style NONE
set -g status-right-style NONE
set -g status-left "#[fg=#123059,bg=#1c88ce,bold] #S "
set -g status-right "#[fg=#1c88ce,bg=#0e1926] #{prefix_highlight} #[fg=#1c88ce,bg=#26374d] %Y-%m-%d | %I:%M %p #[fg=#123059,bg=#1c88ce,bold] #h "
setw -g window-status-activity-style "underscore,fg=#26374d,bg=#0e1926"
setw -g window-status-separator ""
setw -g window-status-style "NONE,fg=#26374d,bg=#0e1926"
setw -g window-status-format "#[default] #I | #W #F "
setw -g window-status-current-format "#[fg=#1c88ce,bg=#26374d,bold] #I | #W #F "
|
/*
* Copyright 2002 Sun Microsystems, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
* DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN
* OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR
* FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR
* PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF
* LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that Software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of
* any nuclear facility.
*/
package com.sun.j2ee.blueprints.xmldocuments;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.Element;
public class ChangedOrder {
public static final String XML_ORDER = "Order";
public static final String XML_ORDERID = "OrderId";
public static final String XML_ORDERSTATUS = "OrderStatus";
private String orderId;
private String orderStatus;
public ChangedOrder(String id, String stat) {
orderId = id;
orderStatus = stat;
}
private ChangedOrder() {} // Used by the fromDOM() factory method
public String getOrderId() {
return(orderId);
}
public String getOrderStatus() {
return orderStatus;
}
public Document toDOM() throws XMLDocumentException {
Document document = XMLDocumentUtils.createDocument();
Element root = (Element) toDOM(document);
document.appendChild(root);
return document;
}
public Node toDOM(Document document) {
Element root = document.createElement(XML_ORDER);
XMLDocumentUtils.appendChild(document, root, XML_ORDERID, orderId);
XMLDocumentUtils.appendChild(document, root, XML_ORDERSTATUS, orderStatus);
return root;
}
public static ChangedOrder fromDOM(Node node) throws XMLDocumentException {
Element element;
if (node.getNodeType() == Node.ELEMENT_NODE && (element = ((Element) node)).getTagName().equals(XML_ORDER)) {
Element child;
ChangedOrder changedOrder = new ChangedOrder();
changedOrder.orderId = XMLDocumentUtils.getContentAsString(child = XMLDocumentUtils.getFirstChild(element, XML_ORDERID, false), false);
changedOrder.orderStatus = XMLDocumentUtils.getContentAsString(child = XMLDocumentUtils.getNextSibling(child, XML_ORDERSTATUS, false), false);
return changedOrder;
}
throw new XMLDocumentException(XML_ORDER + " element expected.");
}
}
|
<reponame>stobias123/ruby-twitch-api
# frozen_string_literal: true
RSpec.describe Twitch::Client, :vcr do
subject(:client) do
described_class.new(
client_id: client_id,
client_secret: client_secret,
## Optional parameters below
access_token: access_token,
refresh_token: refresh_token,
token_type: token_type,
scopes: scopes,
redirect_uri: redirect_uri
)
end
let(:client_id) { ENV['TWITCH_CLIENT_ID'] }
let(:client_secret) { ENV['TWITCH_CLIENT_SECRET'] }
let(:access_token) { ENV['TWITCH_ACCESS_TOKEN'] }
let(:outdated_access_token) { '<PASSWORD>' }
let(:refresh_token) { ENV['TWITCH_REFRESH_TOKEN'] }
let(:token_type) { :application }
let(:scopes) { [] }
let(:redirect_uri) { 'http://localhost' }
describe '#get_bits_leaderboard' do
subject(:body) { client.get_bits_leaderboard.body }
let(:scopes) { %w[bits:read] }
context 'when `token_type` is `user`' do
let(:token_type) { :user }
let(:expected_result) do
{ 'data' => [], 'date_range' => { 'ended_at' => '', 'started_at' => '' }, 'total' => 0 }
end
context 'with `access_token`' do
context 'when `access_token` is actual' do
it { is_expected.to eq expected_result }
end
context 'when `access_token` is outdated' do
let(:access_token) { outdated_access_token }
context 'with `refresh_token`' do
it { is_expected.to eq expected_result }
end
context 'without `refresh_token`' do
let(:refresh_token) { nil }
it { expect { body }.to raise_error TwitchOAuth2::Error, 'missing refresh token' }
end
end
end
context 'without tokens' do
let(:access_token) { nil }
let(:refresh_token) { nil }
let(:redirect_params) do
URI.encode_www_form_component URI.encode_www_form(
client_id: client_id,
redirect_uri: redirect_uri,
response_type: :code,
scope: scopes.join(' ')
)
end
let(:expected_login_url) do
"https://www.twitch.tv/login?client_id=#{client_id}&redirect_params=#{redirect_params}"
end
it do
expect { body }.to raise_error an_instance_of(TwitchOAuth2::Error)
.and having_attributes(
message: 'Use `error.metadata[:link]` for getting new tokens',
metadata: { link: expected_login_url }
)
end
end
end
context 'when `token_type` is `application`' do
let(:token_type) { :application }
context 'without tokens' do
let(:access_token) { nil }
let(:refresh_token) { nil }
it { expect { body }.to raise_error Twitch::APIError, 'Missing User OAUTH Token' }
end
end
end
describe '#get_clips' do
subject { client.get_clips(id: 'ObliqueEncouragingHumanHumbleLife').data }
let(:broadcaster_id_greekgodx) { 15_310_631 }
it { is_expected.not_to be_empty }
describe '#broadcaster_id' do
subject { super().first.broadcaster_id }
it { is_expected.to eq broadcaster_id_greekgodx.to_s }
end
end
describe '#get_streams' do
subject { client.get_streams(**kwargs).data }
context 'with empty kwargs' do
let(:kwargs) { {} }
describe 'data length' do
subject { super().length }
it { is_expected.to eq(20) }
end
end
context 'with username' do
let(:kwargs) { { user_login: 'SunsetClub' } }
it { is_expected.not_to be_empty }
describe 'viewer_count' do
subject { super().first.viewer_count }
it { is_expected.to be_an(Integer) }
end
end
end
describe '#get_users' do
subject { client.get_users(id: 18_587_270).data }
it { is_expected.not_to be_empty }
describe 'login' do
subject { super().first.login }
it { is_expected.to eq 'day9tv' }
end
end
describe '#get_games' do
subject { client.get_games(name: games_names) }
let(:games_names) { ['Heroes of the Storm', 'Super Mario Odyssey'] }
describe 'data length' do
subject { super().data.length }
it { is_expected.to eq games_names.size }
end
end
describe '#get_top_games' do
subject { client.get_top_games(first: limit) }
let(:limit) { 5 }
describe 'data length' do
subject { super().data.length }
it { is_expected.to eq limit }
end
end
describe '#get_videos' do
subject { client.get_videos(user_id: 9_846_758) }
describe 'data' do
subject { super().data }
it { is_expected.not_to be_empty }
end
describe 'pagination cursor' do
subject { super().pagination['cursor'] }
it { is_expected.not_to be_nil }
end
end
end
|
#!/bin/bash
set -x
mkdir -p ~/.ivy2 ~/.sbt ~/.m2 ~/.sbt_cache
docker run --rm -v $(pwd):/app/build \
--user $(id -u):$(id -g) \
-v ~/.m2:/app/.m2 \
-v ~/.ivy2:/app/.ivy2 \
-v ~/.sbt:/app/.sbt \
-v ~/.sbt_cache:/app/.cache \
-w /app/build hseeberger/scala-sbt:8u282_1.5.2_2.11.12 \
sbt -Duser.home=/app clean compile |
<gh_stars>0
# encoding: UTF-8
class AddPublicToSurveys < ActiveRecord::Migration[5.2]
def self.up
add_column :surveys, :public, :boolean
end
def self.down
remove_column :surveys, :public
end
end
|
#!/bin/bash
ds_ids=cmip5.output1.MOHC.HadGEM2-ES.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.MOHC.HadGEM2-CC.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.BCC.bcc-csm1-1.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.BCC.bcc-csm1-1-m.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.CCCma.CanCM4.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.CMCC.CMCC-CM.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.CMCC.CMCC-CMS.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.CSIRO-BOM.ACCESS1-0.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.CSIRO-BOM.ACCESS1-3.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.CSIRO-QCCCE.CSIRO-Mk3-6-0.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.INM.inmcm4.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.IPSL.IPSL-CM5A-LR.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.IPSL.IPSL-CM5A-MR.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.MIROC.MIROC-ESM-CHEM.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.MIROC.MIROC4h.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.MIROC.MIROC5.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.MPI-M.MPI-ESM-MR.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.MRI.MRI-CGCM3.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.NASA-GISS.GISS-E2-R.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.NASA-GISS.GISS-E2-R-CC.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.NCAR.CCSM4.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.NCC.NorESM1-M.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga,cmip5.output1.NCC.NorESM1-ME.rcp45.mon.ocean.Omon.r1i1p1.latest.zostoga
python analyse.py -d $ds_ids cmip5
|
# isomeasurer.py
# ALS 2017/06/01
import os
import astropy.units as u
from astropy.io import fits
import numpy as np
import astropy.table as at
import pickle
import scipy.ndimage as simg
from ..measurer import Measurer
from ... import tabtools
from . import polytools
from . import plottools
class isoMeasurer(Measurer):
def __init__(self, **kwargs):
"""
child of Measurer
do isophotal measurements
"""
super(isoMeasurer, self).__init__(**kwargs)
self.msrtype = 'iso'
def get_fp_contours(self, imgtag='OIII5008_I', onlycenter=False, suffix=''):
""" e.g., msr_iso-OIII5008_I{suffix}_contours.pkl
\or msr_iso-OIII5008_I{suffix}_contours-ctr.pkl
"""
if onlycenter:
ctrtag = '-ctr'
else:
ctrtag = ''
fp_root = self.get_fp_msrtagroot(imgtag=imgtag, suffix=suffix)
return fp_root+'_contours{ctrtag}.pkl'.format(ctrtag=ctrtag)
def make_measurements(self, imgtag='OIII5008_I', isocut=3.e-15*u.Unit('erg / (arcsec2 cm2 s)'), minarea=5, onlycenter=True, centerradius=5.*u.arcsec, plotsuffix='', savecontours=False, plotmsr=False, msrsuffix='', overwrite=False, append=False):
"""
make measurements on a map and write to msr_iso.csv.
if imgtag='OIII5008_I' then measure 'stamp-OIII5008_I.fits'
Params
------
self
imgtag='OIII5008_I'
overwrite = False (bool)
isocut=1.e-15*u.Unit('erg / (arcsec2 cm2 s)'):
isophote cut
minarea=0:
connected contour area (# pix) above the area is counted as part of the isophote measurement
onlycenter=False:
whether to consider only the center contours
centerradius=2.*u.arcsec
plotsuffix = '':
plotsuffix label to be attach to the end of the plot or contour file names.
savecontours=False
plotmsr=False
msrsuffix=''
plotsuffix label in the end of the measurement csv file: msr_iso_{msrsuffix}.csv.
overwrite=False
append=False
Return
------
status (bool)
Write Output
------------
e.g., msr_iso.csv
"""
fn = self.get_fp_msr(msrsuffix=msrsuffix)
condi = {'imgtag': imgtag, 'isocut': isocut, 'minarea': minarea, 'onlycenter': onlycenter, 'centerradius': centerradius}
if append or overwrite or (not tabtools.fn_has_row(fn, condi)):
print("[isomeasurer] making measurement")
img = self.get_stamp_img(imgtag=imgtag, wunit=True)
xc, yc = self._get_xc_yc(img)
# calc
if np.all(~np.isnan(img)):
contours = self._get_contours_from_img(img=img, isocut=isocut, xc=xc, yc=yc, minarea=minarea, onlycenter=onlycenter, centerradius=centerradius)
tab_msr = self._get_tab_measurements_from_contours(contours=contours, xc=xc, yc=yc)
else:
contours = []
tab_msr = self._get_tab_measurements_nan()
tab_params = self._get_tab_params(imgtag=imgtag, isocut=isocut, minarea=minarea, onlycenter=onlycenter, centerradius=centerradius)
tabout = at.hstack([tab_params, tab_msr])
# output
tabtools.write_row(fn=fn, row=tabout, condi=condi, overwrite=overwrite, append=append)
# optional output
if savecontours:
fn_contours = self.get_fp_contours(imgtag=imgtag, onlycenter=onlycenter, suffix=plotsuffix)
write_pickle(contours, fn_contours, overwrite=overwrite)
if plotmsr:
fn_plot = self.get_fp_msrplot(imgtag=imgtag, suffix=plotsuffix)
plottools.make_plot_img_w_contours(fn_plot=fn_plot, img=img, contours=contours)
else:
print("[isomeasurer] skip making measurement as files exist")
return os.path.isfile(fn)
def make_visualpanel(self, fn=None, compo_bands ='gri', imgtag='OIII5008_I', onlycenter=True, minarea=5, centerradius=5.*u.arcsec, tocolorbar=True, totitle=True, fontsize=12, overwrite=False):
"""
make panel figure to visualize the composit and the iso measurements
saved to e.g., 'msr_iso-OIII5008_I_panel.pdf'
Params
------
fn = None: default: msr_iso_{imgtag}_panel.pdf
compo_bands ='gri', imgtag='OIII5008_I', overwrite=False
Return
------
status
"""
if fn is None:
fn = self.get_fp_msrplot(imgtag=imgtag, suffix='_panel')
else:
fn = self.dir_obj+fn
if not os.path.isfile(fn) or overwrite:
print("[isomeasurer] making visual panel")
# get files ready
self.make_colorimg(bands=compo_bands, img_type='stamp', overwrite=False)
# access data
img_compo = simg.imread(self.dir_obj+'color_stamp-{}.png'.format(compo_bands))
img_map = self.get_stamp_img(imgtag=imgtag, wunit=False)
suffix = '_3e-15'
isocut = 3.e-15*u.Unit('erg / (arcsec2 cm2 s)')
fn_contours3 = self.get_fp_contours(imgtag=imgtag, onlycenter=onlycenter, suffix=suffix)
if not os.path.isfile(fn_contours3):
print("[isomeasurer] re-doing measurements to make contours required for visual panel plots")
self.make_measurements(imgtag=imgtag, isocut=isocut, plotsuffix=suffix, minarea=minarea, onlycenter=onlycenter, centerradius=centerradius, overwrite=True, savecontours=True, plotmsr=False),
contours3 = read_pickle(fn_contours3)
suffix = '_1e-15'
isocut = 1.e-15*u.Unit('erg / (arcsec2 cm2 s)')
fn_contours1 = self.get_fp_contours(imgtag=imgtag, onlycenter=onlycenter, suffix=suffix)
if not os.path.isfile(fn_contours1):
print("[isomeasurer] re-doing measurements to make contours required for visual panel plots")
self.make_measurements(imgtag=imgtag, isocut=isocut, plotsuffix=suffix, minarea=minarea, onlycenter=onlycenter, centerradius=centerradius, overwrite=True, savecontours=True, plotmsr=False),
contours1 = read_pickle(fn_contours1)
z = self.z
pixsize = self.pixsize.to_value(u.arcsec)
legend_suffix = ' at 3'
name = self.obj.name[4:]
title_compo = '${}~{}~{}~$'.format(compo_bands[0], compo_bands[1], compo_bands[2])+'$\mathrm{Composite}$'
title_map = '$\mathrm{[OIII]\lambda 5007~Intensity}$'
label_cbar = '$I~[10^{-15}~\mathrm{erg~s^{-1}~cm^{-2}~arcsec^{-2}}]$'
plottools.make_iso_visual_panel(fn, img_compo, img_map, contours1, contours3, z, pixsize, legend_suffix, name, title_compo, title_map, label_cbar, tocolorbar=tocolorbar, totitle=totitle, fontsize=fontsize)
else:
print("[isomeasurer] skip making visual panel as files exist")
return os.path.isfile(fn)
def _get_tab_params(self, imgtag, isocut, minarea, onlycenter, centerradius):
"""
return a one row table of the measurement params
"""
tab = at.Table([[imgtag], [str(isocut)], [minarea], [onlycenter], [str(centerradius)], ], names=['imgtag', 'isocut', 'minarea', 'onlycenter', 'centerradius', ])
return tab
def _get_tab_measurements_from_contours(self, contours, xc, yc):
"""
calculate iso measurements from contours, return a table like:
"""
tab = polytools.ShapeParamsTab_from_contours(contours, xc, yc)
# unit conversion
area_ars = tab['area_pix'][0]*(self.pixsize/u.arcsec)**2
dmax_ars = self._pix_to_theta(tab['dmax_pix'][0], wunit=False)
rmax_ars = self._pix_to_theta(tab['rmax_pix'][0], wunit=False)
dper_ars = self._pix_to_theta(tab['dper_pix'][0], wunit=False)
kpc_per_arcsec = np.array(self._get_kpc_proper_per_arcsec())
area_kpc = area_ars * kpc_per_arcsec**2
dmax_kpc = dmax_ars * kpc_per_arcsec
rmax_kpc = rmax_ars * kpc_per_arcsec
dper_kpc = dper_ars * kpc_per_arcsec
tab_converted = at.Table(names=['area_kpc', 'dmax_kpc', 'rmax_kpc', 'dper_kpc', 'area_ars', 'dmax_ars', 'rmax_ars', 'dper_ars', ])
tab_converted.add_row([area_kpc, dmax_kpc, rmax_kpc, dper_kpc, area_ars, dmax_ars, rmax_ars, dper_ars, ])
tabout = at.hstack([tab_converted, tab])
return tabout
def _get_tab_measurements_nan(self):
"""
return a tab measurement just like _get_tab_measurements_from_contours() but with entries all nan.
"""
names = ['area_kpc', 'dmax_kpc', 'rmax_kpc', 'dper_kpc', 'area_ars', 'dmax_ars', 'rmax_ars', 'dper_ars', 'area_pix', 'dmax_pix', 'rmax_pix', 'dper_pix', 'theta_dmax', 'theta_rmax', 'theta_dper', 'aspectr']
tabout = at.Table(names=names)
tabout.add_row([np.nan for i in range(len(names))])
return tabout
def _get_contours_from_img(self, img, isocut, xc, yc, minarea=0., onlycenter=False, centerradius=2.*u.arcsec):
"""
make contour at isocut of image as python pickle file (fn_contours)
always overwrite
Params
------
self
img (array)
isocut (float or quantity):
has to be of the same type of unit as image
minarea (float):
minimum area (pix) to be considered as contour patch
onlycenter (bool):
whether to take only center patches as patches (they all have to pass minarea test as well)
centerradius (angular quantity):
if onlycenter = True, then it sets the radius of the center area. only patches overlapping with that area will be considered.
"""
# prep
try:
img.unit
except:
img_nparr = img/isocut
else:
img_nparr = np.array((img/isocut).to(u.dimensionless_unscaled))
# find contours -- satisfy minarea
contours = polytools.find_largecontours(img=img_nparr, threshold=1., minarea=minarea)
if onlycenter: # select only those at the center
centerradius_pix = self._theta_to_pix(centerradius)
contours = polytools.select_center_contours(contours, xc, yc, radius=centerradius_pix)
return contours
def read_pickle(fn):
with open(fn, 'rb') as handle:
result = pickle.load(handle)
return result
def write_pickle(result, fn, overwrite=False):
if not os.path.isfile(fn) or overwrite:
with open(fn, 'wb') as handle:
pickle.dump(result, handle) |
<reponame>viniciusvts/imcCalculo<filename>src/directives/directives.module.ts
import { NgModule } from '@angular/core';
import { CalculosDirective } from './calculos/calculos';
@NgModule({
declarations: [CalculosDirective],
imports: [],
exports: [CalculosDirective]
})
export class DirectivesModule {}
|
package net.yotvoo.asterd.app;
import javafx.application.Application;
import javafx.stage.Stage;
/**
* @author <NAME>
* Simple game based on classic Asteroids game
* Made for fun ale learning purposes
*/
@SuppressWarnings({"FieldCanBeLocal", "unused"})
public class AsterDroidsApp extends Application {
private static GameLogic gameLogic;
static void log(String string){
System.out.println(string);
}
@Override
public void start(Stage stage) throws Exception {
Sound sound = new Sound();
GameView gameView = new GameView(stage);
Control control = new Control(gameView);
gameLogic = new GameLogic(gameView, control, sound);
gameView.setGameLogic(gameLogic);
}
public static void main(String[] args) {
launch(args);
}
}
|
const CODE_39_CHAR = 0;
const CODE_39_CHECKSUM_VAL = 1;
const CODE_39_BITS = 2;
const code39Data = [
["1", 1, "110100101011"],
["2", 2, "101100101011"],
["3", 3, "110110010101"],
["4", 4, "101001101011"],
["5", 5, "110100110101"],
["6", 6, "101100110101"],
["7", 7, "101001011011"],
["8", 8, "110100101101"],
["9", 9, "101100101101"],
["0", 0, "101001101101"],
["A", 10, "110101001011"],
["B", 11, "101101001011"],
["C", 12, "110110100101"],
["D", 13, "101011001011"],
["E", 14, "110101100101"],
["F", 15, "101101100101"],
["G", 16, "101010011011"],
["H", 17, "110101001101"],
["I", 18, "101101001101"],
["J", 19, "101011001101"],
["K", 20, "110101010011"],
["L", 21, "101101010011"],
["M", 22, "110110101001"],
["N", 23, "101011010011"],
["O", 24, "110101101001"],
["P", 25, "101101101001"],
["Q", 26, "101010110011"],
["R", 27, "110101011001"],
["S", 28, "101101011001"],
["T", 29, "101011011001"],
["U", 30, "110010101011"],
["V", 31, "100110101011"],
["W", 32, "110011010101"],
["X", 33, "100101101011"],
["Y", 34, "110010110101"],
["Z", 35, "100110110101"],
["-", 36, "100101011011"],
[".", 37, "110010101101"],
[" ", 38, "100110101101"],
["*", NaN, "100101101101"],
];
let code39Lookup = null;
const makeCode39Lookups = function() {
if (code39Lookup) {
return;
}
code39Lookup = { };
code39Data.forEach(function(row) {
const ch = row[CODE_39_CHAR];
code39Lookup[ch] = row;
});
};
// @param [withChecksum] {Boolean} If true, then add the mod 43 checksum. Defaults to false.
export default function encodeCode39(text, withChecksum) {
makeCode39Lookups();
// @todo implement
withChecksum = withChecksum || false;
if (!text) {
text = "";
}
text = `*${text}*`;
const outlist = [];
for (let i = 0; i < text.length; i++) {
if (i !== 0) {
outlist.push({
char: "",
bits: "0",
humanReadable: false,
});
}
const ch = text[i];
const row = code39Lookup[ch];
if (!row) {
throw new Error(`Cannot encode code 39 barcode: invalid char: ${ch}`);
}
const bits = row[CODE_39_BITS];
outlist.push({
char: ch,
bits: bits,
humanReadable: true,
});
}
return {
type: "bits",
data: outlist,
};
}
|
<gh_stars>0
class AddSubscriptionFrequencyReferenceToSpreSubscriptions < SpreeExtension::Migration[4.2]
def change
add_reference :spree_subscriptions, :subscription_frequency, index: true
end
end
|
<filename>Casks/taskpaper.rb
cask 'taskpaper' do
version '3.1'
sha256 '91b28efa694ab81d2b7c12082c1225d6a3095c986a540bb98df1f4f619b85687'
# amazonaws.com is the official download host per the vendor homepage
url "https://taskpaper.s3.amazonaws.com/assets/app/TaskPaper-#{version}.dmg"
appcast 'https://taskpaper.s3.amazonaws.com/TaskPaper.rss',
checkpoint: '94e5c68e5a2b997da3086e2fb3b7dff9a09e1bd0602776a37c51b05ef6e63fe9'
name 'TaskPaper'
homepage 'http://www.hogbaysoftware.com/products/taskpaper'
license :commercial
app 'TaskPaper.app'
end
|
#!/bin/sh
[ -z "${HTTP_ERRORS_DIR}" ] && HTTP_ERRORS_DIR="/opt/whalesome/packages/nginx/fpm/errors"
mkdir -p "${HTTP_ERRORS_DIR}"
replace_content="
error_page 404 /404.html;
error_page 403 /403.html;
error_page 500 /500.html;
error_page 502 /502.html;
error_page 503 /503.html;
error_page 504 /504.html;
location ~ /(500|502|503|504|404|403).html$ {
root ${HTTP_ERRORS_DIR};
}
"
tmp=$(mktemp)
echo "${replace_content}" > "${tmp}"
sed -i \
-e "/\# litea.nginx.placeholders.static_http_errors/r ${tmp}" \
-e "//d" \
/etc/nginx/nginx.conf
rm "${tmp}" |
define(['mocha', 'chai'], function(mocha, chai) {
"use strict";
return function() {
this.initialize = function() {
console.log("Log: initialize Mocha setup");
mocha.setup('bdd');
/**
* Chai need to be expose global
* https://github.com/chaijs/chai/issues/448
*/
window.chai = chai;
},
this.runMocha = function() {
console.log("Log: Run Mocha");
mocha.run();
},
this.initialize();
};
}); |
<filename>data_utils.py
# -*- coding: utf-8 -*-
# file: data_utils.py
# author: songyouwei <<EMAIL>>
# Copyright (C) 2018. All Rights Reserved.
import os
import pickle
import numpy as np
import torch
from torch.utils.data import Dataset
# from pytorch_transformers import BertTokenizer
from transformers import BertTokenizer
import nlpaug.augmenter.word as naw
SynonymAug = naw.SynonymAug()
ContextualWordEmbsAug = naw.ContextualWordEmbsAug()
WordEmbsAug= naw.ContextualWordEmbsAug()
import wikipedia
def insert(original, new, pos):
# '''Inserts new inside original at pos.'''
return original[:pos] + new + original[pos:]
def build_tokenizer(fnames, max_seq_len, dat_fname):
if os.path.exists(dat_fname):
print('loading tokenizer:', dat_fname)
tokenizer = pickle.load(open(dat_fname, 'rb'))
else:
text = ''
for fname in fnames:
fin = open(fname, 'r', encoding='utf-8', newline='\n', errors='ignore')
lines = fin.readlines()
fin.close()
for i in range(0, len(lines), 3):
text_left, _, text_right = [s.lower().strip() for s in lines[i].partition("$T$")]
aspect = lines[i + 1].lower().strip()
text_raw = text_left + " " + aspect + " " + text_right
text += text_raw + " "
tokenizer = Tokenizer(max_seq_len)
tokenizer.fit_on_text(text)
pickle.dump(tokenizer, open(dat_fname, 'wb'))
return tokenizer
def _load_word_vec(path, word2idx=None):
fin = open(path, 'r', encoding='utf-8', newline='\n', errors='ignore')
word_vec = {}
for line in fin:
tokens = line.rstrip().split()
if word2idx is None or tokens[0] in word2idx.keys():
word_vec[tokens[0]] = np.asarray(tokens[1:], dtype='float32')
return word_vec
def build_embedding_matrix(word2idx, embed_dim, dat_fname):
if os.path.exists(dat_fname):
print('loading embedding_matrix:', dat_fname)
embedding_matrix = pickle.load(open(dat_fname, 'rb'))
else:
print('loading word vectors...')
embedding_matrix = np.zeros((len(word2idx) + 2, embed_dim)) # idx 0 and len(word2idx)+1 are all-zeros
fname = './glove.twitter.27B/glove.twitter.27B.' + str(embed_dim) + 'd.txt' \
if embed_dim != 300 else './glove.42B.300d.txt'
word_vec = _load_word_vec(fname, word2idx=word2idx)
print('building embedding_matrix:', dat_fname)
for word, i in word2idx.items():
vec = word_vec.get(word)
if vec is not None:
# words not found in embedding index will be all-zeros.
embedding_matrix[i] = vec
pickle.dump(embedding_matrix, open(dat_fname, 'wb'))
return embedding_matrix
def pad_and_truncate(sequence, maxlen, dtype='int64', padding='post', truncating='post', value=0):
x = (np.ones(maxlen) * value).astype(dtype)
if truncating == 'pre':
trunc = sequence[-maxlen:]
else:
trunc = sequence[:maxlen]
trunc = np.asarray(trunc, dtype=dtype)
if padding == 'post':
x[:len(trunc)] = trunc
else:
x[-len(trunc):] = trunc
return x
def pad(a,maxlen):
B = np.pad(a, (0, maxlen - len(a)%maxlen), 'constant')
return B
def pad_5(a,maxlen):
B = np.pad(a, (5, maxlen - len(a)%maxlen), 'constant')
return B
class Tokenizer(object):
def __init__(self, max_seq_len, lower=True):
self.lower = lower
self.max_seq_len = max_seq_len
self.word2idx = {}
self.idx2word = {}
self.idx = 1
def fit_on_text(self, text):
if self.lower:
text = text.lower()
words = text.split()
for word in words:
if word not in self.word2idx:
self.word2idx[word] = self.idx
self.idx2word[self.idx] = word
self.idx += 1
def text_to_sequence(self, text, reverse=False, padding='post', truncating='post'):
if self.lower:
text = text.lower()
words = text.split()
unknownidx = len(self.word2idx)+1
sequence = [self.word2idx[w] if w in self.word2idx else unknownidx for w in words]
if len(sequence) == 0:
sequence = [0]
if reverse:
sequence = sequence[::-1]
return pad_and_truncate(sequence, self.max_seq_len, padding=padding, truncating=truncating)
class Tokenizer4Bert:
def __init__(self, max_seq_len, pretrained_bert_name):
self.tokenizer = BertTokenizer.from_pretrained(pretrained_bert_name)
self.max_seq_len = max_seq_len
def text_to_sequence(self, text, reverse=False, padding='post', truncating='post'):
sequence = self.tokenizer.convert_tokens_to_ids(self.tokenizer.tokenize(text))
if len(sequence) == 0:
sequence = [0]
if reverse:
sequence = sequence[::-1]
return pad_and_truncate(sequence, self.max_seq_len, padding=padding, truncating=truncating)
def add_tokens(self,params):
self.tokenizer.add_tokens(params)
class ABSADataset(Dataset):
def __init__(self, fname, tokenizer,process,opt):
all_data = []
if int(opt.resplit)==1:
load = 1
else:
load = 0
if int(opt.resplit)!=0 and load ==0:
raise RuntimeError('pls use load to load the replit')
if load:
if fname.lower().find('train')!=-1:
process='train'
if fname.lower().find('test')!=-1:
process='test'
if fname.lower().find('valid')!=-1:
process='valid'
# if int(opt.aug)==1:
# all_data=np.load('./datasets/aug/{}-{}.npy'.format(process, opt.dataset),allow_pickle=True).tolist()
# else:
if int(opt.resplit)==1:
print('./datasets/resplit/{}-{}.npy'.format(process, opt.dataset))
all_data=np.load('./datasets/resplit/{}-{}.npy'.format(process, opt.dataset),allow_pickle=True).tolist()
if int(opt.resplit)==2:
print('./datasets/resplit/{}-{}-lw.npy'.format(process, opt.dataset))
all_data=np.load('./datasets/resplit/{}-{}-lw.npy'.format(process, opt.dataset),allow_pickle=True).tolist()
if int(opt.resplit)==3:
print('./datasets/remove/{}-{}.npy'.format(process, opt.dataset))
all_data=np.load('./datasets/remove/{}-{}.npy'.format(process, opt.dataset),allow_pickle=True).tolist()
if int(opt.resplit)==0:
all_data=np.load('./datasets/processed/{}-{}.npy'.format(process, opt.dataset),allow_pickle=True).tolist()
self.data = all_data
else:
fin = open(fname, 'r', encoding='utf-8', newline='\n', errors='ignore')
lines = fin.readlines()
fin.close()
for i in range(0, len(lines), 3):
text_left, _, text_right = [s.lower().strip() for s in lines[i].partition("$T$")]
aspect = lines[i + 1].lower().strip()
polarity = lines[i + 2].strip()
text_raw="[CLS] " + text_left + " " + aspect + " " + text_right + " [SEP]"
text_spc='[CLS] ' + text_left + " " + aspect + " " + text_right + ' [SEP] ' + aspect + " [SEP]"
text_target='[CLS] ' + text_left + ' [aspect_b] '+aspect + ' [aspect_e] '+ text_right + ' [SEP] '
text_without_cls=text_left + " " + aspect + " " + text_right + ' [SEP] ' + aspect + " [SEP]"
text_raw_indices = tokenizer.text_to_sequence(text_left + " " + aspect + " " + text_right)
text_target_indices = tokenizer.text_to_sequence(text_target)
text_target_segments_ids=np.asarray([0] * (np.sum(text_target_indices != 0)))
text_target_segments_ids = pad_and_truncate(text_target_segments_ids, tokenizer.max_seq_len)
text_raw_without_aspect_indices = tokenizer.text_to_sequence(text_left + " " + text_right)
text_left_indices = tokenizer.text_to_sequence(text_left)
text_left_with_aspect_indices = tokenizer.text_to_sequence(text_left + " " + aspect)
text_right_indices = tokenizer.text_to_sequence(text_right, reverse=True)
text_right_with_aspect_indices = tokenizer.text_to_sequence(" " + aspect + " " + text_right, reverse=True)
aspect_indices = tokenizer.text_to_sequence(aspect)
left_context_len = np.sum(text_left_indices != 0)
aspect_len = np.sum(aspect_indices != 0)
aspect_pos = left_context_len+1
target_begin=left_context_len+1
aspect_in_text = torch.tensor([left_context_len.item(), (left_context_len + aspect_len - 1).item()])
# aspect_range = torch.LongTensor(range(left_context_len.item()+1, (left_context_len + aspect_len).item()+1))# plus [cls]
polarity = int(polarity) + 1
text_bert_indices = tokenizer.text_to_sequence('[CLS] ' + text_left + " " + aspect + " " + text_right + ' [SEP] ' + aspect + " [SEP]")
# text_bert_indices = tokenizer.text_to_sequence('[CLS] '+ text_left + " " + aspect + " " + text_right + ' [SEP] '+ aspect + " [SEP] ")
bert_segments_ids = np.asarray([0] * (np.sum(text_raw_indices != 0)+2) + [1] * (aspect_len + 1))
# bert_segments_ids = np.asarray([1] * (aspect_len + 1)+[0] * (np.sum(text_raw_indices != 0) + 2))
bert_raw_segments_ids=np.asarray([0] * (np.sum(text_raw_indices != 0)+2))
bert_segments_ids = pad_and_truncate(bert_segments_ids, tokenizer.max_seq_len)
bert_raw_segments_ids = pad_and_truncate(bert_raw_segments_ids, tokenizer.max_seq_len)
text_raw_bert_indices = tokenizer.text_to_sequence("[CLS] " + text_left + " " + aspect + " " + text_right + " [SEP]")
aspect_bert_indices = tokenizer.text_to_sequence("[CLS] " + aspect + " [SEP]")
input_mask=torch.tensor([1]*len(text_bert_indices))
# print(aspect_indices)
isaug=torch.tensor(0)
data = {
'text_target_indices':text_target_indices,
'text_target_segments_ids':text_target_segments_ids,
'text_left':text_left,
'text_right':text_right,
'aspect_pos':aspect_pos,
'aspect_len':aspect_len,
'target_begin':target_begin,
'text_raw': text_raw,
'text_spc': text_spc,
'text_without_cls': text_without_cls,
'text_aspect':aspect,
'left_context_len': left_context_len,
'text_bert_indices': text_bert_indices,
'bert_segments_ids': bert_segments_ids,
'text_raw_bert_indices': text_raw_bert_indices,
'aspect_bert_indices': aspect_bert_indices,
'text_raw_indices': text_raw_indices,
'bert_raw_segments_ids':bert_raw_segments_ids,
'text_raw_without_aspect_indices': text_raw_without_aspect_indices,
'text_left_indices': text_left_indices,
'text_left_with_aspect_indices': text_left_with_aspect_indices,
'text_right_indices': text_right_indices,
'text_right_with_aspect_indices': text_right_with_aspect_indices,
'aspect_indices': aspect_indices,
'aspect_in_text': aspect_in_text,
'polarity': polarity,
'input_mask':input_mask,
'isaug':isaug,
}
all_data.append(data)
l=len(all_data)
if opt.aug!=str(0):
idx=0
while idx in range(l):
print(idx)
data=all_data[idx]
text_left=data['text_left']
text_right=data['text_right']
aspect=data['text_aspect']
polarity=data['polarity']
isaug=torch.tensor(1)
ori_aspect=aspect
if opt.aug == 'synonyms':
augmented_aspect = SynonymAug.augment(aspect)
elif opt.aug == 'contextual':
augmented_aspect = ContextualWordEmbsAug.augment(aspect)
if augmented_aspect=='.':
# print('gg')
idx=idx+1
continue
elif opt.aug == 'word_eb':
augmented_aspect = WordEmbsAug.augment(aspect)
if augmented_aspect=='.':
idx=idx+1
continue
elif opt.aug=='wiki':
augmented_aspect=wikipedia.search(aspect, results=10, suggestion=False)
print(aspect,wikipedia.search(aspect, results=10, suggestion=False),aug.augment(aspect))
augmented_aspect=augmented_aspect[0]
aspect=augmented_aspect
print(ori_aspect,'---->',aspect)
text_raw="[CLS] " + text_left + " " + aspect + " " + text_right + " [SEP]"
text_spc='[CLS] ' + text_left + " " + aspect + " " + text_right + ' [SEP] ' + aspect + " [SEP]"
text_target='[CLS] ' + text_left + ' [aspect_b] '+aspect + ' [aspect_e] '+ text_right + ' [SEP] '
text_without_cls=text_left + " " + aspect + " " + text_right + ' [SEP] ' + aspect + " [SEP]"
text_raw_indices = tokenizer.text_to_sequence(text_left + " " + aspect + " " + text_right)
text_target_indices = tokenizer.text_to_sequence(text_target)
text_target_segments_ids=np.asarray([0] * (np.sum(text_target_indices != 0)))
text_target_segments_ids = pad_and_truncate(text_target_segments_ids, tokenizer.max_seq_len)
text_raw_without_aspect_indices = tokenizer.text_to_sequence(text_left + " " + text_right)
text_left_indices = tokenizer.text_to_sequence(text_left)
text_left_with_aspect_indices = tokenizer.text_to_sequence(text_left + " " + aspect)
text_right_indices = tokenizer.text_to_sequence(text_right, reverse=True)
text_right_with_aspect_indices = tokenizer.text_to_sequence(" " + aspect + " " + text_right, reverse=True)
aspect_indices = tokenizer.text_to_sequence(aspect)
left_context_len = np.sum(text_left_indices != 0)
aspect_len = np.sum(aspect_indices != 0)
aspect_pos = left_context_len+1
target_begin=left_context_len+1
aspect_in_text = torch.tensor([left_context_len.item(), (left_context_len + aspect_len - 1).item()])
# aspect_range = torch.LongTensor(range(left_context_len.item()+1, (left_context_len + aspect_len).item()+1))# plus [cls]
# polarity = int(polarity) + 1
text_bert_indices = tokenizer.text_to_sequence('[CLS] ' + text_left + " " + aspect + " " + text_right + ' [SEP] ' + aspect + " [SEP]")
# text_bert_indices = tokenizer.text_to_sequence('[CLS] '+ text_left + " " + aspect + " " + text_right + ' [SEP] '+ aspect + " [SEP] ")
bert_segments_ids = np.asarray([0] * (np.sum(text_raw_indices != 0)+2) + [1] * (aspect_len + 1))
# bert_segments_ids = np.asarray([1] * (aspect_len + 1)+[0] * (np.sum(text_raw_indices != 0) + 2))
bert_raw_segments_ids=np.asarray([0] * (np.sum(text_raw_indices != 0)+2))
bert_segments_ids = pad_and_truncate(bert_segments_ids, tokenizer.max_seq_len)
bert_raw_segments_ids = pad_and_truncate(bert_raw_segments_ids, tokenizer.max_seq_len)
text_raw_bert_indices = tokenizer.text_to_sequence("[CLS] " + text_left + " " + aspect + " " + text_right + " [SEP]")
aspect_bert_indices = tokenizer.text_to_sequence("[CLS] " + aspect + " [SEP]")
input_mask=torch.tensor([1]*len(text_bert_indices))
# print(aspect_indices)
data = {
'text_target_indices':text_target_indices,
'text_target_segments_ids':text_target_segments_ids,
'text_left':text_left,
'text_right':text_right,
'aspect_pos':aspect_pos,
'aspect_len':aspect_len,
'target_begin':target_begin,
'text_raw': text_raw,
'text_spc': text_spc,
'text_without_cls': text_without_cls,
'text_aspect':aspect,
'left_context_len': left_context_len,
'text_bert_indices': text_bert_indices,
'bert_segments_ids': bert_segments_ids,
'text_raw_bert_indices': text_raw_bert_indices,
'aspect_bert_indices': aspect_bert_indices,
'text_raw_indices': text_raw_indices,
'bert_raw_segments_ids':bert_raw_segments_ids,
'text_raw_without_aspect_indices': text_raw_without_aspect_indices,
'text_left_indices': text_left_indices,
'text_left_with_aspect_indices': text_left_with_aspect_indices,
'text_right_indices': text_right_indices,
'text_right_with_aspect_indices': text_right_with_aspect_indices,
'aspect_indices': aspect_indices,
'aspect_in_text': aspect_in_text,
'polarity': polarity,
'input_mask':input_mask,
'isaug':isaug,
}
all_data.append(data)
idx=idx+1
target_b=tokenizer.text_to_sequence('[target_b]')[0]
target_e=tokenizer.text_to_sequence('[target_e]')[0]
# aspect_b=tokenizer.text_to_sequence('[aspect_b]')[0]
# target_e=tokenizer.text_to_sequence('[aspect_e]')[0]
aspect_b=[]
aspect_e=[]
aspect_b_tokens=[]
aspect_e_tokens=[]
for i in range(20):
b='['+str(i)+'b]'
e='['+str(i)+'e]'
aspect_b_tokens.append(b)
aspect_e_tokens.append(e)
aspect_b.append(tokenizer.text_to_sequence(b)[0])
aspect_e.append(tokenizer.text_to_sequence(e)[0])
idx=0
while idx in range(len(all_data)):
# print(idx)
data=all_data[idx]
text_raw=data['text_raw']
flag = True
count=0
while flag:
count=count+1
if idx+count not in range(len(all_data)):
break
text_raw_next=all_data[idx+count]['text_raw']
if (text_raw_next!=text_raw):
flag=False
aspect_list=[]
polarity_list=[]
for i in range(0,count):
text_aspect=all_data[idx+i]['text_aspect']
aspect_list.append(text_aspect)
polarity_list.append(all_data[idx+i]['polarity']+1)
for i in range(0,count):
all_data[idx+i]['aspect_list']=aspect_list
all_data[idx+i]['polarity_list']=polarity_list
idx=idx+count
for i in range(len(all_data)):
all_data[i]['text_multi']=all_data[i]['text_raw']
now=0
# print(len(all_data[i]['aspect_list']),all_data[i]['aspect_list'])
for aspect in all_data[i]['aspect_list']:
aspect_len=len(aspect)
text_multi=all_data[i]['text_multi']
if aspect == all_data[i]['text_aspect']:
aspect_b_now=aspect_b_tokens[now]
aspect_e_now=aspect_e_tokens[now]
# text_multi=insert(text_multi,' [target_b] ',text_multi.find(aspect))
# text_multi=insert(text_multi,' [target_e] ',text_multi.find(aspect)+len(aspect))
text_multi=insert(text_multi,' '+aspect_b_now+' ',text_multi.find(aspect))
text_multi=insert(text_multi,' '+aspect_e_now+' ',text_multi.find(aspect)+len(aspect))
# main_target=aspect_b[now]
now=now+1
else:
aspect_b_now=aspect_b_tokens[now]
aspect_e_now=aspect_e_tokens[now]
text_multi=insert(text_multi,' '+aspect_b_now+' ',text_multi.find(aspect))
text_multi=insert(text_multi,' '+aspect_e_now+' ',text_multi.find(aspect)+len(aspect))
now=now+1
# text_multi=insert(text_multi,' [aspect_b] ',text_multi.find(aspect))
# text_multi=insert(text_multi,' [aspect_e] ',text_multi.find(aspect)+len(aspect))
all_data[i]['text_multi']=text_multi
multi_target_indices = tokenizer.text_to_sequence(all_data[i]['text_multi'])
all_data[i]['multi_target_indices']=multi_target_indices
now=0
poss=[]
for aspect in all_data[i]['aspect_list']:
aspect_len=len(aspect)
text_multi=all_data[i]['text_multi']
if aspect == all_data[i]['text_aspect']:
aspect_b_now=aspect_b[now]
aspect_e_now=aspect_e[now]
pos=np.argwhere(all_data[i]['multi_target_indices']==aspect_b_now)[0][0]
pos_end=np.argwhere(all_data[i]['multi_target_indices']==aspect_e_now)[0][0]
main_target_pos=pos
main_target_pos_end=pos_end
poss.append(pos)
now=now+1
else:
aspect_b_now=aspect_b[now]
aspect_e_now=aspect_e[now]
pos=np.argwhere(all_data[i]['multi_target_indices']==aspect_b_now)[0][0]
poss.append(pos)
now=now+1
# text_multi=insert(text_multi,' [aspect_b] ',text_multi.find(aspect))
# text_multi=insert(text_multi,' [aspect_e] ',text_multi.find(aspect)+len(aspect))
all_data[i]['text_multi']=text_multi
multi_target_segments_ids=np.asarray([0] * (np.sum(multi_target_indices != 0)))
multi_target_segments_ids = pad_and_truncate(multi_target_segments_ids, tokenizer.max_seq_len)
all_data[i]['multi_target_segments_ids']=multi_target_segments_ids
# pos=np.argwhere(all_data[i]['multi_target_indices']==target_b)[0]
poss=pad(poss,maxlen=20)
poss=torch.tensor(poss)
# print(poss)
polarity_list=all_data[i]['polarity_list']
polarity_list=pad(polarity_list,maxlen=20)
polarity_list=torch.tensor(polarity_list)
# poss=np.argwhere(all_data[i]['multi_target_indices']==target_b)[0]
# aspect_poss=np.argwhere(all_data[i]['multi_target_indices']==target_b)[0][0]
# print(pos,all_data[i]['multi_target_indices'])
# print(main_target_pos)
# print(main_target_pos_end)
all_data[i]['target_pos']=main_target_pos
all_data[i]['target_pos_end']=main_target_pos_end
all_data[i]['poss']=poss
all_data[i]['polarity_list']=polarity_list
# out_name='./datasets/processed/{}-{}.txt'.format(process, opt.dataset)
# out = open(out_name,'w+')
# for i in range(len(all_data)):
# out.write(all_data[i]['text_multi'])
# out.write('\t')
# out.write(str(all_data[i]['polarity']))
# out.write('\n')
# out.close()
a=np.array(all_data)
if opt.aug==1:
np.save('./datasets/aug/{}-{}.npy'.format(process, opt.dataset),a)
else:
np.save('./datasets/processed/{}-{}.npy'.format(process, opt.dataset),a)
self.data = all_data
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
|
def printHierarchical(dirname):
# Create a list with the file names
list = os.listdir(dirname)
# Loop through the list
for file in list:
# Get the path and check if it is a directory
path = dirname + "\\" + file
if os.path.isdir(path):
# Recursively call the function with the new path
printHierarchical(path)
else:
# Print the file name
print(file) |
<filename>src/main/java/com/xiaomaigou/code/service/GenerateCodeService.java
package com.xiaomaigou.code.service;
import com.xiaomaigou.code.dto.GenerateCodeTemplateDataDTO;
import com.xiaomaigou.code.dto.TemplateData;
import com.xiaomaigou.code.entity.ColumnEntity;
import com.xiaomaigou.code.entity.TableEntity;
import java.util.List;
import java.util.zip.ZipOutputStream;
/**
* 生成代码
*
* @author xiaomaiyun
* @version 1.2.3
* @date 2020/8/16 18:41
*/
public interface GenerateCodeService {
/**
* 生成代码
*
* @param tableNameList 表名List
* @param useTemplateName 使用的模板名称
* @return 生成结果
*/
byte[] generateCode(List<String> tableNameList, String useTemplateName);
/**
* 生成代码
*
* @param tableEntity 表详细信息
* @param columnEntityList 列详细信息列表
* @param useTemplateName 使用的模板名称
* @return 生成结果
*/
byte[] generateCode(TableEntity tableEntity, List<ColumnEntity> columnEntityList, String useTemplateName);
/**
* 通过生成代码模板数据生成代码
*
* @param generateCodeTemplateDataDTO 生成代码模板数据
* @return 生成结果
*/
byte[] generateCode(GenerateCodeTemplateDataDTO generateCodeTemplateDataDTO);
/**
* 生成代码
*
* @param templateData 模板数据
* @param zipOutputStream 输出流
*/
void generateCode(TemplateData templateData, ZipOutputStream zipOutputStream);
/**
* 根据模板数据和字符串模板生成代码字符串
*
* @param templateData 模板数据
* @param stringTemplate 字符串模板
* @return 代码字符串
*/
String generateStringCodeByStringTemplate(TemplateData templateData, String stringTemplate);
/**
* 生成非模板文件代码
*
* @param templateData 模板数据
* @param zipOutputStream 输出流
*/
void generateNotTemplatesCode(TemplateData templateData, ZipOutputStream zipOutputStream);
}
|
class Calculator:
# Constructor
def __init__(self):
self.value = 0
# Add to the value
def add(self, val):
self.value += val
# Subtract from the value
def subtract(self, val):
self.value -= val
# Multiply the value
def multiply(self, val):
self.value *= val
# Divide the value
def divide(self, val):
self.value /= val
# Print the value
def show_value(self):
print("Value =", self.value) |
def print_transformed_sizes(transformed_dataset):
for i in range(len(transformed_dataset)):
sample = transformed_dataset[i]
print(i, sample['image'].size(), sample['labels'].size()) |
package org.rockyang.blockchain.web.vo.res;
import org.rockyang.blockchain.account.Account;
/**
* account VO
* @author yangjian
* @since 18-7-14
*/
public class AccountVo extends Account {
@Override
public String toString() {
return "AccountVo{" +
"address='" + address + '\'' +
", priKey='" + priKey + '\'' +
'}';
}
}
|
#!/bin/bash
set -x
set -e
gcloud config set project $GOOGLE_CLOUD_PROJECT
gcloud config set compute/zone us-central1-f
## Create GKE cluster ##
gcloud beta container clusters create kayenta-tutorial \
--machine-type=n1-standard-2 \
--enable-stackdriver-kubernetes
gcloud container clusters get-credentials kayenta-tutorial
## Install Stackdriver Prometheus plugin ##
kubectl apply --as=admin --as-group=system:masters -f \
https://storage.googleapis.com/stackdriver-prometheus-documentation/rbac-setup.yml
curl -sS "https://storage.googleapis.com/stackdriver-prometheus-documentation/prometheus-service.yml" | \
sed "s/_stackdriver_project_id:.*/_stackdriver_project_id: $GOOGLE_CLOUD_PROJECT/" | \
sed "s/_kubernetes_cluster_name:.*/_kubernetes_cluster_name: kayenta-tutorial/" | \
sed "s/_kubernetes_location:.*/_kubernetes_location: us-central1-f/" | \
kubectl apply -f -
## Install Spinnaker ##
curl -sSL "https://www.spinnaker.io/downloads/kubernetes/quick-install.yml" | \
sed 's/version:.*/version: 1.12.2/g' | kubectl apply -f -
# A successful Spinnaker install has 11 pods
# Timeout of 20minutes (1200s)
set +x
timeout=0
while [ "$timeout" -lt 1200 -a $(kubectl -n spinnaker get pods | grep "1/1" | wc -l) -ne 11 ]; do
n=$(kubectl -n spinnaker get pods | grep "1/1" | wc -l)
echo "[${timeout}s|${n}/11] Spinnaker not yet ready, waiting 30s more."
sleep 30
timeout=$((timeout+30))
done
if [ "$timeout" -ge 1200 ]; then
echo "Timeout installing Spinnaker"
exit 1
fi
echo "Spinnaker ready!"
set -x
|
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
echo "Container nvidia build = " $NVIDIA_BUILD_ID
DATA_DIR=${1:-"/datasets/LibriSpeech"}
DATASET=${2:-"dev-clean"}
MODEL_CONFIG=${3:-"configs/jasper10x5dr_sp_offline_specaugment.toml"}
RESULT_DIR=${4:-"/results"}
CHECKPOINT=$5
CREATE_LOGFILE=${6:-"true"}
CUDNN_BENCHMARK=${7:-"true"}
PRECISION=${8:-"fp32"}
NUM_STEPS=${9:-"-1"}
MAX_DURATION=${10:-"36"}
SEED=${11:-0}
BATCH_SIZE=${12:-64}
PREC=""
if [ "$PRECISION" = "fp16" ] ; then
PREC="--fp16"
elif [ "$PRECISION" = "fp32" ] ; then
PREC=""
else
echo "Unknown <precision> argument"
exit -2
fi
STEPS=""
if [ "$NUM_STEPS" -gt 0 ] ; then
STEPS=" --steps $NUM_STEPS"
fi
if [ "$CUDNN_BENCHMARK" = "true" ] ; then
CUDNN_BENCHMARK=" --cudnn_benchmark"
else
CUDNN_BENCHMARK=""
fi
CMD=" python inference_benchmark.py"
CMD+=" --batch_size=$BATCH_SIZE"
CMD+=" --model_toml=$MODEL_CONFIG"
CMD+=" --seed=$SEED"
CMD+=" --dataset_dir=$DATA_DIR"
CMD+=" --val_manifest $DATA_DIR/librispeech-${DATASET}-wav.json "
CMD+=" --ckpt=$CHECKPOINT"
CMD+=" --max_duration=$MAX_DURATION"
CMD+=" --pad_to=-1"
CMD+=" $CUDNN_BENCHMARK"
CMD+=" $PREC"
CMD+=" $STEPS"
if [ "$CREATE_LOGFILE" = "true" ] ; then
export GBS=$(expr $BATCH_SIZE )
printf -v TAG "jasper_inference_benchmark_%s_gbs%d" "$PRECISION" $GBS
DATESTAMP=`date +'%y%m%d%H%M%S'`
LOGFILE="${RESULT_DIR}/${TAG}.${DATESTAMP}.log"
printf "Logs written to %s\n" "$LOGFILE"
fi
set -x
if [ -z "$LOGFILE" ] ; then
$CMD
else
(
$CMD
) |& tee "$LOGFILE"
grep 'latency' "$LOGFILE"
fi
set +x
|
#!/bin/bash
#/*
# * This file is part of TangoMan Provisions package.
# *
# * Copyright (c) 2021 "Matthias Morin" <mat@tangoman.io>
# *
# * This source file is subject to the MIT license that is bundled
# * with this source code in the file LICENSE.
# */
##/**
# * recipe essentials
# *
# * @license MIT
# * @author "Matthias Morin" <mat@tangoman.io>
# */
./update.sh
./upgrade.sh
# system
./system/install_zsh.sh
./fonts/install_fonts-powerline.sh
./themes/install_ohmyzsh.sh
./themes/install_tangoman-theme.sh
./system/config_zsh.sh
./clean.sh
|
const alertList = document.querySelectorAll(".alert");
alertList.forEach(function (alert) {
new bootstrap.Alert(alert);
});
|
def word_total(text):
words = text.split(" ")
word_count = len(words)
return word_count |
def is_compiled_language(language):
compiled_languages = ['C++', 'Java', 'Go', 'Rust']
if language in compiled_languages:
return True
else:
return False |
#!/bin/bash
set -eo pipefail
if [ -z $AWS_ACCOUNT_ID ]; then
echo "AWS_ACCOUNT_ID environment variable is not set."
exit 1
fi
if [ -z $AWS_DEFAULT_REGION ]; then
echo "AWS_DEFAULT_REGION environment variable is not set."
exit 1
fi
AWS_CLI_VERSION=$(aws --version 2>&1 | cut -d/ -f2 | cut -d. -f1)
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
PROJECT_NAME="howto-k8s-retry-policy"
APP_NAMESPACE=${PROJECT_NAME}
MESH_NAME=${PROJECT_NAME}
ECR_URL="${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_DEFAULT_REGION}.amazonaws.com"
ECR_IMAGE_PREFIX="${ECR_URL}/${PROJECT_NAME}"
FRONT_APP_IMAGE="${ECR_IMAGE_PREFIX}/feapp"
COLOR_APP_IMAGE="${ECR_IMAGE_PREFIX}/colorapp"
MANIFEST_VERSION="${1:-v1beta2}"
error() {
echo $1
exit 1
}
check_k8s_virtualrouter() {
#check CRD
crd=$(kubectl get crd virtualrouters.appmesh.k8s.aws -o json )
if [ -z "$crd" ]; then
error "$PROJECT_NAME requires virtualrouters.appmesh.k8s.aws CRD to support retryPolicy. See https://github.com/aws/aws-app-mesh-controller-for-k8s/blob/master/CHANGELOG.md"
else
echo "CRD check passed!"
fi
}
check_k8s_virtualservice() {
#check CRD
crd=$(kubectl get crd virtualservices.appmesh.k8s.aws -o json | jq -e '.. | .retryPolicy? | select(. != null)')
if [ "$crd" = "" ]; then
echo "$PROJECT_NAME requires CRD that supports retryPolicy. See https://github.com/aws/aws-app-mesh-controller-for-k8s/blob/master/CHANGELOG.md#v020"
exit 1
fi
}
check_appmesh_k8s() {
#check aws-app-mesh-controller version
if [ "$MANIFEST_VERSION" = "v1beta2" ]; then
currentver=$(kubectl get deployment -n appmesh-system appmesh-controller -o json | jq -r ".spec.template.spec.containers[].image" | cut -f2 -d ':'|tail -n1)
requiredver="v1.0.0"
check_k8s_virtualrouter
elif [ "$MANIFEST_VERSION" = "v1beta1" ]; then
currentver=$(kubectl get deployment -n appmesh-system appmesh-controller -o json | jq -r ".spec.template.spec.containers[].image" | cut -f2 -d ':')
requiredver="v0.3.0"
check_k8s_virtualservice
else
error "$PROJECT_NAME unexpected manifest version input: $MANIFEST_VERSION. Should be v1beta2 or v1beta1 based on the AppMesh controller version. See https://github.com/aws/aws-app-mesh-controller-for-k8s/blob/master/CHANGELOG.md"
fi
if [ "$(printf '%s\n' "$requiredver" "$currentver" | sort -V | head -n1)" = "$requiredver" ]; then
echo "aws-app-mesh-controller check passed! $currentver >= $requiredver"
else
error "$PROJECT_NAME requires aws-app-mesh-controller version >=$requiredver but found $currentver. See https://github.com/aws/aws-app-mesh-controller-for-k8s/blob/master/CHANGELOG.md"
fi
}
ecr_login() {
if [ $AWS_CLI_VERSION -gt 1 ]; then
aws ecr get-login-password --region ${AWS_DEFAULT_REGION} | \
docker login --username AWS --password-stdin ${ECR_URL}
else
$(aws ecr get-login --no-include-email)
fi
}
deploy_images() {
ecr_login
for app in colorapp feapp; do
aws ecr describe-repositories --repository-name $PROJECT_NAME/$app >/dev/null 2>&1 || aws ecr create-repository --repository-name $PROJECT_NAME/$app >/dev/null
docker build -t ${ECR_IMAGE_PREFIX}/${app} ${DIR}/${app}
docker push ${ECR_IMAGE_PREFIX}/${app}
done
}
deploy_app() {
EXAMPLES_OUT_DIR="${DIR}/_output/"
mkdir -p ${EXAMPLES_OUT_DIR}
eval "cat <<EOF
$(<${DIR}/${MANIFEST_VERSION}/manifest.yaml.template)
EOF
" >${EXAMPLES_OUT_DIR}/manifest.yaml
kubectl apply -f ${EXAMPLES_OUT_DIR}/manifest.yaml
}
redeploy_app() {
EXAMPLES_OUT_DIR="${DIR}/_output/"
kubectl delete -f ${EXAMPLES_OUT_DIR}/manifest.yaml
deploy_app
}
main() {
check_appmesh_k8s
if [ -z $SKIP_IMAGES ]; then
echo "deploy images..."
deploy_images
fi
if [ "$REDEPLOY" = true ]; then
echo "redeploying app..."
redeploy_app
exit 0
fi
deploy_app
}
main
|
#!/bin/bash
# base_model or sub_model_1 or sub_model_2 or so on
#model_type="$1"
model_name="video_dcc_boosting_discardhopeless"
MODEL_DIR="../model/${model_name}"
vocab_file="resources/train.video_id.vocab"
default_freq_file="resources/train.video_id.freq"
rm ${MODEL_DIR}/ensemble.conf
if [ ! -f $vocab_file ]; then
cd resources
wget http://us.data.yt8m.org/1/ground_truth_labels/train_labels.csv
echo "OOV" > train.video_id.vocab
cat train_labels.csv | cut -d ',' -f 1 >> train.video_id.vocab
cd ..
fi
vocab_checksum=$(md5sum $vocab_file | cut -d ' ' -f 1)
if [ "$vocab_checksum" == "b74b8f2592cad5dd21bf614d1438db98" ]; then
echo $vocab_file is valid
else
echo $vocab_file is corrupted
exit 1
fi
if [ ! -f $default_freq_file ]; then
cat $vocab_file | awk '{print 1}' > $default_freq_file
fi
base_model_dir="${MODEL_DIR}/base_model"
# base model (4 epochs)
if [ ! -d $base_model_dir ]; then
mkdir -p $base_model_dir
for j in {1..2}; do
CUDA_VISIBLE_DEVICES=0 python train.py \
--train_dir="$base_model_dir" \
--train_data_pattern="/Youtube-8M/data/video/train/train*" \
--frame_features=False \
--feature_names="mean_rgb,mean_audio" \
--feature_sizes="1024,128" \
--model=DeepCombineChainModel \
--moe_num_mixtures=4 \
--deep_chain_relu_cells=256 \
--deep_chain_layers=4 \
--label_loss=MultiTaskCrossEntropyLoss \
--multitask=True \
--support_type="label,label,label,label" \
--num_supports=18864 \
--support_loss_percent=0.05 \
--reweight=True \
--sample_vocab_file="$vocab_file" \
--sample_freq_file="$default_freq_file" \
--keep_checkpoint_every_n_hour=8.0 \
--keep_checkpoint_interval=6 \
--base_learning_rate=0.01 \
--data_augmenter=NoiseAugmenter \
--input_noise_level=0.2 \
--num_readers=2 \
--num_epochs=2 \
--batch_size=1024
done
fi
last_freq_file=$default_freq_file
for i in {1..8}; do
sub_model_dir="${MODEL_DIR}/sub_model_${i}"
if [ ! -d $sub_model_dir ]; then
cp -r $base_model_dir $sub_model_dir
echo "training model #$i, reweighting with $last_freq_file"
# train N models with re-weighted samples
CUDA_VISIBLE_DEVICES=0 python train.py \
--train_dir="$sub_model_dir" \
--train_data_pattern="/Youtube-8M/data/video/train/train*" \
--frame_features=False \
--feature_names="mean_rgb,mean_audio" \
--feature_sizes="1024,128" \
--model=DeepCombineChainModel \
--moe_num_mixtures=4 \
--deep_chain_relu_cells=256 \
--deep_chain_layers=4 \
--label_loss=MultiTaskCrossEntropyLoss \
--multitask=True \
--support_type="label,label,label,label" \
--num_supports=18864 \
--support_loss_percent=0.05 \
--reweight=True \
--sample_vocab_file="$vocab_file" \
--sample_freq_file="$last_freq_file" \
--keep_checkpoint_every_n_hour=8.0 \
--base_learning_rate=0.01 \
--data_augmenter=NoiseAugmenter \
--input_noise_level=0.2 \
--num_readers=2 \
--num_epochs=2 \
--batch_size=1024
fi
# inference-pre-ensemble
for part in test ensemble_validate ensemble_train; do
output_dir="/Youtube-8M/model_predictions/${part}/${model_name}/sub_model_$i"
if [ ! -d $output_dir ]; then
CUDA_VISIBLE_DEVICES=0 python inference-pre-ensemble.py \
--output_dir="${output_dir}" \
--train_dir="${sub_model_dir}" \
--input_data_pattern="/Youtube-8M/data/video/${part}/*.tfrecord" \
--frame_features=False \
--feature_names="mean_rgb,mean_audio" \
--feature_sizes="1024,128" \
--model=DeepCombineChainModel \
--moe_num_mixtures=4 \
--deep_chain_relu_cells=256 \
--deep_chain_layers=4 \
--batch_size=128 \
--file_size=4096
fi
done
# get error mapping
output_error_file="${sub_model_dir}/train.video_id.error"
if [ ! -f $output_error_file ]; then
echo "generating error mapping to $output_error_file"
CUDA_VISIBLE_DEVICES=0 python inference-sample-error.py \
--output_file="${output_error_file}" \
--train_dir="${sub_model_dir}" \
--input_data_pattern="/Youtube-8M/data/video/train/*.tfrecord" \
--frame_features=False \
--feature_names="mean_rgb,mean_audio" \
--feature_sizes="1024,128" \
--model=DeepCombineChainModel \
--moe_num_mixtures=4 \
--deep_chain_relu_cells=256 \
--deep_chain_layers=4 \
--batch_size=1024
fi
# generate resample freq file
output_freq_file="${sub_model_dir}/train.video_id.next_freq"
if [ ! -f $output_freq_file ]; then
echo "generating reweight freq to $output_freq_file"
python training_utils/reweight_sample_freq.py \
--discard_weight=20.0 \
--video_id_file="$vocab_file" \
--input_freq_file="$last_freq_file" \
--input_error_file="${sub_model_dir}/train.video_id.error" \
--output_freq_file="${sub_model_dir}/train.video_id.next_freq"
fi
last_freq_file="${sub_model_dir}/train.video_id.next_freq"
echo "${model_name}/sub_model_$i" >> ${MODEL_DIR}/ensemble.conf
done
cd ../youtube-8m-ensemble
# partly ensemble
for i in 1 2 4; do
part_conf="${MODEL_DIR}/ensemble${i}.conf"
cat ${MODEL_DIR}/ensemble.conf | head -n $i > $part_conf
bash ensemble_scripts/train-matrix_model.sh ${model_name}/ensemble${i}_matrix_model $part_conf
bash ensemble_scripts/eval-matrix_model.sh ${model_name}/ensemble${i}_matrix_model $part_conf
done
# all ensemble
bash ensemble_scripts/train-matrix_model.sh ${model_name}/ensemble_matrix_model ${MODEL_DIR}/ensemble.conf
bash ensemble_scripts/eval-matrix_model.sh ${model_name}/ensemble_matrix_model ${MODEL_DIR}/ensemble.conf
#bash ensemble_scripts/infer-matrix_model.sh ${model_name}/ensemble_matrix_model ${MODEL_DIR}/ensemble.conf
|
<reponame>FranGarciaLopez/quizer-api
from lib.response_parser import Response_Parser
from lib.db import Db
class UserTests:
def __init__(self, conn):
self.conn = conn
def post(self, user_id, test_id):
sql_statement = "INSERT INTO user_tests (user_id, test_id) VALUES ('{0}','{1}')".format(user_id, test_id)
response = self.conn.engine.execute(sql_statement)
return Response_Parser.post(response)
def get_all(self, user_id):
sql_statement = "SELECT * FROM user_tests where user_id = '{0}'".format(user_id)
response = self.conn.engine.execute(sql_statement)
return Response_Parser.get(response)
def get_one(self, user_id, test_id):
sql_statement = "SELECT * FROM user_tests WHERE user_id = '{0}' and test_id = '{1}'".format(user_id, test_id)
response = self.conn.engine.execute(sql_statement)
return Response_Parser.get(response)
def put(self, user_id, test_id):
sql_statement = "UPDATE users_tests SET user_id = '{0}', test_id = '{1}'".format(user_id, test_id)
response = self.conn.engine.execute(sql_statement)
return Response_Parser.put(response)
def delete_all(self, table_name, id_name, id_value):
Db.delete_all_subelement(self, table_name, id_name, id_value)
def delete(self, user_id, test_id):
sql_statement = "DELETE FROM user_tests WHERE user_id = '{0}' and test_id = '{1}'".format(user_id, test_id)
response = self.conn.engine.execute(sql_statement)
return Response_Parser.delete(response)
|
/* SysFileSystem.cpp */
//----------------------------------------------------------------------------------------
//
// Project: CCore 3.01
//
// Tag: Target/LIN64utf8
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2017 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <CCore/inc/sys/SysFileSystem.h>
#include <CCore/inc/sys/SysInternal.h>
#include <CCore/inc/Exception.h>
#include <CCore/inc/ElementPool.h>
#include <CCore/inc/Array.h>
#include <CCore/inc/CharProp.h>
#include <CCore/inc/Path.h>
#include <CCore/inc/CmdlineParser.h>
#include <limits.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
#include <fcntl.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <dirent.h>
#ifndef _DIRENT_HAVE_D_TYPE
#error "Bad dirent"
#endif
namespace CCore {
namespace Sys {
/* namespace Private_SysFileSystem */
namespace Private_SysFileSystem {
/* functions */
StrLen Dotcard(StrLen dir)
{
return PathIsBase(dir)? "."_c : ""_c ;
}
/* class EmptyDirEngine */
class EmptyDirEngine : NoCopy
{
char buf[MaxPathLen+1];
private:
FileError deleteDir(ulen dir_len)
{
if( FileError fe=emptyDir(dir_len) ) return fe;
buf[dir_len]=0;
if( rmdir(buf)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
bool set(ulen dir_len,StrLen file_name)
{
if( file_name.len>MaxPathLen-dir_len ) return false;
file_name.copyTo(buf+dir_len);
buf[dir_len+file_name.len]=0;
return true;
}
FileError remove(ulen dir_len,StrLen file_name,bool is_dir)
{
if( !set(dir_len+1,file_name) ) return FileError_TooLongPath;
if( is_dir )
{
return deleteDir(dir_len+1+file_name.len);
}
else
{
if( unlink(buf)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
}
FileError emptyDir(ulen dir_len)
{
if( dir_len>MaxPathLen-1 ) return FileError_TooLongPath;
buf[dir_len]='/';
buf[dir_len+1]=0;
DIR *dir=opendir(buf);
if( !dir ) return MakeError(FileError_OpFault);
errno=0;
while( dirent *result=readdir(dir) )
{
StrLen file_name(result->d_name);
if( !PathBase::IsSpecial(file_name) )
{
if( FileError fe=remove(dir_len,file_name,result->d_type==DT_DIR) )
{
closedir(dir);
return fe;
}
}
errno=0;
}
int error=errno;
closedir(dir);
if( error ) return MakeError(FileError_OpFault,error);
return FileError_Ok;
}
public:
EmptyDirEngine() {}
FileError emptyDir(StrLen dir_name)
{
if( dir_name.len>MaxPathLen ) return FileError_TooLongPath;
dir_name.copyTo(buf);
return emptyDir(dir_name.len);
}
};
/* DeleteDirRecursive() */
FileError DeleteDirRecursive(StrLen dir_name)
{
EmptyDirEngine engine;
if( FileError fe=engine.emptyDir(dir_name) ) return fe;
if( rmdir(dir_name.ptr)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
/* class StrPool */
class StrPool : NoCopy
{
ElementPool pool;
public:
StrPool() : pool(4_KByte) {}
~StrPool() {}
char * add(StrLen str)
{
auto buf=pool.createArray_raw<char>(LenAdd(str.len,1));
str.copyTo(buf.ptr);
buf.back(1)=0;
return buf.ptr;
}
char * add(BuilderType<char> builder)
{
char *buf=pool.createArray_raw<char>(LenAdd(builder.getLen(),1)).ptr;
StrLen str=builder(buf);
buf[str.len]=0;
return buf;
}
};
/* class StrList */
class StrList : NoCopy
{
StrPool &pool;
DynArray<char *> list;
public:
explicit StrList(StrPool &pool_) : pool(pool_),list(DoReserve,100) {}
~StrList() {}
void add(StrLen str)
{
list.append_copy(pool.add(str));
}
void add(BuilderType<char> builder)
{
list.append_copy(pool.add(builder));
}
char ** complete()
{
list.append_copy(0);
return list.getPtr();
}
void prepareArg(StrLen program,StrLen arg);
};
void StrList::prepareArg(StrLen program,StrLen arg)
{
add(program);
CmdlineParser parser(arg);
for(;;)
{
auto builder=parser.next();
if( !builder ) break;
add(builder);
}
}
FileError Spawn(char *wdir,char *path,char **argv,char **envp)
{
volatile FileError error=FileError_Ok;
switch( pid_t child_pid=vfork() )
{
case -1 : return MakeError(FileError_OpFault);
case 0 :
{
char temp[PATH_MAX+1];
char *path1=path;
if( *path!='/' && strchr(path,'/') )
{
if( char *result=realpath(path,temp) )
{
path1=result;
}
else
{
error=MakeError(FileError_OpFault);
_exit(124);
}
}
if( chdir(wdir) )
{
error=MakeError(FileError_OpFault);
_exit(125);
}
execvpe(path1,argv,envp);
error=MakeError(FileError_OpFault);
_exit( ( error==ENOENT )? 126 : 127 );
}
default:
{
waitpid(child_pid,0,WNOHANG);
return error;
}
}
}
} // namespace Private_SysFileSystem
using namespace Private_SysFileSystem;
/* struct FileSystem::DirCursor */
void FileSystem::DirCursor::init(FileSystem *,StrLen dir_name) noexcept
{
if( !dir_name )
{
dir=0;
error=FileError_BadName;
return;
}
FileName path;
if( path.set(dir_name) )
{
dir=opendir(path);
if( dir )
error=FileError_Ok;
else
error=MakeError(FileError_OpFault);
}
else
{
dir=0;
error=FileError_TooLongPath;
}
}
void FileSystem::DirCursor::exit() noexcept
{
if( dir )
{
closedir(static_cast<DIR *>(dir));
}
}
bool FileSystem::DirCursor::next() noexcept
{
if( !dir ) return false;
errno=0;
if( dirent *result=readdir(static_cast<DIR *>(dir)) )
{
StrLen name(result->d_name);
if( name.len>MaxPathLen )
{
error=FileError_TooLongPath;
closedir(static_cast<DIR *>(dir));
dir=0;
return false;
}
name.copyTo(file_name);
len=name.len;
type=(result->d_type==DT_DIR)?FileType_dir:FileType_file;
error=FileError_Ok;
return true;
}
else
{
int error_=errno;
if( error_ )
{
error=MakeError(FileError_OpFault,error_);
}
else
{
error=FileError_Ok;
}
closedir(static_cast<DIR *>(dir));
dir=0;
return false;
}
}
/* struct FileSystem */
FileError FileSystem::init() noexcept
{
return FileError_Ok;
}
FileError FileSystem::exit() noexcept
{
return FileError_Ok;
}
auto FileSystem::getFileType(StrLen path) noexcept -> TypeResult
{
FileName file_name;
if( !file_name.set(path) ) return {FileType_none,FileError_TooLongPath};
struct stat result;
if( stat(file_name,&result)==-1 )
{
int error=errno;
if( error==ENOENT || error==ENOTDIR ) return {FileType_none,FileError_Ok};
return {FileType_none,MakeError(FileError_OpFault,error)};
}
return {(S_ISDIR(result.st_mode))?FileType_dir:FileType_file,FileError_Ok};
}
auto FileSystem::getFileUpdateTime(StrLen path) noexcept -> CmpTimeResult
{
FileName file_name;
if( !file_name.set(path) ) return {0,FileError_TooLongPath};
struct stat result;
if( stat(file_name,&result)==-1 )
{
int error=errno;
if( error==ENOENT || error==ENOTDIR ) return {0,FileError_Ok};
return {0,MakeError(FileError_OpFault,error)};
}
CmpFileTimeType time=TimeSpec(result.st_mtim).getNanoSec();
if( !time ) time=1;
return {time,FileError_Ok};
}
FileError FileSystem::createFile(StrLen file_name_) noexcept
{
FileName file_name;
if( !file_name.set(file_name_) ) return FileError_TooLongPath;
int flags=O_RDWR|O_CREAT|O_EXCL;
int mode=S_IRUSR|S_IWUSR|S_IRGRP|S_IWGRP;
int handle=open(file_name,flags,mode);
if( handle==-1 ) return MakeError(FileError_OpenFault);
close(handle);
return FileError_Ok;
}
FileError FileSystem::deleteFile(StrLen file_name_) noexcept
{
FileName file_name;
if( !file_name.set(file_name_) ) return FileError_TooLongPath;
if( unlink(file_name)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
FileError FileSystem::createDir(StrLen dir_name) noexcept
{
FileName file_name;
if( !file_name.set(dir_name) ) return FileError_TooLongPath;
int mode=S_IRUSR|S_IWUSR|S_IXUSR|S_IRGRP|S_IWGRP|S_IXGRP;
if( mkdir(file_name,mode)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
FileError FileSystem::deleteDir(StrLen dir_name,bool recursive) noexcept
{
if( !dir_name ) return FileError_BadName;
FileName path;
if( !path.set(dir_name,Dotcard(dir_name)) ) return FileError_TooLongPath;
if( recursive ) return DeleteDirRecursive(StrLen(path,dir_name.len));
if( rmdir(path)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
FileError FileSystem::rename(StrLen old_path_,StrLen new_path_,bool allow_overwrite) noexcept
{
FileName old_path;
if( !old_path.set(old_path_) ) return FileError_TooLongPath;
FileName new_path;
if( !new_path.set(new_path_) ) return FileError_TooLongPath;
if( allow_overwrite )
{
struct stat result;
if( stat(new_path,&result)==-1 )
{
int error=errno;
if( error!=ENOENT && error!=ENOTDIR ) return MakeError(FileError_OpFault);
}
else
{
if( S_ISDIR(result.st_mode) ) return FileError_FileExist;
}
if( ::rename(old_path,new_path)==-1 ) return MakeError(FileError_OpFault);
}
else
{
struct stat result;
if( stat(new_path,&result)==-1 )
{
int error=errno;
if( error!=ENOENT && error!=ENOTDIR ) return MakeError(FileError_OpFault);
}
else
{
return FileError_FileExist;
}
if( ::rename(old_path,new_path)==-1 ) return MakeError(FileError_OpFault);
}
return FileError_Ok;
}
FileError FileSystem::remove(StrLen path) noexcept
{
FileName file_name;
if( !file_name.set(path) ) return FileError_TooLongPath;
struct stat result;
if( stat(file_name,&result)==-1 )
{
int error=errno;
if( error==ENOENT || error==ENOTDIR ) return FileError_NoPath;
return MakeError(FileError_OpFault,error);
}
if( S_ISDIR(result.st_mode) )
{
if( rmdir(file_name)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
else
{
if( unlink(file_name)==-1 ) return MakeError(FileError_OpFault);
return FileError_Ok;
}
}
FileError FileSystem::exec(StrLen dir,StrLen program,StrLen arg) noexcept
{
SilentReportException report;
try
{
char temp[MaxPathLen+1];
auto result=pathOf(dir,temp);
if( result.error ) return result.error;
StrPool pool;
StrList argc(pool);
char *path=pool.add(program);
argc.prepareArg(program,arg);
return Spawn(temp,path,argc.complete(),environ);
}
catch(CatchType)
{
return FileError_SysOverload;
}
}
auto FileSystem::pathOf(StrLen path_,char buf[MaxPathLen+1]) noexcept -> PathOfResult
{
FileName path;
if( !path.set(path_) ) return PathOfResult{StrLen(),FileError_TooLongPath};
char temp[PATH_MAX+1];
if( char *result=realpath(path,temp) )
{
StrLen src(result);
if( src.len>MaxPathLen )
{
return PathOfResult{StrLen(),FileError_TooLongPath};
}
else
{
src.copyTo(buf);
return PathOfResult{StrLen(buf,src.len),FileError_Ok};
}
}
else
{
return PathOfResult{StrLen(),MakeError(FileError_OpFault)};
}
}
} // namespace Sys
} // namespace CCore
|
<gh_stars>1-10
export class AppError {
public readonly name: string;
public readonly message: string;
public readonly statusCode: number;
constructor(message: string, statusCode = 400, name = 'AppError') {
this.name = name;
this.message = message;
this.statusCode = statusCode;
}
}
|
<reponame>collosdeveloper/tt<gh_stars>0
package com.knyaz.testtask.utils;
import android.text.TextUtils;
public class UserFieldsValidator {
private static UserFieldsValidator sInstance;
public static UserFieldsValidator getInstance() {
if (sInstance == null) {
sInstance = new UserFieldsValidator();
}
return sInstance;
}
private UserFieldsValidator() {
}
public boolean validateNotEmpty(String text) {
return !TextUtils.isEmpty(text);
}
public boolean validateEmail(String email) {
return !TextUtils.isEmpty(email) && Patterns.EMAIL_ADDRESS.matcher(email).matches();
}
public boolean validatePasswordLength(String password) {
return password.length() >= 6;
}
public boolean validateNoSpaces(String line) {
return !line.contains(" ");
}
} |
<filename>app/src/main/java/com/rxjava2/practice/activity/creating/CreatingActivity.java<gh_stars>0
package com.rxjava2.practice.activity.creating;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import com.rxjava2.practice.R;
/**
* Created by j on 2017/12/26.
*/
public class CreatingActivity extends AppCompatActivity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_creating);
}
public void startCreateActivity(View view){
startActivity(new Intent(CreatingActivity.this, CreateActivity.class));
}
public void startDeferActivity(View view){
startActivity(new Intent(CreatingActivity.this, DeferActivity.class));
}
public void startFromActivity(View view){
startActivity(new Intent(CreatingActivity.this, FromActivity.class));
}
public void startJustActivity(View view){
startActivity(new Intent(CreatingActivity.this, JustActivity.class));
}
public void startStartActivity(View view){
startActivity(new Intent(CreatingActivity.this, StartActivity.class));
}
public void startRepeatActivity(View view){
startActivity(new Intent(CreatingActivity.this, RepeatActivity.class));
}
public void startRangeActivity(View view){
startActivity(new Intent(CreatingActivity.this, RangeActivity.class));
}
}
|
<gh_stars>0
var express = require('express')
var path = require('path')
var app = express()
app.use(express.static(path.join(__dirname, '/')));
app.listen(3000);
console.log("Server running at 3000 port"); |
export { default } from "./Captcha.js";
|
<gh_stars>10-100
const { HiddenRelationship } = require('./HiddenRelationship')
const composeHook = (originalHook, newHook) => async params => {
let { resolvedData } = params
if (originalHook) {
resolvedData = await originalHook(params)
}
return newHook({ ...params, resolvedData })
}
function isValidDate (date) {
return date && Object.prototype.toString.call(date) === '[object Date]' && !isNaN(date)
}
module.exports = {
HiddenRelationship,
composeHook,
isValidDate,
}
|
#pragma once
/**
* @file eases.h
* @author Group 7 - Informatica
*/
/**
* @namespace goliath::transitions::methods
* @brief Contains methods for transitioning a phase
*/
namespace goliath::transitions::methods {
double easeInQuad(double value);
double easeOutQuad(double value);
double easeInOutQuad(double value);
double easeInCubic(double value);
double easeOutCubic(double value);
double easeInOutCubic(double value);
double easeInQuart(double value);
double easeOutQuart(double value);
double easeInOutQuart(double value);
double easeInQuint(double value);
double easeOutQuint(double value);
double easeInOutQuint(double value);
double linear(double value);
}
|
#! /bin/sh
# Copyright (C) 2001-2013 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Test to make sure the linker for a dist_*_SOURCES can override that for
# *_SOURCES
# Richard Boulton <richard@tartarus.org>
. test-init.sh
cat >> configure.ac << 'END'
AC_PROG_CC
AC_PROG_CXX
END
cat > Makefile.am << 'END'
bin_PROGRAMS = lavalamp
lavalamp_SOURCES = lava.c
dist_lavalamp_SOURCES = lamp.cxx
END
$ACLOCAL
$AUTOMAKE
# We should only see the C++ linker in the rules of 'Makefile.in'.
# Look for this macro not at the beginning of any line; that will have
# to be good enough for now.
grep '.\$(CXXLINK)' Makefile.in
# We should not see these patterns:
grep '.\$(FLINK)' Makefile.in && exit 1
grep '.\$(LINK)' Makefile.in && exit 1
exit 0
|
<filename>pairearch_WLY/RootTabBar/PrivateCenter/Controllers/PersonalCenterViewController.h<gh_stars>0
//
// PersonalCenterViewController.h
// WLY
//
// Created by Leo on 16/3/15.
// Copyright © 2016年 Leo. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface PersonalCenterViewController : BaseViewController
@end
|
from rest_framework import serializers
from django_celery_beat.models import ClockedSchedule
from django_celery_beat.models import CrontabSchedule
from django_celery_beat.models import IntervalSchedule
from django_celery_beat.models import SolarSchedule
from django_celery_beat.models import PeriodicTask
from rest_framework.validators import UniqueTogetherValidator
class ClockedScheduleSerializer(serializers.ModelSerializer):
class Meta:
model = ClockedSchedule
fields = ('id', 'clocked_time', 'enabled')
validators = [
UniqueTogetherValidator(
queryset=ClockedSchedule.objects.all(),
fields=('clocked_time',),
message="已存在,无需重复新增"
)
]
class CrontabScheduleSerializer(serializers.ModelSerializer):
# 该字段无法直接转换,需要单独声明
timezone = serializers.CharField(max_length=63)
class Meta:
model = CrontabSchedule
fields = ('id', 'minute', 'hour', 'day_of_week', 'day_of_month', 'month_of_year', 'timezone')
validators = [
UniqueTogetherValidator(
queryset=CrontabSchedule.objects.all(),
fields=('minute', 'hour', 'day_of_week', 'day_of_month', 'month_of_year', 'timezone'),
message="已存在,无需重复新增"
)
]
class IntervalScheduleSerializer(serializers.ModelSerializer):
class Meta:
model = IntervalSchedule
fields = ('id', 'every', 'period')
validators = [
UniqueTogetherValidator(
queryset=IntervalSchedule.objects.all(),
fields=('every', 'period',),
message="已存在,无需重复新增"
)
]
class SolarScheduleSerializer(serializers.ModelSerializer):
class Meta:
model = SolarSchedule
fields = ('id', 'event', 'latitude', 'longitude')
validators = [
UniqueTogetherValidator(
queryset=SolarSchedule.objects.all(),
fields=('event', 'latitude', 'longitude'),
message="已存在,无需重复新增"
)
]
class PeriodicTaskSerializer(serializers.ModelSerializer):
interval = IntervalScheduleSerializer(allow_null=True)
crontab = CrontabScheduleSerializer(allow_null=True)
solar = SolarScheduleSerializer(allow_null=True)
clocked = ClockedScheduleSerializer(allow_null=True)
class Meta:
model = PeriodicTask
fields = ('id', 'name', 'task', 'interval', 'crontab', 'solar', 'clocked', 'args', 'kwargs', 'queue',
'exchange', 'routing_key', 'headers', 'priority', 'expires', 'one_off', 'start_time', 'enabled',
'last_run_at', 'total_run_count', 'date_changed', 'description')
def create(self, validated_data):
validated_data, relation_data = self.pop_data(validated_data)
periodic_task = PeriodicTask.objects.create(**validated_data, **relation_data)
return periodic_task
def update(self, instance, validated_data):
validated_data, relation_data = self.pop_data(validated_data)
for attr, value in dict(validated_data, **relation_data).items():
setattr(instance, attr, value)
instance.save()
return instance
@staticmethod
def pop_data(validated_data):
interval_data = validated_data.pop('interval')
crontab_data = validated_data.pop('crontab')
solar_data = validated_data.pop('solar')
clocked_data = validated_data.pop('clocked')
if interval_data:
interval_schedule = IntervalSchedule.objects.get(**interval_data)
else:
interval_schedule = None
if crontab_data:
crontab_schedule = CrontabSchedule.objects.get(**crontab_data)
else:
crontab_schedule = None
if solar_data:
solar_schedule = SolarSchedule.objects.get(**solar_data)
else:
solar_schedule = None
if clocked_data:
clocked_schedule = ClockedSchedule.objects.get(**clocked_data)
else:
clocked_schedule = None
return validated_data, {"interval": interval_schedule, "crontab": crontab_schedule, "solar": solar_schedule,
"clocked": clocked_schedule}
|
#!/usr/bin/env bash
# NOTE: DO NOT WRITE DISTRIBUTION-SPECIFIC COMMANDS HERE (e.g., apt, dnf, etc)
set -euo pipefail
${CXX:-g++} -v
(
set -euo pipefail
cd tools
# To skip error
mkdir -p kaldi/egs/wsj/s5/utils && touch kaldi/egs/wsj/s5/utils/parse_options.sh
if ${USE_CONDA}; then
./setup_anaconda.sh venv espnet ${ESPNET_PYTHON_VERSION}
else
./setup_python.sh "$(command -v python3)" venv
fi
make TH_VERSION="${TH_VERSION}"
make nkf.done moses.done mwerSegmenter.done pesq pyopenjtalk.done py3mmseg.done
rm -rf kaldi
)
. tools/activate_python.sh
python3 --version
pip3 install https://github.com/kpu/kenlm/archive/master.zip
# NOTE(kan-bayashi): Fix the error in black installation.
# See: https://github.com/psf/black/issues/1707
pip3 uninstall -y typing
# install espnet
pip3 install -e ".[test]"
pip3 install -e ".[doc]"
# log
pip3 freeze
|
/**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/* eslint-disable */
/*
* THIS FILE IS MODIFIED FOR DOCUSAURUS
* the above copyright header must be preserved for license compliance.
*/
/*
Implementation based on comment: https://github.com/facebook/create-react-app/issues/9994#issuecomment-811289191
*/
function base64SourceMap(source) {
const base64 = Buffer.from(JSON.stringify(source.map()), 'utf8').toString(
'base64',
);
return `data:application/json;charset=utf-8;base64,${base64}`;
}
// modified for Docusaurus => remove webpack 5 deprecation warnings
// See https://github.com/facebook/create-react-app/issues/9994#issuecomment-811289191
function getSourceById(server, id) {
const module = Array.from(server._stats.compilation.modules).find(
(m) => server._stats.compilation.chunkGraph.getModuleId(m) == id,
);
return module.originalSource();
}
/*
* Middleware responsible for retrieving a generated source
* Receives a webpack internal url: "webpack-internal:///<module-id>"
* Returns a generated source: "<source-text><sourceMappingURL><sourceURL>"
*
* Based on EvalSourceMapDevToolModuleTemplatePlugin.js
*/
module.exports = function createEvalSourceMapMiddleware(server) {
return function handleWebpackInternalMiddleware(req, res, next) {
if (req.url.startsWith('/__get-internal-source')) {
const fileName = req.query.fileName;
const id = fileName.match(/webpack-internal:\/\/\/(.+)/)[1];
if (!id || !server._stats) {
next();
}
const source = getSourceById(server, id);
const sourceMapURL = `//# sourceMappingURL=${base64SourceMap(source)}`;
const sourceURL = `//# sourceURL=webpack-internal:///${module.id}`;
res.end(`${source.source()}\n${sourceMapURL}\n${sourceURL}`);
} else {
next();
}
};
};
|
$LOAD_PATH << File.join(File.dirname(__FILE__), '../lib')
RSpec.configure do |config|
config.run_all_when_everything_filtered = true
config.filter_run :focus
config.order = 'random'
end
|
#!/bin/bash
set -e
VERSION="1.15.2"
OS="$(uname -s)"
ARCH="$(uname -m)"
case $OS in
"Linux")
case $ARCH in
"x86_64")
ARCH=amd64
;;
"aarch64")
ARCH=arm64
;;
"armv6")
ARCH=armv6l
;;
"armv8")
ARCH=arm64
;;
.*386.*)
ARCH=386
;;
esac
PLATFORM="linux-$ARCH"
;;
"Darwin")
PLATFORM="darwin-amd64"
;;
esac
PACKAGE_NAME="go$VERSION.$PLATFORM.tar.gz"
TEMP_DIRECTORY=$(mktemp -d)
echo "Downloading $PACKAGE_NAME ..."
wget -q --show-progress https://storage.googleapis.com/golang/$PACKAGE_NAME -O "$TEMP_DIRECTORY/go.tar.gz"
if [ $? -ne 0 ]; then
echo "Download failed! Exiting."
exit 1
fi
echo "Extracting File..."
sudo rm -rf /usr/local/go
sudo mkdir /usr/local/go
sudo tar -C /usr/local -xzf "$TEMP_DIRECTORY/go.tar.gz"
|
#!/bin/sh -e
set -o errexit
###
# Copyright (c) 2015-2018, Antoine Le Gonidec
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# This software is provided by the copyright holders and contributors "as is"
# and any express or implied warranties, including, but not limited to, the
# implied warranties of merchantability and fitness for a particular purpose
# are disclaimed. In no event shall the copyright holder or contributors be
# liable for any direct, indirect, incidental, special, exemplary, or
# consequential damages (including, but not limited to, procurement of
# substitute goods or services; loss of use, data, or profits; or business
# interruption) however caused and on any theory of liability, whether in
# contract, strict liability, or tort (including negligence or otherwise)
# arising in any way out of the use of this software, even if advised of the
# possibility of such damage.
###
###
# Melody’s Escape
# build native Linux packages from the original installers
# send your bug reports to vv221@dotslashplay.it
###
script_version=20180224.1
# Set game-specific variables
GAME_ID='melodys-escape'
GAME_NAME='Melody’s Escape'
ARCHIVES_LIST='ARCHIVE_HUMBLE'
ARCHIVE_HUMBLE='Melodys_Escape_Linux.zip'
ARCHIVE_HUMBLE_URL='https://www.humblebundle.com/store/melodys-escape'
ARCHIVE_HUMBLE_MD5='4d463482418c2d9917c56df3bbde6eea'
ARCHIVE_HUMBLE_SIZE='60000'
ARCHIVE_HUMBLE_VERSION='1.0-humble160601'
ARCHIVE_ICONS_PACK='melodys-escape_icons.tar.gz'
ARCHIVE_ICONS_PACK_MD5='656fce13728d399e557fd72c3a6bc244'
ARCHIVE_DOC_PATH="Melody's Escape"
ARCHIVE_DOC_FILES='./Licenses ./README.txt'
ARCHIVE_GAME_BIN_PATH="Melody's Escape"
ARCHIVE_GAME_BIN_FILES='./MelodysEscape.bin.x86 ./lib ./*.dll ./FNA.dll.config ./*.so ./MelodysEscape.exe'
ARCHIVE_GAME_DATA_PATH="Melody's Escape"
ARCHIVE_GAME_DATA_FILES='./BassPlugins ./BundledMusic ./Calibration ./Content ./Mods ./mono'
ARCHIVE_ICONS_PATH='.'
ARCHIVE_ICONS_FILES='./16x16 ./32x32 ./48x48 ./64x64 ./128x128 ./256x256'
APP_MAIN_TYPE='native'
APP_MAIN_EXE='MelodysEscape.bin.x86'
PACKAGES_LIST='PKG_DATA PKG_BIN'
PKG_DATA_ID="${GAME_ID}-data"
PKG_DATA_DESCRIPTION='data'
PKG_BIN_ARCH='32'
PKG_BIN_DEPS_DEB="$PKG_DATA_ID, libc6, libstdc++6"
PKG_BIN_DEPS_ARCH="$PKG_DATA_ID lib32-glibc"
# Load common functions
target_version='2.5'
if [ -z "$PLAYIT_LIB2" ]; then
[ -n "$XDG_DATA_HOME" ] || XDG_DATA_HOME="$HOME/.local/share"
if [ -e "$XDG_DATA_HOME/play.it/libplayit2.sh" ]; then
PLAYIT_LIB2="$XDG_DATA_HOME/play.it/libplayit2.sh"
elif [ -e './libplayit2.sh' ]; then
PLAYIT_LIB2='./libplayit2.sh'
else
printf '\n\033[1;31mError:\033[0m\n'
printf 'libplayit2.sh not found.\n'
exit 1
fi
fi
. "$PLAYIT_LIB2"
# Try to load icons archive
ARCHIVE_MAIN="$ARCHIVE"
set_archive 'ARCHIVE_ICONS' 'ARCHIVE_ICONS_PACK'
ARCHIVE="$ARCHIVE_MAIN"
# Extract game data
extract_data_from "$SOURCE_ARCHIVE"
if [ "$ARCHIVE_ICONS" ]; then
(
ARCHIVE='ARCHIVE_ICONS'
extract_data_from "$ARCHIVE_ICONS"
)
fi
PKG='PKG_BIN'
organize_data 'GAME_BIN' "$PATH_GAME"
PKG='PKG_DATA'
organize_data 'DOC' "$PATH_DOC"
organize_data 'GAME_DATA' "$PATH_GAME"
PKG='PKG_DATA'
if [ "$ARCHIVE_ICONS" ]; then
organize_data 'ICONS' "$PATH_ICON_BASE"
fi
rm --recursive "$PLAYIT_WORKDIR/gamedata"
# Write launchers
PKG='PKG_BIN'
write_launcher 'APP_MAIN'
# Build package
write_metadata
build_pkg
# Clean up
rm --recursive "$PLAYIT_WORKDIR"
#print instructions
print_instructions
exit 0
|
name = input("Please enter your name: ")
print("Hello " + name + "!") |
#!/bin/bash
# ==============================================================================
# FUNCTIONS - START
# ==============================================================================
run_it()
{
local _src="${1-xquisite.sass}"
local _dest="${2-$(dirname "${BASH_SOURCE[0]}")/output/$(basename ${_src}).css}"
local _options="${3---sourcemap=none}"
# ----------------------------------------------------------------------------
local _dest_dir="$(dirname "${_dest}")"
if [ ! -d "${_dest_dir}" ]; then
mkdir -p "${_dest_dir}"
fi
# ----------------------------------------------------------------------------
sass "${_src}" "${_dest}" ${_options}
}
# run_it()
# ==============================================================================
# FUNCTIONS - END
# ==============================================================================
# ==============================================================================
# EXECUTION - START
# ==============================================================================
run_it $@
# ==============================================================================
# EXECUTION - END
# ============================================================================== |
#!/bin/bash
#==========================================
# Copyright (c) 2020 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#==========================================
source /opt/intel/inteloneapi/setvars.sh > /dev/null 2>&1
/bin/echo "##" $(whoami) is compiling oneTBB Introduction exercise fwd-sub-solution
dpcpp -std=c++17 -O2 solutions/fwd-sub-solution.cpp common/fwd_sub.cpp -o bin/fwd-sub-solution -tbb
bin/fwd-sub-solution
|
TERMUX_PKG_HOMEPAGE=https://www.w3.org/TR/WOFF2/
TERMUX_PKG_DESCRIPTION="font compression reference code"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=1.0.2
TERMUX_PKG_SRCURL=https://github.com/google/woff2/archive/v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=add272bb09e6384a4833ffca4896350fdb16e0ca22df68c0384773c67a175594
TERMUX_PKG_DEPENDS="brotli"
|
package artifality.item;
import artifality.item.base.BaseItem;
import net.minecraft.item.ItemStack;
public class CrystalHeartItem extends BaseItem {
public CrystalHeartItem(Settings settings) {
super(settings);
}
@Override
public boolean hasGlint(ItemStack stack) {
return true;
}
}
|
public class DataPoint {
private int x;
private int y;
public DataPoint(int x, int y) {
this.x = x;
this.y = y;
}
public int getX() {
return x;
}
public int getY() {
return y;
}
} |
def build_city_info_dictionary(city, location, population):
city_info_dict = dict()
city_info_dict[city] = (location, population)
return city_info_dict
city = 'Dallas'
location = 'Texas, USA'
population = '1.3 million'
city_info_dict = build_city_info_dictionary(city, location, population)
print(city_info_dict) # {'Dallas': ('Texas, USA', '1.3 million')} |
<filename>Source/Core/Utils/FileStringUtils.cpp
#include "FileStringUtils.h"
#include "Core/Application/Application.h"
// --- To get usage of windows file dialogs ---
#include <commdlg.h>
// --- To attach file dialogs to the engine's window ---
#define GLFW_EXPOSE_NATIVE_WIN32 // If defined, we can get Win32 functionalities we need
#include <GLFW/glfw3native.h>
// ------------------------------------------------------------------------------
namespace FileUtils
{
// ----- Files Standard Functions -----
std::string FileUtils::MakePath(const std::string& dir, const std::string& filename)
{
return dir + "/" + filename;
}
std::string FileUtils::GetDirectory(const std::string& path)
{
size_t last_slash = path.find_last_of("/\\");
if (last_slash != path.npos)
return path.substr((size_t)0, path.find_last_of("/\\"));
else
return "INVALID PATH!";
}
uint64 FileUtils::GetFileLastWriteTimestamp(const char* filepath)
{
#ifdef _WIN32
union Filetime2u64
{
FILETIME filetime;
unsigned long long int u64time;
} conversor;
WIN32_FILE_ATTRIBUTE_DATA Data;
if (GetFileAttributesExA(filepath, GetFileExInfoStandard, &Data))
{
conversor.filetime = Data.ftLastWriteTime;
return(conversor.u64time);
}
#else
// NOTE: This has not been tested in unix-like systems
struct stat attrib;
if (stat(filepath, &attrib) == 0)
return attrib.st_mtime;
#endif
return 0;
}
// ----- Files Dialogues Functions -----
std::string FileDialogs::OpenFile(const char* filter)
{
// -- Initialize OPENFILENAME to 0 (Common Dialog Box Structure) --
OPENFILENAMEA open_file_name;
ZeroMemory(&open_file_name, sizeof(OPENFILENAME));
// -- Set OPENFILENAME --
open_file_name.lStructSize = sizeof(OPENFILENAME); // Size
open_file_name.hwndOwner = glfwGetWin32Window((GLFWwindow*)Application::Get().GetWindow().GetNativeWindow()); // Owner of dialog window to engine's window
// -- Buffer File --
CHAR file[260] = { 0 };
open_file_name.lpstrFile = file;
open_file_name.nMaxFile = sizeof(file);
open_file_name.lpstrFilter = filter;
open_file_name.nFilterIndex = 1;
open_file_name.Flags = OFN_PATHMUSTEXIST | OFN_FILEMUSTEXIST | OFN_NOCHANGEDIR; // If this is not here, it will change the working directory
// -- Check for current directory and set it if exists --
CHAR directory[260] = { 0 };
if (GetCurrentDirectoryA(256, directory))
open_file_name.lpstrInitialDir = directory;
// -- If file (Ascii) is open (exists), return it --
if (GetOpenFileNameA(&open_file_name) == TRUE)
return open_file_name.lpstrFile;
return std::string();
}
std::string FileDialogs::SaveFile(const char* filter, const char* filename)
{
// -- Initialize OPENFILENAME to 0 (Common Dialog Box Structure) --
OPENFILENAMEA open_file_name;
ZeroMemory(&open_file_name, sizeof(OPENFILENAME));
// -- Set OPENFILENAME --
open_file_name.lStructSize = sizeof(OPENFILENAME); // Size
open_file_name.hwndOwner = glfwGetWin32Window((GLFWwindow*)Application::Get().GetWindow().GetNativeWindow()); // Owner of dialog window to engine's window
// -- Buffer File --
CHAR file[260] = { 0 };
if (filename)
{
std::strncpy(file, filename, sizeof(file)); // Copy filename to file[]
file[sizeof(file) - 1] = 0; // Add null-terminator at the end (in case filename is >= than the file[] max size)
}
open_file_name.lpstrFile = file;
open_file_name.nMaxFile = sizeof(file);
open_file_name.lpstrFilter = filter;
open_file_name.nFilterIndex = 1;
open_file_name.lpstrDefExt = strchr(filter, '\0') + 1; // Default extension (gets it from filter)
open_file_name.Flags = OFN_PATHMUSTEXIST | OFN_OVERWRITEPROMPT | OFN_NOCHANGEDIR; // If this is not here, it will change the working directory
// -- Check for current directory and set it if exists --
CHAR directory[260] = { 0 };
if (GetCurrentDirectoryA(256, directory))
open_file_name.lpstrInitialDir = directory;
// -- If file (Ascii) is open (exists), return it --
if (GetSaveFileNameA(&open_file_name) == TRUE)
return open_file_name.lpstrFile;
return std::string();
}
} |
import etcd
import yaml
class EtcdSettingsLoader:
def __init__(self, host="127.0.0.1", port=2379, protocol="https", encode=None):
self.host = host
self.port = port
self.protocol = protocol
self.encode = encode
self.client = etcd.Client(host=self.host, port=self.port, protocol=self.protocol)
def load_settings(self, file_path):
with open(file_path, 'r') as file:
settings = yaml.safe_load(file)
for key, value in settings.items():
self.client.write(key, value, ttl=None) |
package com.example.simpletodo;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.text.Editable;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
public class EditActivity extends AppCompatActivity {
EditText editItem;
Button buttonSave;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_edit);
editItem = findViewById(R.id.editItem);
buttonSave = findViewById(R.id.buttonSave);
ActionBar actionBar = getSupportActionBar();
actionBar.setTitle("Edit item");
Intent intent = getIntent();
buttonSave.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent theintent = new Intent();
Editable result = editItem.getText();
theintent.putExtra(MainActivity.THE_KEY_ITEM_TEXT, result.toString());
Bundle bundle = getIntent().getExtras();
theintent.putExtra(MainActivity.THE_KEY_ITEM_POSITION, bundle.getInt(MainActivity.THE_KEY_ITEM_POSITION));
setResult(RESULT_OK, theintent);
finish();
}
});
editItem.setText(intent.getStringExtra(MainActivity.THE_KEY_ITEM_TEXT));
}
}
|
<reponame>DavidKatz-il/flight-simulator
package customExceptions;
public class VarTypeNotSupportedException extends Exception{
public VarTypeNotSupportedException(String varType){
super("The varType: " + varType + " is not supported.");
}
}
|
// 11727. 2×n 타일링 2
// 2020.09.21
// 다이나믹 프로그래밍
#include<iostream>
using namespace std;
int d[1001]; // d[i] : 2xi크기의 직사각형을 채우는 방법의 수를 10007로 나눈 나머지
int main()
{
int n;
cin >> n;
d[1] = 1;
d[2] = 3;
for (int i = 3; i <= n; i++)
{
d[i] = d[i - 1] + 2 * d[i - 2]; // 2x2정사각형도 있으므로 2*d[i-2]
d[i] %= 10007;
}
cout << d[n] << endl;
return 0;
}
|
#!/bin/sh
die() {
echo "" >&2
echo "$*" >&2
exit 1
}
rm -rf ${HOME}/Sources/toolchain/build/binutils
mkdir -p ${HOME}/Sources/toolchain/build/binutils
cd ${HOME}/Sources/toolchain/build/binutils
# light build: Assembler and Linker are not built, as provided wtith LLVM/Clang
${HOME}/Sources/toolchain/src/binutils/configure \
--prefix=/usr/local/arm-none-eabi \
--target=arm-none-eabi \
--without-gnu-ld \
--without-gnu-as \
--disable-shared \
--disable-nls \
--with-gmp \
--with-mpfr \
--disable-cloog-version-check \
--enable-multilibs \
--enable-interwork \
--enable-lto \
--disable-werror \
--disable-debug || die "Cannot configure binutils"
make || die "Cannot build binutils"
sudo make install
|
const express = require('express');
const app = express();
const bodyParser = require('body-parser');
const cors = require('cors');
const usersRouter = require('./users/router');
const port = process.env.PORT || 4000;
app.use(cors());
app.use(bodyParser.json());
app.use(usersRouter);
app.listen(port, () => {
console.log(`Listening on port ${port}...`);
});
// users/router.js
const { Router } = require('express');
const bcrypt = require('bcrypt');
const User = require('./model');
const router = new Router();
router.post('/signup', async (req, res) => {
const { username, password } = req.body;
// hash the password
const saltRounds = 10;
const hashedPassword = await bcrypt.hash(password, saltRounds);
// create and save the user
const user = {
username,
password: hashedPassword
};
const newUser = await User.create(user);
return res.send({
message: 'User created successfully!',
user: newUser
});
});
router.post('/signin', async (req, res) => {
const { username, password } = req.body;
// check if user exists
const user = await User.findOne({
where: {
username
}
});
if (!user) {
return res.status(404).send({
message: 'User not found!'
});
}
// check if the given password matches
const validPassword = await bcrypt.compare(password, user.password);
if (!validPassword) {
return res.status(403).send({
message: 'Invalid password!'
});
}
return res.json({
message: 'You are now signed in!'
});
});
module.exports = router; |
<gh_stars>0
package com.github.guitsilva.battleship.model.coordinates;
import java.util.Random;
import com.github.guitsilva.battleship.model.grid.Grid;
public class Coordinates {
public final static String CHARS = "ABCDEFGHIJ";
private final byte line;
private final byte column;
public Coordinates(char lineChar, byte column) {
this(convertCharToIndex(lineChar), column);
}
public Coordinates(byte line, byte column)
throws InvalidCoordinateException {
if (!isValidCoordinate(line)) {
throw new InvalidCoordinateException("invalid line coordinate");
}
if (!isValidCoordinate(column)) {
throw new InvalidCoordinateException("invalid column coordinate");
}
this.line = line;
this.column = column;
}
public byte getLine() {
return this.line;
}
public char getLineChar() {
return convertIndexToChar(this.line);
}
public byte getColumn() {
return this.column;
}
public char getColumnChar() {
return convertIndexToChar(this.column);
}
public static char convertIndexToChar(byte coordinateIndex) {
return CHARS.charAt(coordinateIndex);
}
public static byte convertCharToIndex(char coordinateChar) {
return (byte) CHARS.indexOf(coordinateChar);
}
public static boolean isValidCoordinateChar(char coordinateChar) {
byte coordinate = convertCharToIndex(coordinateChar);
return isValidCoordinate(coordinate);
}
public static boolean isValidCoordinate(byte coordinate) {
if (coordinate < 0 || coordinate >= Grid.SIZE) {
return false;
}
return true;
}
public static boolean isValid(Coordinates coordinates) {
byte line = coordinates.getLine();
byte column = coordinates.getColumn();
if (!isValidCoordinate(line) || !isValidCoordinate(column)) {
return false;
}
return true;
}
public static Coordinates getRandomCoordinates() {
byte randomLine = (byte) new Random().nextInt(CHARS.length());
byte randomColumn = (byte) new Random().nextInt(CHARS.length());
return new Coordinates(randomLine, randomColumn);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.hadoop.rdf.io.input.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
import org.junit.Assert;
import org.junit.Test;
/**
* Abstract tests for {@link TrackableInputStream} implementations
*
*
*
*/
public abstract class AbstractTrackableInputStreamTests {
protected static final int KILO = 1024;
protected static final int BYTES_PER_KB = KILO;
protected static final int BYTES_PER_MB = BYTES_PER_KB * KILO;
/**
* Gets the instance to test using the given input as the stream to track
*
* @param input
* Input Stream
* @return Trackable Input Stream
*/
protected abstract TrackableInputStream getInstance(InputStream input);
/**
* Generates an input stream containing the given number of bytes
*
* @param length
* Number of bytes
* @return Input stream
*/
protected final InputStream generateData(int length) {
ByteArrayOutputStream output = new ByteArrayOutputStream(length);
byte b = (byte) 'b';
for (int i = 0; i < length; i++) {
output.write(b);
}
return new ByteArrayInputStream(output.toByteArray());
}
protected final void testSingleByteRead(int length) throws IOException {
InputStream input = this.generateData(length);
TrackableInputStream trackable = this.getInstance(input);
long count = 0;
while (trackable.read() >= 0) {
count++;
}
Assert.assertEquals(length, count);
Assert.assertEquals(length, trackable.getBytesRead());
trackable.close();
}
/**
* Test reading byte by byte
*
* @throws IOException
*/
@Test
public final void trackable_input_read_single_01() throws IOException {
this.testSingleByteRead(0);
}
/**
* Test reading byte by byte
*
* @throws IOException
*/
@Test
public final void trackable_input_read_single_02() throws IOException {
this.testSingleByteRead(100);
}
/**
* Test reading byte by byte
*
* @throws IOException
*/
@Test
public final void trackable_input_read_single_03() throws IOException {
// 1KB
this.testSingleByteRead(BYTES_PER_KB);
}
/**
* Test reading byte by byte
*
* @throws IOException
*/
@Test
public final void trackable_input_read_single_04() throws IOException {
// 1 MB
this.testSingleByteRead(BYTES_PER_MB);
}
protected final void testMultiByteRead(int length, int bufferSize) throws IOException {
if (bufferSize < 1)
throw new IllegalArgumentException("bufferSize must be >= 1");
InputStream input = this.generateData(length);
TrackableInputStream trackable = this.getInstance(input);
long count = 0;
byte[] buffer = new byte[bufferSize];
long read;
do {
read = trackable.read(buffer);
if (read > 0)
count += read;
} while (read >= 0);
Assert.assertEquals(length, count);
Assert.assertEquals(length, trackable.getBytesRead());
trackable.close();
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_01() throws IOException {
this.testMultiByteRead(0, 1);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_02() throws IOException {
this.testMultiByteRead(0, 16);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_03() throws IOException {
this.testMultiByteRead(0, BYTES_PER_KB);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_04() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, 1);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_05() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, 16);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_06() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_07() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, 1);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_08() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, 16);
}
/**
* Test reading multiple bytes i.e. calling {@link InputStream#read(byte[])}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_09() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB);
}
protected final void testMultiByteRead(int length, int bufferSize, int readSize) throws IOException {
if (bufferSize < 1)
throw new IllegalArgumentException("bufferSize must be >= 1");
if (readSize < 1 || readSize > bufferSize)
throw new IllegalArgumentException("readSize must be >= 1 and <= bufferSize");
InputStream input = this.generateData(length);
TrackableInputStream trackable = this.getInstance(input);
long count = 0;
byte[] buffer = new byte[bufferSize];
long read;
do {
read = trackable.read(buffer, 0, readSize);
if (read > 0)
count += read;
} while (read >= 0);
Assert.assertEquals(length, count);
Assert.assertEquals(length, trackable.getBytesRead());
trackable.close();
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_01() throws IOException {
this.testMultiByteRead(0, 1, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_02() throws IOException {
this.testMultiByteRead(0, 16, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_03() throws IOException {
this.testMultiByteRead(0, 16, 16);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_04() throws IOException {
this.testMultiByteRead(0, BYTES_PER_KB, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_05() throws IOException {
this.testMultiByteRead(0, BYTES_PER_KB, 16);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_06() throws IOException {
this.testMultiByteRead(0, BYTES_PER_KB, BYTES_PER_KB);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_07() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, 1, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_08() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, 16, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_09() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, 16, 16);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_10() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_11() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB, 16);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_12() throws IOException {
// 1KB
this.testMultiByteRead(BYTES_PER_KB, BYTES_PER_KB, BYTES_PER_KB);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_13() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, 1, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_14() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, 16, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_15() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, 16, 16);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_16() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB, 1);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_17() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB, 16);
}
/**
* Test reading multiple bytes while reading less than the buffer size bytes
* i.e. calling {@link InputStream#read(byte[], int, int)}
*
* @throws IOException
*/
@Test
public final void trackable_input_read_multiple_partial_18() throws IOException {
// 1MB
this.testMultiByteRead(BYTES_PER_MB, BYTES_PER_KB, BYTES_PER_KB);
}
protected final void testSkip(int length, long skipSize) throws IOException {
if (skipSize < 1)
throw new IllegalArgumentException("skipSize must be >= 1");
InputStream input = this.generateData(length);
TrackableInputStream trackable = this.getInstance(input);
long count = 0;
long skipped;
do {
skipped = trackable.skip(skipSize);
if (skipped > 0)
count += skipped;
} while (skipped > 0);
Assert.assertEquals(length, count);
Assert.assertEquals(length, trackable.getBytesRead());
trackable.close();
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_01() throws IOException {
this.testSkip(0, 1);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_02() throws IOException {
this.testSkip(100, 1);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_03() throws IOException {
this.testSkip(100, 16);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_04() throws IOException {
this.testSkip(100, BYTES_PER_KB);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_05() throws IOException {
// 1KB
this.testSkip(BYTES_PER_KB, 1);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_06() throws IOException {
// 1KB
this.testSkip(BYTES_PER_KB, 16);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_07() throws IOException {
// 1KB
this.testSkip(BYTES_PER_KB, BYTES_PER_KB);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_08() throws IOException {
// 1KB
this.testSkip(BYTES_PER_KB, BYTES_PER_MB);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_09() throws IOException {
// 1 MB
this.testSkip(BYTES_PER_MB, 1);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_10() throws IOException {
// 1 MB
this.testSkip(BYTES_PER_MB, 16);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_11() throws IOException {
// 1 MB
this.testSkip(BYTES_PER_MB, BYTES_PER_KB);
}
/**
* Test skipping
*
* @throws IOException
*/
@Test
public final void trackable_input_skip_single_12() throws IOException {
// 1 MB
this.testSkip(BYTES_PER_MB, BYTES_PER_MB);
}
/**
* Tests behaviour after closing
*
* @throws IOException
*/
@Test
public final void trackable_input_post_close_01() throws IOException {
InputStream input = this.generateData(0);
TrackableInputStream trackable = this.getInstance(input);
trackable.close();
Assert.assertEquals(-1, trackable.read());
}
/**
* Tests behaviour after closing
*
* @throws IOException
*/
@Test
public final void trackable_input_post_close_02() throws IOException {
InputStream input = this.generateData(0);
TrackableInputStream trackable = this.getInstance(input);
trackable.close();
Assert.assertEquals(0, trackable.read(new byte[0]));
}
/**
* Tests behaviour after closing
*
* @throws IOException
*/
@Test
public final void trackable_input_post_close_03() throws IOException {
InputStream input = this.generateData(0);
TrackableInputStream trackable = this.getInstance(input);
trackable.close();
Assert.assertEquals(-1, trackable.read(new byte[1]));
}
/**
* Tests behaviour after closing
*
* @throws IOException
*/
@Test
public final void trackable_input_post_close_04() throws IOException {
InputStream input = this.generateData(0);
TrackableInputStream trackable = this.getInstance(input);
trackable.close();
Assert.assertEquals(0, trackable.read(new byte[16], 0, 0));
}
/**
* Tests behaviour after closing
*
* @throws IOException
*/
@Test
public final void trackable_input_post_close_05() throws IOException {
InputStream input = this.generateData(0);
TrackableInputStream trackable = this.getInstance(input);
trackable.close();
Assert.assertEquals(-1, trackable.read(new byte[16], 0, 8));
}
/**
* Tests behaviour after closing
*
* @throws IOException
*/
@Test
public final void trackable_input_post_close_06() throws IOException {
InputStream input = this.generateData(0);
TrackableInputStream trackable = this.getInstance(input);
trackable.close();
Assert.assertEquals(0, trackable.skip(0));
}
/**
* Tests exceptions are thrown trying to perform actions after closing the
* input
*
* @throws IOException
*/
@Test
public final void trackable_input_post_close_07() throws IOException {
InputStream input = this.generateData(0);
TrackableInputStream trackable = this.getInstance(input);
trackable.close();
Assert.assertEquals(0, trackable.skip(1));
}
}
|
#pragma once
#include <cstdint>
template<typename FieldT>
struct BitFieldBase {
//casts an enum class as a bitfield
template<typename T>
FieldT constexpr bitfield(T const val) {
//assert(sizeof(Rt)>=sizeof(T) || val>>sizeof(Rt) == 0);
return (static_cast<FieldT>(val));
}
template<typename... Args>
FieldT constexpr bitfield(const Args... args) {
return (... | bitfield(args));
}
};
template<typename T>
uint8_t constexpr bitfield8(T const val) {
return (static_cast<uint8_t>(val));
}
template<typename... Args>
uint8_t constexpr bitfield8(const Args... args) {
return (... | bitfield8(args));
}
|
<filename>socialmediasignup/src/main/java/com/ahmedadel/socialmediasignup/SocialMediaSignUpActivity.java
package com.ahmedadel.socialmediasignup;
import android.app.ProgressDialog;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.Window;
import com.ahmedadel.socialmediasignup.model.SocialMediaUser;
import com.ahmedadel.socialmediasignup.SocialMediaSignUp.SocialMediaType;
/**
* Created by <NAME> on 2/16/18.
* <p>
* SocialMediaSignUpActivity is the base activity that our social media classes we integrated with will extend.
*/
abstract class SocialMediaSignUpActivity extends AppCompatActivity {
protected ProgressDialog loadingDialog;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
supportRequestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
loadingDialog = SocialMediaSignUp.getInstance().getCustomLoadingDialog();
if (loadingDialog == null)
loadingDialog = SocialMediaSignUpUtils.createLoadingDialog(this);
}
protected void handCancel(SocialMediaType socialMediaType) {
stopLoading();
getAuthData().getCallback().onSignOut(socialMediaType);
finish();
}
protected void handleError(Throwable error) {
stopLoading();
getAuthData().getCallback().onError(error);
finish();
}
protected void handleSuccess(SocialMediaSignUp.SocialMediaType socialMediaType, SocialMediaUser socialMediaUser) {
stopLoading();
getAuthData().getCallback().onSuccess(socialMediaType, socialMediaUser);
finish();
}
protected abstract SocialMediaData getAuthData();
protected void startLoading() {
if (loadingDialog != null)
loadingDialog.show();
}
protected void stopLoading() {
if (loadingDialog != null)
loadingDialog.dismiss();
}
}
|
from django.db import models
# Create your models here.
class MailingList(models.Model):
email = models.EmailField(db_index=True, unique=True) |
#!/usr/bin/env bash
YW=`echo "\033[33m"`
BL=`echo "\033[36m"`
RD=`echo "\033[01;31m"`
CM='\xE2\x9C\x94\033'
GN=`echo "\033[1;92m"`
CL=`echo "\033[m"`
APP="Motioneye"
HN=$(echo ${APP,,} | tr -d ' ')
while true; do
read -p "This will create a New ${APP} LXC. Proceed(y/n)?" yn
case $yn in
[Yy]* ) break;;
[Nn]* ) exit;;
* ) echo "Please answer yes or no.";;
esac
done
clear
function header_info {
echo -e "${BL}
__ __ ____ _______ _____ ____ _ _ ________ ________
| \/ |/ __ \__ __|_ _/ __ \| \ | | ____\ \ / / ____|
| \ / | | | | | | | || | | | \| | |__ \ \_/ /| |__
| |\/| | | | | | | | || | | | | __| \ / | __|
| | | | |__| | | | _| || |__| | |\ | |____ | | | |____
|_| |_|\____/ |_| |_____\____/|_| \_|______| |_| |______|
${CL}"
}
header_info
show_menu(){
printf " ${YW} 1)${YW} Privileged ${CL}\n"
printf " ${YW} 2)${GN} Unprivileged ${CL}\n"
printf "Please choose a Install Method and hit enter or ${RD}x${CL} to exit."
read opt
}
option_picked(){
message1=${@:-"${CL}Error: No message passed"}
printf " ${YW}${message1}${CL}\n"
}
show_menu
while [ $opt != '' ]
do
if [ $opt = '' ]; then
exit;
else
case $opt in
1) clear;
header_info;
option_picked "Using Privileged Install";
IM=0
break;
;;
2) clear;
header_info;
option_picked "Using Unprivileged Install";
IM=1
break;
;;
x)exit;
;;
\n)exit;
;;
*)clear;
option_picked "Please choose a Install Method from the menu";
show_menu;
;;
esac
fi
done
show_menu2(){
printf " ${YW} 1)${GN} Use Automatic Login ${CL}\n"
printf " ${YW} 2)${GN} Use Password (changeme) ${CL}\n"
printf "Please choose a Password Type and hit enter or ${RD}x${CL} to exit."
read opt
}
option_picked(){
message2=${@:-"${CL}Error: No message passed"}
printf " ${YW}${message1}${CL}\n"
printf " ${YW}${message2}${CL}\n"
}
show_menu2
while [ $opt != '' ]
do
if [ $opt = '' ]; then
exit;
else
case $opt in
1) clear;
header_info;
option_picked "Using Automatic Login";
PW=" "
break;
;;
2) clear;
header_info;
option_picked "Using Password (changeme)";
PW="-password changeme"
break;
;;
x)exit;
;;
\n)exit;
;;
*)clear;
option_picked "Please choose a Password Type from the menu";
show_menu2;
;;
esac
fi
done
show_menu3(){
printf " ${YW} 1)${GN} Automatic DHCP ${CL}\n"
printf " ${YW} 2)${GN} Manual DHCP ${CL}\n"
printf "Please choose a DHCP Type and hit enter or ${RD}x${CL} to exit."
read opt
}
option_picked(){
message3=${@:-"${CL}Error: No message passed"}
printf " ${YW}${message1}${CL}\n"
printf " ${YW}${message2}${CL}\n"
printf " ${YW}${message3}${CL}\n"
}
show_menu3
while [ $opt != '' ]
do
if [ $opt = '' ]; then
exit;
else
case $opt in
1) clear;
header_info;
option_picked "Using Automatic DHCP";
DHCP=" "
break;
;;
2) clear;
header_info;
option_picked "Using Manual DHCP";
DHCP="1"
break;
;;
x)exit;
;;
\n)exit;
;;
*)clear;
option_picked "Please choose a DHCP Type from the menu";
show_menu3;
;;
esac
fi
done
set -o errexit
set -o errtrace
set -o nounset
set -o pipefail
shopt -s expand_aliases
alias die='EXIT=$? LINE=$LINENO error_exit'
trap die ERR
trap cleanup EXIT
function error_exit() {
trap - ERR
local DEFAULT='Unknown failure occured.'
local REASON="\e[97m${1:-$DEFAULT}\e[39m"
local FLAG="\e[91m[ERROR] \e[93m$EXIT@$LINE"
msg "$FLAG $REASON"
[ ! -z ${CTID-} ] && cleanup_ctid
exit $EXIT
}
function warn() {
local REASON="\e[97m$1\e[39m"
local FLAG="\e[93m[WARNING]\e[39m"
msg "$FLAG $REASON"
}
function info() {
local REASON="$1"
local FLAG="\e[36m[INFO]\e[39m"
msg "$FLAG $REASON"
}
function msg() {
local TEXT="$1"
echo -e "$TEXT"
}
function cleanup_ctid() {
if $(pct status $CTID &>/dev/null); then
if [ "$(pct status $CTID | awk '{print $2}')" == "running" ]; then
pct stop $CTID
fi
pct destroy $CTID
elif [ "$(pvesm list $STORAGE --vmid $CTID)" != "" ]; then
pvesm free $ROOTFS
fi
}
function cleanup() {
popd >/dev/null
rm -rf $TEMP_DIR
}
if [ "$IM" == "1" ]; then
FEATURES="nesting=1,keyctl=1"
else
FEATURES="nesting=1"
fi
TEMP_DIR=$(mktemp -d)
pushd $TEMP_DIR >/dev/null
export CTID=$(pvesh get /cluster/nextid)
export PCT_OSTYPE=debian
export PCT_OSVERSION=11
export PCT_DISK_SIZE=8
export PCT_OPTIONS="
-features $FEATURES
-hostname $HN
-net0 name=eth0,bridge=vmbr0,ip=dhcp
-onboot 1
-cores 2
-memory 2048
-unprivileged ${IM}
${PW}
"
bash -c "$(wget -qLO - https://raw.githubusercontent.com/mossburg145/Proxmox/main/ct/create_lxc.sh)" || exit
STORAGE_TYPE=$(pvesm status -storage $(pct config $CTID | grep rootfs | awk -F ":" '{print $2}') | awk 'NR>1 {print $2}')
if [ "$STORAGE_TYPE" == "zfspool" ]; then
warn "Some addons may not work due to ZFS not supporting 'fallocate'."
fi
LXC_CONFIG=/etc/pve/lxc/${CTID}.conf
cat <<EOF >> $LXC_CONFIG
lxc.cgroup2.devices.allow: a
lxc.cap.drop:
EOF
if [ "$DHCP" == "1" ]; then
MAC=$(pct config $CTID \
| grep -i hwaddr \
| awk '{print substr($2, 31, length($3) 17 ) }') \
echo -e "MAC Address ${BL}$MAC${CL}"
dhcp_reservation(){
printf "Please set DHCP reservation and press Enter."
read
}
dhcp_reservation
fi
echo -en "${GN} Starting LXC Container... "
pct start $CTID
echo -e "${CM}${CL} \r"
alias lxc-cmd="lxc-attach -n $CTID --"
lxc-cmd bash -c "$(wget -qLO - https://raw.githubusercontent.com/mossburg145/Proxmox/main/setup/$HN-install.sh)" || exit
IP=$(pct exec $CTID ip a s dev eth0 | sed -n '/inet / s/\// /p' | awk '{print $2}')
echo -e "${GN}Successfully created ${APP} LXC to${CL} ${BL}$CTID${CL}.
${BL}${APP}${CL} should be reachable by going to the following URL.
${BL}http://${IP}:8765${CL} \n"
|
def format_family_tree(id, id_padre, id_madre):
if id_padre == 0 and id_madre == 0:
return f"Person with id {id} is the root of the family tree."
elif id_padre != 0 and id_madre != 0:
return f"Person with id {id} has father with id {id_padre} and mother with id {id_madre}."
elif id_padre != 0:
return f"Person with id {id} has father with id {id_padre}."
elif id_madre != 0:
return f"Person with id {id} has mother with id {id_madre}." |
#!/bin/bash
# bash script to sync the local code to the remote sassauna servers
#echo -e "Choose local directory (directory you want to sync to sassauna servers)"
#read localdir
SOURCEDIR=/home/msc20f10/Python_Code/results/
#echo -e "Choose destination directory (keras or pytorch)"
#read destdir
#DESTDIR=/media/sf_Master_thesis/Python_Code/results
DESTDIR=/home/fabian/Documents/results
rsync -av msc20f10@sassauna.ee.ethz.ch:$SOURCEDIR $DESTDIR |
class Complex:
def __init__(self, re, im):
self.re = re
self.im = im
def __add__(self, rhs):
return Complex(self.re + rhs.re, self.im + rhs.im)
# Create instances of Complex class
c1 = Complex(3, 4)
c2 = Complex(1, 2)
result = c1 + c2
print(result.re, result.im) # Output: 4 6 |
#!/bin/bash
# this scripts expects that the following environment variables are set
# I2C_DEVICE : the I2C device to use for ecc probe
# REGION_OVERRIDE/GW_REGION : without one of these two variables the region will default to US915
# for an asserted device it should not matter.
# Any other varialbe overrriding gateway rs setting file. gateway-rs allows us to
# override all settings in the gateway-rs setting file. Refer readme at
# https://github.com/helium/gateway-rs/tree/main for more details.
echo "Checking for I2C device"
I2C_NUM=$(echo "${I2C_DEVICE}" | cut -d "-" -f2)
# NOTE:: not sure we even need to do this. We should set the right environment or
# get it from hm-pyhelper and it should be correct. We are doing this only to make
# sure that the gateway runs even when the I2C device is not present.
mapfile -t data < <( i2cdetect -y "${I2C_NUM}" )
for i in $(seq 1 ${#data[@]}); do
# shellcheck disable=SC2206
line=(${data[$i]})
# shellcheck disable=SC2068
if echo ${line[@]:1} | grep -q 60; then
echo "ECC is present."
ECC_CHIP=True
fi
done
if [[ -v ECC_CHIP ]]
then
echo "Using ECC for public key."
export GW_KEYPAIR="ecc://i2c-${I2C_NUM}:96&slot=0"
elif [[ -v ALLOW_NON_ECC_KEY ]]
then
echo "gateway-rs deb package provided key /etc/helium_gateway/gateway_key.bin will be used."
else
echo "define ALLOW_NON_ECC_KEY environment variable to run gatewayrs without ecc."
exit 1
fi
if [[ -v REGION_OVERRIDE ]]
then
export GW_REGION="${REGION_OVERRIDE}"
fi
# NOTE:: this should ultimately move to pktfwd container.
# the local rpc should is capable of providing this information
/opt/nebra-gatewayrs/gen-region.sh &
# there is a systemd/sysv script for this service in the deb package
# it doesn't make much sense to use it in the container
/usr/bin/helium_gateway -c /etc/helium_gateway server
|
<filename>lib/bar.py
import progressbar
def BAR():
return progressbar.ProgressBar(widgets=[
progressbar.AnimatedMarker(markers='⣯⣟⡿⢿⣻⣽⣾⣷'),
' [', progressbar.Percentage(), '] ',
progressbar.Bar(marker='■', fill='□', left='[', right=']'),
' (', progressbar.ETA(), ') ', ], redirect_stdout=True)
|
#!/bin/bash
dieharder -d 209 -g 53 -S 215460975
|
import json
from textblob import TextBlob
# create the API endpoint
@app.route('/sentiment_analysis', methods=['POST'])
def sentiment_analysis():
# get the data from the request
data = request.get_json()
# create the results
results = []
for tweet in data:
username = tweet['username']
tweet_text = tweet['tweet']
blob = TextBlob(tweet_text)
sentiment = blob.sentiment.polarity
result = {
'username': username,
'tweet': tweet_text,
'sentiment': sentiment
}
results.append(result)
# return the results
return json.dumps(results) |
def apply_destination_rule(context: str, namespace: str, yaml_manifest: str) -> None:
import subprocess
# Construct the kubectl command to apply the DestinationRule YAML manifest
kubectl_command = f'kubectl --context="{context}" apply -n {namespace} -f -'
# Use subprocess to execute the kubectl command and pass the YAML manifest as input
subprocess.run(kubectl_command, input=yaml_manifest, text=True, shell=True) |
#include<iostream>
using namespace std;
// Function to calculate the nth fibonacci number
int fibonacci(int n)
{
// intializing array to the first 2 numbers in sequence
int Fibonacci[2] = { 0, 1 };
// loop to calculate each number in the Fibonacci sequence
for (int i = 2; i <= n; i++)
{
// sum of two preceding numbers
Fibonacci[i % 2] = Fibonacci[0] + Fibonacci[1];
// swap for next iteration
Fibonacci[(i - 1) % 2] = Fibonacci[i % 2];
}
// returns the desired nth number
return Fibonacci[n % 2];
}
int main()
{
// input the desired position
int n = 10;
// prints the answer
cout << fibonacci(n);
return 0;
} |
<filename>test/lib/extensions/active_record/connection_adapters/sqlite3_adapter_test.rb
require 'test_helper'
require 'sqlite3_json_rails4/extensions/active_record/type/json'
require 'sqlite3_json_rails4/extensions/active_record/connection_adapters/sqlite3_adapter'
class SQLite3AdapterAdditionsTest < Minitest::Test
class FakeClass
attr_reader :map
prepend Sqlite3JsonRails4::Extensions::ActiveRecord::ConnectionAdapters::SQLite3AdapterAdditions
def initialize_type_map(map)
@map = map
end
end
def setup
@instance = FakeClass.new
end
def test_json_is_properly_registered
mock = Minitest::Mock.new
mock.expect :register_type, nil, [/^json/i, Sqlite3JsonRails4::Extensions::ActiveRecord::Type::Json.new]
@instance.initialize_type_map(mock)
mock.verify
end
end
|
/*
* Copyright (c) 2016-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the LICENSE
* file in the root directory of this source tree.
*
*/
#include <gtest/gtest.h>
#include <folly/json.h>
#include "mcrouter/lib/IOBufUtil.h"
#include "mcrouter/lib/carbon/CarbonMessageConversionUtils.h"
#include "mcrouter/lib/carbon/test/gen/CarbonTest.h"
TEST(CarbonMessageConversionUtils, toFollyDynamic_Complex) {
carbon::test::TestRequest r;
r.baseInt64Member() = 1;
r.int32Member() = -1;
r.stringMember() = "testStrMbr";
r.enumMember() = carbon::test2::util::SimpleEnum::One;
r.vectorMember().emplace_back();
r.vectorMember().back().member1() = 342;
r.vectorMember().emplace_back();
r.vectorMember().back().member1() = 123;
r.key() = carbon::Keys<folly::IOBuf>("/test/key/");
r.testEnum() = carbon::test2::util::SimpleEnum::Negative;
r.testBool() = true;
r.testChar() = 'a';
r.testInt8() = -123;
r.testInt16() = -7890;
r.testInt32() = -123456789;
r.testInt64() = -9876543210123ll;
r.testUInt8() = 123;
r.testUInt16() = 7890;
r.testUInt32() = 123456789;
r.testUInt64() = 9876543210123ll;
r.testFloat() = 1.5;
r.testDouble() = 5.6;
r.testShortString() = "abcdef";
r.testLongString() = "asdfghjkl;'eqtirgwuifhiivlzkhbvjkhc3978y42h97*&687gba";
r.testIobuf() = folly::IOBuf(
folly::IOBuf::CopyBufferOp(), folly::StringPiece("TestTheBuf"));
r.testStruct().baseInt64Member() = 345;
r.testStruct().stringMember() = "nestedSimpleStruct";
r.testOptionalString() = "tstOptStr";
r.testList() = std::vector<std::string>({"abc", "bce", "xyz"});
r.testEnumVec() = std::vector<carbon::test2::util::SimpleEnum>(
{carbon::test2::util::SimpleEnum::One,
carbon::test2::util::SimpleEnum::Twenty});
r.testUnion().emplace<2>(true);
r.testNestedVec() = {{1, 1, 1}, {2, 2, 2}};
r.testUMap() = std::unordered_map<std::string, std::string>(
{{"key", "value"}, {"adele", "beyonce"}});
r.testMap() = std::map<double, double>({{3.14, 2.7}, {0.577, 0.2}});
r.testComplexMap() = std::map<std::string, std::vector<uint16_t>>(
{{"hello", {1, 1, 1}}, {"world", {2, 2, 2}}});
r.testUSet() = std::unordered_set<std::string>({"hello", "world"});
r.testSet() = std::set<uint64_t>({123, 456});
r.testIOBufList() =
std::vector<folly::IOBuf>({folly::IOBuf(), folly::IOBuf()});
folly::dynamic expected = folly::dynamic::object(
"__Base",
folly::dynamic::object(
"__BaseStruct", folly::dynamic::object("baseInt64Member", 1))(
"int32Member", -1)("stringMember", "testStrMbr")("enumMember", 1)(
"vectorMember",
folly::dynamic::array(
folly::dynamic::object("member1", 342),
folly::dynamic::object("member1", 123))))("key", "/test/key/")(
"testEnum", -92233)("testBool", true)("testChar", "a")("testInt8", -123)(
"testInt16", -7890)("testInt32", -123456789)(
"testInt64", -9876543210123ll)("testUInt8", 123)("testUInt16", 7890)(
"testUInt32", 123456789)("testUInt64", 9876543210123ll)("testFloat", 1.5)(
"testDouble", 5.6)("testShortString", "abcdef")(
"testLongString",
"asdfghjkl;'eqtirgwuifhiivlzkhbvjkhc3978y42h97*&687gba")(
"testIobuf", "TestTheBuf")(
"testStruct",
folly::dynamic::object(
"__BaseStruct", folly::dynamic::object("baseInt64Member", 345))(
"enumMember", 20)("int32Member", 0)(
"stringMember", "nestedSimpleStruct")(
"vectorMember", folly::dynamic::array()))(
"testOptionalString", "tstOptStr")(
"testList", folly::dynamic::array("abc", "bce", "xyz"))(
"testEnumVec", folly::dynamic::array(1, 20))(
"testUnion", folly::dynamic::object("umember2", true))(
"testNestedVec",
folly::dynamic::array(
folly::dynamic::array(1, 1, 1), folly::dynamic::array(2, 2, 2)))(
"testUMap", folly::dynamic::object("key", "value")("adele", "beyonce"))(
"testMap", folly::dynamic::object("3.14", 2.7)("0.577", 0.2))(
"testComplexMap",
folly::dynamic::object("hello", folly::dynamic::array(1, 1, 1))(
"world", folly::dynamic::array(2, 2, 2)))(
"testUSet", folly::dynamic::array("hello", "world"))(
"testSet", folly::dynamic::array(123, 456))("testType", "(user type)")(
"testOptionalVec", folly::dynamic::array())(
"testIOBufList", folly::dynamic::array("", ""));
auto dynamic = carbon::convertToFollyDynamic(r);
auto set = dynamic.at("testUSet");
std::sort(set.begin(), set.end());
dynamic.at("testUSet") = set;
EXPECT_EQ(expected, dynamic);
}
TEST(CarbonMessageConversionUtils, toFollyDynamic_InlineMixins) {
carbon::test::SimpleStruct s;
s.baseInt64Member() = 123;
s.stringMember() = "abcdef";
folly::dynamic noInline = folly::dynamic::object(
"__BaseStruct", folly::dynamic::object("baseInt64Member", 123))(
"int32Member", 0)("stringMember", "abcdef")("enumMember", 20)(
"vectorMember", folly::dynamic::array());
EXPECT_EQ(noInline, carbon::convertToFollyDynamic(s));
folly::dynamic withInline = folly::dynamic::object("baseInt64Member", 123)(
"int32Member", 0)("stringMember", "abcdef")("enumMember", 20)(
"vectorMember", folly::dynamic::array());
carbon::FollyDynamicConversionOptions opts;
opts.inlineMixins = true;
EXPECT_EQ(withInline, carbon::convertToFollyDynamic(s, opts));
}
TEST(CarbonMessageConversionUtils, toFollyDynamic_NoDefaultValues) {
carbon::test::SimpleStruct s;
s.baseInt64Member() = 0;
s.stringMember() = "abcdef";
const folly::dynamic expected =
folly::dynamic::object("stringMember", "abcdef")("enumMember", 20);
carbon::FollyDynamicConversionOptions opts;
opts.serializeFieldsWithDefaultValue = false;
EXPECT_EQ(expected, carbon::convertToFollyDynamic(s, opts));
}
TEST(CarbonMessageConversionUtils, toFollyDynamic_NoDefaultValues_Complex) {
carbon::test::TestRequest req;
req.testList() = std::vector<std::string>({"", "bce", ""});
req.testNestedVec() = {{0}, {2, 0, 2}, {}};
req.testChar() = 'a';
const folly::dynamic expected = folly::dynamic::object(
"__Base", folly::dynamic::object("enumMember", 20))(
"testList", folly::dynamic::array("", "bce", ""))("testChar", "a")(
"testEnum", 20)("testStruct", folly::dynamic::object("enumMember", 20))(
"testNestedVec",
folly::dynamic::array(
folly::dynamic::array(0),
folly::dynamic::array(2, 0, 2),
folly::dynamic::array()))("testType", "(user type)");
carbon::FollyDynamicConversionOptions opts;
opts.serializeFieldsWithDefaultValue = false;
EXPECT_EQ(expected, carbon::convertToFollyDynamic(req, opts));
}
TEST(CarbonMessageConversionUtils, fromFollyDynamic_InlineMixins) {
const std::string jsonStr = R"json(
{
"int32Member": 32,
"stringMember": "This is a string",
"baseInt64Member": 132
}
)json";
carbon::test::SimpleStruct s;
carbon::convertFromFollyDynamic(folly::parseJson(jsonStr), s);
EXPECT_EQ(32, s.int32Member());
EXPECT_EQ("This is a string", s.stringMember());
EXPECT_EQ(132, s.baseInt64Member());
}
TEST(CarbonMessageConversionUtils, fromFollyDynamic) {
const std::string jsonStr = R"json(
{
"int32Member": 32,
"stringMember": "This is a string",
"__BaseStruct": {
"baseInt64Member": 132
}
}
)json";
carbon::test::SimpleStruct s;
carbon::convertFromFollyDynamic(folly::parseJson(jsonStr), s);
EXPECT_EQ(32, s.int32Member());
EXPECT_EQ("This is a string", s.stringMember());
EXPECT_EQ(132, s.baseInt64Member());
}
TEST(CarbonMessageConversionUtils, fromFollyDynamic_Complex) {
const std::string jsonStr = R"json(
{
"key": "sampleKey",
"int32Member": 32,
"stringMember": "This is a string",
"__BaseStruct": {
"baseInt64Member": 132
},
"testEnum": -92233,
"testBool": true,
"testChar": "a",
"testInt8": -8,
"testInt16": -16,
"testInt32": -32,
"testInt64": -64,
"testUInt8": 8,
"testUInt16": 16,
"testUInt32": 32,
"testUInt64": 64,
"testFloat": 12.356,
"testDouble": 35.98765,
"testLongString": "this is a very long and nice string in a json file 12",
"testIobuf": "iobuf string here...",
"testStruct": {
"int32Member": 7,
"stringMember": "I'm nested!",
"baseInt64Member": 9
},
"testList": [
"string 1",
"s2"
],
"testOptionalString": "I exist!",
"testUnion": {
"umember3": "abc def ghi"
},
"testNestedVec": [
[ 17, 26 ],
[],
[ 32 ]
],
"testMap": {
10.7: 11.8,
30.567: 31.789
},
"testComplexMap": {
"v1": [ 10 ],
"ve2": [ 20, 30 ],
"vec03": [ 50, 70, 90 ]
}
}
)json";
size_t numErrors = 0;
auto onError = [&numErrors](folly::StringPiece name, folly::StringPiece msg) {
std::cerr << "ERROR: " << name << ": " << msg << std::endl;
numErrors++;
};
folly::json::serialization_opts jsonOpts;
jsonOpts.allow_non_string_keys = true;
folly::dynamic json = folly::parseJson(jsonStr, jsonOpts);
carbon::test::TestRequest r;
carbon::convertFromFollyDynamic(json, r, std::move(onError));
EXPECT_EQ(0, numErrors);
EXPECT_EQ("sampleKey", r.key().fullKey());
// Simple struct
EXPECT_EQ(32, r.int32Member());
EXPECT_EQ("This is a string", r.stringMember());
EXPECT_EQ(132, r.baseInt64Member());
EXPECT_EQ(carbon::test2::util::SimpleEnum::Negative, r.testEnum());
EXPECT_TRUE(r.testBool());
EXPECT_EQ('a', r.testChar());
EXPECT_EQ(-8, r.testInt8());
EXPECT_EQ(-16, r.testInt16());
EXPECT_EQ(-32, r.testInt32());
EXPECT_EQ(-64, r.testInt64());
EXPECT_EQ(8, r.testUInt8());
EXPECT_EQ(16, r.testUInt16());
EXPECT_EQ(32, r.testUInt32());
EXPECT_EQ(64, r.testUInt64());
EXPECT_FLOAT_EQ(12.356, r.testFloat());
EXPECT_DOUBLE_EQ(35.98765, r.testDouble());
EXPECT_EQ(
"this is a very long and nice string in a json file 12",
r.testLongString());
const folly::IOBuf expectedIobuf(
folly::IOBuf::CopyBufferOp(), folly::StringPiece("iobuf string here..."));
EXPECT_TRUE(folly::IOBufEqual()(expectedIobuf, r.testIobuf()));
EXPECT_EQ(7, r.testStruct().int32Member());
EXPECT_EQ("I'm nested!", r.testStruct().stringMember());
EXPECT_EQ(9, r.testStruct().baseInt64Member());
ASSERT_EQ(2, r.testList().size());
EXPECT_EQ("string 1", r.testList()[0]);
EXPECT_EQ("s2", r.testList()[1]);
ASSERT_TRUE(r.testOptionalString().hasValue());
EXPECT_EQ("I exist!", r.testOptionalString().value());
ASSERT_EQ(3, r.testUnion().which());
EXPECT_EQ("abc def ghi", r.testUnion().umember3());
ASSERT_EQ(3, r.testNestedVec().size());
ASSERT_EQ(2, r.testNestedVec()[0].size());
EXPECT_EQ(0, r.testNestedVec()[1].size());
ASSERT_EQ(1, r.testNestedVec()[2].size());
EXPECT_EQ(17, r.testNestedVec()[0][0]);
EXPECT_EQ(26, r.testNestedVec()[0][1]);
EXPECT_EQ(32, r.testNestedVec()[2][0]);
ASSERT_EQ(2, r.testMap().size());
EXPECT_EQ(11.8, r.testMap()[10.7]);
EXPECT_EQ(31.789, r.testMap()[30.567]);
ASSERT_EQ(3, r.testComplexMap().size());
ASSERT_EQ(1, r.testComplexMap()["v1"].size());
EXPECT_EQ(10, r.testComplexMap()["v1"][0]);
ASSERT_EQ(2, r.testComplexMap()["ve2"].size());
EXPECT_EQ(20, r.testComplexMap()["ve2"][0]);
EXPECT_EQ(30, r.testComplexMap()["ve2"][1]);
ASSERT_EQ(3, r.testComplexMap()["vec03"].size());
EXPECT_EQ(50, r.testComplexMap()["vec03"][0]);
EXPECT_EQ(70, r.testComplexMap()["vec03"][1]);
EXPECT_EQ(90, r.testComplexMap()["vec03"][2]);
}
TEST(CarbonMessageConversionUtils, fromFollyDynamic_Errors) {
const std::string jsonStr = R"json(
{
"key": 75,
"int32Member": "abc",
"testChar": "ab",
"testStruct": {
"__BaseStruct": {
"baseInt64Member": "abc"
}
},
"testList": [
"string 1",
7
],
"testUnion": {
"umember2": 17
},
"testNestedVec": [
[],
[ 18, "abc" ]
]
}
)json";
size_t numErrors = 0;
auto onError = [&numErrors](
folly::StringPiece fieldName, folly::StringPiece msg) {
numErrors++;
std::cerr << fieldName << ": " << msg << std::endl;
};
carbon::test::TestRequest r;
carbon::convertFromFollyDynamic(
folly::parseJson(jsonStr), r, std::move(onError));
EXPECT_EQ(7, numErrors);
ASSERT_EQ(1, r.testList().size());
EXPECT_EQ("string 1", r.testList()[0]);
ASSERT_EQ(2, r.testNestedVec().size());
EXPECT_EQ(0, r.testNestedVec()[0].size());
ASSERT_EQ(1, r.testNestedVec()[1].size());
EXPECT_EQ(18, r.testNestedVec()[1][0]);
}
|
#!/bin/bash
set -e
if [ -z ${CI_PROJECT_DIR+x} ];
then
echo "Please set CI_PROJECT_DIR to the path of your SIDL repository.";
exit 1;
fi
# Kill child processes on exit.
trap 'jobs -p | xargs kill' EXIT
if [ -z ${DONT_CREATE_WEBAPPS} ];
then
# Cleanup webapps directory.
rm -rf ${CI_PROJECT_DIR}/prebuilts/http_root/webapps
# Check for each service if a service/xyz/client/manifest.webmanifest file
# is present, and if so add it to the application list.
WEBAPPS_JSON=${CI_PROJECT_DIR}/tests/webapps/webapps.json
mkdir -p ${CI_PROJECT_DIR}/tests/webapps
rm -rf ${CI_PROJECT_DIR}/tests/webapps/*
echo "[" > ${WEBAPPS_JSON}
pushd ${CI_PROJECT_DIR}/services > /dev/null
for service in `ls -d *`; do
if [ -f ${service}/client/manifest.webmanifest ];
then
echo "Registering ${service} tests"
echo "{ \"name\": \"${service}\", \"manifest_url\": \"http://${service}.localhost:8081/manifest.webmanifest\" }," >> ${WEBAPPS_JSON}
ln -s `realpath ${service}/client/` ${CI_PROJECT_DIR}/tests/webapps/${service}
fi
done
popd > /dev/null
# Add an extra entry to make the file valid Json.
echo "{ \"name\": \"dummy\", \"manifest_url\": \"http://example.com\" }]" >> ${WEBAPPS_JSON}
fi
DAEMON_CONFIG=${DAEMON_CONFIG:-config-webdriver.toml}
export FIREFOX_BIN="${CI_PROJECT_DIR}/tests/b2g/b2g"
pushd ${CI_PROJECT_DIR}/daemon > /dev/null
RUST_LOG=debug ${CI_PROJECT_DIR}/target/release/api-daemon ${CI_PROJECT_DIR}/daemon/${DAEMON_CONFIG} &
rm -rf ./tmp-profile
mkdir -p ./tmp-profile/webapps
export TEST_FIREFOX_PROFILE=${CI_PROJECT_DIR}/daemon/tmp-profile
export RUST_LOG=info
export RUST_BACKTRACE=1
${CI_PROJECT_DIR}/target/release/driver 2>/dev/null $@
rm -rf ./tmp-profile
popd > /dev/null
echo "==========================================================="
echo "WebDriver test successful for $@"
echo "==========================================================="
|
<reponame>knkgun/curve25519-voi
// Copyright (c) 2014 The Go Authors. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//go:build !amd64 || purego || !gc
// +build !amd64 purego !gc
package strobe
import "encoding/binary"
// rc stores the round constants for use in the ι step.
var rc = [24]uint64{
0x0000000000000001,
0x0000000000008082,
0x800000000000808A,
0x8000000080008000,
0x000000000000808B,
0x0000000080000001,
0x8000000080008081,
0x8000000000008009,
0x000000000000008A,
0x0000000000000088,
0x0000000080008009,
0x000000008000000A,
0x000000008000808B,
0x800000000000008B,
0x8000000000008089,
0x8000000000008003,
0x8000000000008002,
0x8000000000000080,
0x000000000000800A,
0x800000008000000A,
0x8000000080008081,
0x8000000000008080,
0x0000000080000001,
0x8000000080008008,
}
// keccakF1600 applies the Keccak permutation to a 1600b-wide
// state represented as a slice of 25 uint64s.
func keccakF1600(a *[25]uint64) {
// Implementation translated from Keccak-inplace.c
// in the keccak reference code.
var t, bc0, bc1, bc2, bc3, bc4, d0, d1, d2, d3, d4 uint64
for i := 0; i < 24; i += 4 {
// Combines the 5 steps in each round into 2 steps.
// Unrolls 4 rounds per loop and spreads some steps across rounds.
// Round 1
bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
d0 = bc4 ^ (bc1<<1 | bc1>>63)
d1 = bc0 ^ (bc2<<1 | bc2>>63)
d2 = bc1 ^ (bc3<<1 | bc3>>63)
d3 = bc2 ^ (bc4<<1 | bc4>>63)
d4 = bc3 ^ (bc0<<1 | bc0>>63)
bc0 = a[0] ^ d0
t = a[6] ^ d1
bc1 = t<<44 | t>>(64-44)
t = a[12] ^ d2
bc2 = t<<43 | t>>(64-43)
t = a[18] ^ d3
bc3 = t<<21 | t>>(64-21)
t = a[24] ^ d4
bc4 = t<<14 | t>>(64-14)
a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i]
a[6] = bc1 ^ (bc3 &^ bc2)
a[12] = bc2 ^ (bc4 &^ bc3)
a[18] = bc3 ^ (bc0 &^ bc4)
a[24] = bc4 ^ (bc1 &^ bc0)
t = a[10] ^ d0
bc2 = t<<3 | t>>(64-3)
t = a[16] ^ d1
bc3 = t<<45 | t>>(64-45)
t = a[22] ^ d2
bc4 = t<<61 | t>>(64-61)
t = a[3] ^ d3
bc0 = t<<28 | t>>(64-28)
t = a[9] ^ d4
bc1 = t<<20 | t>>(64-20)
a[10] = bc0 ^ (bc2 &^ bc1)
a[16] = bc1 ^ (bc3 &^ bc2)
a[22] = bc2 ^ (bc4 &^ bc3)
a[3] = bc3 ^ (bc0 &^ bc4)
a[9] = bc4 ^ (bc1 &^ bc0)
t = a[20] ^ d0
bc4 = t<<18 | t>>(64-18)
t = a[1] ^ d1
bc0 = t<<1 | t>>(64-1)
t = a[7] ^ d2
bc1 = t<<6 | t>>(64-6)
t = a[13] ^ d3
bc2 = t<<25 | t>>(64-25)
t = a[19] ^ d4
bc3 = t<<8 | t>>(64-8)
a[20] = bc0 ^ (bc2 &^ bc1)
a[1] = bc1 ^ (bc3 &^ bc2)
a[7] = bc2 ^ (bc4 &^ bc3)
a[13] = bc3 ^ (bc0 &^ bc4)
a[19] = bc4 ^ (bc1 &^ bc0)
t = a[5] ^ d0
bc1 = t<<36 | t>>(64-36)
t = a[11] ^ d1
bc2 = t<<10 | t>>(64-10)
t = a[17] ^ d2
bc3 = t<<15 | t>>(64-15)
t = a[23] ^ d3
bc4 = t<<56 | t>>(64-56)
t = a[4] ^ d4
bc0 = t<<27 | t>>(64-27)
a[5] = bc0 ^ (bc2 &^ bc1)
a[11] = bc1 ^ (bc3 &^ bc2)
a[17] = bc2 ^ (bc4 &^ bc3)
a[23] = bc3 ^ (bc0 &^ bc4)
a[4] = bc4 ^ (bc1 &^ bc0)
t = a[15] ^ d0
bc3 = t<<41 | t>>(64-41)
t = a[21] ^ d1
bc4 = t<<2 | t>>(64-2)
t = a[2] ^ d2
bc0 = t<<62 | t>>(64-62)
t = a[8] ^ d3
bc1 = t<<55 | t>>(64-55)
t = a[14] ^ d4
bc2 = t<<39 | t>>(64-39)
a[15] = bc0 ^ (bc2 &^ bc1)
a[21] = bc1 ^ (bc3 &^ bc2)
a[2] = bc2 ^ (bc4 &^ bc3)
a[8] = bc3 ^ (bc0 &^ bc4)
a[14] = bc4 ^ (bc1 &^ bc0)
// Round 2
bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
d0 = bc4 ^ (bc1<<1 | bc1>>63)
d1 = bc0 ^ (bc2<<1 | bc2>>63)
d2 = bc1 ^ (bc3<<1 | bc3>>63)
d3 = bc2 ^ (bc4<<1 | bc4>>63)
d4 = bc3 ^ (bc0<<1 | bc0>>63)
bc0 = a[0] ^ d0
t = a[16] ^ d1
bc1 = t<<44 | t>>(64-44)
t = a[7] ^ d2
bc2 = t<<43 | t>>(64-43)
t = a[23] ^ d3
bc3 = t<<21 | t>>(64-21)
t = a[14] ^ d4
bc4 = t<<14 | t>>(64-14)
a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i+1]
a[16] = bc1 ^ (bc3 &^ bc2)
a[7] = bc2 ^ (bc4 &^ bc3)
a[23] = bc3 ^ (bc0 &^ bc4)
a[14] = bc4 ^ (bc1 &^ bc0)
t = a[20] ^ d0
bc2 = t<<3 | t>>(64-3)
t = a[11] ^ d1
bc3 = t<<45 | t>>(64-45)
t = a[2] ^ d2
bc4 = t<<61 | t>>(64-61)
t = a[18] ^ d3
bc0 = t<<28 | t>>(64-28)
t = a[9] ^ d4
bc1 = t<<20 | t>>(64-20)
a[20] = bc0 ^ (bc2 &^ bc1)
a[11] = bc1 ^ (bc3 &^ bc2)
a[2] = bc2 ^ (bc4 &^ bc3)
a[18] = bc3 ^ (bc0 &^ bc4)
a[9] = bc4 ^ (bc1 &^ bc0)
t = a[15] ^ d0
bc4 = t<<18 | t>>(64-18)
t = a[6] ^ d1
bc0 = t<<1 | t>>(64-1)
t = a[22] ^ d2
bc1 = t<<6 | t>>(64-6)
t = a[13] ^ d3
bc2 = t<<25 | t>>(64-25)
t = a[4] ^ d4
bc3 = t<<8 | t>>(64-8)
a[15] = bc0 ^ (bc2 &^ bc1)
a[6] = bc1 ^ (bc3 &^ bc2)
a[22] = bc2 ^ (bc4 &^ bc3)
a[13] = bc3 ^ (bc0 &^ bc4)
a[4] = bc4 ^ (bc1 &^ bc0)
t = a[10] ^ d0
bc1 = t<<36 | t>>(64-36)
t = a[1] ^ d1
bc2 = t<<10 | t>>(64-10)
t = a[17] ^ d2
bc3 = t<<15 | t>>(64-15)
t = a[8] ^ d3
bc4 = t<<56 | t>>(64-56)
t = a[24] ^ d4
bc0 = t<<27 | t>>(64-27)
a[10] = bc0 ^ (bc2 &^ bc1)
a[1] = bc1 ^ (bc3 &^ bc2)
a[17] = bc2 ^ (bc4 &^ bc3)
a[8] = bc3 ^ (bc0 &^ bc4)
a[24] = bc4 ^ (bc1 &^ bc0)
t = a[5] ^ d0
bc3 = t<<41 | t>>(64-41)
t = a[21] ^ d1
bc4 = t<<2 | t>>(64-2)
t = a[12] ^ d2
bc0 = t<<62 | t>>(64-62)
t = a[3] ^ d3
bc1 = t<<55 | t>>(64-55)
t = a[19] ^ d4
bc2 = t<<39 | t>>(64-39)
a[5] = bc0 ^ (bc2 &^ bc1)
a[21] = bc1 ^ (bc3 &^ bc2)
a[12] = bc2 ^ (bc4 &^ bc3)
a[3] = bc3 ^ (bc0 &^ bc4)
a[19] = bc4 ^ (bc1 &^ bc0)
// Round 3
bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
d0 = bc4 ^ (bc1<<1 | bc1>>63)
d1 = bc0 ^ (bc2<<1 | bc2>>63)
d2 = bc1 ^ (bc3<<1 | bc3>>63)
d3 = bc2 ^ (bc4<<1 | bc4>>63)
d4 = bc3 ^ (bc0<<1 | bc0>>63)
bc0 = a[0] ^ d0
t = a[11] ^ d1
bc1 = t<<44 | t>>(64-44)
t = a[22] ^ d2
bc2 = t<<43 | t>>(64-43)
t = a[8] ^ d3
bc3 = t<<21 | t>>(64-21)
t = a[19] ^ d4
bc4 = t<<14 | t>>(64-14)
a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i+2]
a[11] = bc1 ^ (bc3 &^ bc2)
a[22] = bc2 ^ (bc4 &^ bc3)
a[8] = bc3 ^ (bc0 &^ bc4)
a[19] = bc4 ^ (bc1 &^ bc0)
t = a[15] ^ d0
bc2 = t<<3 | t>>(64-3)
t = a[1] ^ d1
bc3 = t<<45 | t>>(64-45)
t = a[12] ^ d2
bc4 = t<<61 | t>>(64-61)
t = a[23] ^ d3
bc0 = t<<28 | t>>(64-28)
t = a[9] ^ d4
bc1 = t<<20 | t>>(64-20)
a[15] = bc0 ^ (bc2 &^ bc1)
a[1] = bc1 ^ (bc3 &^ bc2)
a[12] = bc2 ^ (bc4 &^ bc3)
a[23] = bc3 ^ (bc0 &^ bc4)
a[9] = bc4 ^ (bc1 &^ bc0)
t = a[5] ^ d0
bc4 = t<<18 | t>>(64-18)
t = a[16] ^ d1
bc0 = t<<1 | t>>(64-1)
t = a[2] ^ d2
bc1 = t<<6 | t>>(64-6)
t = a[13] ^ d3
bc2 = t<<25 | t>>(64-25)
t = a[24] ^ d4
bc3 = t<<8 | t>>(64-8)
a[5] = bc0 ^ (bc2 &^ bc1)
a[16] = bc1 ^ (bc3 &^ bc2)
a[2] = bc2 ^ (bc4 &^ bc3)
a[13] = bc3 ^ (bc0 &^ bc4)
a[24] = bc4 ^ (bc1 &^ bc0)
t = a[20] ^ d0
bc1 = t<<36 | t>>(64-36)
t = a[6] ^ d1
bc2 = t<<10 | t>>(64-10)
t = a[17] ^ d2
bc3 = t<<15 | t>>(64-15)
t = a[3] ^ d3
bc4 = t<<56 | t>>(64-56)
t = a[14] ^ d4
bc0 = t<<27 | t>>(64-27)
a[20] = bc0 ^ (bc2 &^ bc1)
a[6] = bc1 ^ (bc3 &^ bc2)
a[17] = bc2 ^ (bc4 &^ bc3)
a[3] = bc3 ^ (bc0 &^ bc4)
a[14] = bc4 ^ (bc1 &^ bc0)
t = a[10] ^ d0
bc3 = t<<41 | t>>(64-41)
t = a[21] ^ d1
bc4 = t<<2 | t>>(64-2)
t = a[7] ^ d2
bc0 = t<<62 | t>>(64-62)
t = a[18] ^ d3
bc1 = t<<55 | t>>(64-55)
t = a[4] ^ d4
bc2 = t<<39 | t>>(64-39)
a[10] = bc0 ^ (bc2 &^ bc1)
a[21] = bc1 ^ (bc3 &^ bc2)
a[7] = bc2 ^ (bc4 &^ bc3)
a[18] = bc3 ^ (bc0 &^ bc4)
a[4] = bc4 ^ (bc1 &^ bc0)
// Round 4
bc0 = a[0] ^ a[5] ^ a[10] ^ a[15] ^ a[20]
bc1 = a[1] ^ a[6] ^ a[11] ^ a[16] ^ a[21]
bc2 = a[2] ^ a[7] ^ a[12] ^ a[17] ^ a[22]
bc3 = a[3] ^ a[8] ^ a[13] ^ a[18] ^ a[23]
bc4 = a[4] ^ a[9] ^ a[14] ^ a[19] ^ a[24]
d0 = bc4 ^ (bc1<<1 | bc1>>63)
d1 = bc0 ^ (bc2<<1 | bc2>>63)
d2 = bc1 ^ (bc3<<1 | bc3>>63)
d3 = bc2 ^ (bc4<<1 | bc4>>63)
d4 = bc3 ^ (bc0<<1 | bc0>>63)
bc0 = a[0] ^ d0
t = a[1] ^ d1
bc1 = t<<44 | t>>(64-44)
t = a[2] ^ d2
bc2 = t<<43 | t>>(64-43)
t = a[3] ^ d3
bc3 = t<<21 | t>>(64-21)
t = a[4] ^ d4
bc4 = t<<14 | t>>(64-14)
a[0] = bc0 ^ (bc2 &^ bc1) ^ rc[i+3]
a[1] = bc1 ^ (bc3 &^ bc2)
a[2] = bc2 ^ (bc4 &^ bc3)
a[3] = bc3 ^ (bc0 &^ bc4)
a[4] = bc4 ^ (bc1 &^ bc0)
t = a[5] ^ d0
bc2 = t<<3 | t>>(64-3)
t = a[6] ^ d1
bc3 = t<<45 | t>>(64-45)
t = a[7] ^ d2
bc4 = t<<61 | t>>(64-61)
t = a[8] ^ d3
bc0 = t<<28 | t>>(64-28)
t = a[9] ^ d4
bc1 = t<<20 | t>>(64-20)
a[5] = bc0 ^ (bc2 &^ bc1)
a[6] = bc1 ^ (bc3 &^ bc2)
a[7] = bc2 ^ (bc4 &^ bc3)
a[8] = bc3 ^ (bc0 &^ bc4)
a[9] = bc4 ^ (bc1 &^ bc0)
t = a[10] ^ d0
bc4 = t<<18 | t>>(64-18)
t = a[11] ^ d1
bc0 = t<<1 | t>>(64-1)
t = a[12] ^ d2
bc1 = t<<6 | t>>(64-6)
t = a[13] ^ d3
bc2 = t<<25 | t>>(64-25)
t = a[14] ^ d4
bc3 = t<<8 | t>>(64-8)
a[10] = bc0 ^ (bc2 &^ bc1)
a[11] = bc1 ^ (bc3 &^ bc2)
a[12] = bc2 ^ (bc4 &^ bc3)
a[13] = bc3 ^ (bc0 &^ bc4)
a[14] = bc4 ^ (bc1 &^ bc0)
t = a[15] ^ d0
bc1 = t<<36 | t>>(64-36)
t = a[16] ^ d1
bc2 = t<<10 | t>>(64-10)
t = a[17] ^ d2
bc3 = t<<15 | t>>(64-15)
t = a[18] ^ d3
bc4 = t<<56 | t>>(64-56)
t = a[19] ^ d4
bc0 = t<<27 | t>>(64-27)
a[15] = bc0 ^ (bc2 &^ bc1)
a[16] = bc1 ^ (bc3 &^ bc2)
a[17] = bc2 ^ (bc4 &^ bc3)
a[18] = bc3 ^ (bc0 &^ bc4)
a[19] = bc4 ^ (bc1 &^ bc0)
t = a[20] ^ d0
bc3 = t<<41 | t>>(64-41)
t = a[21] ^ d1
bc4 = t<<2 | t>>(64-2)
t = a[22] ^ d2
bc0 = t<<62 | t>>(64-62)
t = a[23] ^ d3
bc1 = t<<55 | t>>(64-55)
t = a[24] ^ d4
bc2 = t<<39 | t>>(64-39)
a[20] = bc0 ^ (bc2 &^ bc1)
a[21] = bc1 ^ (bc3 &^ bc2)
a[22] = bc2 ^ (bc4 &^ bc3)
a[23] = bc3 ^ (bc0 &^ bc4)
a[24] = bc4 ^ (bc1 &^ bc0)
}
}
func keccakF1600Bytes(s *[25 * 8]byte) {
// TODO/perf: This could be split out to a separate set of files so
// that architectures that don't care about memory alignment that
// aren't amd64 do not have to take the added overhead.
//
// But if there are people using this library on 386, ppc64,
// ppc64le or s390x, they can submit a PR for it.
var a [25]uint64
for i := 0; i < 25; i++ {
a[i] = binary.LittleEndian.Uint64(s[i*8:])
}
keccakF1600(&a)
for i := 0; i < 25; i++ {
binary.LittleEndian.PutUint64(s[i*8:], a[i])
}
}
|
import os
from nudgebot.config import Config
config = Config(os.path.dirname(__file__))
config.reload()
|
import IAPIParamTranslator from "../../API/interfaces/IAPIParamTranslator";
import IAPIParamTranslatorStatic from "../../API/interfaces/IAPIParamTranslatorStatic";
export default class APIDAOParamsVO implements IAPIParamTranslator<APIDAOParamsVO> {
public static URL: string = ':api_type_id/:ids';
public static fromREQ(req): APIDAOParamsVO {
if (!(req && req.params)) {
return null;
}
let ids = req.params.ids.split('_');
for (let i in ids) {
ids[i] = parseInt(ids[i]);
}
return new APIDAOParamsVO(req.params.api_type_id, ids);
}
public static fromParams(API_TYPE_ID: string, ids: number[]): APIDAOParamsVO {
return new APIDAOParamsVO(API_TYPE_ID, ids);
}
public static getAPIParams(param: APIDAOParamsVO): any[] {
return [param.API_TYPE_ID, param.ids];
}
public constructor(
public API_TYPE_ID: string,
public ids: number[]) {
}
public translateToURL(): string {
return this.API_TYPE_ID + '/' + this.ids.join('_');
}
}
export const APIDAOParamsVOStatic: IAPIParamTranslatorStatic<APIDAOParamsVO> = APIDAOParamsVO; |
<filename>TrafficFlowClassification/preprocess/test_splitTrian.py
'''
@Author: <NAME>
@Date: 2021-01-05 11:32:31
@Description: 测试 splitTrain.py 文件
@LastEditTime: 2021-01-05 12:53:17
'''
from TrafficFlowClassification.preprocess.splitTrain import get_file_path, get_train_test
def test_get_file_path():
pcap_dict = get_file_path('D:\Traffic-Classification\data\preprocess_data')
print(pcap_dict)
def test_get_train_test():
train_dict, test_dict = get_train_test('D:\Traffic-Classification\data\preprocess_data', 0.9)
print(train_dict, test_dict) |
import { Router } from "express";
import { client } from "./gremlin"
const routes = Router();
/**
* GET home page
*/
routes.get('/', (req, res) => {
res.render('index', { title: 'gremUIlle' });
});
routes.post("/api/query", (req, res) => {
const { query } = req.body;
client().execute(query, (err, results) => {
res.setHeader('Content-Type', 'application/json');
if (err) {
console.error(err);
res.send(JSON.stringify({error: err.toString()}));
return;
}
res.send(JSON.stringify(results));
});
});
export default routes;
|
package com.fc.test.model.custom.email;
public class MailInfo {
private String fullname;
private String email;
private String tel;
private String company;
private String title;
private String content;
public String getFullname() {
return fullname;
}
public void setFullname(String fullname) {
this.fullname = fullname;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getTel() {
return tel;
}
public void setTel(String tel) {
this.tel = tel;
}
public String getCompany() {
return company;
}
public void setCompany(String company) {
this.company = company;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
}
|
import {
Options,
Series,
ChartOptionsUsingYAxis,
Axes,
ViewAxisLabel,
RotationLabelData,
InitAxisData,
Layout,
} from '@t/store/store';
import { LineTypeXAxisOptions, BulletChartOptions, AxisTitle, DateOption } from '@t/options';
import { Theme } from '@t/theme';
import { AxisType } from '@src/component/axis';
import {
divisors,
makeTickPixelPositions,
getTextHeight,
getTextWidth,
} from '@src/helpers/calculator';
import { range, isString, isUndefined, isNumber } from '@src/helpers/utils';
import {
ANGLE_CANDIDATES,
calculateRotatedWidth,
calculateRotatedHeight,
} from '@src/helpers/geometric';
import { getDateFormat, formatDate } from '@src/helpers/formatDate';
import { calculateDegreeToRadian } from '@src/helpers/sector';
import { DEFAULT_LABEL_TEXT } from '@src/brushes/label';
import { AxisDataParams } from '@src/store/axes';
interface IntervalInfo {
blockCount: number;
remainBlockCount: number;
interval: number;
}
function makeAdjustingIntervalInfo(blockCount: number, axisWidth: number, blockSize: number) {
let remainBlockCount;
let newBlockCount = Math.floor(axisWidth / blockSize);
let intervalInfo: IntervalInfo | null = null;
const interval = newBlockCount ? Math.floor(blockCount / newBlockCount) : blockCount;
if (interval > 1) {
// remainBlockCount : remaining block count after filling new blocks
// | | | | | | | | | | | | - previous block interval
// | | | | - new block interval
// |*|*| - remaining block
remainBlockCount = blockCount - interval * newBlockCount;
if (remainBlockCount >= interval) {
newBlockCount += Math.floor(remainBlockCount / interval);
remainBlockCount = remainBlockCount % interval;
}
intervalInfo = {
blockCount: newBlockCount,
remainBlockCount,
interval,
};
}
return intervalInfo;
}
export function getAutoAdjustingInterval(count: number, axisWidth: number, categories?: string[]) {
const autoInterval = {
MIN_WIDTH: 90,
MAX_WIDTH: 121,
STEP_SIZE: 5,
};
const LABEL_MARGIN = 5;
if (categories?.[0]) {
const categoryMinWidth = getTextWidth(categories[0]);
if (categoryMinWidth < axisWidth / count - LABEL_MARGIN) {
return 1;
}
}
let candidates: IntervalInfo[] = [];
divisors(count).forEach((interval) => {
const intervalWidth = (interval / count) * axisWidth;
if (intervalWidth >= autoInterval.MIN_WIDTH && intervalWidth <= autoInterval.MAX_WIDTH) {
candidates.push({ interval, blockCount: Math.floor(count / interval), remainBlockCount: 0 });
}
});
if (!candidates.length) {
const blockSizeRange = range(
autoInterval.MIN_WIDTH,
autoInterval.MAX_WIDTH,
autoInterval.STEP_SIZE
);
candidates = blockSizeRange.reduce<IntervalInfo[]>((acc, blockSize) => {
const candidate = makeAdjustingIntervalInfo(count, axisWidth, blockSize);
return candidate ? [...acc, candidate] : acc;
}, []);
}
let tickInterval = 1;
if (candidates.length) {
const candidate = candidates.reduce(
(acc, cur) => (cur.blockCount > acc.blockCount ? cur : acc),
{ blockCount: 0, interval: 1 }
);
tickInterval = candidate.interval;
}
return tickInterval;
}
export function isLabelAxisOnYAxis(series: Series, options?: Options) {
return (
!!series.bar ||
!!series.radialBar ||
(!!series.bullet && !(options as BulletChartOptions)?.series?.vertical)
);
}
export function hasBoxTypeSeries(series: Series) {
return !!series.column || !!series.bar || !!series.boxPlot || !!series.bullet;
}
export function isPointOnColumn(series: Series, options: Options) {
if (hasBoxTypeSeries(series)) {
return true;
}
if (series.line || series.area) {
return Boolean((options.xAxis as LineTypeXAxisOptions)?.pointOnColumn);
}
return false;
}
export function isSeriesUsingRadialAxes(series: Series): boolean {
return !!series.radar || !!series.radialBar;
}
function getAxisNameUsingRadialAxes(labelAxisOnYAxis: boolean) {
return {
valueAxisName: labelAxisOnYAxis ? 'circularAxis' : 'verticalAxis',
labelAxisName: labelAxisOnYAxis ? 'verticalAxis' : 'circularAxis',
};
}
export function getAxisName(labelAxisOnYAxis: boolean, series: Series) {
return isSeriesUsingRadialAxes(series)
? getAxisNameUsingRadialAxes(labelAxisOnYAxis)
: {
valueAxisName: labelAxisOnYAxis ? 'xAxis' : 'yAxis',
labelAxisName: labelAxisOnYAxis ? 'yAxis' : 'xAxis',
};
}
export function getSizeKey(labelAxisOnYAxis: boolean) {
return {
valueSizeKey: labelAxisOnYAxis ? 'width' : 'height',
labelSizeKey: labelAxisOnYAxis ? 'height' : 'width',
};
}
export function getLimitOnAxis(labels: string[]) {
const values = labels.map((label) => Number(label));
return {
min: Math.min(...values),
max: Math.max(...values),
};
}
export function hasSecondaryYAxis(options: ChartOptionsUsingYAxis) {
return Array.isArray(options?.yAxis) && options.yAxis.length === 2;
}
export function getYAxisOption(options: ChartOptionsUsingYAxis) {
const secondaryYAxis = hasSecondaryYAxis(options);
return {
yAxis: secondaryYAxis ? options.yAxis![0] : options?.yAxis,
secondaryYAxis: secondaryYAxis ? options.yAxis![1] : null,
};
}
export function getValueAxisName(
options: ChartOptionsUsingYAxis,
seriesName: string,
valueAxisName: string
) {
const { secondaryYAxis } = getYAxisOption(options);
return secondaryYAxis?.chartType === seriesName ? 'secondaryYAxis' : valueAxisName;
}
export function getValueAxisNames(options: ChartOptionsUsingYAxis, valueAxisName: string) {
const { yAxis, secondaryYAxis } = getYAxisOption(options);
return valueAxisName !== 'xAxis' && secondaryYAxis
? [yAxis.chartType, secondaryYAxis.chartType].map((seriesName, index) =>
seriesName
? getValueAxisName(options, seriesName, valueAxisName)
: ['yAxis', 'secondaryYAxis'][index]
)
: [valueAxisName];
}
export function getAxisTheme(theme: Theme, name: string) {
const { xAxis, yAxis } = theme;
let axisTheme;
if (name === AxisType.X) {
axisTheme = xAxis;
} else if (Array.isArray(yAxis)) {
axisTheme = name === AxisType.Y ? yAxis[0] : yAxis[1];
} else {
axisTheme = yAxis;
}
return axisTheme;
}
function getRotationDegree(distance: number, labelWidth: number, labelHeight: number) {
let degree = 0;
ANGLE_CANDIDATES.every((angle) => {
const compareWidth = calculateRotatedWidth(angle, labelWidth, labelHeight);
degree = angle;
return compareWidth > distance;
});
return distance < labelWidth ? degree : 0;
}
function hasYAxisMaxLabelLengthChanged(
previousAxes: Axes,
currentAxes: Axes,
field: 'yAxis' | 'secondaryYAxis'
) {
const prevYAxis = previousAxes[field];
const yAxis = currentAxes[field];
if (!prevYAxis && !yAxis) {
return false;
}
return prevYAxis?.maxLabelWidth !== yAxis?.maxLabelWidth;
}
function hasYAxisTypeMaxLabelChanged(previousAxes: Axes, currentAxes: Axes): boolean {
return (
hasYAxisMaxLabelLengthChanged(previousAxes, currentAxes, 'yAxis') ||
hasYAxisMaxLabelLengthChanged(previousAxes, currentAxes, 'secondaryYAxis')
);
}
function hasXAxisSizeChanged(previousAxes: Axes, currentAxes: Axes): boolean {
const { maxHeight: prevMaxHeight } = previousAxes.xAxis;
const { maxHeight } = currentAxes.xAxis;
return prevMaxHeight !== maxHeight;
}
export function hasAxesLayoutChanged(previousAxes: Axes, currentAxes: Axes) {
return (
hasYAxisTypeMaxLabelChanged(previousAxes, currentAxes) ||
hasXAxisSizeChanged(previousAxes, currentAxes)
);
}
export function getRotatableOption(options: Options) {
return options?.xAxis?.label?.rotatable ?? true;
}
type ViewAxisLabelParam = {
labels: string[];
pointOnColumn?: boolean;
labelDistance?: number;
labelInterval: number;
tickDistance: number;
tickInterval: number;
tickCount: number;
};
export function getViewAxisLabels(axisData: ViewAxisLabelParam, axisSize: number) {
const {
labels,
pointOnColumn,
labelDistance,
tickDistance,
labelInterval,
tickInterval,
tickCount,
} = axisData;
const relativePositions = makeTickPixelPositions(axisSize, tickCount);
const interval = labelInterval === tickInterval ? labelInterval : 1;
const labelAdjustment = pointOnColumn ? (labelDistance ?? tickDistance * interval) / 2 : 0;
return labels.reduce<ViewAxisLabel[]>((acc, text, index) => {
const offsetPos = relativePositions[index] + labelAdjustment;
const needRender = !(index % labelInterval) && offsetPos <= axisSize;
return needRender ? [...acc, { offsetPos, text }] : acc;
}, []);
}
export function makeTitleOption(title?: AxisTitle) {
if (isUndefined(title)) {
return title;
}
const defaultOption = { text: '', offsetX: 0, offsetY: 0 };
return isString(title) ? { ...defaultOption, text: title } : { ...defaultOption, ...title };
}
export function makeFormattedCategory(categories: string[], date?: DateOption) {
const format = getDateFormat(date);
return categories.map((category) => (format ? formatDate(format, new Date(category)) : category));
}
export function makeRotationData(
maxLabelWidth: number,
maxLabelHeight: number,
distance: number,
rotatable: boolean
): Required<RotationLabelData> {
const degree = getRotationDegree(distance, maxLabelWidth, maxLabelHeight);
if (!rotatable || degree === 0) {
return {
needRotateLabel: false,
radian: 0,
rotationHeight: maxLabelHeight,
};
}
return {
needRotateLabel: degree > 0,
radian: calculateDegreeToRadian(degree, 0),
rotationHeight: calculateRotatedHeight(degree, maxLabelWidth, maxLabelHeight),
};
}
export function getMaxLabelSize(labels: string[], xMargin: number, font = DEFAULT_LABEL_TEXT) {
const maxLengthLabel = labels.reduce((acc, cur) => (acc.length > cur.length ? acc : cur), '');
return {
maxLabelWidth: getTextWidth(maxLengthLabel, font) + xMargin,
maxLabelHeight: getTextHeight(maxLengthLabel, font),
};
}
export function getLabelXMargin(axisName: string, options: Options) {
if (axisName === 'xAxis') {
return 0;
}
const axisOptions = getYAxisOption(options);
return Math.abs(axisOptions?.[axisName]?.label?.margin ?? 0);
}
export function getInitAxisIntervalData(isLabelAxis: boolean, params: AxisDataParams) {
const { axis, categories, layout, isCoordinateTypeChart } = params;
const tickInterval = axis?.tick?.interval;
const labelInterval = axis?.label?.interval;
const existIntervalOptions = isNumber(tickInterval) || isNumber(labelInterval);
const needAdjustInterval =
isLabelAxis &&
!isNumber(axis?.scale?.stepSize) &&
!params.shift &&
!existIntervalOptions &&
!isCoordinateTypeChart;
const initTickInterval = needAdjustInterval ? getInitTickInterval(categories, layout) : 1;
const initLabelInterval = needAdjustInterval ? initTickInterval : 1;
const axisData: InitAxisData = {
tickInterval: tickInterval ?? initTickInterval,
labelInterval: labelInterval ?? initLabelInterval,
};
return axisData;
}
function getInitTickInterval(categories?: string[], layout?: Layout) {
if (!categories || !layout) {
return 1;
}
const { width } = layout.xAxis;
const count = categories.length;
return getAutoAdjustingInterval(count, width, categories);
}
|
def to_dict(lst):
# Initialize a new dictionary
new_dict = {}
# Iterate over the list and assign the square root of each element as value
for item in lst:
new_dict[item] = item**0.5
# Return the dictionary
return new_dict |
<filename>bower_components/foundation/node_modules/karma-browserstack-launcher/node_modules/browserstacktunnel-wrapper/test/Support/BrowserStackConfig.example.js
module.exports = {
key: 'YOUR_KEY'
};
|
<filename>controllers/vaultsecret_controller_test.go<gh_stars>10-100
//go:build integration
// +build integration
package controllers
import (
"context"
"os"
"reflect"
"regexp"
"strings"
"time"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
redhatcopv1alpha1 "github.com/redhat-cop/vault-config-operator/api/v1alpha1"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
)
var _ = Describe("VaultSecret controller", func() {
timeout := time.Second * 120
interval := time.Second * 2
Context("When creating a VaultSecret from multiple secrets", func() {
It("Should create a Secret when created", func() {
By("By creating a new PasswordPolicy")
ppInstance, err := decoder.GetPasswordPolicyInstance("../test/password-policy.yaml")
Expect(err).To(BeNil())
ppInstance.Namespace = vaultAdminNamespaceName
Expect(k8sIntegrationClient.Create(ctx, ppInstance)).Should(Succeed())
pplookupKey := types.NamespacedName{Name: ppInstance.Name, Namespace: ppInstance.Namespace}
ppCreated := &redhatcopv1alpha1.PasswordPolicy{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, pplookupKey, ppCreated)
if err != nil {
return false
}
for _, condition := range ppCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
By("By creating new Policies")
pInstance, err := decoder.GetPolicyInstance("../test/kv-engine-admin-policy.yaml")
Expect(err).To(BeNil())
pInstance.Namespace = vaultAdminNamespaceName
//SUBSTITUE
pInstance.Spec.Policy = strings.Replace(pInstance.Spec.Policy, "${accessor}", os.Getenv("ACCESSOR"), -1)
Expect(k8sIntegrationClient.Create(ctx, pInstance)).Should(Succeed())
pLookupKey := types.NamespacedName{Name: pInstance.Name, Namespace: pInstance.Namespace}
pCreated := &redhatcopv1alpha1.Policy{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, pLookupKey, pCreated)
if err != nil {
return false
}
for _, condition := range pCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
pInstance, err = decoder.GetPolicyInstance("../test/secret-writer-policy.yaml")
Expect(err).To(BeNil())
pInstance.Namespace = vaultAdminNamespaceName
//SUBSTITUE
pInstance.Spec.Policy = strings.Replace(pInstance.Spec.Policy, "${accessor}", os.Getenv("ACCESSOR"), -1)
Expect(k8sIntegrationClient.Create(ctx, pInstance)).Should(Succeed())
pLookupKey = types.NamespacedName{Name: pInstance.Name, Namespace: pInstance.Namespace}
pCreated = &redhatcopv1alpha1.Policy{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, pLookupKey, pCreated)
if err != nil {
return false
}
for _, condition := range pCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
pInstance, err = decoder.GetPolicyInstance("../test/vaultsecret/policy-secret-reader.yaml")
Expect(err).To(BeNil())
pInstance.Namespace = vaultAdminNamespaceName
//SUBSTITUE
pInstance.Spec.Policy = strings.Replace(pInstance.Spec.Policy, "${accessor}", os.Getenv("ACCESSOR"), -1)
Expect(k8sIntegrationClient.Create(ctx, pInstance)).Should(Succeed())
pLookupKey = types.NamespacedName{Name: pInstance.Name, Namespace: pInstance.Namespace}
pCreated = &redhatcopv1alpha1.Policy{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, pLookupKey, pCreated)
if err != nil {
return false
}
for _, condition := range pCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
By("By creating new KubernetesAuthEngineRoles")
kaerInstance, err := decoder.GetKubernetesAuthEngineRoleInstance("../test/kv-engine-admin-role.yaml")
Expect(err).To(BeNil())
kaerInstance.Namespace = vaultAdminNamespaceName
Expect(k8sIntegrationClient.Create(ctx, kaerInstance)).Should(Succeed())
kaerLookupKey := types.NamespacedName{Name: kaerInstance.Name, Namespace: kaerInstance.Namespace}
kaerCreated := &redhatcopv1alpha1.KubernetesAuthEngineRole{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, kaerLookupKey, kaerCreated)
if err != nil {
return false
}
for _, condition := range kaerCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
kaerInstance, err = decoder.GetKubernetesAuthEngineRoleInstance("../test/secret-writer-role.yaml")
Expect(err).To(BeNil())
kaerInstance.Namespace = vaultAdminNamespaceName
Expect(k8sIntegrationClient.Create(ctx, kaerInstance)).Should(Succeed())
kaerLookupKey = types.NamespacedName{Name: kaerInstance.Name, Namespace: kaerInstance.Namespace}
kaerCreated = &redhatcopv1alpha1.KubernetesAuthEngineRole{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, kaerLookupKey, kaerCreated)
if err != nil {
return false
}
for _, condition := range kaerCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
kaerInstance, err = decoder.GetKubernetesAuthEngineRoleInstance("../test/vaultsecret/kubernetesauthenginerole-secret-reader.yaml")
Expect(err).To(BeNil())
kaerInstance.Namespace = vaultAdminNamespaceName
Expect(k8sIntegrationClient.Create(ctx, kaerInstance)).Should(Succeed())
kaerLookupKey = types.NamespacedName{Name: kaerInstance.Name, Namespace: kaerInstance.Namespace}
kaerCreated = &redhatcopv1alpha1.KubernetesAuthEngineRole{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, kaerLookupKey, kaerCreated)
if err != nil {
return false
}
for _, condition := range kaerCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
By("By creating a new SecretEngineMount")
semInstance, err := decoder.GetSecretEngineMountInstance("../test/kv-secret-engine.yaml")
Expect(err).To(BeNil())
semInstance.Namespace = vaultTestNamespaceName
Expect(k8sIntegrationClient.Create(ctx, semInstance)).Should(Succeed())
semLookupKey := types.NamespacedName{Name: semInstance.Name, Namespace: semInstance.Namespace}
semCreated := &redhatcopv1alpha1.SecretEngineMount{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, semLookupKey, semCreated)
if err != nil {
return false
}
for _, condition := range semCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
By("By creating new RandomSecrets")
rsInstance, err := decoder.GetRandomSecretInstance("../test/random-secret.yaml")
Expect(err).To(BeNil())
rsInstance.Namespace = vaultTestNamespaceName
Expect(k8sIntegrationClient.Create(ctx, rsInstance)).Should(Succeed())
rslookupKey := types.NamespacedName{Name: rsInstance.Name, Namespace: rsInstance.Namespace}
rsCreated := &redhatcopv1alpha1.RandomSecret{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, rslookupKey, rsCreated)
if err != nil {
return false
}
for _, condition := range rsCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
rsInstance, err = decoder.GetRandomSecretInstance("../test/vaultsecret/randomsecret-another-password.yaml")
Expect(err).To(BeNil())
rsInstance.Namespace = vaultTestNamespaceName
Expect(k8sIntegrationClient.Create(ctx, rsInstance)).Should(Succeed())
rslookupKey = types.NamespacedName{Name: rsInstance.Name, Namespace: rsInstance.Namespace}
rsCreated = &redhatcopv1alpha1.RandomSecret{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, rslookupKey, rsCreated)
if err != nil {
return false
}
for _, condition := range rsCreated.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
By("By creating a new VaultSecret")
ctx := context.Background()
instance, err := decoder.GetVaultSecretInstance("../test/vaultsecret/vaultsecret-randomsecret.yaml")
Expect(err).To(BeNil())
instance.Namespace = vaultTestNamespaceName
Expect(k8sIntegrationClient.Create(ctx, instance)).Should(Succeed())
lookupKey := types.NamespacedName{Name: instance.Name, Namespace: instance.Namespace}
created := &redhatcopv1alpha1.VaultSecret{}
// We'll need to retry getting this newly created VaultSecret, given that creation may not immediately happen.
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, lookupKey, created)
if err != nil {
return false
}
for _, condition := range created.Status.Conditions {
if condition.Type == "ReconcileSuccess" {
return true
}
}
return false
}, timeout, interval).Should(BeTrue())
By("By checking the Secret Exists with proper Owner Reference")
lookupKey = types.NamespacedName{Name: instance.Spec.TemplatizedK8sSecret.Name, Namespace: instance.Namespace}
secret := &corev1.Secret{}
Eventually(func() bool {
err := k8sIntegrationClient.Get(ctx, lookupKey, secret)
if err != nil {
return false
}
return true
}, timeout, interval).Should(BeTrue())
kind := reflect.TypeOf(redhatcopv1alpha1.VaultSecret{}).Name()
Expect(secret.GetObjectMeta().GetOwnerReferences()[0].Kind).Should(Equal(kind))
By("By checking the Secret Data matches expected pattern")
var isLowerCaseLetter = regexp.MustCompile(`^[a-z]+$`).MatchString
for k := range instance.Spec.TemplatizedK8sSecret.StringData {
val, ok := secret.Data[k]
Expect(ok).To(BeTrue())
s := string(val)
Expect(isLowerCaseLetter(s)).To(BeTrue())
Expect(len(s)).To(Equal(20))
}
})
})
})
|
<reponame>Lefhar/portfoliopublic
-- phpMyAdmin SQL Dump
-- version 4.9.7
-- https://www.phpmyadmin.net/
--
-- Hôte : localhost
-- Généré le : ven. 30 juil. 2021 à 18:26
-- Version du serveur : 10.1.26-MariaDB-0+deb9u1
-- Version de PHP : 7.0.33-52+0~20210701.57+debian9~1.gbp8e49b6
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Base de données : `c6lh`
--
-- --------------------------------------------------------
--
-- Structure de la table `contenu_cv`
--
CREATE TABLE `contenu_cv` (
`id` int(11) NOT NULL,
`title` char(52) NOT NULL,
`content` longtext NOT NULL,
`date` datetime DEFAULT CURRENT_TIMESTAMP,
`emplacement` enum('left','right') NOT NULL DEFAULT 'left',
`position` int(2) NOT NULL,
`public` int(1) NOT NULL DEFAULT '1'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Déchargement des données de la table `contenu_cv`
--
INSERT INTO `contenu_cv` (`id`, `title`, `content`, `date`, `emplacement`, `position`, `public`) VALUES
(1, 'CONNAISSANCES', '<ul>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/html5-min.png\" alt=\"html\" width=\"30\" /> HTML</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/css-min.png\" alt=\"css\" width=\"30\" /> CSS</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/bootstrap.png\" alt=\"bootstrap\" width=\"30\" /> BOOTSTRAP</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/javascript-min.png\" alt=\"javascript\" width=\"30\" /> Javascript</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/php.png\" alt=\"php\" width=\"30\" /> PHP</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/mysql.png\" alt=\"mysql\" width=\"30\" /> MySQL</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/wordpress-min.png\" alt=\"wordpress\" width=\"30\" /> Wordpress</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/codeigniter-min.png\" alt=\"codeigniter\" width=\"30\" /> CodeIgniter</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/1200px-Octicons-mark-github.svg.png\" alt=\"Github\" width=\"30\" /> Github</li>\r\n<li><img class=\"img-fluid\" src=\"assets/img/logo/linux.png\" alt=\"linux\" width=\"30\" /> Linux Debian, Ubuntu, Centos</li>\r\n</ul>', '2021-07-25 15:22:17', 'left', 1, 1),
(3, 'COMPETENCES', '<p>Connaissances approfondies des langages MYSQL, PHP, HTML, JAVASCRIPT, CSS.</p>\r\n<p>Création de base de données, Maquetter une application, Framework CodeIgniter, Conception d\'application Web et Web Mobile.</p>\r\n<p>Analyser des problèmes techniques.</p>\r\n<p>Respecter les règles et consigne de sécurité.</p>', '2021-07-25 15:23:32', 'left', 2, 1),
(4, 'QUALITES', '<ul>\r\n<li>\r\nTravailleur\r\n</li>\r\n<li>\r\nCurieux\r\n</li>\r\n<li>\r\nSérieux\r\n</li>\r\n<li>Persévérant</li>\r\n</ul>', '2021-07-25 15:23:55', 'left', 3, 1),
(5, 'INTRODUCTION', '<p class=\"font-italic text-justify\">Passionné d’informatique depuis 2005, j’aime relever des défis techniques et résoudre les problèmes les plus complexes. J’ai commencé en autodidacte, principalement en procédural, et depuis 2 ans en POO. J’ai des compétences élevées en HTML, Javascript, PHP (procédural, PDO, POO) et MySQL. Je connais l’environnement serveur Debian, Ubuntu, centOS. Afin de professionnaliser toutes mes compétences, j\'ai suivi une formation à l’AFPA du 19 Octobre 2020 au 9 Juillet 2021.</p>', '2021-07-25 15:24:18', 'right', 2, 1),
(6, 'FORMATION', '<strong> DEVELOPPEUR WEB ET WEB MOBILE</strong>\r\n<p class=\"font-italic\">Afpa - Amiens 10/2020 - 07/2021</p>\r\n<strong>DYNAMIQUE VERS L’EMPLOI</strong>\r\n<p class=\"font-italic\">Centre relais Amiens 07/2020 – 10/2020</p>', '2021-07-25 15:25:22', 'right', 3, 1),
(7, 'EXPERIENCES PROFESSIONNELLES', '<strong>DEVELOPPEUR WEB</strong>\r\n<p class=\"font-italic\">Ma Prospection à Narbonne (stage en télétravail) 04/2021 - 07/2021</p>\r\n<strong>VENDEUR IMPRIMEUR</strong>\r\n<p class=\"font-italic\">01/09/2020 - 12/09/2020 (Top Office Amiens Sud)</p>\r\n<strong>CONTRAT D’AVENIR CITADELLE AMIENS</strong>\r\n<p class=\"font-italic\">01/2007 – 03/2007</p>\r\n<strong>INVENTORISTE</strong>\r\n<p class=\"font-italic\">Inventaire à Auchan à Dury (intérim Adecco) 06/2003 – 06/2006</p>\r\n<strong>AGENT DE PRODUCTION</strong>\r\n<p class=\"font-italic\">Prima France à Amiens 03/2003 - 09/2003</p>\r\n<strong>AGENT DE FABRICATION</strong>\r\n<p class=\"font-italic\">Friskies à Aubigny 12/2002 - 06/2003</p>', '2021-07-25 15:26:44', 'right', 4, 1),
-- --------------------------------------------------------
--
-- Structure de la table `demarcharge`
--
CREATE TABLE `demarcharge` (
`id` int(11) NOT NULL COMMENT 'id aléatoire',
`nom` varchar(150) NOT NULL COMMENT 'nom de société',
`email` varchar(160) NOT NULL,
`date` datetime NOT NULL COMMENT 'date d''envoi',
`etat` enum('attente','refus','accepté') NOT NULL,
`status` int(1) NOT NULL DEFAULT '0' COMMENT 'statut'
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Structure de la table `image`
--
CREATE TABLE `image` (
`id` int(11) NOT NULL,
`content` longtext NOT NULL,
`date` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`emplacement` enum('profil','sociaux') NOT NULL DEFAULT 'profil'
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Déchargement des données de la table `image`
--
INSERT INTO `image` (`id`, `content`, `date`, `emplacement`) VALUES
(4, '<img class=\"rounded\" src=\"assets/file/personage_head_powerfit.jpg\" alt=\"\" width=\"150\" />', '2021-07-29 12:12:27', 'profil'),
(5, '<a title=\"github\" href=\"https://github.com/Lefhar\" target=\"_blank\" rel=\"noopener\"><img class=\"img-fluid\" src=\"assets/file/1200px-Octicons-mark-github.svg[1].png\" alt=\"\" width=\"30\" /></a><a title=\"linkedin\" href=\"https://www.linkedin.com/in/harold-lefebvre/\" target=\"_blank\" rel=\"noopener\"><img class=\"img-fluid\" src=\"assets/file/linkedin[1].png\" alt=\"\" width=\"37\" /></a>', '2021-07-29 12:16:57', 'sociaux');
-- --------------------------------------------------------
--
-- Structure de la table `mesprojets`
--
CREATE TABLE `mesprojets` (
`id` int(11) NOT NULL,
`title` varchar(30) NOT NULL,
`contenu` longtext NOT NULL,
`date_create` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`lien_github` varchar(200) NOT NULL,
`lien_web` varchar(200) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Déchargement des données de la table `mesprojets`
--
INSERT INTO `mesprojets` (`id`, `title`, `contenu`, `date_create`, `lien_github`, `lien_web`) VALUES
(3, 'Jarditou avec CodeIgniter', '<a title=\"jarditou CodeIgniter\" href=\"https://jarditou.lefebvreharold.fr/\" target=\"_blank\" rel=\"noopener\"><img class=\"w-100\" src=\"assets/file/jarditou_ci-min2-min_1.dat\" alt=\"\" width=\"250\" /></a>', '2021-07-30 12:57:03', 'https://github.com/Lefhar/CodeIgniter_exercice.git', 'https://jarditou.lefebvreharold.fr/'),
(6, 'Jarditou wordpress', '<a title=\"Jarditou avec wordpress\" href=\"https://dev.amorce.org/lharold/wordpress/\" target=\"_blank\" rel=\"noopener\"><img class=\"w-100\" src=\"assets/file/jarditou-wordpress-min[1].dat\" alt=\"jarditou wordpress\" width=\"250\" /></a>', '2021-07-30 13:08:18', 'https://github.com/Lefhar/template_wp_jarditou.git', 'https://dev.amorce.org/lharold/wordpress/'),
(4, 'Pblv-scoop', '<a title=\"pblv-scoop\" href=\"https://pblv-scoop.com/\" target=\"_blank\" rel=\"noopener\"><img class=\"w-100\" src=\"assets/file/pblv-scoop-min2-min[1].dat\" alt=\"pblv-scoop\" width=\"250\" /></a>', '2021-07-30 13:02:03', '', 'https://pblv-scoop.com/'),
(9, 'Portfolio CodeIgniter 4', '<a title=\"Portfolio CodeIgniter 4\" href=\"https://lefebvreharold.fr/\" target=\"_blank\" rel=\"noopener\"><img class=\"w-100\" src=\"assets/file/portfolio.dat\" alt=\"portfolio CodeIgniter 4\" width=\"250\" /></a>', '2021-07-30 13:12:55', 'https://github.com/Lefhar/portfolio.git', 'https://lefebvreharold.fr/'),
(5, 'Travail de formation', '<a title=\"Travail de formation\" href=\"http://travaildeformation.tk/\" target=\"_blank\" rel=\"noopener\"><img class=\"w-100\" src=\"assets/file/travaildeformation-min2-min[1].dat\" alt=\"travail de formation\" width=\"250\" /></a>', '2021-07-30 13:06:41', 'https://github.com/Lefhar/travaildeformation.git', 'http://travaildeformation.tk/'),
(8, 'Wazaa immo', '<a title=\"wazaa immo\" href=\"https://wazaaimmo.lefebvreharold.fr/\" target=\"_blank\" rel=\"noopener\"><img class=\"w-100\" src=\"assets/file/wazaa_immo-min[1].dat\" alt=\"wazaa immo\" width=\"250\" /></a>', '2021-07-30 13:10:05', 'https://github.com/Lefhar/wazaaimmo.git', 'https://wazaaimmo.lefebv<EMAIL>/');
-- --------------------------------------------------------
--
-- Structure de la table `servermail`
--
CREATE TABLE `servermail` (
`username` varchar(150) NOT NULL,
`password` varchar(150) NOT NULL,
`smtp` varchar(100) NOT NULL,
`port` int(4) NOT NULL DEFAULT '25'
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Structure de la table `users`
--
CREATE TABLE `users` (
`id` int(11) NOT NULL COMMENT 'id aléatoire',
`role` int(1) NOT NULL DEFAULT '0',
`email` varchar(250) NOT NULL,
`date_create` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`date_connect` datetime NOT NULL,
`password` varchar(255) NOT NULL,
`salt` varchar(255) NOT NULL,
`jeton` varchar(255) NOT NULL,
`mail_hash` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Déchargement des données de la table `users`
--
--
-- Index pour les tables déchargées
--
--
-- Index pour la table `contenu_cv`
--
ALTER TABLE `contenu_cv`
ADD PRIMARY KEY (`id`);
--
-- Index pour la table `demarcharge`
--
ALTER TABLE `demarcharge`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `email` (`email`);
--
-- Index pour la table `image`
--
ALTER TABLE `image`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `emplacement` (`emplacement`);
--
-- Index pour la table `mesprojets`
--
ALTER TABLE `mesprojets`
ADD UNIQUE KEY `title` (`title`),
ADD KEY `id` (`id`);
--
-- Index pour la table `servermail`
--
ALTER TABLE `servermail`
ADD UNIQUE KEY `username` (`username`,`password`);
--
-- Index pour la table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT pour les tables déchargées
--
--
-- AUTO_INCREMENT pour la table `contenu_cv`
--
ALTER TABLE `contenu_cv`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=13;
--
-- AUTO_INCREMENT pour la table `demarcharge`
--
ALTER TABLE `demarcharge`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'id aléatoire', AUTO_INCREMENT=106;
--
-- AUTO_INCREMENT pour la table `image`
--
ALTER TABLE `image`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- AUTO_INCREMENT pour la table `mesprojets`
--
ALTER TABLE `mesprojets`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10;
--
-- AUTO_INCREMENT pour la table `users`
--
ALTER TABLE `users`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'id aléatoire', AUTO_INCREMENT=19;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.brooklyn.camp.dto;
import io.brooklyn.camp.BasicCampPlatform;
import io.brooklyn.camp.CampServer;
import io.brooklyn.camp.commontypes.RepresentationSkew;
import io.brooklyn.camp.rest.util.CampRestGuavas;
import io.brooklyn.camp.spi.AbstractResource;
import java.io.IOException;
import java.util.Arrays;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
@Test
public class ResourceDtoTest {
// private static final Logger log = LoggerFactory.getLogger(ResourceDtoTest.class);
CampServer s;
AbstractResource rr;
ResourceDto r;
@SuppressWarnings("unchecked")
protected void initSimpleDto() {
s = new CampServer(new BasicCampPlatform(), "http://atest/");
s.getDtoFactory().getUriFactory().registerIdentityFunction(AbstractResource.class, "basic", CampRestGuavas.IDENTITY_OF_REST_RESOURCE);
rr = AbstractResource.builder().name("Name").description("a description").
tags(Arrays.asList("tag1", "tag 2")).representationSkew(RepresentationSkew.NONE).build();
r = ResourceDto.newInstance(s.getDtoFactory(), rr);
}
@Test
public void testSimpleCreation() throws IOException {
initSimpleDto();
Assert.assertNotNull(r.getCreatedAsString());
Assert.assertEquals(r.getName(), "Name");
Assert.assertEquals(r.getDescription(), "a description");
Assert.assertEquals(r.getTags(), Arrays.asList("tag1", "tag 2"));
Assert.assertEquals(r.getRepresentationSkew(), RepresentationSkew.NONE);
}
public void testSimpleSerializationAndDeserialization() throws IOException {
initSimpleDto();
JsonNode t = BasicDtoTest.tree(r);
// Assert.assertEquals(t.get("uri").asText(), r.getUri());
ResourceDto r2 = new ObjectMapper().readValue(t.toString(), ResourceDto.class);
Assert.assertNotNull(r2.getCreated());
Assert.assertEquals(r, r2);
}
}
|
<gh_stars>0
from datetime import datetime, timedelta
from typing import (
TYPE_CHECKING,
Any,
AsyncContextManager,
Callable,
Dict,
Generic,
List,
Protocol,
TypeVar,
Union,
runtime_checkable,
)
if TYPE_CHECKING:
from avilla.core.execution import Execution
from avilla.core.protocol import BaseProtocol
from avilla.core.relationship import Relationship
TProtocol = TypeVar("TProtocol", bound="BaseProtocol")
TExecutionMiddleware = Callable[["Relationship", "Execution"], AsyncContextManager[None]]
_T = TypeVar("_T", contravariant=True)
@runtime_checkable
class Ensureable(Protocol, Generic[_T]):
def ensure(self, interact: _T) -> Any:
...
|
<gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consts
import (
"errors"
"github.com/shiningrush/droplet/data"
)
const (
ErrBadRequest = 20001
ErrForbidden = 20002
)
const (
// IDNotFound is the string use for can't find the cache by the id
IDNotFound = "%s id: %s not found"
)
var (
// ErrorUsernamePassword is the error means username or password is not correct
ErrUsernamePassword = errors.New("username or password error")
// ErrorIDUsername is the error use for the input's id and username is different
ErrIDUsername = errors.New("consumer's id and username must be a same value")
// ErrorParameterID is the error use for parameter ID is empty
ErrParameterID = errors.New("Parameter IDs cannot be empty")
// ErrorRouteData is the error that the route data is empty
ErrRouteData = errors.New("Route data is empty, cannot be exported")
// ErrorImportFile is the error that use for import a empty file
ErrImportFile = errors.New("empty or invalid imported file")
// ErrorImportFile means the certificate is invalid
ErrSSLCertificate = errors.New("invalid certificate")
// ErrorSSLCertificateResolution means the SSL certificate decode failed
ErrSSLCertificateResolution = errors.New("Certificate resolution failed")
// ErrorSSLKeyAndCert means the SSL key and SSL certificate don't match
ErrSSLKeyAndCert = errors.New("key and cert don't match")
)
var (
// base error please refer to github.com/shiningrush/droplet/data, such as data.ErrNotFound, data.ErrConflicted
ErrInvalidRequest = data.BaseError{Code: ErrBadRequest, Message: "invalid request"}
ErrSchemaValidateFailed = data.BaseError{Code: ErrBadRequest, Message: "JSONSchema validate failed"}
ErrIPNotAllow = data.BaseError{Code: ErrForbidden, Message: "IP address not allowed"}
)
|
#!/bin/bash
set -e
echo "Get macOS VMware Tools 3.0.2.1"
echo "==============================="
echo "(c) Dave Parsons 2015-18"
# Ensure we only use unmodified commands
export PATH=/bin:/sbin:/usr/bin:/usr/sbin
# Make sure only root can run our script
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" 1>&2
exit 1
fi
echo Getting VMware Tools...
python gettools.py
cp ./tools/darwin*.* /usr/lib/vmware/isoimages/
echo Finished!
|
<reponame>QuaternionMark/COMP371-FinalProject
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2009-2014 DreamWorks Animation LLC.
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of DreamWorks Animation nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
#ifndef INCLUDED_IMF_DWA_COMRESSOR_H
#define INCLUDED_IMF_DWA_COMRESSOR_H
//------------------------------------------------------------------------------
//
// class DwaCompressor -- Store lossy RGB data by quantizing DCT components.
//
//------------------------------------------------------------------------------
#include <vector>
#include <half.h>
#include "ImfInt64.h"
#include "ImfZip.h"
#include "ImfChannelList.h"
#include "ImfCompressor.h"
#include "ImfNamespace.h"
OPENEXR_IMF_INTERNAL_NAMESPACE_HEADER_ENTER
class DwaCompressor: public Compressor
{
public:
enum AcCompression
{
STATIC_HUFFMAN,
DEFLATE,
};
DwaCompressor (const Header &hdr,
int maxScanLineSize,
int numScanLines, // ideally is a multiple of 8
AcCompression acCompression);
virtual ~DwaCompressor ();
virtual int numScanLines () const;
virtual Imf::Compressor::Format format () const;
virtual int compress (const char *inPtr,
int inSize,
int minY,
const char *&outPtr);
virtual int compressTile (const char *inPtr,
int inSize,
Imath::Box2i range,
const char *&outPtr);
virtual int uncompress (const char *inPtr,
int inSize,
int minY,
const char *&outPtr);
virtual int uncompressTile (const char *inPtr,
int inSize,
Imath::Box2i range,
const char *&outPtr);
static void initializeFuncs ();
private:
struct ChannelData;
struct CscChannelSet;
struct Classifier;
class LossyDctDecoderBase;
class LossyDctDecoder;
class LossyDctDecoderCsc;
class LossyDctEncoderBase;
class LossyDctEncoder;
class LossyDctEncoderCsc;
enum CompressorScheme
{
UNKNOWN = 0,
LOSSY_DCT,
RLE,
NUM_COMPRESSOR_SCHEMES
};
//
// Per-chunk compressed data sizes, one value per chunk
//
enum DataSizesSingle
{
VERSION = 0, // Version number:
// 0: classic
// 1: adds "end of block" to the AC RLE
UNKNOWN_UNCOMPRESSED_SIZE, // Size of leftover data, uncompressed.
UNKNOWN_COMPRESSED_SIZE, // Size of leftover data, zlib compressed.
AC_COMPRESSED_SIZE, // AC RLE + Huffman size
DC_COMPRESSED_SIZE, // DC + Deflate size
RLE_COMPRESSED_SIZE, // RLE + Deflate data size
RLE_UNCOMPRESSED_SIZE, // RLE'd data size
RLE_RAW_SIZE, // Un-RLE'd data size
AC_UNCOMPRESSED_COUNT, // AC RLE number of elements
DC_UNCOMPRESSED_COUNT, // DC number of elements
AC_COMPRESSION, // AC compression strategy
NUM_SIZES_SINGLE
};
AcCompression _acCompression;
int _maxScanLineSize;
int _numScanLines;
int _min[2], _max[2];
ChannelList _channels;
std::vector<ChannelData> _channelData;
std::vector<CscChannelSet> _cscSets;
std::vector<Classifier> _channelRules;
char *_packedAcBuffer;
size_t _packedAcBufferSize;
char *_packedDcBuffer;
size_t _packedDcBufferSize;
char *_rleBuffer;
size_t _rleBufferSize;
char *_outBuffer;
size_t _outBufferSize;
char *_planarUncBuffer[NUM_COMPRESSOR_SCHEMES];
size_t _planarUncBufferSize[NUM_COMPRESSOR_SCHEMES];
Zip *_zip;
float _dwaCompressionLevel;
int compress (const char *inPtr,
int inSize,
Imath::Box2i range,
const char *&outPtr);
int uncompress (const char *inPtr,
int inSize,
Imath::Box2i range,
const char *&outPtr);
void initializeBuffers (size_t&);
void initializeDefaultChannelRules ();
void initializeLegacyChannelRules ();
void relevantChannelRules( std::vector<Classifier> &) const;
//
// Populate our cached version of the channel data with
// data from the real channel list. We want to
// copy over attributes, determine compression schemes
// releveant for the channel type, and find sets of
// channels to be compressed from Y'CbCr data instead
// of R'G'B'.
//
void classifyChannels (ChannelList channels,
std::vector<ChannelData> &chanData,
std::vector<CscChannelSet> &cscData);
//
// Compute various buffer pointers for each channel
//
void setupChannelData (int minX, int minY, int maxX, int maxY);
};
OPENEXR_IMF_INTERNAL_NAMESPACE_HEADER_EXIT
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.