text
stringlengths 27
775k
|
|---|
import 'package:functional_error_handling_dart/functional_error_handling_dart.dart';
Future<void> main() async {
const itemIds = [1, 2, 3, 4, 5];
// Bad Code
var badItemList = <Item>[]; // mutable list
for (var itemId in itemIds) {
var targetItem = ItemService.find(itemId);
if (targetItem.isEmpty) {
// error handling
} else {
badItemList.add(targetItem.orNull()!);
}
}
badItemList; // do something
// better code?
final beffetItemList = itemIds.map<Either<String, Item>>((itemId) {
final targetItem = ItemService.find(itemId);
return targetItem.isEmpty
? Left('$itemId not included')
: Right(targetItem.orNull()!);
}).toList(); // immutable list
beffetItemList; // do something
}
class Item {
final int itemid;
final String message;
const Item({required this.itemid, required this.message});
}
const itemList = [
Item(itemid: 1, message: 'message1'),
Item(itemid: 2, message: 'message2'),
Item(itemid: 3, message: 'message3'),
// Missing id: 4
Item(itemid: 5, message: 'message5'),
Item(itemid: 6, message: 'message6'),
];
class ItemService {
static Option<Item> find(int id) {
try {
return Some(itemList.firstWhere((element) => element.itemid == id));
} catch (e) {
return None();
}
}
}
|
require 'spec_helper'
require 'etl/audit_loader'
RSpec.describe ETL::AuditLoader do
let(:audit_dimension) { instance_double(Dimensions::Audit) }
subject { described_class.new(audit_dimension: audit_dimension) }
it 'creates an Import reocrd' do
expect(audit_dimension).to receive(:update_attributes!).with(
inserted_records: 5,
log: { 'Cancelled' => 5 }
)
subject.call(records: [1, 2, 3, 4, 5], log: { 'Cancelled' => 5 })
end
end
|
require 'puppet/util/errors'
require 'puppet/util/execution'
require 'octokit'
Puppet::Type.type(:github_ssh_key).provide :github do
include Puppet::Util::Execution
include Puppet::Util::Errors
def exists?
existing_id
end
def destroy
api.remove_key(existing_id)
end
def create
api.add_key(title, key)
end
private
def api
@api ||= Octokit::Client.new(
access_token: Facter.value(:github_token),
auto_paginate: true
)
end
def existing_id
match = api.keys.find { |x| x.key == key }
return nil unless match
match.id
end
def path
@path ||= File.expand_path @resource[:path]
end
def key
@key ||= File.read(path).rpartition(' ').first
end
def default_title
"#{Facter.value(:user)}@#{Facter.value(:hostname)}"
end
def title
@title ||= @resource[:title] || default_title
end
end
|
#!/bin/bash
cd ggc-core && make $1 && cd ..
cd ggc-app && make $1 && cd ..
|
// Copyright (c) 2019 The DAML Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.ledger.client.binding.offset
import com.digitalasset.ledger.api.v1.ledger_offset.LedgerOffset
import com.digitalasset.ledger.api.v1.ledger_offset.LedgerOffset.LedgerBoundary.{
LEDGER_BEGIN,
LEDGER_END
}
import com.digitalasset.ledger.api.v1.ledger_offset.LedgerOffset.Value.{Absolute, Boundary, Empty}
object LedgerOffsetOrdering {
val ledgerBegin = LedgerOffset(Boundary(LEDGER_BEGIN))
val ledgerEnd = LedgerOffset(Boundary(LEDGER_END))
implicit val offsetOrdering: Ordering[LedgerOffset] = Ordering.fromLessThan { (a, b) =>
a.value match {
case Boundary(LEDGER_BEGIN) => true
case Boundary(LEDGER_END) => false
case Boundary(_) => emptyOffset(a)
case Empty => emptyOffset(a)
case Absolute(strA) =>
b.value match {
case Boundary(LEDGER_BEGIN) => false
case Boundary(LEDGER_END) => true
case Boundary(_) => emptyOffset(b)
case Empty => emptyOffset(b)
case Absolute(strB) => BigInt(strA) < BigInt(strB) // TODO this is not compatible with LS
}
}
}
private def emptyOffset(o: LedgerOffset): Boolean =
throw new RuntimeException(s"Offset '$o' is empty")
}
|
#-------------------------------------------------------------------------------
#
# Thomas Thomassen
# thomas[at]thomthom[dot]net
#
#-------------------------------------------------------------------------------
module TT::Plugins::QuadFaceTools
module Settings
@cache = {}
def self.read(key, default = nil)
if @cache.key?(key)
@cache[key]
else
value = Sketchup.read_default(PLUGIN_ID, key, default)
@cache[key] = value
value
end
end
def self.write(key, value)
@cache[key] = value
Sketchup.write_default(PLUGIN_ID, key, value)
value
end
end # module Settings
end # module
|
# how to generate html API documentation using pdoc3
1. Using anaconda prompt, activate dryft environment per repository README.
2. Check that pdoc3 is installed: `pip install pdoc3`
3. Navigate to repository locally: `cd path/to/dryft`
4. Use pdoc3: `python -m pdoc --html dryft`
Html files should be located in a new folder called 'html'. Pdoc3 page is [here](https://pdoc3.github.io/pdoc)
|
import Log from "../core/log";
import {default as parseArgs} from "minimist";
export default class ConsoleCommand {
public readonly base: string;
public readonly args: any;
/**
* @param {string} base
* @param {*} args
*/
constructor(base: string, args: any) {
/**
* @type {string}
* @readonly
*/
this.base = base;
/**
* @type {*}
* @readonly
*/
this.args = args;
}
// TODO
/**
* @param {string} consoleCommandString
* @return {ConsoleCommand}
*/
public static parse(consoleCommandString: string): ConsoleCommand {
const split: string[] = consoleCommandString.split(" ");
Log.info(split.join(" "));
return new ConsoleCommand(split[0], parseArgs(split.splice(1, 0).join(" ")));
}
}
|
#include "LevelLoader.h"
//#include <iostream>
#include <fstream>
//#include <iomanip>
#include "../Modules/ActorFactory.h"
LevelLoader::LevelLoader(void)
{
}
LevelLoader::~LevelLoader(void)
{
}
void LevelLoader::save(World* world, const std::string levelName)
{
// saving all actors in this world
std::ofstream mapFile;
mapFile.open(std::string("./maps/").append(levelName).append(std::string(".map")));
for (auto const &actor : world->allActors)
{
mapFile << actor->getClassID();
mapFile << " ";
mapFile << actor->getLocation().x;
mapFile << " ";
mapFile << actor->getLocation().y;
mapFile << " ";
mapFile << actor->getScale().x;
mapFile << " ";
mapFile << actor->getScale().y;
mapFile << " ";
mapFile << actor->getRotation().getValue();
mapFile << "\n";
}
mapFile.close();
// saving paths
typedef std::map<PathPoint*, int> PathPointMap;
PathPointMap points;
std::ofstream pathsFile;
pathsFile.open(std::string("./maps/").append(levelName).append(std::string(".paths")));
int i = 0;
// save locations of pathpoints
for (auto const &navPoint : world->navigationMap)
{
points.insert(PathPointMap::value_type(navPoint, i));
pathsFile << i;
pathsFile << " ";
pathsFile << navPoint->location.x;
pathsFile << " ";
pathsFile << navPoint->location.y;
pathsFile << "\n";
i++;
}
pathsFile << "-1\n";
// save connections of pathpoints
for (auto const &point : points)
{
int firstIndex = points.at(point.first);
for (auto const &connection : point.first->legalPoints)
{
int secondIndex = points.at(connection);
pathsFile << firstIndex;
pathsFile << " ";
pathsFile << secondIndex;
pathsFile << "\n";
}
}
pathsFile.close();
}
void LevelLoader::load(World* world, const std::string levelName)
{
// loading actors
std::ifstream mapFile;
try
{
mapFile.open(std::string("./maps/").append(levelName).append(std::string(".map")));
}
catch(std::exception)
{
return;
}
while (!mapFile.eof())
{
std::string className;
mapFile >> className;
if (className == "")
continue;
float xPos, yPos;
float xScale, yScale;
float angle;
mapFile >> xPos;
mapFile >> yPos;
mapFile >> xScale;
mapFile >> yScale;
mapFile >> angle;
ActorFactory::Factory().placeActor(className, world, Vector2D(xPos, yPos),
Vector2D(xScale, yScale), Rotator(angle));
}
mapFile.close();
// loading paths
typedef std::map<int, PathPoint*> PathPointMap;
PathPointMap points;
std::ifstream pathsFile;
pathsFile.open(std::string("./maps/").append(levelName).append(std::string(".paths")));
while (!pathsFile.eof())
{
int point;
float xPos, yPos;
pathsFile >> point;
// leave cycle when reached divider
if (point == -1) break;
pathsFile >> xPos;
pathsFile >> yPos;
points.insert(PathPointMap::value_type(point, new PathPoint(Vector2D(xPos, yPos))));
}
// load connections and link pathpoints
while (!pathsFile.eof())
{
int point1, point2;
pathsFile >> point1;
pathsFile >> point2;
// create way from point1 to point2
points.at(point1)->connect(points.at(point2));
}
// put paths into the world
for (int i = 0, pointsSize = points.size(); i < pointsSize; i++)
{
world->addPathPoint(points.at(i));
}
pathsFile.close();
}
|
<?php
declare(strict_types = 1);
require __DIR__.'/../vendor/autoload.php';
use Innmind\HttpServer\Main;
use Innmind\Http\Message\{
ServerRequest,
Response,
};
use Innmind\Compose\ContainerBuilder\ContainerBuilder;
use Innmind\Url\{
PathInterface,
Path,
};
use Innmind\HttpFramework\Environment;
use Innmind\Immutable\{
MapInterface,
Set,
};
use Symfony\Component\Yaml\Yaml;
new class extends Main
{
protected function main(ServerRequest $request): Response
{
$environment = $this->environment($request);
return $this->handle($request, $environment);
}
/**
* @return MapInterface<string, mixed>
*/
private function environment(ServerRequest $request): MapInterface
{
$environment = Environment::camelize(
__DIR__.'/../config/.env',
$request->environment()
);
return $environment
->put('routes', Set::of(
PathInterface::class,
new Path(__DIR__.'/../config/routes.yml')
))
->put('templates', new Path(__DIR__.'/../templates'))
->put('debug', ($environment['appEnv'] ?? 'prod') === 'dev')
->put('metas', [Yaml::parseFile(__DIR__.'/../config/neo4j/entities.yml')])
->put('neo4jPassword', $environment['neo4jPassword'])
->put('filesStoragePath', $environment['filesStoragePath']);
}
/**
* @param MapInterface<string, mixed> $environment
*/
private function handle(ServerRequest $request, MapInterface $environment): Response
{
$container = (new ContainerBuilder)(
new Path(__DIR__.'/../config/container/web.yml'),
$environment
);
$handle = $container->get('requestHandler');
return $handle($request);
}
};
|
module SyntheticWeb.Counter.ByteCounter
( ByteCounter (..)
, empty
, addByteCount
) where
import GHC.Int (Int64)
data ByteCounter =
ByteCounter { download :: {-# UNPACK #-} !Int64
, upload :: {-# UNPACK #-} !Int64 }
deriving (Show)
empty :: ByteCounter
empty = ByteCounter 0 0
addByteCount :: ByteCounter -> ByteCounter -> ByteCounter
addByteCount b1 b2 = ByteCounter { download = download b1 + download b2
, upload = upload b1 + upload b2
}
|
sudo apt-get update
sudo sh -c "echo "US/Eastern" > /etc/timezone"
sudo dpkg-reconfigure -f noninteractive tzdata
sudo debconf-set-selections <<< "postfix postfix/mailname string $HOSTNAME"
sudo debconf-set-selections <<< "postfix postfix/main_mailer_type string 'Internet Site'"
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \
libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \
xz-utils tk-dev libffi-dev jq xclip libevent-dev texinfo build-essential texinfo libx11-dev \
libxpm-dev libjpeg-dev libpng-dev libgif-dev libtiff-dev libncurses-dev libxpm-dev \
automake autoconf libevent-dev libgtk-3-dev mailutils libgnutls28-dev aspell-en zip\
xauth x11-apps software-properties-common fzf direnv trash-cli libxaw7-dev
# libgtk2.0-dev libgnutls-dev
# docker
# curl -fsSL get.docker.com -o get-docker.sh
# sh get-docker.sh
# sudo groupadd docker
# sudo usermod -aG docker $USER
# git config --global user.email "mshuaic@users.noreply.github.com"
git config --global user.email "mark@shma.dev"
git config --global user.name "Mark"
bash install_python.sh
bash install_tmux.sh
bash install_emacs.sh
bash toHome.sh
|
a = gets.split.map(&:to_i)
nums = a.uniq.map { |n| a.count(n) }
if nums.include?(3)
if nums.include?(2)
puts 'FULL HOUSE'
else
puts 'THREE CARD'
end
elsif nums.include?(2)
if nums.count(2) == 2
puts 'TWO PAIR'
else
puts 'ONE PAIR'
end
else
puts 'NO HAND'
end
|
var PATH = require("path");
var stackTrace = require("stack-trace");
var _ = require("lodash");
module.exports = {
getParentModule: function (depth) {
// get unique filename call stack
var paths = module.exports.getUniqueFilenameStackTrace();
// we work relative to parent path,
// so we remove ModuleHelper from stack trace
paths = _.slice(paths, 1, paths.length);
if (depth < paths.length) {
var path = paths[depth];
var m = require.cache[path];
if(! m && require.main.filename === path) {
m = require.main;
}
return m ? m : null;
} else {
return null;
}
},
getUniqueFilenameStackTrace: function() {
return _(stackTrace.get())
.map(function (x) {
return x.getFileName();
}).reject(function (path) {
return path === "module.js" || path === "node.js"
}).uniq()
.value();
},
getFilenameStackTrace: function() {
return _(stackTrace.get())
.map(function (x) {
return x.getFileName();
})
.value();
},
getStackTrace: function() {
return _(stackTrace.get())
.map(function (x) {
return x.getFileName()+":"+x.getLineNumber();
})
.value();
}
};
|
# === COPYRIGHT:
# Copyright (c) North Carolina State University
# Developed with funding for the National eXtension Initiative.
# === LICENSE:
#
# see LICENSE file
class EpochDate
extend YearWeek
attr_accessor :date
# earliest google analytics data
GA_START = Date.parse('2007-02-23')
# in preparation for the eXtension launch, existing content
# was republished into the new www site under development
# this is the early content "created at" date we have in the
# www data
WWW_CONTENT_START = Date.parse('2008-02-05')
# eXtension National launch
WWW_LAUNCH = Date.parse('2008-02-21')
# New content marked as "noindex" so that Google wouldn't
# index the largely duplicate news content
WWW_NEWS_NOINDEX = Date.parse('2010-11-03')
# the day after Google's "panda" search algorithm change
# was released.
POST_PANDA = Date.parse('2011-02-24')
# www faq/article/news/event urls modified to be
# all /pages/id/seo-friendly-title
WWW_URL_REVAMP = Date.parse('2011-03-19')
# the day after Google's "panda" search algorithm change
# was released globally
POST_GLOBAL_PANDA = Date.parse('2011-04-12')
# after analyzing all content, we marked any page not getting
# more than one entrance from Google Search per week on
# average
WWW_LOW_PEFORM_NOINDEX = Date.parse('2011-05-27')
# noindex experiment had no apparent effect on traffic
# all faqs, articles, events marked as "index"
WWW_REINDEX_AFE = Date.parse('2011-07-12')
# actual migration date is one day prior
# ag energy and animal manure management content migrated
CREATE_PILOT_WIKI_MIGRATION = Date.parse('2011-05-06')
# bulk of content migrated to create
CREATE_FIRST_WIKI_MIGRATION = Date.parse('2011-06-18')
# faqs migrated to create
CREATE_FAQ_MIGRATION = Date.parse('2011-06-23')
# all copwiki content migrated to create
CREATE_FINAL_WIKI_MIGRATION = Date.parse('2011-07-09')
# AaEv2
AAE_V2_TRANSITION = Date.parse('2012-12-03')
# range slider to radio button conversion on evaluation form
CONVERT_EVALUATION_SLIDERS = Date.parse('2014-07-21')
def initialize(date)
@date = date
end
def year_week
self.class.year_week_for_date(@date)
end
def yearweek
(year,week) = self.year_week
self.class.yearweek(year,week)
end
def previous_year_week
(year,week) = self.year_week
(sow,eow) = self.class.date_pair_for_year_week(year,week)
previous = sow - 1.day
[previous.cwyear,previous.cweek]
end
def next_year_week
(year,week) = self.year_week
(sow,eow) = self.date_pair_for_year_week(year,week)
next_date = eow + 1
self.class.year_week_for_date(next_date)
end
def previous_year_weeks(count)
(year,week) = self.year_week
(sow,eow) = self.class.date_pair_for_year_week(year,week)
previous_date_end = sow - 1.day
previous_date_start = (previous_date_end - count.week) + 1.day
self.class.year_weeks_between_dates(previous_date_start,previous_date_end)
end
def next_year_weeks(count)
(year,week) = self.year_week
(sow,eow) = self.class.date_pair_for_year_week(year,week)
next_date_start = eow + 1.day
next_date_end = (next_date_start + count.week) - 1.day
self.class.year_weeks_between_dates(next_date_start,next_date_end)
end
def previous_yearweeks(count)
year_weeks = previous_year_weeks(count)
year_weeks.map{|(year,week)| self.class.yearweek(year,week)}
end
def next_yearweeks(count)
year_weeks = next_year_weeks(count)
year_weeks.map{|(year,week)| self.class.yearweek(year,week)}
end
def self.panda_epoch_date
self.new(POST_PANDA)
end
end
|
/**
* Example: https://github.com/jasonsoft-net/jasonsoft-express-server
* FilePath: /jasonsoft-express-server/app.js
* Added by Jason.Song (成长的小猪) on 2021/09/28
* CSDN: https://blog.csdn.net/jasonsong2008
* GitHub: https://github.com/jasonsoft-net
* Organizations: https://github.com/jasonsoft
*/
import Express from 'express';
/**
* Import the ControllerProvider from @jasonsoft/express-controller
*/
import { ControllerProvider } from '@jasonsoft/express-controller';
const app = new Express();
/**
* Inject the controller directory
*/
ControllerProvider.initControllers({
router: app,
/** The default directory is './src/controllers' */
dir: './app/controllers',
});
// app.get('/', (req, res) => {
// res.send('Hello World!');
// });
/** Service port */
const port = Number(process.env.PORT || 3000);
/** Listening port */
app.listen(port, () => {
console.log(
`[\x1B[36mRunning\x1B[0m] Application is running on: http://localhost:${port}`,
);
});
|
package nguyengiap.vietitpro.tudienanhviet.com.adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.text.Html;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import java.util.List;
import nguyengiap.vietitpro.tudienanhviet.com.R;
import nguyengiap.vietitpro.tudienanhviet.com.model.EVEntity;
/**
* Created by Amin Ghabi on 13/09/15.
*
*/
public class AdaptetListSearch extends RecyclerView.Adapter<AdaptetListSearch.RadioViewHolder> {
private Context context;
private List<EVEntity> listVoca;
public AdaptetListSearch(Context context, List<EVEntity> listVoca) {
this.context = context;
this.listVoca = listVoca;
}
@Override
public RadioViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.list_item_search, parent, false);
return new RadioViewHolder(view);
}
@Override
public void onBindViewHolder(RadioViewHolder holder, final int position) {
EVEntity item = listVoca.get(position);
holder.txt_word.setText(item.getWord().toUpperCase());
if(!TextUtils.isEmpty(item.getPhonic())) {
holder.txt_phonetic.setText(item.getPhonic());
}
if(!TextUtils.isEmpty(item.getSimpleMeans())) {
holder.txt_summary.setText(item.getSimpleMeans());
}
}
@Override
public int getItemCount() {
return listVoca.size();
}
public class RadioViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
TextView txt_word;
TextView txt_phonetic;
TextView txt_summary;
public RadioViewHolder(View itemView) {
super(itemView);
txt_word= (TextView) itemView.findViewById(R.id.word);
txt_phonetic= (TextView) itemView.findViewById(R.id.phonetic);
txt_summary= (TextView) itemView.findViewById(R.id.txtsummary);
itemView.setOnClickListener(this);
}
@Override
public void onClick(View v) {
Toast.makeText(context,"aaa",Toast.LENGTH_LONG).show();
}
}
}
|
desc "This task is called by the Heroku scheduler add-on"
task :clean_articles => :environment do
puts "Cleaning articles..."
Article.where("created_at > ?", 1.day.ago).destroy_all
puts "done."
end
|
"""
@title
@description
"""
import argparse
import CoDrone
def get_sensor_state(drone):
sensor_vals = {
'accel': drone.get_accelerometer(),
'ang_speed': drone.get_angular_speed(),
'battery': drone.get_battery_percentage(),
'battery_voltage': drone.get_battery_voltage(),
'temp': drone.get_drone_temp(),
'gyor_angles': drone.get_gyro_angles(),
'height': drone.get_height(),
'opt_flow_position': drone.get_opt_flow_position(),
'pressure': drone.get_pressure(),
'state': drone.get_state(),
'trim': drone.get_trim(),
}
return sensor_vals
def main(main_args):
# CoDrone has a 50ms delay between commands and will disconnect if too many
# commands are sent at once and too fast.
codrone = CoDrone.CoDrone()
try:
# When paired, your BLE LED and CoDrone tail LED should both be solid green.
codrone.pair(codrone.Nearest)
trim = codrone.get_trim()
print(f'pitch: {trim.PITCH}')
print(f'roll: {trim.ROLL}')
print(f'yaw: {trim.YAW}')
print(f'throttle: {trim.THROTTLE}')
codrone.calibrate()
CoDrone.time.sleep(5)
trim = codrone.get_trim()
print(f'pitch: {trim.PITCH}')
print(f'roll: {trim.ROLL}')
print(f'yaw: {trim.YAW}')
print(f'throttle: {trim.THROTTLE}')
# take off the drone if state is not on flight
state = codrone.get_state()
print(f'State: {state}')
if state != 'FLIGHT':
codrone.takeoff()
codrone.hover(3)
sensor_vals = get_sensor_state(codrone)
for each_key, each_val in sensor_vals.items():
print(f'{each_key}: {each_val}')
# drone.set() will prepare the CoDrone to move in a certain direction and speed,
# while drone.move() will actually move the CoDrone in the air.
codrone.set_pitch(30) # Set positive pitch to 30% power
codrone.set_roll(-30) # Set negative roll to 30% power
codrone.move(2) # forward and right for 2 seconds
except Exception as e:
print(e)
finally:
codrone.land()
codrone.disconnect()
codrone.close()
return
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
args = parser.parse_args()
main(vars(args))
|
# Changelog
## 2.1.3
- Fix for object properties
## 2.1.2
- Version bump to trigger a green build
## 2.1.1
- Fixed a bug where component file names were wrong on Windows builds
## 2.1.0
- Annotating React.Fragments as a configurable option
## 2.0.1
- Readme update
## 2.0.0
- React.Fragments are no longer annotated
- Updated to new dependency versions
## 1.0.2
- Added a homepage link to package.json
## 1.0.1
- Added an npm ignore list so that samples and tests aren't installed by npm.
- Tweaked the explanation of `data-element` in the main README.
- Stopped Jest from runing sample tests when testing the plugin.
## 1.0.0
Initial Release
|
/**
* Copyright 2020 Shimizu Yasuhiro (yshrsmz)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.codingfeline.twitter4kt.core
import com.codingfeline.twitter4kt.core.util.Twitter4ktInternalAPI
import kotlin.contracts.ExperimentalContracts
import kotlin.contracts.InvocationKind
import kotlin.contracts.contract
public sealed class ApiResult<T> {
public data class Success<T>(val value: T) : ApiResult<T>()
public data class Failure<T>(val error: Throwable) : ApiResult<T>()
public fun getOrNull(): T? {
return when (this) {
is Success -> this.value
is Failure -> null
}
}
public fun exceptionOrNull(): Throwable? {
return when (this) {
is Failure -> this.error
is Success -> null
}
}
public companion object {
public fun <T> success(value: T): Success<T> = Success(value)
public fun <T> failure(error: Throwable): Failure<T> = Failure(error)
}
}
@OptIn(ExperimentalContracts::class)
public fun <T> ApiResult<T>.isSuccess(): Boolean {
contract {
returns(true) implies (this@isSuccess is ApiResult.Success)
returns(false) implies (this@isSuccess is ApiResult.Failure)
}
return this is ApiResult.Success
}
@OptIn(ExperimentalContracts::class)
public fun <T> ApiResult<T>.isFailure(): Boolean {
contract {
returns(true) implies (this@isFailure is ApiResult.Failure)
returns(false) implies (this@isFailure is ApiResult.Success)
}
return this is ApiResult.Failure
}
@Suppress("unused", "NOTHING_TO_INLINE")
internal inline fun <T> Result<T>.toApiResult(): ApiResult<T> {
return when {
isSuccess -> ApiResult.success(getOrThrow())
else -> ApiResult.failure(requireNotNull(exceptionOrNull()))
}
}
@Suppress("unused")
public fun <T> ApiResult<T>.getOrThrow(): T {
return when (this) {
is ApiResult.Failure -> throw error
is ApiResult.Success -> value
}
}
@Suppress("unused")
@OptIn(ExperimentalContracts::class)
public inline fun <R, T : R> ApiResult<T>.getOrElse(onFailure: (exception: Throwable) -> R): R {
contract {
callsInPlace(onFailure, InvocationKind.AT_MOST_ONCE)
}
return when (this) {
is ApiResult.Failure -> onFailure(error)
is ApiResult.Success -> value
}
}
@Suppress("unused")
public fun <R, T : R> ApiResult<T>.getOrDefault(defaultValue: R): R {
return when (this) {
is ApiResult.Failure -> defaultValue
is ApiResult.Success -> value
}
}
@OptIn(ExperimentalContracts::class)
public inline fun <R, T> ApiResult<T>.fold(
onSuccess: (value: T) -> R,
onFailure: (exception: Throwable) -> R
): R {
contract {
callsInPlace(onSuccess, InvocationKind.AT_MOST_ONCE)
callsInPlace(onFailure, InvocationKind.AT_MOST_ONCE)
}
@Suppress("UNCHECKED_CAST")
return when (this) {
is ApiResult.Success -> onSuccess(value)
is ApiResult.Failure -> onFailure(error)
}
}
@Twitter4ktInternalAPI
public inline fun <T, R> ApiResult<T>.runCatching(block: ApiResult<T>.() -> R): ApiResult<R> {
return try {
ApiResult.success(block())
} catch (e: Throwable) {
ApiResult.failure(e)
}
}
@Suppress("unused")
@OptIn(ExperimentalContracts::class)
public inline fun <R, T> ApiResult<T>.map(transform: (value: T) -> R): ApiResult<R> {
contract {
callsInPlace(transform, InvocationKind.AT_MOST_ONCE)
}
return when (this) {
is ApiResult.Success -> ApiResult.success(transform(value))
is ApiResult.Failure -> ApiResult.failure(error)
}
}
@Suppress("unused")
@OptIn(Twitter4ktInternalAPI::class)
public inline fun <R, T> ApiResult<T>.mapCatching(transform: (value: T) -> R): ApiResult<R> {
return when (this) {
is ApiResult.Success -> runCatching { transform(value) }
is ApiResult.Failure -> ApiResult.failure(error)
}
}
@Suppress("unused")
@OptIn(ExperimentalContracts::class)
public inline fun <R, T : R> ApiResult<T>.recover(transform: (value: Throwable) -> R): ApiResult<R> {
contract {
callsInPlace(transform, InvocationKind.AT_MOST_ONCE)
}
return when (this) {
is ApiResult.Success -> ApiResult.success(value)
is ApiResult.Failure -> ApiResult.success(transform(error))
}
}
@Suppress("unused")
@OptIn(Twitter4ktInternalAPI::class)
public inline fun <R, T : R> ApiResult<T>.recoverCatching(transform: (value: Throwable) -> R): ApiResult<R> {
return when (this) {
is ApiResult.Success -> ApiResult.success(value)
is ApiResult.Failure -> runCatching { transform(error) }
}
}
@Suppress("unused")
@OptIn(ExperimentalContracts::class)
public inline fun <T> ApiResult<T>.onFailure(action: (exception: Throwable) -> Unit): ApiResult<T> {
contract {
callsInPlace(action, InvocationKind.AT_MOST_ONCE)
}
exceptionOrNull()?.let(action)
return this
}
@Suppress("unused")
@OptIn(ExperimentalContracts::class)
public inline fun <T> ApiResult<T>.onSuccess(action: (value: T) -> Unit): ApiResult<T> {
contract {
callsInPlace(action, InvocationKind.AT_MOST_ONCE)
}
getOrNull()?.let(action)
return this
}
|
import { ThemeTypes } from '@getstation/theme';
import * as classNames from 'classnames';
import * as React from 'react';
// @ts-ignore: no declaration file
import injectSheet from 'react-jss';
interface Classes {
container: string,
dot: string,
close: string,
minimize: string,
expand: string,
}
interface Props {
classes?: Classes,
focused: boolean,
handleClose: () => any,
handleMinimize: () => any,
handleExpand: () => any,
dark?: boolean,
allHover?: boolean,
}
@injectSheet((theme: ThemeTypes) => ({
container: {
display: 'flex',
justifyContent: 'space-between',
flex: '0 0 auto',
padding: 6,
paddingBottom: 4,
width: 50,
},
dot: {
...theme.avatarMixin('10px'),
backgroundColor: ({ dark }: Props) => dark ? `#000` : `#FFF`,
opacity: ({ focused, allHover }: Props) => allHover ? 1 : (focused ? 0.5 : 0.2),
flex: '0 0 auto',
transition: 'all 100ms ease-out',
'&:hover': {
opacity: 1,
},
},
close: {
backgroundColor: ({ allHover }: Props) => allHover ? '#FF6059' : 'parent',
'&:hover': {
backgroundColor: '#FF6059',
},
},
minimize: {
backgroundColor: ({ allHover }: Props) => allHover ? '#FFBD2E' : 'parent',
'&:hover': {
backgroundColor: '#FFBD2E',
},
},
expand: {
backgroundColor: ({ allHover }: Props) => allHover ? '#29C941' : 'parent',
'&:hover': {
backgroundColor: '#29C941',
},
},
}))
export default class TrafficLights extends React.PureComponent<Props, {}> {
render() {
const { classes, handleClose, handleMinimize, handleExpand } = this.props;
return (
<div className={classes!.container}>
<div className={classNames(classes!.dot, classes!.close)} onClick={handleClose} />
<div className={classNames(classes!.dot, classes!.minimize)} onClick={handleMinimize} />
<div className={classNames(classes!.dot, classes!.expand)} onClick={handleExpand} />
</div>
);
}
}
|
package com.example.kru13.fractal;
import android.graphics.Bitmap;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
public static final String TAG = "MainActivity";
// Used to load the 'native-lib' library on application startup.
static {
System.loadLibrary("native-lib");
}
/**
* A native method that is implemented by the 'native-lib' native library,
* which is packaged with this application.
*/
public native String stringFromJNI();
private native void fractal(Bitmap img, double minR, double maxR, double minI, double maxI);
int width = 800;
int height = 600;
double offsetDownX = 0;
double offsetUpX = 0;
double offsetDownY = 0;
double offsetUpY = 0;
double minR = -2.2;
double maxR = 1.0;
double minI = -1.2;
double maxI = 1.2;
ImageView imgview;
TextView tvMinR;
TextView tvMaxR;
TextView tvMinI;
TextView tvMaxI;
Bitmap.Config mConf;
Bitmap mB;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mConf = Bitmap.Config.ARGB_8888;
mB = Bitmap.createBitmap(width, height, mConf);
setContentView(R.layout.activity_main);
imgview = (ImageView) findViewById(R.id.imageView1);
tvMinR = (TextView) findViewById(R.id.minR);
tvMaxR = (TextView) findViewById(R.id.maxR);
tvMinI = (TextView) findViewById(R.id.minI);
tvMaxI = (TextView) findViewById(R.id.maxI);
imgview.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
if (motionEvent.getAction() == android.view.MotionEvent.ACTION_DOWN) {
offsetDownX = motionEvent.getX();
offsetDownY = motionEvent.getY();
} else if (motionEvent.getAction() == android.view.MotionEvent.ACTION_UP) {
offsetUpX = motionEvent.getX();
offsetUpY = motionEvent.getY();
// Calculate new intervals
double intR = maxR - minR;
double intI = maxI - minI;
double Rf = intR / width;
double If = intI / height;
double omaxR = maxR;
double ominI = minI;
double omaxI = maxI;
double ominR = minR;
minR = (offsetDownX * Rf) + ominR;
maxR = (offsetUpX * Rf) + ominR;
minI = (offsetUpY * If) + ominI;
maxI = (offsetDownY * If) + ominI;
calcFractal();
}
return true;
}
});
calcFractal();
}
public void buttonClick(View v) {
switch (v.getId()) {
case R.id.button1:
// Example of a call to a native method
minR = -2.2;
maxR = 1.0;
minI = -1.2;
maxI = 1.2;
calcFractal();
break;
}
}
void calcFractal() {
Log.d("NDK fractal", "Starting ...");
mB = Bitmap.createBitmap(width, height, mConf);
fractal(mB, minR, maxR, minI, maxI);
imgview.setImageBitmap(mB);
tvMinR.setText("minR: " + minR);
tvMaxR.setText("maxR: " + maxR);
tvMinI.setText("minI: " + minI);
tvMaxI.setText("maxI: " + maxI);
Log.d("NDK fractal", "Finished");
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Samples
{
public static class Constants
{
public const string BaseAddress = "https://developers.es.gov.br/acessocidadao/is";
public const string RedirectUriCorporativo = "https://localhost:44345/";
public const string RedirectUriImplicit = "https://localhost:44358/";
public const string AuthorizeEndpoint = BaseAddress + "/connect/authorize";
public const string LogoutEndpoint = BaseAddress + "/connect/endsession";
public const string TokenEndpoint = BaseAddress + "/connect/token";
public const string UserInfoEndpoint = BaseAddress + "/connect/userinfo";
public const string IdentityTokenValidationEndpoint = BaseAddress + "/connect/identitytokenvalidation";
public const string ClientIdCorporativo = "e5bd55b7-d1e5-4f22-a1db-475965436c22";
public const string ClientSecretCorporativo = "Ns70NyK3Q9L8&Jr9WyVnJb @wI0KCgoMW";
public const string ClientIdImplicit = "f751504d-dd45-47ee-8551-7cf0e40c29eb";
}
}
|
<?php
namespace App\Helpers;
use App\Pattern;
class BibliobigrafiRelationship
{
private $pattern;
private $str;
private $newPattern;
private $patternId;
public function modifyPattern($str)
{
$this->str = $str;
$this->findPattern();
}
public function getPattern()
{
$this->updatePattern();
return $this->preventNull();
}
public function preventNull()
{
return is_null($this->patternId) ? $this->pattern : $this->patternId;
}
private function updatePattern()
{
try {
$pattern = Pattern::find($this->str);
$pattern->last_pattern = $this->joinPattern();
$pattern->save();
} catch (\Throwable $th) {
return $th;
}
}
private function joinPattern()
{
$int = (int) ($this->newPattern[1] += 1);
$inc = substr_replace(
$this->pattern,
$int,
strlen($this->newPattern[0]),
strlen($this->newPattern[1])
);
$this->patternId = $inc;
return $inc;
}
private function processPattern()
{
$matches = [];
$pattern = '/(\w)(\d{3,})(\w)/';
preg_match($pattern, $this->pattern, $matches);
array_shift($matches);
$this->newPattern = $matches;
}
private function findPattern()
{
$this->pattern = Pattern::findOrFail($this->str)->last_pattern;
$this->processPattern();
}
}
|
package com.github.lemfi.kest.cadence.executor
import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper
import com.github.lemfi.kest.core.model.Execution
import com.google.gson.Gson
import com.uber.cadence.activity.ActivityOptions
import com.uber.cadence.client.WorkflowClient
import com.uber.cadence.client.WorkflowClientOptions
import com.uber.cadence.client.WorkflowOptions
import com.uber.cadence.common.RetryOptions
import com.uber.cadence.context.ContextPropagator
import com.uber.cadence.serviceclient.ClientOptions
import com.uber.cadence.serviceclient.WorkflowServiceTChannel
import com.uber.cadence.worker.WorkerFactory
import com.uber.cadence.worker.WorkerOptions
import com.uber.cadence.workflow.Workflow
import com.uber.cadence.workflow.WorkflowMethod
import org.opentest4j.AssertionFailedError
import java.lang.reflect.Type
import java.time.Duration
import kotlin.reflect.KFunction
import kotlin.reflect.jvm.javaType
internal class ActivityExecution<RESULT>(
private val cadenceHost: String,
private val cadencePort: Int,
private val cadenceDomain: String,
private val tasklist: String,
private val activity: KFunction<RESULT>,
private val params: Array<out Any?>?,
private val contextPropagators: List<ContextPropagator>?,
private val type: Type,
) : Execution<RESULT>() {
@Suppress("unchecked_cast")
override fun execute(): RESULT {
WorkerFactory.newInstance(
WorkflowClient.newInstance(
WorkflowServiceTChannel(
ClientOptions.newBuilder()
.setHost(cadenceHost)
.setPort(cadencePort)
.build()
),
WorkflowClientOptions.newBuilder().setDomain(cadenceDomain).build()
)
)
.apply {
val worker = newWorker(
"KEST_TL", WorkerOptions.defaultInstance()
)
worker.registerWorkflowImplementationTypes(com.github.lemfi.kest.cadence.executor.Workflow::class.java)
}.start()
val parameterTypes = activity.parameters.subList(1, activity.parameters.size).also { parameterTypes ->
if ((params?.size ?: 0) != parameterTypes.size) throw AssertionFailedError(
"Wrong number of parameter for activity, expected [${
parameterTypes.map { it.type }.joinToString(", ")
}] got ${params?.toList()}"
)
}
return WorkflowClient.newInstance(
WorkflowServiceTChannel(
ClientOptions.newBuilder()
.setHost(cadenceHost)
.setPort(cadencePort)
.build()
),
WorkflowClientOptions.newBuilder().setDomain(cadenceDomain).build()
)
.newWorkflowStub(IWorkflow::class.java, WorkflowOptions.Builder()
.setExecutionStartToCloseTimeout(Duration.ofSeconds(30))
.setTaskList("KEST_TL")
.apply {
contextPropagators?.let {
setContextPropagators(it)
}
}
.build())
.run(
WorkflowParameter(
activity.parameters[0].type::javaType.get().typeName,
tasklist,
activity.name,
params?.mapIndexed { index, it ->
Parameter(
jacksonObjectMapper().writeValueAsString(it),
parameterTypes[index].type.javaType.typeName
)
})
).let {
when (it) {
null -> null as RESULT
else -> Gson().fromJson(Gson().toJsonTree(it), type)
}
}
}
}
class WorkflowParameter(
val className: String,
val tasklist: String,
val function: String,
val parameters: List<Parameter>?
)
interface IWorkflow {
@WorkflowMethod
fun run(parameters: WorkflowParameter): Any?
}
class Workflow : IWorkflow {
override fun run(parameters: WorkflowParameter): Any? {
val cls = Class.forName(parameters.className)
val params = parameters.parameters?.map { jacksonObjectMapper().readValue(it.value, Class.forName(it.cls)) }
val types = parameters.parameters?.map { Class.forName(it.cls) }
val method = types?.let { cls.getMethod(parameters.function, *types.toTypedArray()) } ?: cls.getMethod(
parameters.function
)
return if (params != null) {
method.invoke(getActivity(cls, parameters.tasklist), *params.toTypedArray())
} else {
method.invoke(getActivity(cls, parameters.tasklist))
}
}
private fun <T> getActivity(cls: Class<T>, tasklist: String): T {
return Workflow.newActivityStub(
cls,
ActivityOptions.Builder()
.setTaskList(tasklist)
.setScheduleToCloseTimeout(Duration.ofSeconds(600))
.setRetryOptions(
RetryOptions.Builder()
.setInitialInterval(Duration.ofSeconds(1))
.setExpiration(Duration.ofMinutes(1))
.setMaximumAttempts(5)
.build()
)
.build()
)
}
}
data class Parameter(
val value: String?,
val cls: String
)
|
helm del --purge helm
kubectl delete -f /home/hingnekar_mayur/k8s-practice/prometheus-alerting-0.20.0-helm/prometheus-instance-rbac.yaml
kubectl delete secret alertmanager-alertmanager -n monitoring
kubectl delete -f /home/hingnekar_mayur/k8s-practice/prometheus-alerting-0.20.0-helm/alertmanager-setup.yaml
kubectl delete -f /home/hingnekar_mayur/k8s-practice/prometheus-alerting-0.20.0-helm/prometheus-server.yaml
kubectl delete -f /home/hingnekar_mayur/k8s-practice/prometheus-alerting-0.20.0-helm/prometheus-rules.yaml
|
'use strict';
const fs = require('fs');
const StaticMaps = require('staticmaps');
const svg2img = require('svg2img');
const { Colors, Transport } = require('../helpers/enums');
const varsToChange = ['%arrow.fill', '%arrow.stroke', '%circle.fill', '%circle.stroke', '%direction', '%route.number'];
const transportMarker = fs.readFileSync('./assets/transport.svg', 'utf-8');
const newTransMarker = recolorSvg(transportMarker, varsToChange, [...Colors[Transport.Minibus], 0, 220]);
// svg2img(newTransMarker, (err, buff) => {
// if (err) console.error(err);
// fs.writeFileSync(`assets/${date}.png`, buff);
// const marker = {
// img: `assets/${date}.png`,
// offsetX: 30,
// offsetY: 30,
// width: 60,
// height: 60,
// coord: [30.45781, 50.46596],
// };
// map.addMarker(marker);
// const coords = ''
// .split(' ')
// .map(i => i.split(','))
// .map(i => [+i[1], +i[0]]);
// const line = { coords, color: Colors[Transport.Minibus][2], simplify: true };
// map.addLine(line);
// map
// .render([30.45781, 50.46596], 17)
// .then(() => map.image.save(`${Date.now()}.png`))
// .then(() => console.log('File saved!'))
// .catch(console.log);
// });
module.exports = class Map extends StaticMaps {
constructor(data = {}) {
super(data)
this.setup(data)
}
setup(data) {
this.transport = data.transport || {}
this.transport.number = data.transport.number ||
}
setTransport(lat, lng) {
Object.defineProperty(this, 'trasportPoint', [lat, lng]);
return this;
}
setTransType(type) {
Object.defineProperty(this.transport, 'type', type);
}
toJSON() {
return {};
}
};
function recolorSvg(svg, oldExp = [], newExp = []) {
oldExp.forEach((o, i) => (svg = svg.replace(new RegExp(o, 'gi'), newExp[i])));
return svg;
}
|
# Localized resources for DSR_ReplaceText
ConvertFrom-StringData @'
SearchForTextMessage = Searching using RegEx '{1}' in file '{0}'.
StringNotFoundMessageAppend = String not found using RegEx '{1}' in file '{0}', change required.
StringNotFoundMessage = String not found using RegEx '{1}' in file '{0}', change not required.
StringMatchFoundMessage = String(s) '{2}' found using RegEx '{1}' in file '{0}'.
StringReplacementRequiredMessage = String found using RegEx '{1}' in file '{0}', replacement required.
StringNoReplacementMessage = String found using RegEx '{1}' in file '{0}', no replacement required.
StringReplaceTextMessage = String replaced by '{1}' in file '{0}'.
StringReplaceSecretMessage = String replaced by secret text in file '{0}'.
FileParentNotFoundError = File parent path '{0}' not found.
FileEncodingNotInDesiredState = File encoding is set to '{0}' but should be set to '{1}', Change required.
'@
|
/*
* An Adaptive Hash Table
* Sumer Cip 2012
*/
#ifndef YHASHTAB_H
#define YHASHTAB_H
#include "config.h"
#define HSIZE(n) (1<<n)
#define HMASK(n) (HSIZE(n)-1)
#define HLOADFACTOR 0.75
struct _hitem {
uintptr_t key;
uintptr_t val;
int free; // for recycling.
struct _hitem *next;
};
typedef struct _hitem _hitem;
typedef struct {
int realsize;
int logsize;
int count;
int mask;
int freecount;
_hitem ** _table;
} _htab;
_htab *htcreate(int logsize);
void htdestroy(_htab *ht);
_hitem *hfind(_htab *ht, uintptr_t key);
int hadd(_htab *ht, uintptr_t key, uintptr_t val);
void henum(_htab *ht, int (*fn) (_hitem *item, void *arg), void *arg);
int hcount(_htab *ht);
void hfree(_htab *ht, _hitem *item);
#endif
|
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using SampleBank.Business;
using SampleBank.Core.Abstractions.Business;
using SampleBank.Core.Abstractions.Persistence;
using SampleBank.Core.Entity;
using SampleBank.Persistence;
using SampleBank.Persistence.EF;
namespace SampleBank.Application
{
public static class DependencyRegistration
{
public static void ConfigureServices(IServiceCollection services)
{
services.AddDbContext<EfContext>();
services.AddScoped<IRepository, EfRepository>();
services.AddScoped<IBusinessUser, BusinessUser>();
services.AddScoped<IBusinessTransaction, BusinessTransaction>();
services.AddScoped<IBusinessAccount, BusinessAccount>();
services.AddScoped<IBusinessCustomer, BusinessCustomer>();
services.AddScoped<IUnitOfWork, EfUnitOfWork>();
services.AddScoped<DbContext, EfContext>();
services.AddScoped<IRepositoryUser, RepositoryUser>();
services.AddScoped<IRepositoryAccount, RepositoryAccount>();
services.AddScoped<IRepositoryTransaction, RepositoryTransaction>();
services.AddScoped<IRepositoryCustomer, RepositoryCustomer>();
services.AddScoped<IRepositoryBase<User>, RepositoryBase<User>>();
services.AddScoped<IRepositoryBase<Account>, RepositoryBase<Account>>();
services.AddScoped<IRepositoryBase<Transaction>, RepositoryBase<Transaction>>();
services.AddScoped<IRepositoryBase<Customer>, RepositoryBase<Customer>>();
services.AddScoped<Core.Abstractions.Logging.ILogger, Core.Abstractions.Logging.Logger>();
}
}
}
|
ENV["GKSwstype"]="100"
using Literate
using Plots
import Remark
using Remark, FileWatching
# files = filter( f -> startswith(f, "0"), readdir("src")) |> collect
files = [ "01.Introduction.jl",
"02.RungeKuttaMethods.jl",
"03.PoissonEquation.jl",
"04.HOODESolver.jl",
"05.Conclusion.jl"]
run(pipeline(`cat src/$files`; stdout="slides/src/index.jl" ))
slideshowdir = Remark.slideshow("slides",
options = Dict("ratio" => "16:9"),
title = "Differential equations with Julia")
# Open presentation in default browser.
# Remark.open(slideshowdir)
Literate.notebook("slides/src/index.jl", execute=false)
cp("slides/build", "docs", force=true)
|
# 우석대학교 컴퓨터공학과 커리큘럼
* [Web Skills](https://github.com/cbnuswoss/web-skills)를 이용하여 우리학교 CS 커리큘럼 페이지 만들기
## ✋ Team Members
* 
* 
* 
## ⚡ npm scripts
### Install
```
npm install
```
### Start Server
```
npm run s
```
## 📖 Wiki
* [Guide](https://github.com/woosuk-computer-engineering/curriculum/wiki/Guide)
## 🔗 Reference
* [Git flow](https://woowabros.github.io/experience/2017/10/30/baemin-mobile-git-branch-strategy.html)
|
from subprocess import Popen, PIPE
import sys
import os
from queue import Queue, Empty
import subprocess
import threading
import time
class LocalShell(object):
def __init__(self):
pass
def run(self, cmd):
env = os.environ.copy()
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=subprocess.STDOUT, shell=True, env=env)
def writeall(p):
while True:
# print("read data: ")
data = p.stdout.read(1).decode("utf-8")
if not data:
break
sys.stdout.write(data)
sys.stdout.flush()
writer = threading.Thread(target=writeall, args=(p,))
writer.start()
def reader(readq):
try:
while True:
d = sys.stdin.read(1)
if not d:
break
readq.put(d)
except EOFError:
pass
readq = Queue()
r = threading.Thread(target=reader, args=(readq,))
r.daemon=True
r.start()
while True:
if not writer.isAlive():
break
try:
d = readq.get(block=False)
self._write(p, bytes(d, 'utf-8'))
time.sleep(0.01)
except Empty:
pass
def _write(self, process, message):
process.stdin.write(message)
process.stdin.flush()
cmd = ['cookiecutter', 'source-files']
cmd = ' '.join(cmd)
def main():
shell = LocalShell()
try:
shell.run(cmd)
except KeyboardInterrupt:
return
if __name__ == '__main__':
main()
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.IO;
using NAudio.Wave;
namespace AlbumRecorder {
static class Program {
public const string GracenoteKey = "1032852198-E5015C394A16665EE980474BCE5A113A";
public static AlbumInfo Album;
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main() {
if (string.IsNullOrWhiteSpace(Properties.Settings.Default.MusicFolder)) {
// Default music folder is My Music
Properties.Settings.Default.MusicFolder = Environment.GetFolderPath(Environment.SpecialFolder.MyMusic);
Properties.Settings.Default.Save();
}
if(string.IsNullOrWhiteSpace(Properties.Settings.Default.RecordingFolder)) {
// Default Recording folder is TEMP folder
Properties.Settings.Default.RecordingFolder = Path.GetTempPath();
Properties.Settings.Default.Save();
}
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.Run(new MainForm());
}
/// <summary>
/// Saved Album Info
/// </summary>
public static AlbumInfo CurrentState;
/// <summary>
/// True if Locked icon is showing - locks subsequent track lengths when moving start and end of track
/// </summary>
public static bool Locked = true;
/// <summary>
/// Helper function for tracing
/// </summary>
public static void Trace(string message) {
System.Diagnostics.Trace.WriteLine(message);
}
/// <summary>
/// Helper function for tracing
/// </summary>
public static void Trace(string format, params object[] args) {
Trace(string.Format(format, args));
}
/// <summary>
/// Convert seconds to a TimeSpan string [mm:]ss.t
/// </summary>
public static string ToTimeSpanString(this float self) {
StringBuilder b = new StringBuilder();
float s = Math.Abs(self);
int ws = (int)Math.Floor(s);
if (self < 0) {
b.Append('-');
}
int m = (int)ws / 60;
s -= m * 60;
b.AppendFormat(m == 0 ? "{0:#0.0}" : "{1}:{0:00.0}", s, m);
return b.ToString();
}
/// <summary>
/// Convert TimeSpan string [mm:]ss[.t] to seconds
/// </summary>
public static bool TimeSpanToSeconds(this string self, ref float seconds) {
Match m = Regex.Match(self, @"^\s*-?(?:(\d+):)?(\d+(?:\.\d*)?)\s*$");
if (m.Success) {
try {
float s = float.Parse(m.Groups[2].Value);
if (!string.IsNullOrEmpty(m.Groups[1].Value))
s += 60 * int.Parse(m.Groups[1].Value);
seconds = s;
return true;
} catch {
}
}
return false;
}
/// <summary>
/// Convert wave file byte count to seconds
/// </summary>
public static float BytesToSeconds(this WaveFormat f, long bytes) {
return (float)f.SamplesToSeconds(bytes) * 8 / f.BitsPerSample;
}
/// <summary>
/// Convert wave file sample count to seconds
/// </summary>
public static float SamplesToSeconds(this WaveFormat f, long samples) {
return (float)samples / (f.SampleRate * f.Channels);
}
/// <summary>
/// Convert seconds to wave file byte count
/// </summary>
public static long SecondsToBytes(this WaveFormat f, float seconds) {
return f.SecondsToSamples(seconds) * f.BitsPerSample / 8;
}
/// <summary>
/// Convert seconds to wave file sample count
/// </summary>
public static long SecondsToSamples(this WaveFormat f, float seconds) {
long l = (long)(seconds * f.SampleRate * f.Channels);
l -= l % f.BlockAlign;
return l;
}
/// <summary>
/// Parse a float value >= 0
/// Throws if invalid
/// </summary>
/// <param name="desc">Description (for errors)</param>
public static float ToFloat(this string self, string desc) {
float r;
try {
r = float.Parse(self);
} catch {
throw new ApplicationException(desc + " '" + self + "' invalid");
}
if (r < 0)
throw new ApplicationException(desc + " must be >= 0");
return r;
}
/// <summary>
/// Parse int from string. Throw if invalid.
/// </summary>
/// <param name="self">Input string</param>
/// <param name="min">Min allowed value</param>
/// <param name="desc">For errors</param>
/// <returns></returns>
public static int ToInt(this string self, int min, string desc) {
int r;
try {
r = int.Parse(self);
} catch {
throw new ApplicationException(desc + " '" + self + "' invalid");
}
if (r < min)
throw new ApplicationException(desc + " must be > " + (min - 1));
return r;
}
}
}
|
#ifndef MCL_ARMIJO_H
#define MCL_ARMIJO_H
#include "Problem.hpp"
namespace mcl {
namespace optlib {
// Backtracking-Armijo
template<typename Scalar, int DIM, typename P>
class Armijo {
public:
typedef Eigen::Matrix<Scalar,DIM,1> VectorX;
typedef Eigen::Matrix<Scalar,DIM,DIM> MatrixX;
static Scalar linesearch(const VectorX &x, const VectorX &p, P &problem, Scalar alpha_init) {
const Scalar tau = 0.7;
const Scalar beta = 0.3;
const int max_iter = 1000000;
Scalar alpha = std::abs(alpha_init);
VectorX grad;
if( DIM == Eigen::Dynamic ){ grad = VectorX::Zero(x.rows()); }
Scalar f_x = problem.gradient(x, grad);
Scalar bgdp = beta*grad.dot(p);
int iter = 0;
for( iter=0; iter < max_iter; ++iter ){
Scalar f_xap = problem.value(x + alpha*p);
Scalar f_x_a = f_x + alpha*bgdp; // Armijo condition
if( f_xap <= f_x_a ){ break; }
alpha *= tau;
}
if( iter == max_iter ){
printf("Armijo::linesearch Error: Reached max_iters\n");
return -1;
}
return alpha;
}
};
}
}
#endif
|
package dev.kord.core.behavior
import dev.kord.core.cache.data.ApplicationCommandData
import dev.kord.core.entity.application.GlobalUserCommand
import dev.kord.core.entity.application.GuildUserCommand
import dev.kord.core.entity.application.UserCommand
import dev.kord.rest.builder.interaction.UserCommandModifyBuilder
public interface UserCommandBehavior : ApplicationCommandBehavior {
public suspend fun edit(builder: suspend UserCommandModifyBuilder.() -> Unit): UserCommand
}
public interface GlobalUserCommandBehavior : UserCommandBehavior, GlobalApplicationCommandBehavior {
override suspend fun edit(builder: suspend UserCommandModifyBuilder.() -> Unit): GlobalUserCommand {
val response = service.modifyGlobalUserApplicationCommand(applicationId, id) { builder() }
val data = ApplicationCommandData.from(response)
return GlobalUserCommand(data, service)
}
}
public interface GuildUserCommandBehavior : UserCommandBehavior, GuildApplicationCommandBehavior {
override suspend fun edit(builder: suspend UserCommandModifyBuilder.() -> Unit): GuildUserCommand {
val response = service.modifyGuildUserApplicationCommand(applicationId, guildId, id) {
builder()
}
val data = ApplicationCommandData.from(response)
return GuildUserCommand(data, service)
}
}
|
# 
[](https://bintray.com/yuriy-budiyev/maven/code-scanner/_latestVersion)
[](https://android-arsenal.com/details/1/6095)
[](https://android-arsenal.com/api?level=19)
[](https://www.codacy.com/app/yuriy-budiyev/code-scanner?utm_source=github.com&utm_medium=referral&utm_content=yuriy-budiyev/code-scanner&utm_campaign=Badge_Grade)
Code scanner library for [Android](https://developer.android.com), based on [ZXing](https://github.com/zxing/zxing)
### Features
* Auto focus and flash light control
* Portrait and landscape screen orientations
* Back and front facing cameras
* Customizable viewfinder
* Kotlin friendly
* Touch focus
### Supported formats
| 1D product | 1D industrial | 2D
| ---------- | ------------- | --------------
| UPC-A | Code 39 | QR Code
| UPC-E | Code 93 | Data Matrix
| EAN-8 | Code 128 | Aztec
| EAN-13 | Codabar | PDF 417
| | ITF | MaxiCode
| | RSS-14 |
| | RSS-Expanded |
### Usage ([sample](https://github.com/yuriy-budiyev/lib-demo-app))
Add dependency:
```gradle
dependencies {
implementation 'com.budiyev.android:code-scanner:2.1.0'
}
```
Add camera permission to AndroidManifest.xml (Don't forget about dynamic permissions on API >= 23):
```xml
<uses-permission android:name="android.permission.CAMERA"/>
```
Define a view in your layout file:
```xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.budiyev.android.codescanner.CodeScannerView
android:id="@+id/scanner_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:autoFocusButtonColor="@android:color/white"
app:autoFocusButtonVisible="true"
app:flashButtonColor="@android:color/white"
app:flashButtonVisible="true"
app:frameColor="@android:color/white"
app:frameCornersSize="50dp"
app:frameCornersRadius="0dp"
app:frameAspectRatioWidth="1"
app:frameAspectRatioHeight="1"
app:frameSize="0.75"
app:frameThickness="2dp"
app:maskColor="#77000000"/>
</FrameLayout>
```
And add following code to your activity:
Kotlin
```kotlin
class MainActivity : AppCompatActivity() {
private lateinit var codeScanner: CodeScanner
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val scannerView = findViewById<CodeScannerView>(R.id.scanner_view)
codeScanner = CodeScanner(this, scannerView)
// Parameters (default values)
codeScanner.camera = CodeScanner.CAMERA_BACK // or CAMERA_FRONT or specific camera id
codeScanner.formats = CodeScanner.ALL_FORMATS // list of type BarcodeFormat,
// ex. listOf(BarcodeFormat.QR_CODE)
codeScanner.autoFocusMode = AutoFocusMode.SAFE // or CONTINUOUS
codeScanner.scanMode = ScanMode.SINGLE // or CONTINUOUS or PREVIEW
codeScanner.isAutoFocusEnabled = true // Whether to enable auto focus or not
codeScanner.isFlashEnabled = false // Whether to enable flash or not
// Callbacks
codeScanner.decodeCallback = DecodeCallback {
runOnUiThread {
Toast.makeText(this, "Scan result: ${it.text}", Toast.LENGTH_LONG).show()
}
}
codeScanner.errorCallback = ErrorCallback { // or ErrorCallback.SUPPRESS
runOnUiThread {
Toast.makeText(this, "Camera initialization error: ${it.message}",
Toast.LENGTH_LONG).show()
}
}
scannerView.setOnClickListener {
codeScanner.startPreview()
}
}
override fun onResume() {
super.onResume()
codeScanner.startPreview()
}
override fun onPause() {
codeScanner.releaseResources()
super.onPause()
}
}
```
Java
```java
public class MainActivity extends AppCompatActivity {
private CodeScanner mCodeScanner;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
CodeScannerView scannerView = findViewById(R.id.scanner_view);
mCodeScanner = new CodeScanner(this, scannerView);
mCodeScanner.setDecodeCallback(new DecodeCallback() {
@Override
public void onDecoded(@NonNull final Result result) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, result.getText(), Toast.LENGTH_SHORT).show();
}
});
}
});
scannerView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mCodeScanner.startPreview();
}
});
}
@Override
protected void onResume() {
super.onResume();
mCodeScanner.startPreview();
}
@Override
protected void onPause() {
mCodeScanner.releaseResources();
super.onPause();
}
}
```
or fragment:
Kotlin
```kotlin
class MainFragment : Fragment() {
private lateinit var codeScanner: CodeScanner
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
return inflater.inflate(R.layout.fragment_main, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
val scannerView = view.findViewById<CodeScannerView>(R.id.scanner_view)
val activity = requireActivity()
codeScanner = CodeScanner(activity, scannerView)
codeScanner.decodeCallback = DecodeCallback {
activity.runOnUiThread {
Toast.makeText(activity, it.text, Toast.LENGTH_LONG).show()
}
}
scannerView.setOnClickListener {
codeScanner.startPreview()
}
}
override fun onResume() {
super.onResume()
codeScanner.startPreview()
}
override fun onPause() {
codeScanner.releaseResources()
super.onPause()
}
}
```
Java
```java
public class MainFragment extends Fragment {
private CodeScanner mCodeScanner;
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
final Activity activity = getActivity();
View root = inflater.inflate(R.layout.fragment_main, container, false);
CodeScannerView scannerView = root.findViewById(R.id.scanner_view);
mCodeScanner = new CodeScanner(activity, scannerView);
mCodeScanner.setDecodeCallback(new DecodeCallback() {
@Override
public void onDecoded(@NonNull final Result result) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(activity, result.getText(), Toast.LENGTH_SHORT).show();
}
});
}
});
scannerView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mCodeScanner.startPreview();
}
});
return root;
}
@Override
public void onResume() {
super.onResume();
mCodeScanner.startPreview();
}
@Override
public void onPause() {
mCodeScanner.releaseResources();
super.onPause();
}
}
```
### Preview

|
// test/sort/merge.test.ts
import { merge } from '../../code/sort/merge';
describe('test function merge:', () => {
test('test case nums = [3, 1, 2, 5, 4]', () => {
expect(merge([3, 1, 2, 5, 4])).toEqual([1, 2, 3, 4, 5]);
});
test('test case nums = [1, 2, 3, 4, 5]', () => {
expect(merge([1, 2, 3, 4, 5])).toEqual([1, 2, 3, 4, 5]);
});
});
|
var a00240 =
[
[ "shared_ptr", "a00240.html#abdd4f7b20903037894fc3847905214ad", null ],
[ "Unit", "a00240.html#a33df813274f299f2d6d6e67c7e95c60f", null ],
[ "~Unit", "a00240.html#ac5c108d61c9bc4fd86939ead503368b5", null ],
[ "Mahalanobis", "a00240.html#a86bb771335c2071c0c764d9572866933", null ],
[ "print", "a00240.html#aa4c0a201a843688d9d7402889c1a81f3", null ],
[ "unwhiten", "a00240.html#a98ae8b646493766aec96e66ac66a7d1b", null ],
[ "unwhitenInPlace", "a00240.html#a98ac92399318d2db7959e5ad84536e3f", null ],
[ "unwhitenInPlace", "a00240.html#a4f9ee2a3a0a114aa6e836526ef8f90f0", null ],
[ "whiten", "a00240.html#af2c33ee8aea93c2602caaf6c2b5904f2", null ],
[ "Whiten", "a00240.html#a68a547a3474eb0666372f9334049d2ca", null ],
[ "WhitenInPlace", "a00240.html#a258d054bf8bd4ef89bd5acf0f5623191", null ],
[ "WhitenInPlace", "a00240.html#a333b07e2081f5ccf2bca22569079abb0", null ],
[ "whitenInPlace", "a00240.html#a183d0ba26c61be8523dc0b813d815925", null ],
[ "whitenInPlace", "a00240.html#a3b20f7f2094635a14cadaaf2cba016af", null ],
[ "boost::serialization::access", "a00240.html#ac98d07dd8f7b70e16ccb9a01abf56b9c", null ]
];
|
class FakeKafkaProducer
def initialize(real)
@messages = {}
@real = real
@stub = true
end
def unstub
@stub = false
yield
@stub = true
end
def produce(message, options = {})
topic = options[:topic]
self.channel(topic) << message
@real.produce(message, options) if not @stub
end
def decode(message)
Kraken.config.encoder.decode message
end
def channel(channel)
channel = "kraken.#{channel}" unless channel.start_with? "kraken."
@messages[channel] ||= []
end
def first_message(channel)
message(channel, 0)
end
def message(channel, index)
msg = self.channel(channel)[index]
return nil if msg.nil?
m = decode(msg).with_indifferent_access
m.delete :_timestamp
m
end
def deliver_messages
end
end
RSpec.shared_context "kafka stub", type: :worker do
let(:kafka) { FakeKafkaProducer.new Kraken.config.kafka_producer }
before(:each) do |example|
allow(Kraken.config).to receive(:kafka_producer).and_return(kafka)
end
end
|
#!/bin/bash
root_password=`date +%s | sha1sum | head -c 12 ; echo`
echo "Updating root's password to ${root_password}."
echo "root:${root_password}" | chpasswd
echo "Removing IPv6 localhost from /etc/hosts."
# Sed can't always modify this thing in place?
sed -e 's/localhost ip6-localhost/ip6-localhost/g' /etc/hosts > /etc/hosts.tmp
cat /etc/hosts.tmp > /etc/hosts
rm /etc/hosts.tmp
echo "Ensuring data directories exist."
mkdir -p /data/Acronis
rsync -a /var/lib/Acronis /data/
rm -r /var/lib/Acronis
ln -sfn /data/Acronis /var/lib/
echo "Starting Acronis via init."
/etc/init.d/acronis_agent start
/etc/init.d/acronis_ams start
/etc/init.d/acronis_asm start
/etc/init.d/acronis_zmqgw start
trap 'killall' INT TERM
killall() {
# https://unix.stackexchange.com/a/55922/107654
echo "Shutting down Acronis via init."
/etc/init.d/acronis_agent stop
/etc/init.d/acronis_ams stop
/etc/init.d/acronis_asm stop
/etc/init.d/acronis_zmqgw stop
echo "DONE"
exit 0
}
trap 'err_report $LINENO' ERR
err_report() {
echo "An error occured on line $1"
exit 1
}
echo "Waiting..."
sleep infinity & wait
|
module Mailflow
class << self
attr_accessor :test_mode
end
class Client
include Mailflow::APIOperations
def self.test
response = get_request('test')
return {status: response.code}
end
end
end
|
package modules
import org.springframework.stereotype.Component
@Component
class MyBeanB {
def getMessage = "I am a message from a Spring Bean"
}
|
'use strict';
/*
* nodejs-express-mongoose
* Copyright(c) 2015 Madhusudhan Srinivasa <madhums8@gmail.com>
* MIT Licensed
*/
/**
* Module dependencies
*/
require('dotenv').config();
const fs = require('fs');
const join = require('path').join;
const express = require('express');
const passport = require('passport');
const compression = require('compression');
const socketIO = require('socket.io');
const https = require('https');
const bodyParser = require('body-parser');
const config = join(__dirname, 'config/');
const routes = join(__dirname, 'routes/');
const MulterImpl = require(config+'multerImpl');
//DB CONNECTION
// var r = require('rethinkdbdash')({
// port : '28015',
// host : 'localhost',
// db : 'hambasafe',
// })();
const port = process.env.PORT || 3000;
//XHR SUPPORT
var cors = require('cors');
//Cookie Parser (Obv's)
var cookieParser = require('cookie-parser')
var session = require('express-session');
//Logging
var logger = require('morgan');
var app = new (express)();
var options = {
key : fs.readFileSync(config + 'certificates/key.pem'),
cert : fs.readFileSync(config + 'certificates/cert.pem'),
};
//
//Gzip
app.use(compression());
//body field in post
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: false}));
app.use(cookieParser());
app.set('webSecret', 'H3ll0w0rld');
//ALLOW ORIGIN
app.use(cors());
// require(path.join(__dirname, 'server_utils/startUp.js'))(r);
const upload = new MulterImpl({}).init();
var server = https.createServer(options, app)
const io = socketIO(server);
//Require api routes
require(
join(routes,'index.js')
)(app, io, upload);
app.use('/public', express.static(join(__dirname, 'public')));
app.use("/", function(req, res) {
console.log('sending index')
res.sendFile(__dirname + '/index.html')
})
// require(
// './startUp.js'
// )(r);
/**
* Expose
*/
// module.exports = {
// app,
// };
server.listen(port, function(error) {
if (error) {
console.error(error)
} else {
console.info("==> Listening on port %s. Open up https://localhost:%s/ in your browser.", port, port)
}
});
|
<?
$save_dir="./upload";
//파일 업로드 함수
function upload(&$file,$limit_file_size)
{
global $save_dir;
//금지된 확장자 설정 - 금지할 확장자를 추가해서 사용
$ban_ext = array('php','php3','html','htm','cgi','pl');
//업로드 파일 제한 크기를 초과하였는지 확인
if ($file[size] > $limit_file_size)
{
//파일의 크기를 아래의 단위로 표시합니다.
$unit=Array("Bytes", "KB", "MB", "GB");
for ($i=0; $limit_file_size>=1024; $limit_file_size>>=10, $i++);
$file_size = sprintf("%d $unit[$i]", $limit_file_size);
MsgBox("업로드 파일 크기 제한 : $file_size");
}
//확장자를 이용하여 업로드 가능한 파일인지 체크한다.
$temp_name = explode(".",$file['name']);
$ext = strtolower($temp_name[sizeof($temp_name)-1]);
if (in_array($ext,$ban_ext)) {
MsgBox("업로드가 불가능한 확장자입니다.");
}
//같은 파일명이 있지 않게 하기위해 파일명을 절대 중복이 불가능하게 만든다.
mt_srand((double)microtime()*1000000);
$new_file_name = time() . mt_rand(10000,99999);
$file_name = $new_file_name . '.' . $ext; //파일 이름뒤에 확장자를 붙인다.
$file_name_db = $file_name . '||' . $file['name']; //db에 저장될 화일명 예) 새파일명||원래파일명
//화일을 지정된 폴더로 이동시킨다.
if(move_uploaded_file($file['tmp_name'] ,$save_dir . '/' . $file_name)) {
@unlink($file['tmp_name']);
return $file_name_db;
} else {
@unlink($file['tmp_name']);
MsgBox("업로드 과정에서 에러가 발생하였습니다.");
}
}
function download($file)
{
global $save_dir;
$temp_name = explode('||',$file);
$save_file_name = $temp_name[0];
$original_file_name = $temp_name[1];
if( eregi( "\.\./|\/\/", $save_file_name ) ) MsgBox("몬때따~ 참말로 몬때따~");
if(strstr($HTTP_USER_AGENT, "MSIE 6.")) {
header("Content-type: application/octetstream");
header("Content-disposition: filename=$original_file_name");
header("Content-Length: ".filesize($save_dir.'/'.$save_file_name));
} else if(strstr($HTTP_USER_AGENT, "MSIE 5.5")) {
header("Content-Type: doesn/matter");
header("Content-disposition: filename=$original_file_name");
header("Content-Transfer-Encoding: binary");
header("Pragma: no-cache");
header("Expires: 0");
} else if(strstr($HTTP_USER_AGENT, "MSIE 5.0")) {
Header("Content-type: file/unknown");
header("Content-Disposition: attachment; filename=$original_file_name");
Header("Content-Description: PHP3 Generated Data");
header("Pragma: no-cache");
header("Expires: 0");
} else {
Header("Content-type: file/unknown");
header("Content-Disposition: attachment; filename=$original_file_name");
Header("Content-Description: PHP3 Generated Data");
header("Pragma: no-cache");
header("Expires: 0");
}
if (is_file($save_dir. '/' .$save_file_name)) {
$fp = fopen($save_dir . '/' . $save_file_name,"r");
if (!fpassthru($fp)) {
fclose($fp);
}
} else MsgBox("파일이 존재하지 않습니다.");
}
function del_file($file)
{
global $save_dir;
$temp_name = explode('||',$file);
$save_file_name = $temp_name[0];
if( eregi( "\.\./|\/\/", $save_file_name ) ) return;
if(file_exists($save_dir.'/'.$save_file_name)) {
if(!(@unlink($save_dir.'/'.$save_file_name))) MsgBox("파일을 삭제하는데 실패하였습니다.");
}
}
function MsgBox($msg)
{
echo"<script> alert('$msg'); history.back(); </script>";
exit;
}
?>
|
package main
import (
"bytes"
"context"
_ "embed"
"encoding/json"
"errors"
"fmt"
"image"
"image/jpeg"
"image/png"
"io"
"net/http"
"os"
"strconv"
"strings"
"sync"
"time"
"github.com/kelseyhightower/envconfig"
"github.com/nfnt/resize"
)
// Config struct. Main configuration options
type Config struct {
CAcrtFile string `envconfig:"LDAP_SSL_CACERT_FILE"`
LdapServerFQDN string `envconfig:"LDAP_SERVER_FQDN" required:"true"`
LdapPort int `envconfig:"LDAP_PORT" default:"636"`
LdapSSL bool `envconfig:"LDAP_SSL" default:"true"`
LdapTLS bool `envconfig:"LDAP_TLS" default:"false"`
LdapVerifyCert bool `envconfig:"LDAP_VERIFY_CERT" default:"true"`
LdapBindUser string `envconfig:"LDAP_BIND_USER" required:"true"`
LdapBindPasswd string `envconfig:"LDAP_BIND_PASSWORD" required:"true"`
LdapUserBase string `envconfig:"LDAP_USER_BASE" required:"true"`
LdapUserFilter string `envconfig:"LDAP_USER_FILTER" default:"(objectclass=inetOrgPerson)"`
LdapAvatarAttr string `envconfig:"LDAP_AVATAR_ATTRIBUTE" default:"jpegPhoto"`
LdapEmailAttr string `envconfig:"LDAP_EMAIL_ATTRIBUTE" default:"mail"`
GravatarEnabled bool `envconfig:"GRAVATAR_ENABLED" default:"false"`
GravatarURL string `envconfig:"GRAVATAR_URL" default:"https://secure.gravatar.com/avatar"`
}
// Service struct. Main HTTP service
type Service struct {
httpServer *http.Server
Running chan struct{}
}
// Avatar element
type Avatar struct {
Image []byte
LastUpdate time.Time
}
const (
contentType = "Content-Type"
frameOptionsHeader = "X-Frame-Options"
frameOptionsValue = "DENY"
xssProtectionHeader = "X-XSS-Protection"
xssProtectionValue = "1; mode=block"
serverPort = ":8080"
)
var (
cfg Config
//go:embed media/default.jpg
defaultAvatarImg []byte
defaultAvatar = Avatar{Image: defaultAvatarImg}
hs map[string]Avatar
lock = sync.RWMutex{}
maxTime time.Duration
pkgVersion string = ""
epoch = time.Unix(0, 0).Format(time.RFC1123)
)
var noCacheHeaders = map[string]string{
"Expires": epoch,
"Cache-Control": "no-cache, no-store, no-transform, must-revalidate, private, max-age=0",
"Pragma": "no-cache",
"X-Accel-Expires": "0",
}
func panicIf(err error, what ...string) {
if err != nil {
if len(what) == 0 {
panic(err)
}
panic(errors.New(err.Error() + (" " + what[0])))
}
}
func writeNoCacheHeaders(w http.ResponseWriter) {
for k, v := range noCacheHeaders {
w.Header().Set(k, v)
}
}
func writeSecurityHeaders(w http.ResponseWriter) {
w.Header().Set(frameOptionsHeader, frameOptionsValue)
w.Header().Set(xssProtectionHeader, xssProtectionValue)
}
// NewService function. Creates main service
func NewService() *Service {
mux := http.NewServeMux()
mux.HandleFunc("/version", versionHandler)
mux.HandleFunc("/healthz", healthzHandler)
mux.HandleFunc("/avatar/", avatarHandler)
return &Service{
httpServer: &http.Server{
Addr: serverPort,
Handler: mux,
},
Running: make(chan struct{}),
}
}
// Run function. Runs the service
func (s *Service) Run() error {
close(s.Running)
if err := s.httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed {
return err
}
return nil
}
// Shutdown function. Closes the service gracefully
func (s *Service) Shutdown() error {
return s.httpServer.Shutdown(context.TODO())
}
func main() {
maxTime, _ = time.ParseDuration("30m")
err := envconfig.Process("", &cfg)
panicIf(err, "while reading configuration")
hs = make(map[string]Avatar)
FillHash()
svc := NewService()
if err := svc.Run(); err != nil {
fmt.Fprintln(os.Stderr, err.Error())
}
}
func versionHandler(w http.ResponseWriter, r *http.Request) {
defer func() {
if r := recover(); r != nil {
fmt.Fprintln(os.Stderr, r)
}
}()
writeNoCacheHeaders(w)
writeSecurityHeaders(w)
w.Header().Set(contentType, "application/json; charset=utf-8")
enc := json.NewEncoder(w)
if err := enc.Encode(map[string]string{"version": pkgVersion}); err != nil {
fmt.Fprintln(os.Stderr, err.Error())
}
}
func healthzHandler(w http.ResponseWriter, r *http.Request) {
defer func() {
if r := recover(); r != nil {
fmt.Fprintln(os.Stderr, r)
}
}()
writeNoCacheHeaders(w)
w.Header().Set(contentType, "text/plain")
if _, err := io.WriteString(w, "OK"); err != nil {
fmt.Fprintln(os.Stderr, err.Error())
}
}
func hsGet(h string) Avatar {
lock.RLock()
defer lock.RUnlock()
return hs[h]
}
func hsWrite(h string, av Avatar) {
lock.Lock()
defer lock.Unlock()
hs[h] = av
}
func hsDelete(h string) {
lock.Lock()
defer lock.Unlock()
delete(hs, h)
}
// PruneHash function. Deletes hash map elements if they are outdated
func PruneHash() {
for h := range hs {
av := hsGet(h)
if len(av.Image) > 0 && time.Since(av.LastUpdate) > maxTime {
fmt.Fprintln(os.Stderr, h+" × cache")
hsDelete(h)
}
}
}
func getAvatar(h string) Avatar {
var (
body []byte
av Avatar
)
av = hsGet(h)
if len(av.Image) == 0 || time.Since(av.LastUpdate) > maxTime {
PruneHash()
FillHash()
av = hsGet(h)
if len(av.Image) > 0 {
fmt.Fprintln(os.Stderr, h+" → cached")
return av
}
fmt.Fprintln(os.Stderr, h+" × LDAP")
if cfg.GravatarEnabled {
res, err := http.Get(cfg.GravatarURL + "/" + h + "?s=490&d=404")
if err == nil && res.StatusCode == 200 {
body, err = io.ReadAll(res.Body)
res.Body.Close()
if err == nil {
hsWrite(h, Avatar{
Image: body,
LastUpdate: time.Now(),
})
fmt.Fprintln(os.Stderr, h+" → Gravatar")
return hsGet(h)
}
}
fmt.Fprintln(os.Stderr, h+" × Gravatar")
}
fmt.Fprintln(os.Stderr, h+" → default")
return defaultAvatar
}
fmt.Fprintln(os.Stderr, h+" → cached")
return av
}
func encodeAvatar(img image.Image, format string) ([]byte, error) {
var err error
buf := new(bytes.Buffer)
switch {
case format == "jpeg":
err = jpeg.Encode(buf, img, &jpeg.Options{Quality: 90})
case format == "png":
err = png.Encode(buf, img)
}
return buf.Bytes(), err
}
func avatarHandler(w http.ResponseWriter, r *http.Request) {
defer func() {
if r := recover(); r != nil {
fmt.Fprintln(os.Stderr, r)
}
}()
// read request body
_, err := io.ReadAll(r.Body)
panicIf(err, "while reading request body")
q := r.URL.Query()
size := 80
qSize := ""
if s, ok := q["s"]; ok {
qSize = s[0]
} else if s, ok := q["size"]; ok {
qSize = s[0]
}
if s, err := strconv.Atoi(qSize); err == nil {
size = s
}
var (
resizedImg image.Image
resizedAvatar []byte
avatar Avatar
)
hash := strings.Split(strings.Split(r.URL.Path, "/")[2], ".")[0]
avatar = getAvatar(hash)
buf := bytes.NewBuffer(avatar.Image)
img, imgFormat, err := image.Decode(buf)
panicIf(err, "while decoding avatar")
resizedImg = resize.Resize(uint(size), 0, img, resize.Lanczos3)
resizedAvatar, err = encodeAvatar(resizedImg, imgFormat)
panicIf(err, "while encoding image")
w.Header().Set(contentType, "image/"+imgFormat)
w.Header().Set("Content-Length", strconv.Itoa(len(resizedAvatar)))
if _, err := w.Write(resizedAvatar); err != nil {
fmt.Fprintln(os.Stderr, err.Error())
}
}
|
package org.mcdh.jda
import org.jetbrains.java.decompiler.main.decompiler.BaseDecompiler
import java.io.File
class Decompiler @JvmOverloads constructor(private val proxy: JavaDecompileProxy = JavaDecompileProxy()) {
fun decompile(path: String): String {
val target = File(path)
val files = mutableMapOf(Pair(sanitize(path), target))
try {
val parent = target.parentFile
var children = mutableListOf<File>(parent)
var childAdded = true
//Get all files in directory
while(childAdded) {
childAdded = false
val toAdd = mutableListOf<File>()
val toRemove = mutableListOf<File>()
for(file in children) {
if (file.isDirectory) {
toRemove.add(file)
val files = file.listFiles()
if (files.isNotEmpty()) {
childAdded = true
toAdd.addAll(files)
}
}
}
children.removeAll(toRemove)
children.addAll(toAdd)
}
children = children.filter {
!it.isDirectory
&& it.nameWithoutExtension.startsWith(target.nameWithoutExtension + '$', false)
&& it.extension.equals("class", true)
} as MutableList<File>
//Assemble files map
children.forEach {
files[it.absolutePath] = it
}
//Construct options map
val options = mutableMapOf<String, Any>()
options.putAll(proxy.options)
//TODO add switch option for line mappings
if (true) {
options["bsm"] = "1"
} else {
options["__dump_original_lines__"] = "1"
}
//Decompilation
val provider = BytecodeProvider(files)
val decompiler = BaseDecompiler(provider, proxy.saver, options, proxy.logger)
proxy.classpath.forEach {
val file = File(it)
if (file.exists() && file.extension.equals("class") || file.extension.equals("jar")) {
decompiler.addLibrary(file)
}
}
files.keys.forEach {
decompiler.addSource(File(it))
}
decompiler.decompileContext()
} catch(t: Throwable) {
throw t
}
return proxy.saver.getResult()
}
}
|
---
sidebar:
title: "Algorithm"
nav: sidebar-algorithm
icon: "fas fa-calculator"
title: "[Swift] 수박수박수박수박수박수?"
toc: true
toc_sticky: true
toc_label: 목차
tag: "Programers level1"
depth:
- title: "Algorithm"
url: /algorithm/
icon: "fas fa-calculator"
- title: "Programers level1"
url: /algorithm/programers-level1/
icon: "far fa-folder-open"
---
길이가 n이고, "수박수박수박수...."와 같은 패턴을 유지하는 문자열을 리턴하는 함수, solution을 완성하세요. 예를들어 n이 4이면 "수박수박"을 리턴하고 3이라면 "수박수"를 리턴하면 됩니다.
제한 조건
n은 길이 10,000이하인 자연수입니다.
{: .notice--info}
## Example
```swift
Input: 3
Output: "수박수"
```
```swift
Input: 4
Output: "수박수박"
```
[<i class="fas fa-link"></i> 풀러가기](https://programmers.co.kr/learn/courses/30/lessons/12922)
## 코드
```swift
func solution(_ n:Int) -> String {
return (0 ..< n).map { $0 % 2 == 0 ? "수" : "박" }.reduce("", +)
}
```
|
export default function reducer(state = {
user: JSON.parse(localStorage.getItem('user_info'))||{},
posts: JSON.parse(localStorage.getItem('posts'))||{},
friends: JSON.parse(localStorage.getItem('friends'))||{},
token: localStorage.getItem('id_token') || null,
}, action) {
switch (action.type) {
case "LOGGED_IN": {
localStorage.setItem('user_info', JSON.stringify(action.payload.user));
localStorage.setItem('posts', JSON.stringify(action.payload.posts));
localStorage.setItem('friends', JSON.stringify(action.payload.friends));
return {
...state,
posts: action.payload.posts,
friends: action.payload.friends,
user: action.payload.user,
token: localStorage.getItem('id_token'),
isfetched:true,
};
}
case "LOGGED_OUT": {
localStorage.removeItem('user_info');
localStorage.removeItem('posts');
localStorage.removeItem('friends');
return {
...state,
user: null,
token: null,
};
}
//friend list will be updated
case "UPDATE_FRIENDS": {
return {
...state,
friends: action.payload,
};
}
case "UPDATE_PROFILE": {
localStorage.removeItem('user_info');
localStorage.setItem('user_info', JSON.stringify(action.payload.user));
return {
...state,
};
}
}
return state;
}
|
module.exports = function(req, res, next){
const username = req.body.username;
const password = req.body.password;
if (!username || !password) {
return res.status(400).json(
"username and password required"
)
} else {
next()
}
}
|
import React, { useEffect } from 'react';
import Worker from './test.worker.js'; // eslint-disable-line
const videoUrl = '';
let data;
const initWorker = async () => {
const worker = new Worker();
const outputElement = {};
worker.onmessage = function (event) {
const message = event.data;
console.log('主线程接收:', event);
if (message.type === 'ready') {
outputElement.textContent = 'Loaded';
worker.postMessage({
files: [{
name: 'video',
data
}],
type: 'command',
arguments: ['-help']
})
} else if (message.type === 'stdout') {
outputElement.textContent += message.data + '\n';
} else if (message.type === 'start') {
outputElement.textContent = 'Worker has received command\n';
}
console.log('result:', outputElement);
};
// worker.postMessage('aa');
// console.log(222222, worker);
};
const Index = () => {
useEffect(() => {
const getData = async () => {
const res = await fetch(videoUrl, {
mode: 'no-cors',
withCredentials: true
});
data = await res.arrayBuffer(res);
console.log(1111111, data);
};
getData();
}, []);
const startPlay = () => {
initWorker();
};
return <div>
<div>hehehe</div>
<button onClick={startPlay}>开始转换</button>
</div>;
};
export default Index;
|
name "manta"
maintainer "Wanelo, Inc"
maintainer_email "ops@wanelo.com"
license "MIT"
description "Installs/Configures manta"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.0.5"
depends "nodejs"
depends "npm"
supports "smartos"
supports "ubuntu"
|
/*-
* Copyright (c) 2003-2004 Tim Kientzle
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer
* in this position and unchanged.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S) ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "archive_platform.h"
__FBSDID("$FreeBSD: src/lib/libarchive/archive_read_support_format_tar.c,v 1.39 2005/11/08 03:52:42 kientzle Exp $");
#include <sys/stat.h>
#ifdef MAJOR_IN_MKDEV
#include <sys/mkdev.h>
#else
#ifdef MAJOR_IN_SYSMACROS
#include <sys/sysmacros.h>
#endif
#endif
#include <errno.h>
#include <stddef.h>
/* #include <stdint.h> */ /* See archive_platform.h */
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
/* Obtain suitable wide-character manipulation functions. */
#ifdef HAVE_WCHAR_H
#include <wchar.h>
#else
static int wcscmp(const wchar_t *s1, const wchar_t *s2)
{
int diff = *s1 - *s2;
while(*s1 && diff == 0)
diff = (int)*++s1 - (int)*++s2;
return diff;
}
static size_t wcslen(const wchar_t *s)
{
const wchar_t *p = s;
while (*p)
p++;
return p - s;
}
#endif
#include "archive.h"
#include "archive_entry.h"
#include "archive_private.h"
/*
* Layout of POSIX 'ustar' tar header.
*/
struct archive_entry_header_ustar {
char name[100];
char mode[8];
char uid[8];
char gid[8];
char size[12];
char mtime[12];
char checksum[8];
char typeflag[1];
char linkname[100]; /* "old format" header ends here */
char magic[6]; /* For POSIX: "ustar\0" */
char version[2]; /* For POSIX: "00" */
char uname[32];
char gname[32];
char rdevmajor[8];
char rdevminor[8];
char prefix[155];
};
/*
* Structure of GNU tar header
*/
struct gnu_sparse {
char offset[12];
char numbytes[12];
};
struct archive_entry_header_gnutar {
char name[100];
char mode[8];
char uid[8];
char gid[8];
char size[12];
char mtime[12];
char checksum[8];
char typeflag[1];
char linkname[100];
char magic[8]; /* "ustar \0" (note blank/blank/null at end) */
char uname[32];
char gname[32];
char rdevmajor[8];
char rdevminor[8];
char atime[12];
char ctime[12];
char offset[12];
char longnames[4];
char unused[1];
struct gnu_sparse sparse[4];
char isextended[1];
char realsize[12];
/*
* GNU doesn't use POSIX 'prefix' field; they use the 'L' (longname)
* entry instead.
*/
};
/*
* Data specific to this format.
*/
struct sparse_block {
struct sparse_block *next;
off_t offset;
off_t remaining;
};
struct tar {
struct archive_string acl_text;
struct archive_string entry_name;
struct archive_string entry_linkname;
struct archive_string entry_uname;
struct archive_string entry_gname;
struct archive_string longlink;
struct archive_string longname;
struct archive_string pax_header;
struct archive_string pax_global;
wchar_t *pax_entry;
size_t pax_entry_length;
int header_recursion_depth;
off_t entry_bytes_remaining;
off_t entry_offset;
off_t entry_padding;
struct sparse_block *sparse_list;
};
static size_t UTF8_mbrtowc(wchar_t *pwc, const char *s, size_t n);
static int archive_block_is_null(const unsigned char *p);
static int gnu_read_sparse_data(struct archive *, struct tar *,
const struct archive_entry_header_gnutar *header);
static void gnu_parse_sparse_data(struct archive *, struct tar *,
const struct gnu_sparse *sparse, int length);
static int header_Solaris_ACL(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *);
static int header_common(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *);
static int header_old_tar(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *);
static int header_pax_extensions(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *);
static int header_pax_global(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *h);
static int header_longlink(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *h);
static int header_longname(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *h);
static int header_volume(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *h);
static int header_ustar(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *h);
static int header_gnutar(struct archive *, struct tar *,
struct archive_entry *, struct stat *, const void *h);
static int archive_read_format_tar_bid(struct archive *);
static int archive_read_format_tar_cleanup(struct archive *);
static int archive_read_format_tar_read_data(struct archive *a,
const void **buff, size_t *size, off_t *offset);
static int archive_read_format_tar_read_header(struct archive *,
struct archive_entry *);
static int checksum(struct archive *, const void *);
static int pax_attribute(struct archive_entry *, struct stat *,
wchar_t *key, wchar_t *value);
static int pax_header(struct archive *, struct tar *,
struct archive_entry *, struct stat *, char *attr);
static void pax_time(const wchar_t *, int64_t *sec, long *nanos);
static int read_body_to_string(struct archive *, struct tar *,
struct archive_string *, const void *h);
static int64_t tar_atol(const char *, unsigned);
static int64_t tar_atol10(const wchar_t *, unsigned);
static int64_t tar_atol256(const char *, unsigned);
static int64_t tar_atol8(const char *, unsigned);
static int tar_read_header(struct archive *, struct tar *,
struct archive_entry *, struct stat *);
static int utf8_decode(wchar_t *, const char *, size_t length);
/*
* ANSI C99 defines constants for these, but not everyone supports
* those constants, so I define a couple of static variables here and
* compute the values. These calculations should be portable to any
* 2s-complement architecture.
*/
#ifdef UINT64_MAX
static const uint64_t max_uint64 = UINT64_MAX;
#else
static const uint64_t max_uint64 = ~(uint64_t)0;
#endif
#ifdef INT64_MAX
static const int64_t max_int64 = INT64_MAX;
#else
static const int64_t max_int64 = (int64_t)((~(uint64_t)0) >> 1);
#endif
#ifdef INT64_MIN
static const int64_t min_int64 = INT64_MIN;
#else
static const int64_t min_int64 = (int64_t)(~((~(uint64_t)0) >> 1));
#endif
int
archive_read_support_format_gnutar(struct archive *a)
{
return (archive_read_support_format_tar(a));
}
int
archive_read_support_format_tar(struct archive *a)
{
struct tar *tar;
int r;
tar = malloc(sizeof(*tar));
if (tar == NULL) {
archive_set_error(a, ENOMEM, "Can't allocate tar data");
return (ARCHIVE_FATAL);
}
memset(tar, 0, sizeof(*tar));
r = __archive_read_register_format(a, tar,
archive_read_format_tar_bid,
archive_read_format_tar_read_header,
archive_read_format_tar_read_data,
NULL,
archive_read_format_tar_cleanup);
if (r != ARCHIVE_OK)
free(tar);
return (ARCHIVE_OK);
}
static int
archive_read_format_tar_cleanup(struct archive *a)
{
struct tar *tar;
tar = *(a->pformat_data);
archive_string_free(&tar->acl_text);
archive_string_free(&tar->entry_name);
archive_string_free(&tar->entry_linkname);
archive_string_free(&tar->entry_uname);
archive_string_free(&tar->entry_gname);
archive_string_free(&tar->pax_global);
archive_string_free(&tar->pax_header);
if (tar->pax_entry != NULL)
free(tar->pax_entry);
free(tar);
*(a->pformat_data) = NULL;
return (ARCHIVE_OK);
}
static int
archive_read_format_tar_bid(struct archive *a)
{
int bid;
ssize_t bytes_read;
const void *h;
const struct archive_entry_header_ustar *header;
/*
* If we're already reading a non-tar file, don't
* bother to bid.
*/
if (a->archive_format != 0 &&
(a->archive_format & ARCHIVE_FORMAT_BASE_MASK) !=
ARCHIVE_FORMAT_TAR)
return (0);
bid = 0;
/*
* If we're already reading a tar format, start the bid at 1 as
* a failsafe.
*/
if ((a->archive_format & ARCHIVE_FORMAT_BASE_MASK) ==
ARCHIVE_FORMAT_TAR)
bid++;
/* Now let's look at the actual header and see if it matches. */
if (a->compression_read_ahead != NULL)
bytes_read = (a->compression_read_ahead)(a, &h, 512);
else
bytes_read = 0; /* Empty file. */
if (bytes_read < 0)
return (ARCHIVE_FATAL);
if (bytes_read == 0 && bid > 0) {
/* An archive without a proper end-of-archive marker. */
/* Hold our nose and bid 1 anyway. */
return (1);
}
if (bytes_read < 512) {
/* If it's a new archive, then just return a zero bid. */
if (bid == 0)
return (0);
/*
* If we already know this is a tar archive,
* then we have a problem.
*/
archive_set_error(a, ARCHIVE_ERRNO_FILE_FORMAT,
"Truncated tar archive");
return (ARCHIVE_FATAL);
}
/* If it's an end-of-archive mark, we can handle it. */
if ((*(const char *)h) == 0 && archive_block_is_null(h)) {
/* If it's a known tar file, end-of-archive is definite. */
if ((a->archive_format & ARCHIVE_FORMAT_BASE_MASK) ==
ARCHIVE_FORMAT_TAR)
return (512);
/* Empty archive? */
return (1);
}
/* If it's not an end-of-archive mark, it must have a valid checksum.*/
if (!checksum(a, h))
return (0);
bid += 48; /* Checksum is usually 6 octal digits. */
header = h;
/* Recognize POSIX formats. */
if ((memcmp(header->magic, "ustar\0", 6) == 0)
&&(memcmp(header->version, "00", 2)==0))
bid += 56;
/* Recognize GNU tar format. */
if ((memcmp(header->magic, "ustar ", 6) == 0)
&&(memcmp(header->version, " \0", 2)==0))
bid += 56;
/* Type flag must be null, digit or A-Z, a-z. */
if (header->typeflag[0] != 0 &&
!( header->typeflag[0] >= '0' && header->typeflag[0] <= '9') &&
!( header->typeflag[0] >= 'A' && header->typeflag[0] <= 'Z') &&
!( header->typeflag[0] >= 'a' && header->typeflag[0] <= 'z') )
return (0);
bid += 2; /* 6 bits of variation in an 8-bit field leaves 2 bits. */
/* Sanity check: Look at first byte of mode field. */
switch (255 & (unsigned)header->mode[0]) {
case 0: case 255:
/* Base-256 value: No further verification possible! */
break;
case ' ': /* Not recommended, but not illegal, either. */
break;
case '0': case '1': case '2': case '3':
case '4': case '5': case '6': case '7':
/* Octal Value. */
/* TODO: Check format of remainder of this field. */
break;
default:
/* Not a valid mode; bail out here. */
return (0);
}
/* TODO: Sanity test uid/gid/size/mtime/rdevmajor/rdevminor fields. */
return (bid);
}
/*
* The function invoked by archive_read_header(). This
* just sets up a few things and then calls the internal
* tar_read_header() function below.
*/
static int
archive_read_format_tar_read_header(struct archive *a,
struct archive_entry *entry)
{
/*
* When converting tar archives to cpio archives, it is
* essential that each distinct file have a distinct inode
* number. To simplify this, we keep a static count here to
* assign fake dev/inode numbers to each tar entry. Note that
* pax format archives may overwrite this with something more
* useful.
*
* Ideally, we would track every file read from the archive so
* that we could assign the same dev/ino pair to hardlinks,
* but the memory required to store a complete lookup table is
* probably not worthwhile just to support the relatively
* obscure tar->cpio conversion case.
*/
static int default_inode;
static int default_dev;
struct stat st;
struct tar *tar;
const char *p;
int r;
size_t l;
memset(&st, 0, sizeof(st));
/* Assign default device/inode values. */
st.st_dev = 1 + default_dev; /* Don't use zero. */
st.st_ino = ++default_inode; /* Don't use zero. */
/* Limit generated st_ino number to 16 bits. */
if (default_inode >= 0xffff) {
++default_dev;
default_inode = 0;
}
tar = *(a->pformat_data);
tar->entry_offset = 0;
r = tar_read_header(a, tar, entry, &st);
if (r == ARCHIVE_OK) {
/*
* "Regular" entry with trailing '/' is really
* directory: This is needed for certain old tar
* variants and even for some broken newer ones.
*/
p = archive_entry_pathname(entry);
l = strlen(p);
if (S_ISREG(st.st_mode) && p[l-1] == '/') {
st.st_mode &= ~S_IFMT;
st.st_mode |= S_IFDIR;
}
/* Copy the final stat data into the entry. */
archive_entry_copy_stat(entry, &st);
}
return (r);
}
static int
archive_read_format_tar_read_data(struct archive *a,
const void **buff, size_t *size, off_t *offset)
{
ssize_t bytes_read;
struct tar *tar;
struct sparse_block *p;
tar = *(a->pformat_data);
if (tar->sparse_list != NULL) {
/* Remove exhausted entries from sparse list. */
while (tar->sparse_list != NULL &&
tar->sparse_list->remaining == 0) {
p = tar->sparse_list;
tar->sparse_list = p->next;
free(p);
}
if (tar->sparse_list == NULL) {
/* We exhausted the entire sparse list. */
tar->entry_bytes_remaining = 0;
}
}
if (tar->entry_bytes_remaining > 0) {
bytes_read = (a->compression_read_ahead)(a, buff, 1);
if (bytes_read <= 0)
return (ARCHIVE_FATAL);
if (bytes_read > tar->entry_bytes_remaining)
bytes_read = tar->entry_bytes_remaining;
if (tar->sparse_list != NULL) {
/* Don't read more than is available in the
* current sparse block. */
if (tar->sparse_list->remaining < bytes_read)
bytes_read = tar->sparse_list->remaining;
tar->entry_offset = tar->sparse_list->offset;
tar->sparse_list->remaining -= bytes_read;
tar->sparse_list->offset += bytes_read;
}
*size = bytes_read;
*offset = tar->entry_offset;
tar->entry_offset += bytes_read;
tar->entry_bytes_remaining -= bytes_read;
(a->compression_read_consume)(a, bytes_read);
return (ARCHIVE_OK);
} else {
while (tar->entry_padding > 0) {
bytes_read = (a->compression_read_ahead)(a, buff, 1);
if (bytes_read <= 0)
return (ARCHIVE_FATAL);
if (bytes_read > tar->entry_padding)
bytes_read = tar->entry_padding;
(a->compression_read_consume)(a, bytes_read);
tar->entry_padding -= bytes_read;
}
*buff = NULL;
*size = 0;
*offset = tar->entry_offset;
return (ARCHIVE_EOF);
}
}
/*
* This function recursively interprets all of the headers associated
* with a single entry.
*/
static int
tar_read_header(struct archive *a, struct tar *tar,
struct archive_entry *entry, struct stat *st)
{
ssize_t bytes;
int err;
const void *h;
const struct archive_entry_header_ustar *header;
/* Read 512-byte header record */
bytes = (a->compression_read_ahead)(a, &h, 512);
if (bytes < 512) {
/*
* If we're here, it's becase the _bid function accepted
* this file. So just call a short read end-of-archive
* and be done with it.
*/
return (ARCHIVE_EOF);
}
(a->compression_read_consume)(a, 512);
/* Check for end-of-archive mark. */
if (((*(const char *)h)==0) && archive_block_is_null(h)) {
/* Try to consume a second all-null record, as well. */
bytes = (a->compression_read_ahead)(a, &h, 512);
if (bytes > 0)
(a->compression_read_consume)(a, bytes);
archive_set_error(a, 0, NULL);
return (ARCHIVE_EOF);
}
/*
* Note: If the checksum fails and we return ARCHIVE_RETRY,
* then the client is likely to just retry. This is a very
* crude way to search for the next valid header!
*
* TODO: Improve this by implementing a real header scan.
*/
if (!checksum(a, h)) {
archive_set_error(a, EINVAL, "Damaged tar archive");
return (ARCHIVE_RETRY); /* Retryable: Invalid header */
}
if (++tar->header_recursion_depth > 32) {
archive_set_error(a, EINVAL, "Too many special headers");
return (ARCHIVE_WARN);
}
/* Determine the format variant. */
header = h;
switch(header->typeflag[0]) {
case 'A': /* Solaris tar ACL */
a->archive_format = ARCHIVE_FORMAT_TAR_PAX_INTERCHANGE;
a->archive_format_name = "Solaris tar";
err = header_Solaris_ACL(a, tar, entry, st, h);
break;
case 'g': /* POSIX-standard 'g' header. */
a->archive_format = ARCHIVE_FORMAT_TAR_PAX_INTERCHANGE;
a->archive_format_name = "POSIX pax interchange format";
err = header_pax_global(a, tar, entry, st, h);
break;
case 'K': /* Long link name (GNU tar, others) */
err = header_longlink(a, tar, entry, st, h);
break;
case 'L': /* Long filename (GNU tar, others) */
err = header_longname(a, tar, entry, st, h);
break;
case 'V': /* GNU volume header */
err = header_volume(a, tar, entry, st, h);
break;
case 'X': /* Used by SUN tar; same as 'x'. */
a->archive_format = ARCHIVE_FORMAT_TAR_PAX_INTERCHANGE;
a->archive_format_name =
"POSIX pax interchange format (Sun variant)";
err = header_pax_extensions(a, tar, entry, st, h);
break;
case 'x': /* POSIX-standard 'x' header. */
a->archive_format = ARCHIVE_FORMAT_TAR_PAX_INTERCHANGE;
a->archive_format_name = "POSIX pax interchange format";
err = header_pax_extensions(a, tar, entry, st, h);
break;
default:
if (memcmp(header->magic, "ustar \0", 8) == 0) {
a->archive_format = ARCHIVE_FORMAT_TAR_GNUTAR;
a->archive_format_name = "GNU tar format";
err = header_gnutar(a, tar, entry, st, h);
} else if (memcmp(header->magic, "ustar", 5) == 0) {
if (a->archive_format != ARCHIVE_FORMAT_TAR_PAX_INTERCHANGE) {
a->archive_format = ARCHIVE_FORMAT_TAR_USTAR;
a->archive_format_name = "POSIX ustar format";
}
err = header_ustar(a, tar, entry, st, h);
} else {
a->archive_format = ARCHIVE_FORMAT_TAR;
a->archive_format_name = "tar (non-POSIX)";
err = header_old_tar(a, tar, entry, st, h);
}
}
--tar->header_recursion_depth;
return (err);
}
/*
* Return true if block checksum is correct.
*/
static int
checksum(struct archive *a, const void *h)
{
const unsigned char *bytes;
const struct archive_entry_header_ustar *header;
int check, i, sum;
(void)a; /* UNUSED */
bytes = h;
header = h;
/*
* Test the checksum. Note that POSIX specifies _unsigned_
* bytes for this calculation.
*/
sum = tar_atol(header->checksum, sizeof(header->checksum));
check = 0;
for (i = 0; i < 148; i++)
check += (unsigned char)bytes[i];
for (; i < 156; i++)
check += 32;
for (; i < 512; i++)
check += (unsigned char)bytes[i];
if (sum == check)
return (1);
/*
* Repeat test with _signed_ bytes, just in case this archive
* was created by an old BSD, Solaris, or HP-UX tar with a
* broken checksum calculation.
*/
check = 0;
for (i = 0; i < 148; i++)
check += (signed char)bytes[i];
for (; i < 156; i++)
check += 32;
for (; i < 512; i++)
check += (signed char)bytes[i];
if (sum == check)
return (1);
return (0);
}
/*
* Return true if this block contains only nulls.
*/
static int
archive_block_is_null(const unsigned char *p)
{
unsigned i;
for (i = 0; i < ARCHIVE_BYTES_PER_RECORD / sizeof(*p); i++)
if (*p++)
return (0);
return (1);
}
/*
* Interpret 'A' Solaris ACL header
*/
static int
header_Solaris_ACL(struct archive *a, struct tar *tar,
struct archive_entry *entry, struct stat *st, const void *h)
{
int err, err2;
char *p;
wchar_t *wp;
err = read_body_to_string(a, tar, &(tar->acl_text), h);
err2 = tar_read_header(a, tar, entry, st);
err = err_combine(err, err2);
/* XXX Ensure p doesn't overrun acl_text */
/* Skip leading octal number. */
/* XXX TODO: Parse the octal number and sanity-check it. */
p = tar->acl_text.s;
while (*p != '\0')
p++;
p++;
wp = malloc((strlen(p) + 1) * sizeof(wchar_t));
if (wp != NULL) {
utf8_decode(wp, p, strlen(p));
err2 = __archive_entry_acl_parse_w(entry, wp,
ARCHIVE_ENTRY_ACL_TYPE_ACCESS);
err = err_combine(err, err2);
free(wp);
}
return (err);
}
/*
* Interpret 'K' long linkname header.
*/
static int
header_longlink(struct archive *a, struct tar *tar,
struct archive_entry *entry, struct stat *st, const void *h)
{
int err, err2;
err = read_body_to_string(a, tar, &(tar->longlink), h);
err2 = tar_read_header(a, tar, entry, st);
if (err == ARCHIVE_OK && err2 == ARCHIVE_OK) {
/* Set symlink if symlink already set, else hardlink. */
archive_entry_set_link(entry, tar->longlink.s);
}
return (err_combine(err, err2));
}
/*
* Interpret 'L' long filename header.
*/
static int
header_longname(struct archive *a, struct tar *tar,
struct archive_entry *entry, struct stat *st, const void *h)
{
int err, err2;
err = read_body_to_string(a, tar, &(tar->longname), h);
/* Read and parse "real" header, then override name. */
err2 = tar_read_header(a, tar, entry, st);
if (err == ARCHIVE_OK && err2 == ARCHIVE_OK)
archive_entry_set_pathname(entry, tar->longname.s);
return (err_combine(err, err2));
}
/*
* Interpret 'V' GNU tar volume header.
*/
static int
header_volume(struct archive *a, struct tar *tar,
struct archive_entry *entry, struct stat *st, const void *h)
{
(void)h;
/* Just skip this and read the next header. */
return (tar_read_header(a, tar, entry, st));
}
/*
* Read body of an archive entry into an archive_string object.
*/
static int
read_body_to_string(struct archive *a, struct tar *tar,
struct archive_string *as, const void *h)
{
off_t size, padded_size;
ssize_t bytes_read, bytes_to_copy;
const struct archive_entry_header_ustar *header;
const void *src;
char *dest;
(void)tar; /* UNUSED */
header = h;
size = tar_atol(header->size, sizeof(header->size));
/* Read the body into the string. */
archive_string_ensure(as, size+1);
padded_size = (size + 511) & ~ 511;
dest = as->s;
while (padded_size > 0) {
bytes_read = (a->compression_read_ahead)(a, &src, padded_size);
if (bytes_read < 0)
return (ARCHIVE_FATAL);
if (bytes_read > padded_size)
bytes_read = padded_size;
(a->compression_read_consume)(a, bytes_read);
bytes_to_copy = bytes_read;
if ((off_t)bytes_to_copy > size)
bytes_to_copy = (ssize_t)size;
memcpy(dest, src, bytes_to_copy);
dest += bytes_to_copy;
size -= bytes_to_copy;
padded_size -= bytes_read;
}
*dest = '\0';
return (ARCHIVE_OK);
}
/*
* Parse out common header elements.
*
* This would be the same as header_old_tar, except that the
* filename is handled slightly differently for old and POSIX
* entries (POSIX entries support a 'prefix'). This factoring
* allows header_old_tar and header_ustar
* to handle filenames differently, while still putting most of the
* common parsing into one place.
*/
static int
header_common(struct archive *a, struct tar *tar, struct archive_entry *entry,
struct stat *st, const void *h)
{
const struct archive_entry_header_ustar *header;
char tartype;
(void)a; /* UNUSED */
header = h;
if (header->linkname[0])
archive_strncpy(&(tar->entry_linkname), header->linkname,
sizeof(header->linkname));
else
archive_string_empty(&(tar->entry_linkname));
/* Parse out the numeric fields (all are octal) */
st->st_mode = tar_atol(header->mode, sizeof(header->mode));
st->st_uid = tar_atol(header->uid, sizeof(header->uid));
st->st_gid = tar_atol(header->gid, sizeof(header->gid));
st->st_size = tar_atol(header->size, sizeof(header->size));
st->st_mtime = tar_atol(header->mtime, sizeof(header->mtime));
/* Handle the tar type flag appropriately. */
tartype = header->typeflag[0];
st->st_mode &= ~S_IFMT;
switch (tartype) {
case '1': /* Hard link */
archive_entry_set_hardlink(entry, tar->entry_linkname.s);
/*
* The following may seem odd, but: Technically, tar
* does not store the file type for a "hard link"
* entry, only the fact that it is a hard link. So, I
* leave the type zero normally. But, pax interchange
* format allows hard links to have data, which
* implies that the underlying entry is a regular
* file.
*/
if (st->st_size > 0)
st->st_mode |= S_IFREG;
/*
* A tricky point: Traditionally, tar readers have
* ignored the size field when reading hardlink
* entries, and some writers put non-zero sizes even
* though the body is empty. POSIX.1-2001 broke with
* this tradition by permitting hardlink entries to
* store valid bodies in pax interchange format, but
* not in ustar format. Since there is no hard and
* fast way to distinguish pax interchange from
* earlier archives (the 'x' and 'g' entries are
* optional, after all), we need a heuristic. Here, I
* use the bid function to test whether or not there's
* a valid header following. Of course, if we know
* this is pax interchange format, then we must obey
* the size.
*
* This heuristic will only fail for a pax interchange
* archive that is storing hardlink bodies, no pax
* extended attribute entries have yet occurred, and
* we encounter a hardlink entry for a file that is
* itself an uncompressed tar archive.
*/
if (st->st_size > 0 &&
a->archive_format != ARCHIVE_FORMAT_TAR_PAX_INTERCHANGE &&
archive_read_format_tar_bid(a) > 50)
st->st_size = 0;
break;
case '2': /* Symlink */
st->st_mode |= S_IFLNK;
st->st_size = 0;
archive_entry_set_symlink(entry, tar->entry_linkname.s);
break;
case '3': /* Character device */
st->st_mode |= S_IFCHR;
st->st_size = 0;
break;
case '4': /* Block device */
st->st_mode |= S_IFBLK;
st->st_size = 0;
break;
case '5': /* Dir */
st->st_mode |= S_IFDIR;
st->st_size = 0;
break;
case '6': /* FIFO device */
st->st_mode |= S_IFIFO;
st->st_size = 0;
break;
case 'D': /* GNU incremental directory type */
/*
* No special handling is actually required here.
* It might be nice someday to preprocess the file list and
* provide it to the client, though.
*/
st->st_mode |= S_IFDIR;
break;
case 'M': /* GNU "Multi-volume" (remainder of file from last archive)*/
/*
* As far as I can tell, this is just like a regular file
* entry, except that the contents should be _appended_ to
* the indicated file at the indicated offset. This may
* require some API work to fully support.
*/
break;
case 'N': /* Old GNU "long filename" entry. */
/* The body of this entry is a script for renaming
* previously-extracted entries. Ugh. It will never
* be supported by libarchive. */
st->st_mode |= S_IFREG;
break;
case 'S': /* GNU sparse files */
/*
* Sparse files are really just regular files with
* sparse information in the extended area.
*/
/* FALL THROUGH */
default: /* Regular file and non-standard types */
/*
* Per POSIX: non-recognized types should always be
* treated as regular files.
*/
st->st_mode |= S_IFREG;
break;
}
return (0);
}
/*
* Parse out header elements for "old-style" tar archives.
*/
static int
header_old_tar(struct archive *a, struct tar *tar, struct archive_entry *entry,
struct stat *st, const void *h)
{
const struct archive_entry_header_ustar *header;
/* Copy filename over (to ensure null termination). */
header = h;
archive_strncpy(&(tar->entry_name), header->name, sizeof(header->name));
archive_entry_set_pathname(entry, tar->entry_name.s);
/* Grab rest of common fields */
header_common(a, tar, entry, st, h);
tar->entry_bytes_remaining = st->st_size;
tar->entry_padding = 0x1ff & (-tar->entry_bytes_remaining);
return (0);
}
/*
* Parse a file header for a pax extended archive entry.
*/
static int
header_pax_global(struct archive *a, struct tar *tar,
struct archive_entry *entry, struct stat *st, const void *h)
{
int err, err2;
err = read_body_to_string(a, tar, &(tar->pax_global), h);
err2 = tar_read_header(a, tar, entry, st);
return (err_combine(err, err2));
}
static int
header_pax_extensions(struct archive *a, struct tar *tar,
struct archive_entry *entry, struct stat *st, const void *h)
{
int err, err2;
read_body_to_string(a, tar, &(tar->pax_header), h);
/* Parse the next header. */
err = tar_read_header(a, tar, entry, st);
/*
* TODO: Parse global/default options into 'entry' struct here
* before handling file-specific options.
*
* This design (parse standard header, then overwrite with pax
* extended attribute data) usually works well, but isn't ideal;
* it would be better to parse the pax extended attributes first
* and then skip any fields in the standard header that were
* defined in the pax header.
*/
err2 = pax_header(a, tar, entry, st, tar->pax_header.s);
err = err_combine(err, err2);
tar->entry_bytes_remaining = st->st_size;
tar->entry_padding = 0x1ff & (-tar->entry_bytes_remaining);
return (err);
}
/*
* Parse a file header for a Posix "ustar" archive entry. This also
* handles "pax" or "extended ustar" entries.
*/
static int
header_ustar(struct archive *a, struct tar *tar, struct archive_entry *entry,
struct stat *st, const void *h)
{
const struct archive_entry_header_ustar *header;
struct archive_string *as;
header = h;
/* Copy name into an internal buffer to ensure null-termination. */
as = &(tar->entry_name);
if (header->prefix[0]) {
archive_strncpy(as, header->prefix, sizeof(header->prefix));
if (as->s[archive_strlen(as) - 1] != '/')
archive_strappend_char(as, '/');
archive_strncat(as, header->name, sizeof(header->name));
} else
archive_strncpy(as, header->name, sizeof(header->name));
archive_entry_set_pathname(entry, as->s);
/* Handle rest of common fields. */
header_common(a, tar, entry, st, h);
/* Handle POSIX ustar fields. */
archive_strncpy(&(tar->entry_uname), header->uname,
sizeof(header->uname));
archive_entry_set_uname(entry, tar->entry_uname.s);
archive_strncpy(&(tar->entry_gname), header->gname,
sizeof(header->gname));
archive_entry_set_gname(entry, tar->entry_gname.s);
/* Parse out device numbers only for char and block specials. */
if (header->typeflag[0] == '3' || header->typeflag[0] == '4') {
st->st_rdev = makedev(
tar_atol(header->rdevmajor, sizeof(header->rdevmajor)),
tar_atol(header->rdevminor, sizeof(header->rdevminor)));
}
tar->entry_bytes_remaining = st->st_size;
tar->entry_padding = 0x1ff & (-tar->entry_bytes_remaining);
return (0);
}
/*
* Parse the pax extended attributes record.
*
* Returns non-zero if there's an error in the data.
*/
static int
pax_header(struct archive *a, struct tar *tar, struct archive_entry *entry,
struct stat *st, char *attr)
{
size_t attr_length, l, line_length;
char *line, *p;
wchar_t *key, *wp, *value;
int err, err2;
attr_length = strlen(attr);
err = ARCHIVE_OK;
while (attr_length > 0) {
/* Parse decimal length field at start of line. */
line_length = 0;
l = attr_length;
line = p = attr; /* Record start of line. */
while (l>0) {
if (*p == ' ') {
p++;
l--;
break;
}
if (*p < '0' || *p > '9')
return (-1);
line_length *= 10;
line_length += *p - '0';
if (line_length > 999999) {
archive_set_error(a, ARCHIVE_ERRNO_MISC,
"Rejecting pax extended attribute > 1MB");
return (ARCHIVE_WARN);
}
p++;
l--;
}
if (line_length > attr_length)
return (0);
/* Ensure pax_entry buffer is big enough. */
if (tar->pax_entry_length <= line_length) {
wchar_t *old_entry = tar->pax_entry;
if (tar->pax_entry_length <= 0)
tar->pax_entry_length = 1024;
while (tar->pax_entry_length <= line_length + 1)
tar->pax_entry_length *= 2;
old_entry = tar->pax_entry;
tar->pax_entry = realloc(tar->pax_entry,
tar->pax_entry_length * sizeof(wchar_t));
if (tar->pax_entry == NULL) {
free(old_entry);
archive_set_error(a, ENOMEM,
"No memory");
return (ARCHIVE_FATAL);
}
}
/* Decode UTF-8 to wchar_t, null-terminate result. */
if (utf8_decode(tar->pax_entry, p,
line_length - (p - attr) - 1)) {
archive_set_error(a, ARCHIVE_ERRNO_MISC,
"Invalid UTF8 character in pax extended attribute");
err = err_combine(err, ARCHIVE_WARN);
}
/* Null-terminate 'key' value. */
wp = key = tar->pax_entry;
if (key[0] == L'=')
return (-1);
while (*wp && *wp != L'=')
++wp;
if (*wp == L'\0' || wp == NULL) {
archive_set_error(a, ARCHIVE_ERRNO_MISC,
"Invalid pax extended attributes");
return (ARCHIVE_WARN);
}
*wp = 0;
/* Identify null-terminated 'value' portion. */
value = wp + 1;
/* Identify this attribute and set it in the entry. */
err2 = pax_attribute(entry, st, key, value);
err = err_combine(err, err2);
/* Skip to next line */
attr += line_length;
attr_length -= line_length;
}
return (err);
}
/*
* Parse a single key=value attribute. key/value pointers are
* assumed to point into reasonably long-lived storage.
*
* Note that POSIX reserves all-lowercase keywords. Vendor-specific
* extensions should always have keywords of the form "VENDOR.attribute"
* In particular, it's quite feasible to support many different
* vendor extensions here. I'm using "LIBARCHIVE" for extensions
* unique to this library (currently, there are none).
*
* Investigate other vendor-specific extensions, as well and see if
* any of them look useful.
*/
static int
pax_attribute(struct archive_entry *entry, struct stat *st,
wchar_t *key, wchar_t *value)
{
int64_t s;
long n;
switch (key[0]) {
case 'L':
/* Our extensions */
/* TODO: Handle arbitrary extended attributes... */
/*
if (strcmp(key, "LIBARCHIVE.xxxxxxx")==0)
archive_entry_set_xxxxxx(entry, value);
*/
break;
case 'S':
/* We support some keys used by the "star" archiver */
if (wcscmp(key, L"SCHILY.acl.access")==0)
__archive_entry_acl_parse_w(entry, value,
ARCHIVE_ENTRY_ACL_TYPE_ACCESS);
else if (wcscmp(key, L"SCHILY.acl.default")==0)
__archive_entry_acl_parse_w(entry, value,
ARCHIVE_ENTRY_ACL_TYPE_DEFAULT);
else if (wcscmp(key, L"SCHILY.devmajor")==0)
st->st_rdev = makedev(tar_atol10(value, wcslen(value)),
minor(st->st_rdev));
else if (wcscmp(key, L"SCHILY.devminor")==0)
st->st_rdev = makedev(major(st->st_rdev),
tar_atol10(value, wcslen(value)));
else if (wcscmp(key, L"SCHILY.fflags")==0)
archive_entry_copy_fflags_text_w(entry, value);
else if (wcscmp(key, L"SCHILY.nlink")==0)
st->st_nlink = tar_atol10(value, wcslen(value));
break;
case 'a':
if (wcscmp(key, L"atime")==0) {
pax_time(value, &s, &n);
st->st_atime = s;
ARCHIVE_STAT_SET_ATIME_NANOS(st, n);
}
break;
case 'c':
if (wcscmp(key, L"ctime")==0) {
pax_time(value, &s, &n);
st->st_ctime = s;
ARCHIVE_STAT_SET_CTIME_NANOS(st, n);
} else if (wcscmp(key, L"charset")==0) {
/* TODO: Publish charset information in entry. */
} else if (wcscmp(key, L"comment")==0) {
/* TODO: Publish comment in entry. */
}
break;
case 'g':
if (wcscmp(key, L"gid")==0)
st->st_gid = tar_atol10(value, wcslen(value));
else if (wcscmp(key, L"gname")==0)
archive_entry_copy_gname_w(entry, value);
break;
case 'l':
/* pax interchange doesn't distinguish hardlink vs. symlink. */
if (wcscmp(key, L"linkpath")==0) {
if (archive_entry_hardlink(entry))
archive_entry_copy_hardlink_w(entry, value);
else
archive_entry_copy_symlink_w(entry, value);
}
break;
case 'm':
if (wcscmp(key, L"mtime")==0) {
pax_time(value, &s, &n);
st->st_mtime = s;
ARCHIVE_STAT_SET_MTIME_NANOS(st, n);
}
break;
case 'p':
if (wcscmp(key, L"path")==0)
archive_entry_copy_pathname_w(entry, value);
break;
case 'r':
/* POSIX has reserved 'realtime.*' */
break;
case 's':
/* POSIX has reserved 'security.*' */
/* Someday: if (wcscmp(key, L"security.acl")==0) { ... } */
if (wcscmp(key, L"size")==0)
st->st_size = tar_atol10(value, wcslen(value));
break;
case 'u':
if (wcscmp(key, L"uid")==0)
st->st_uid = tar_atol10(value, wcslen(value));
else if (wcscmp(key, L"uname")==0)
archive_entry_copy_uname_w(entry, value);
break;
}
return (0);
}
/*
* parse a decimal time value, which may include a fractional portion
*/
static void
pax_time(const wchar_t *p, int64_t *ps, long *pn)
{
char digit;
int64_t s;
unsigned long l;
int sign;
int64_t limit, last_digit_limit;
limit = max_int64 / 10;
last_digit_limit = max_int64 % 10;
s = 0;
sign = 1;
if (*p == '-') {
sign = -1;
p++;
}
while (*p >= '0' && *p <= '9') {
digit = *p - '0';
if (s > limit ||
(s == limit && digit > last_digit_limit)) {
s = max_uint64;
break;
}
s = (s * 10) + digit;
++p;
}
*ps = s * sign;
/* Calculate nanoseconds. */
*pn = 0;
if (*p != '.')
return;
l = 100000000UL;
do {
++p;
if (*p >= '0' && *p <= '9')
*pn += (*p - '0') * l;
else
break;
} while (l /= 10);
}
/*
* Parse GNU tar header
*/
static int
header_gnutar(struct archive *a, struct tar *tar, struct archive_entry *entry,
struct stat *st, const void *h)
{
const struct archive_entry_header_gnutar *header;
(void)a;
/*
* GNU header is like POSIX ustar, except 'prefix' is
* replaced with some other fields. This also means the
* filename is stored as in old-style archives.
*/
/* Grab fields common to all tar variants. */
header_common(a, tar, entry, st, h);
/* Copy filename over (to ensure null termination). */
header = h;
archive_strncpy(&(tar->entry_name), header->name,
sizeof(header->name));
archive_entry_set_pathname(entry, tar->entry_name.s);
/* Fields common to ustar and GNU */
/* XXX Can the following be factored out since it's common
* to ustar and gnu tar? Is it okay to move it down into
* header_common, perhaps? */
archive_strncpy(&(tar->entry_uname),
header->uname, sizeof(header->uname));
archive_entry_set_uname(entry, tar->entry_uname.s);
archive_strncpy(&(tar->entry_gname),
header->gname, sizeof(header->gname));
archive_entry_set_gname(entry, tar->entry_gname.s);
/* Parse out device numbers only for char and block specials */
if (header->typeflag[0] == '3' || header->typeflag[0] == '4')
st->st_rdev = makedev (
tar_atol(header->rdevmajor, sizeof(header->rdevmajor)),
tar_atol(header->rdevminor, sizeof(header->rdevminor)));
else
st->st_rdev = 0;
tar->entry_bytes_remaining = st->st_size;
tar->entry_padding = 0x1ff & (-tar->entry_bytes_remaining);
/* Grab GNU-specific fields. */
st->st_atime = tar_atol(header->atime, sizeof(header->atime));
st->st_ctime = tar_atol(header->ctime, sizeof(header->ctime));
if (header->realsize[0] != 0) {
st->st_size = tar_atol(header->realsize,
sizeof(header->realsize));
}
if (header->sparse[0].offset[0] != 0) {
gnu_read_sparse_data(a, tar, header);
} else {
if (header->isextended[0] != 0) {
/* XXX WTF? XXX */
}
}
return (0);
}
static int
gnu_read_sparse_data(struct archive *a, struct tar *tar,
const struct archive_entry_header_gnutar *header)
{
ssize_t bytes_read;
const void *data;
struct extended {
struct gnu_sparse sparse[21];
char isextended[1];
char padding[7];
};
const struct extended *ext;
gnu_parse_sparse_data(a, tar, header->sparse, 4);
if (header->isextended[0] == 0)
return (ARCHIVE_OK);
do {
bytes_read = (a->compression_read_ahead)(a, &data, 512);
if (bytes_read < 0)
return (ARCHIVE_FATAL);
if (bytes_read < 512) {
archive_set_error(a, ARCHIVE_ERRNO_FILE_FORMAT,
"Truncated tar archive "
"detected while reading sparse file data");
return (ARCHIVE_FATAL);
}
(a->compression_read_consume)(a, 512);
ext = (const struct extended *)data;
gnu_parse_sparse_data(a, tar, ext->sparse, 21);
} while (ext->isextended[0] != 0);
if (tar->sparse_list != NULL)
tar->entry_offset = tar->sparse_list->offset;
return (ARCHIVE_OK);
}
static void
gnu_parse_sparse_data(struct archive *a, struct tar *tar,
const struct gnu_sparse *sparse, int length)
{
struct sparse_block *last;
struct sparse_block *p;
(void)a; /* UNUSED */
last = tar->sparse_list;
while (last != NULL && last->next != NULL)
last = last->next;
while (length > 0 && sparse->offset[0] != 0) {
p = malloc(sizeof(*p));
if (p == NULL)
__archive_errx(1, "Out of memory");
memset(p, 0, sizeof(*p));
if (last != NULL)
last->next = p;
else
tar->sparse_list = p;
last = p;
p->offset = tar_atol(sparse->offset, sizeof(sparse->offset));
p->remaining =
tar_atol(sparse->numbytes, sizeof(sparse->numbytes));
sparse++;
length--;
}
}
/*-
* Convert text->integer.
*
* Traditional tar formats (including POSIX) specify base-8 for
* all of the standard numeric fields. This is a significant limitation
* in practice:
* = file size is limited to 8GB
* = rdevmajor and rdevminor are limited to 21 bits
* = uid/gid are limited to 21 bits
*
* There are two workarounds for this:
* = pax extended headers, which use variable-length string fields
* = GNU tar and STAR both allow either base-8 or base-256 in
* most fields. The high bit is set to indicate base-256.
*
* On read, this implementation supports both extensions.
*/
static int64_t
tar_atol(const char *p, unsigned char_cnt)
{
/*
* Technically, GNU tar considers a field to be in base-256
* only if the first byte is 0xff or 0x80.
*/
if (*p & 0x80)
return (tar_atol256(p, char_cnt));
return (tar_atol8(p, char_cnt));
}
/*
* Note that this implementation does not (and should not!) obey
* locale settings; you cannot simply substitute strtol here, since
* it does obey locale.
*/
static int64_t
tar_atol8(const char *p, unsigned char_cnt)
{
int64_t l, limit, last_digit_limit;
int digit, sign, base;
base = 8;
limit = max_int64 / base;
last_digit_limit = max_int64 % base;
while (*p == ' ' || *p == '\t')
p++;
if (*p == '-') {
sign = -1;
p++;
} else
sign = 1;
l = 0;
digit = *p - '0';
while (digit >= 0 && digit < base && char_cnt-- > 0) {
if (l>limit || (l == limit && digit > last_digit_limit)) {
l = max_uint64; /* Truncate on overflow. */
break;
}
l = (l * base) + digit;
digit = *++p - '0';
}
return (sign < 0) ? -l : l;
}
/*
* Note that this implementation does not (and should not!) obey
* locale settings; you cannot simply substitute strtol here, since
* it does obey locale.
*/
static int64_t
tar_atol10(const wchar_t *p, unsigned char_cnt)
{
int64_t l, limit, last_digit_limit;
int base, digit, sign;
base = 10;
limit = max_int64 / base;
last_digit_limit = max_int64 % base;
while (*p == ' ' || *p == '\t')
p++;
if (*p == '-') {
sign = -1;
p++;
} else
sign = 1;
l = 0;
digit = *p - '0';
while (digit >= 0 && digit < base && char_cnt-- > 0) {
if (l > limit || (l == limit && digit > last_digit_limit)) {
l = max_uint64; /* Truncate on overflow. */
break;
}
l = (l * base) + digit;
digit = *++p - '0';
}
return (sign < 0) ? -l : l;
}
/*
* Parse a base-256 integer. This is just a straight signed binary
* value in big-endian order, except that the high-order bit is
* ignored. Remember that "int64_t" may or may not be exactly 64
* bits; the implementation here tries to avoid making any assumptions
* about the actual size of an int64_t. It does assume we're using
* twos-complement arithmetic, though.
*/
static int64_t
tar_atol256(const char *_p, unsigned char_cnt)
{
int64_t l, upper_limit, lower_limit;
const unsigned char *p = (const unsigned char *)_p;
upper_limit = max_int64 / 256;
lower_limit = min_int64 / 256;
/* Pad with 1 or 0 bits, depending on sign. */
if ((0x40 & *p) == 0x40)
l = (int64_t)-1;
else
l = 0;
l = (l << 6) | (0x3f & *p++);
while (--char_cnt > 0) {
if (l > upper_limit) {
l = max_int64; /* Truncate on overflow */
break;
} else if (l < lower_limit) {
l = min_int64;
break;
}
l = (l << 8) | (0xff & (int64_t)*p++);
}
return (l);
}
static int
utf8_decode(wchar_t *dest, const char *src, size_t length)
{
size_t n;
int err;
err = 0;
while(length > 0) {
n = UTF8_mbrtowc(dest, src, length);
if (n == 0)
break;
if (n > 8) {
/* Invalid byte encountered; try to keep going. */
*dest = L'?';
n = 1;
err = 1;
}
dest++;
src += n;
length -= n;
}
*dest++ = L'\0';
return (err);
}
/*
* Copied from FreeBSD libc/locale.
*/
static size_t
UTF8_mbrtowc(wchar_t *pwc, const char *s, size_t n)
{
int ch, i, len, mask;
unsigned long lbound, wch;
if (s == NULL)
/* Reset to initial shift state (no-op) */
return (0);
if (n == 0)
/* Incomplete multibyte sequence */
return ((size_t)-2);
/*
* Determine the number of octets that make up this character from
* the first octet, and a mask that extracts the interesting bits of
* the first octet.
*
* We also specify a lower bound for the character code to detect
* redundant, non-"shortest form" encodings. For example, the
* sequence C0 80 is _not_ a legal representation of the null
* character. This enforces a 1-to-1 mapping between character
* codes and their multibyte representations.
*/
ch = (unsigned char)*s;
if ((ch & 0x80) == 0) {
mask = 0x7f;
len = 1;
lbound = 0;
} else if ((ch & 0xe0) == 0xc0) {
mask = 0x1f;
len = 2;
lbound = 0x80;
} else if ((ch & 0xf0) == 0xe0) {
mask = 0x0f;
len = 3;
lbound = 0x800;
} else if ((ch & 0xf8) == 0xf0) {
mask = 0x07;
len = 4;
lbound = 0x10000;
} else if ((ch & 0xfc) == 0xf8) {
mask = 0x03;
len = 5;
lbound = 0x200000;
} else if ((ch & 0xfc) == 0xfc) {
mask = 0x01;
len = 6;
lbound = 0x4000000;
} else {
/*
* Malformed input; input is not UTF-8.
*/
errno = EILSEQ;
return ((size_t)-1);
}
if (n < (size_t)len)
/* Incomplete multibyte sequence */
return ((size_t)-2);
/*
* Decode the octet sequence representing the character in chunks
* of 6 bits, most significant first.
*/
wch = (unsigned char)*s++ & mask;
i = len;
while (--i != 0) {
if ((*s & 0xc0) != 0x80) {
/*
* Malformed input; bad characters in the middle
* of a character.
*/
errno = EILSEQ;
return ((size_t)-1);
}
wch <<= 6;
wch |= *s++ & 0x3f;
}
if (wch < lbound) {
/*
* Malformed input; redundant encoding.
*/
errno = EILSEQ;
return ((size_t)-1);
}
if (pwc != NULL) {
/* Assign the value to the output; out-of-range values
* just get truncated. */
*pwc = (wchar_t)wch;
#ifdef WCHAR_MAX
/*
* If platform has WCHAR_MAX, we can do something
* more sensible with out-of-range values.
*/
if (wch >= WCHAR_MAX)
*pwc = '?';
#endif
}
return (wch == L'\0' ? 0 : len);
}
|
<?php
declare(strict_types=1);
namespace Liquetsoft\Fias\Component\FiasInformer;
use Liquetsoft\Fias\Component\Exception\FiasInformerException;
use SoapClient;
use SoapFault;
use Throwable;
/**
* Объект, который получает ссылку на файл с архивом ФИАС
* от soap сервиса информирования ФИАС.
*/
class SoapFiasInformer implements FiasInformer
{
/**
* @var string
*/
private $wsdl = '';
/**
* @var SoapClient|null
*/
private $soapClient;
/**
* @param SoapClient|string $soapClient
*/
public function __construct($soapClient = 'http://fias.nalog.ru/WebServices/Public/DownloadService.asmx?WSDL')
{
if ($soapClient instanceof SoapClient) {
$this->soapClient = $soapClient;
} else {
$this->wsdl = $soapClient;
}
}
/**
* {@inheritDoc}
*
* @throws SoapFault
*/
public function getCompleteInfo(): InformerResponse
{
try {
$response = $this->getSoapClient()->__call(
'GetLastDownloadFileInfo',
[]
);
} catch (Throwable $e) {
throw new FiasInformerException($e->getMessage(), 0, $e);
}
$versionId = $response->GetLastDownloadFileInfoResult->VersionId ?? 0;
$url = $response->GetLastDownloadFileInfoResult->GarXMLFullURL ?? '';
if ($versionId === 0) {
$message = "Informer can't find complete version in SOAP response.";
throw new FiasInformerException($message);
} elseif ($url === '') {
// иногда версия появляется без ссылки на xml архив
$message = "There is no xml archive set for {$versionId} complete version.";
throw new FiasInformerException($message);
}
return $this->createResponseObject(
(int) $versionId,
(string) $url
);
}
/**
* {@inheritDoc}
*
* @throws SoapFault
*/
public function getDeltaInfo(int $currentVersion): InformerResponse
{
$versions = $this->getDeltaList();
$delta = new InformerResponseBase();
foreach ($versions as $version) {
$versionNumber = $version->getVersion();
if ($versionNumber > $currentVersion && (!$delta->hasResult() || $delta->getVersion() > $versionNumber)) {
$delta = $version;
}
}
return $delta;
}
/**
* {@inheritDoc}
*
* @throws SoapFault
*/
public function getDeltaList(): array
{
try {
$response = $this->getSoapClient()->__call(
'GetAllDownloadFileInfo',
[]
);
} catch (Throwable $e) {
throw new FiasInformerException($e->getMessage(), 0, $e);
}
$response = $response->GetAllDownloadFileInfoResult->DownloadFileInfo ?? [];
$list = [];
foreach ($response as $responseObject) {
$versionId = $responseObject->VersionId ?? 0;
$url = $responseObject->GarXMLDeltaURL ?? '';
if ($url !== '') {
// похоже только так это работает, дельта появляется не сразу
// поэтому просто пропускаем объекты без дельты
$list[] = $this->createResponseObject((int) $versionId, (string) $url);
}
}
return $list;
}
/**
* Создает объект с ответом по номеру версии и url.
*
* @param int $versionId
* @param string $url
*
* @return InformerResponse
*/
private function createResponseObject(int $versionId, string $url): InformerResponse
{
$res = new InformerResponseBase();
$res->setVersion($versionId);
$res->setUrl($url);
return $res;
}
/**
* Возвращает объект SOAP-клиента для запросов.
*
* @return SoapClient
*
* @throws SoapFault
*/
private function getSoapClient(): SoapClient
{
if ($this->soapClient === null) {
$this->soapClient = new SoapClient(
$this->wsdl,
[
'exceptions' => true,
]
);
}
return $this->soapClient;
}
}
|
package de.jlnstrk.transit.sample.android
import androidx.lifecycle.ViewModel
import de.jlnstrk.transit.common.extensions.require
import de.jlnstrk.transit.common.model.Coordinates
import de.jlnstrk.transit.common.model.Location
import de.jlnstrk.transit.common.response.StationBoardData
import de.jlnstrk.transit.common.response.base.ServiceResult
import de.jlnstrk.transit.common.service.StationBoardService
import de.jlnstrk.transit.sample.SampleProvider
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
class SampleViewModel : ViewModel() {
private val provider = SampleProvider
sealed interface DataState<out D> {
object Loading : DataState<Nothing>
object NoResult : DataState<Nothing>
data class Error(val throwable: Throwable?) : DataState<Nothing>
data class Data<T>(val data: T) : DataState<T>
}
val nearbyDepartures: Flow<DataState<StationBoardData>> = flow {
val result = provider.require<StationBoardService>()
.stationBoard(
mode = StationBoardService.Mode.DEPARTURES,
location = Location.Point(coordinates = REFERENCE_COORDINATES),
)
when (result) {
is ServiceResult.Success -> emit(DataState.Data(result.result))
is ServiceResult.Failure -> emit(DataState.Error(result.exception))
ServiceResult.NoResult -> emit(DataState.NoResult)
}
}
}
|
package akka.contrib.persistence.mongodb
import akka.actor.ActorSystem
import com.typesafe.config.{Config, ConfigException}
import scala.concurrent.ExecutionContextExecutor
import scala.util.{Failure, Success, Try}
abstract class WithMongoPersistencePluginDispatcher(actorSystem: ActorSystem, config: Config) {
implicit lazy val pluginDispatcher: ExecutionContextExecutor =
Try(actorSystem.dispatchers.lookup(config.getString("plugin-dispatcher"))) match {
case Success(configuredPluginDispatcher) =>
configuredPluginDispatcher
case Failure(_ : ConfigException) =>
actorSystem.log.warning("plugin-dispatcher not configured for akka-contrib-mongodb-persistence. " +
"Using actor system dispatcher.")
actorSystem.dispatcher
}
}
|
# Copyright (c) 2010-2011, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
namespace :migrations do
desc 'copy all hidden share visibilities from share_visibilities to users. Can be run with the site still up.'
task :copy_hidden_share_visibilities_to_users => [:environment] do
require File.join(Rails.root, 'lib', 'share_visibility_converter')
ShareVisibilityConverter.copy_hidden_share_visibilities_to_users
end
desc 'absolutify all existing image references'
task :absolutify_image_references do
require File.join(File.dirname(__FILE__), '..', '..', 'config', 'environment')
Photo.all.each do |photo|
unless photo.remote_photo_path
# extract root
#
pod = URI::parse(photo.person.url)
pod_url = "#{pod.scheme}://#{pod.host}"
if photo.image.url
remote_path = "#{photo.image.url}"
else
puts pod_url
remote_path = "#{pod_url}#{photo.remote_photo_path}/#{photo.remote_photo_name}"
end
# get path/filename
name_start = remote_path.rindex '/'
photo.remote_photo_path = "#{remote_path.slice(0, name_start)}/"
photo.remote_photo_name = remote_path.slice(name_start + 1, remote_path.length)
photo.save!
end
end
end
task :upload_photos_to_s3 do
require File.join(File.dirname(__FILE__), '..', '..', 'config', 'environment')
puts AppConfig[:s3_key]
connection = Aws::S3.new( AppConfig[:s3_key], AppConfig[:s3_secret])
bucket = connection.bucket('joindiaspora')
dir_name = File.dirname(__FILE__) + "/../../public/uploads/images/"
count = Dir.foreach(dir_name).count
current = 0
Dir.foreach(dir_name){|file_name| puts file_name;
if file_name != '.' && file_name != '..';
begin
key = Aws::S3::Key.create(bucket, 'uploads/images/' + file_name);
key.put(File.open(dir_name+ '/' + file_name).read, 'public-read');
key.public_link();
puts "Uploaded #{current} of #{count}"
current += 1
rescue Exception => e
puts "error #{e} on #{current} (#{file_name}), retrying"
retry
end
end
}
end
end
|
<?php
declare(strict_types=1);
namespace Baraja\Shop\Invoice;
use Baraja\Doctrine\ORM\DI\OrmAnnotationsExtension;
use Nette\DI\CompilerExtension;
final class ShopInvoiceExtension extends CompilerExtension
{
public function beforeCompile(): void
{
$builder = $this->getContainerBuilder();
OrmAnnotationsExtension::addAnnotationPathToManager(
$builder, 'Baraja\Shop\Invoice\Entity', __DIR__ . '/Entity'
);
$builder->addDefinition($this->prefix('invoiceManager'))
->setFactory(InvoiceManager::class)
->setArgument('wwwDir', $builder->parameters['wwwDir'] ?? '');
}
}
|
package docker
import (
"context"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/filters"
docker "github.com/docker/docker/client"
)
// Client for handling requests to the Docker API
type Client struct {
client *docker.Client
authentication string
}
// Authentication sets credentials to be used by the client
func Authentication(user, password string) func(*Client) error {
return func(c *Client) error {
authConfig := types.AuthConfig{
Username: user,
Password: password,
}
encodedJSON, err := json.Marshal(authConfig)
if err != nil {
return err
}
c.authentication = base64.URLEncoding.EncodeToString(encodedJSON)
return nil
}
}
// NewClient builds the client used to access the Docker API
func NewClient(options ...func(*Client) error) (*Client, error) {
c := &Client{}
for _, option := range options {
if option == nil {
return nil, fmt.Errorf("option is nil pointer")
}
err := option(c)
if err != nil {
return nil, err
}
}
cli, err := docker.NewClientWithOpts(docker.FromEnv, docker.WithAPIVersionNegotiation())
if err != nil {
return nil, err
}
c.client = cli
return c, nil
}
// Pull a docker image to the host
func (c *Client) Pull(image string) error {
ctx := context.Background()
opts := types.ImagePullOptions{}
if c.authentication != "" {
opts.RegistryAuth = c.authentication
}
_, err := c.client.ImagePull(ctx, image, opts)
if err != nil {
return err
}
return nil
}
// Image returns metadata about an image on the docker host
func (c *Client) Image(image string) (types.ImageSummary, error) {
ctx := context.Background()
f := filters.NewArgs()
f.Add("reference", image)
opts := types.ImageListOptions{Filters: f}
list, err := c.client.ImageList(ctx, opts)
if err != nil {
return types.ImageSummary{}, err
}
log.Println(list)
if len(list) < 1 {
return types.ImageSummary{}, errors.New("image not found")
}
return list[0], nil
}
// Save an image by ID as tar to output directory
func (c *Client) Save(output, imageID string) error {
ctx := context.Background()
responseBody, err := c.client.ImageSave(ctx, []string{imageID})
if err != nil {
return err
}
defer responseBody.Close()
return copyToFile(output, responseBody)
}
func copyToFile(outfile string, r io.Reader) error {
tmpFile, err := ioutil.TempFile(filepath.Dir(outfile), ".docker_temp_")
if err != nil {
return err
}
tmpPath := tmpFile.Name()
_, err = io.Copy(tmpFile, r)
tmpFile.Close()
if err != nil {
os.Remove(tmpPath)
return err
}
if err = os.Rename(tmpPath, outfile); err != nil {
os.Remove(tmpPath)
return err
}
return nil
}
|
<?php
/**
* Pubble_Messenger
*
* @category Pubble
* @package Pubble_Messenger
* @author Pubble <ross@pubble.io>
* @copyright 2016 Pubble (http://www.pubble.io)
* @version 1.1.2
*/
/**
* Pubble_Messenger_Test_Config_Module
*
* @category Pubble
* @package Pubble_Messenger
* @subpackage Test
*/
class Pubble_Messenger_Test_Config_Module extends EcomDev_PHPUnit_Test_Case_Config
{
/**
* @test
* @group pubble
* @group pubble_messenger
* @group config
*/
public function module_is_in_correct_code_pool()
{
$this->assertModuleCodePool('community');
}
/**
* @test
* @group pubble
* @group pubble_messenger
* @group config
*/
public function module_version_is_correct()
{
$this->assertModuleVersion('1.1.2');
}
/**
* @test
* @group pubble
* @group pubble_messenger
* @group config
*/
public function block_are_configured()
{
$this->assertBlockAlias('pubble_messenger/script', 'Pubble_Messenger_Block_Script');
}
/**
* @test
* @group pubble
* @group pubble_messenger
* @group config
*/
public function models_are_configured()
{
$this->assertModelAlias('pubble_messenger/observer', 'Pubble_Messenger_Model_Observer');
$this->assertModelAlias('pubble_messenger/source_method', 'Pubble_Messenger_Model_Source_Method');
}
/**
* @test
* @group pubble
* @group pubble_messenger
* @group config
*/
public function helpers_are_configured()
{
$this->assertHelperAlias('pubble_messenger/data', 'Pubble_Messenger_Helper_Data');
}
/**
* @test
* @group pubble
* @group pubble_messenger
* @group config
*/
public function access_granted_for_config_acl()
{
$this->assertConfigNodeValue(
'adminhtml/acl/resources/admin/children/system/children/config/children/pubble/title',
'Pubble Messenger Settings'
);
}
/**
* @test
* @group pubble
* @group pubble_messenger
* @group config
*/
public function config_has_controller_action_generate_blocks_after_observer_defined()
{
$this->assertEventObserverDefined(
'frontend',
'controller_action_layout_generate_blocks_after',
'Pubble_Messenger_Model_Observer',
'addPubbleBeforeBodyEnd'
);
}
}
|
#googlemarket
首页、专题、游戏界面如下:
  
分类、排行的界面
 
详情页面如下:

# 良心友情链接
[腾讯QQ群快速检索](http://u.720life.cn/s/8cf73f7c)
[软件免费开发论坛](http://u.720life.cn/s/bbb01dc0)
|
<?php
namespace QuarkCMS\Quark\Component\Form\Fields;
use QuarkCMS\Quark\Component\Form\Fields\Item;
class Quarter extends Item
{
/**
* 组件类型
*
* @var string
*/
public $component = 'quarterField';
}
|
<?php
namespace TheBachtiarz\SerialNumber\Cache;
use TheBachtiarz\SerialNumber\Interfaces\ConfigInterface;
use TheBachtiarz\SerialNumber\Service\ApiKeyAccessService;
use TheBachtiarz\Toolkit\Cache\Service\Cache;
use TheBachtiarz\Toolkit\Helper\App\Converter\ArrayHelper;
use TheBachtiarz\Toolkit\Helper\App\Encryptor\EncryptorHelper;
class CacheService
{
use ArrayHelper, EncryptorHelper;
/**
* Api key
*
* @var string
*/
protected static string $apiKey;
// ? Public Methods
/**
* Get api key access data information
*
* @return array
*/
public static function access(): array
{
$result = ['status' => false, 'message' => ''];
try {
$_getCache = self::checkCacheData();
if ($_getCache['found']) {
$result['status'] = $_getCache['status'];
$result['message'] = $_getCache['message'];
} else {
$_apiKeyInfo = self::checkToServer();
$result['status'] = $_apiKeyInfo['status'];
$result['message'] = $_apiKeyInfo['message'];
}
} catch (\Throwable $th) {
$result['message'] = $th->getMessage();
} finally {
return $result;
}
}
// ? Private Methods
/**
* Check api key status in cache data
*
* @return array
*/
private static function checkCacheData(): array
{
$result = ['found' => false, 'status' => false, 'message' => 'Api key not found'];
try {
throw_if(!Cache::has(ConfigInterface::SERIAL_NUMBER_CACHE_PREFIX_NAME), 'Exception', "Cache not found");
$_cacheData = self::decrypt(Cache::get(ConfigInterface::SERIAL_NUMBER_CACHE_PREFIX_NAME));
foreach ($_cacheData as $apiKey => $status) {
if ($apiKey === static::$apiKey) {
$result['found'] = true;
$result['status'] = $status;
$result['message'] = sprintf("Api key is %s", ($status ? "OK" : "Expired"));
break;
}
}
} catch (\Throwable $th) {
self::initCacheData();
} finally {
return $result;
}
}
/**
* Check api key to server
*
* @return array
*/
private static function checkToServer(): array
{
$result = ['status' => false, 'message' => ''];
try {
$_getApiKeyInfo = ApiKeyAccessService::access(static::$apiKey);
self::updateCacheData([
self::$apiKey => $_getApiKeyInfo['data'] ?? false
]);
throw_if(!$_getApiKeyInfo['status'], 'Exception', $_getApiKeyInfo['message']);
$result['status'] = true;
$result['message'] = "Api key is OK";
} catch (\Throwable $th) {
$result['message'] = $th->getMessage();
} finally {
return $result;
}
}
/**
* Init cache data
*
* @return void
*/
private static function initCacheData(): void
{
$_cacheInit = [];
Cache::set(ConfigInterface::SERIAL_NUMBER_CACHE_PREFIX_NAME, self::simpleEncrypt($_cacheInit));
}
/**
* Update cace data
*
* @param array $apiKeyinfo
* @return void
*/
private static function updateCacheData(array $apiKeyinfo): void
{
$_currentCacheData = self::decrypt(Cache::get(ConfigInterface::SERIAL_NUMBER_CACHE_PREFIX_NAME));
$_currentCacheData[key($apiKeyinfo)] = $apiKeyinfo[key($apiKeyinfo)];
Cache::set(ConfigInterface::SERIAL_NUMBER_CACHE_PREFIX_NAME, self::simpleEncrypt($_currentCacheData));
}
// ? Setter Modules
/**
* Set api key
*
* @param string $apiKey api key
* @return self
*/
public static function setApiKey(string $apiKey): self
{
self::$apiKey = $apiKey;
return new self;
}
}
|
-- --------------------------------------------------------
-- Host: 127.0.0.1
-- Server version: 8.0.12 - MySQL Community Server - GPL
-- Server OS: Win64
-- HeidiSQL Version: 9.5.0.5196
-- --------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET NAMES utf8 */;
/*!50503 SET NAMES utf8mb4 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-- Dumping database structure for gomusic
CREATE DATABASE IF NOT EXISTS `gomusic` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */;
USE `gomusic`;
-- Dumping structure for table gomusic.customers
CREATE TABLE IF NOT EXISTS `customers` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`firstname` varchar(50) NOT NULL DEFAULT '0',
`lastname` varchar(50) NOT NULL DEFAULT '0',
`email` varchar(100) NOT NULL DEFAULT '0',
`pass` varchar(100) NOT NULL DEFAULT '0',
`cc_customerid` varchar(50) NOT NULL DEFAULT '0',
`loggedin` tinyint(1) NOT NULL DEFAULT '0',
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `email` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=22 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- Dumping data for table gomusic.customers: ~4 rows (approximately)
/*!40000 ALTER TABLE `customers` DISABLE KEYS */;
INSERT INTO `customers` (`id`, `firstname`, `lastname`, `email`, `pass`, `cc_customerid`, `loggedin`, `created_at`, `updated_at`, `deleted_at`) VALUES
(1, 'Mal', 'Zein', 'mal.zein@email.com', '$2a$10$ZeZI4pPPlQg89zfOOyQmiuKW9Z7pO9/KvG7OfdgjPAZF0Vz9D8fhC', 'cus_EL08toK8pfDcom', 0, '2018-08-14 07:52:54', '2019-03-03 19:01:16', NULL),
(2, 'River', 'Sam', 'river.sam@email.com', '$2a$10$mNbCLmfCAc0.4crDg3V3fe0iO1yr03aRfE7Rr3vdfKMGVnnzovCZq', '', 0, '2018-08-14 07:52:55', '2019-01-12 22:39:01', NULL),
(3, 'Jayne', 'Ra', 'jayne.ra@email.com', '$2a$10$ZeZI4pPPlQg89zfOOyQmiuKW9Z7pO9/KvG7OfdgjPAZF0Vz9D8fhC', 'cus_EL4GpQmVjwvUUZ', 0, '2018-08-14 07:52:55', '2019-01-13 21:56:05', NULL),
(19, 'John', 'Doe', 'john.doe@bla.com', '$2a$10$T4c8rmpbgKrUA0sIqtHCaO0g2XGWWxFY4IGWkkpVQOD/iuBrwKrZu', '', 0, '2019-01-13 08:43:44', '2019-01-13 15:12:25', NULL);
/*!40000 ALTER TABLE `customers` ENABLE KEYS */;
-- Dumping structure for table gomusic.orders
CREATE TABLE IF NOT EXISTS `orders` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`customer_id` int(11) NOT NULL,
`product_id` int(11) NOT NULL,
`price` int(11) NOT NULL,
`purchase_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- Dumping data for table gomusic.orders: ~11 rows (approximately)
/*!40000 ALTER TABLE `orders` DISABLE KEYS */;
INSERT INTO `orders` (`id`, `customer_id`, `product_id`, `price`, `purchase_date`, `created_at`, `updated_at`, `deleted_at`) VALUES
(1, 1, 1, 90, '2018-12-29 23:34:32', '2018-12-29 23:35:36', '2018-12-29 23:35:36', NULL),
(2, 1, 2, 299, '2018-12-29 23:34:53', '2018-12-29 23:35:48', '2018-12-29 23:35:48', NULL),
(3, 1, 3, 16000, '2018-12-29 23:35:05', '2018-12-29 23:35:57', '2018-12-29 23:35:57', NULL),
(4, 2, 1, 95, '2018-12-29 23:36:18', '2018-12-29 23:36:18', '2018-12-29 23:36:18', NULL),
(5, 2, 2, 299, '2018-12-29 23:36:39', '2018-12-29 23:36:39', '2018-12-29 23:36:39', NULL),
(6, 2, 4, 205, '2018-12-29 23:37:01', '2018-12-29 23:38:13', '2018-12-29 23:38:13', NULL),
(7, 3, 4, 210, '2018-12-29 23:37:28', '2018-12-29 23:38:19', '2018-12-29 23:38:19', NULL),
(8, 3, 5, 200, '2018-12-29 23:37:41', '2018-12-29 23:38:28', '2018-12-29 23:38:28', NULL),
(9, 3, 6, 1000, '2018-12-29 23:37:54', '2018-12-29 23:38:32', '2018-12-29 23:38:32', NULL),
(10, 19, 6, 1000, '2018-12-29 23:37:54', '2019-01-13 00:44:55', '2019-01-13 00:44:55', NULL),
(11, 1, 3, 17000, '0000-00-00 00:00:00', '2019-01-14 06:03:08', '2019-01-14 06:03:08', NULL);
/*!40000 ALTER TABLE `orders` ENABLE KEYS */;
-- Dumping structure for table gomusic.products
CREATE TABLE IF NOT EXISTS `products` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`image` varchar(100) DEFAULT NULL,
`smallimg` varchar(100) DEFAULT NULL,
`imgalt` varchar(50) DEFAULT NULL,
`description` text,
`productname` varchar(50) DEFAULT NULL,
`price` float DEFAULT NULL,
`promotion` float DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- Dumping data for table gomusic.products: ~6 rows (approximately)
/*!40000 ALTER TABLE `products` DISABLE KEYS */;
INSERT INTO `products` (`id`, `image`, `smallimg`, `imgalt`, `description`, `productname`, `price`, `promotion`, `created_at`, `updated_at`, `deleted_at`) VALUES
(1, 'img/strings.png', 'img/img-small/strings.png', 'string', '', 'Strings', 100, NULL, '2018-08-14 07:54:19', '2019-01-11 00:28:40', NULL),
(2, 'img/redguitar.jpeg', 'img/img-small/redguitar.jpeg', 'redg', '', 'Red Guitar', 299, 240, '2018-08-14 07:54:20', '2019-01-11 00:29:11', NULL),
(3, 'img/drums.jpg', 'img/img-small/drums.jpg', 'drums', '', 'Drums', 17000, NULL, '2018-08-14 07:54:20', '2019-01-11 22:05:42', NULL),
(4, 'img/flute.jpeg', 'img/img-small/flute.jpeg', 'flute', '', 'Flute', 210, 190, '2018-08-14 07:54:20', '2019-01-11 00:29:53', NULL),
(5, 'img/blackguitar.jpeg', 'img/img-small/blackguitar.jpeg', 'Black guitar', '', 'Black Guitar', 200, NULL, '2018-08-14 07:54:20', '2019-01-11 00:30:12', NULL),
(6, 'img/saxophone.jpeg', 'img/img-small/saxophone.jpeg', 'Saxophone', '', 'Saxophone', 1000, 980, '2018-08-14 07:54:20', '2019-01-11 00:30:35', NULL);
/*!40000 ALTER TABLE `products` ENABLE KEYS */;
/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */;
/*!40014 SET FOREIGN_KEY_CHECKS=IF(@OLD_FOREIGN_KEY_CHECKS IS NULL, 1, @OLD_FOREIGN_KEY_CHECKS) */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.mkldnn.int8
import com.intel.analytics.bigdl.dataset.{DataSet, MiniBatch}
import com.intel.analytics.bigdl.models.resnet.ImageNetDataSet
import com.intel.analytics.bigdl.nn.{Graph, Module}
import com.intel.analytics.bigdl.utils.Engine
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
/**
* GenerateInt8Scales will generate a model with scales information,
* which will be used with mkldnn int8. You can pass a model trained from BigDL
* and will genereate a model whose name is the same except including "quantized"
*/
object GenerateInt8Scales {
val logger: Logger = Logger.getLogger(getClass)
Logger.getLogger("org").setLevel(Level.ERROR)
Logger.getLogger("akka").setLevel(Level.ERROR)
Logger.getLogger("breeze").setLevel(Level.ERROR)
import Utils._
def genereateInt8Scales(model: Graph[Float], modelName: String,
evaluationSet: RDD[MiniBatch[Float]]): Unit = {
model.evaluate()
model.setInputDimMask(0, true)
model.setOutputDimMask(0, true)
model.setWeightDimMask(1, true)
logger.info(s"Generate the scales for $modelName ...")
val samples = evaluationSet
.repartition(1) // repartition (shuffle) will have better accuracy
.take(1) // only split one batch to sample
.map(_.getInput().toTensor[Float])
samples.foreach { sample =>
model.forward(sample)
model.calcScales(sample)
}
// we should clean the state, such as output
model.clearState()
logger.info(s"Generate the scales for $modelName done.")
}
def saveQuantizedModel(model: Graph[Float], modelName: String): Unit = {
val suffix = ".bigdl"
val prefix = modelName.stripSuffix(suffix)
val name = prefix.concat(".quantized").concat(suffix)
logger.info(s"Save the quantized model $name ...")
// it will force overWrite the existed model file
model.saveModule(name, overWrite = true)
logger.info(s"Save the quantized model $name done.")
}
def main(args: Array[String]): Unit = {
genInt8ScalesParser.parse(args, GenInt8ScalesParams()).foreach { param =>
val conf = Engine.createSparkConf().setAppName("Quantize the model")
.set("spark.akka.frameSize", 64.toString)
.set("spark.task.maxFailures", "1")
val sc = new SparkContext(conf)
Engine.init
val partitionNum = Engine.nodeNumber()
val imageFrame = DataSet.SeqFileFolder.filesToImageFrame(param.folder, sc, 1000,
partitionNum = Option(partitionNum))
// the transformer is the same as as that in validation during training
val evaluationSet = ImageNetDataSet.valDataSet(param.folder,
sc, 224, param.batchSize).toDistributed().data(train = false)
// Currently, we only support the graph model, so we add a `toGraph`
// if the model is already graph, you can need not to it.
val model = Module.loadModule[Float](param.model).toGraph()
genereateInt8Scales(model, param.model, evaluationSet)
saveQuantizedModel(model, param.model)
}
}
}
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
import os
class SlidingWindowTrainer():
@staticmethod
def SlidingTrainer(w: int, s: int, training_max_iter_vec: list, train, kwargs: dict, runInitialTrain=True):
'''
Assume: data is sorted by date
'''
print("Learning Initial Grid.")
pi = None
if runInitialTrain:
pi = train(**kwargs)
assert("data" in kwargs and "max_iter" in kwargs and "output_directory" in kwargs)
data = kwargs['data']
root_dir = kwargs["output_directory"].replace(
".", "").replace("/", "").replace("\\", "")
T = len(data)
assert(w < T)
max_sliding_window_size = int(np.ceil((T-w)/s) + 1)
assert(len(training_max_iter_vec) == max_sliding_window_size)
first_index = 0
last_index = min(w, T)
for i in range(max_sliding_window_size):
kwargs['max_iter'] = training_max_iter_vec[i]
kwargs['data'] = data[first_index:last_index]
kwargs['output_directory'] = "./" + root_dir + "/iter" + str(i)
kwargs['pi'] = pi
if not (os.path.exists(kwargs['output_directory']) and os.path.isdir(kwargs['output_directory'])):
os.mkdir(kwargs['output_directory'])
print("Learning grid window from first index: " +
str(first_index) + " to second index: " + str(last_index))
pi = train(**kwargs)
last_index = min(last_index + s, T)
first_index = min(first_index + s, T)
assert(last_index >= T)
|
package nosj
import (
"bufio"
"fmt"
)
type String struct {
s string
}
func NewString() *String {
return &String{}
}
func (str *String) Parse(s *bufio.Scanner) Node {
str.s = ScanQuote(s)
return str
}
func (str *String) String() string {
return fmt.Sprintf(`"%s"`, str.s)
}
func (str *String) PrettyString() string {
return str.prettyString("")
}
func (str *String) prettyString(indent string) string {
return str.String()
}
|
# GitHub-Search
O GitHub Search é um app onde você pode informações do perfil de qualquer usuário do Github apenas digitando seu nome
Clique <a href="https://luanhma.github.io/GitHub-Search/">aqui</a> para acessar!
|
<?php
namespace decorator;
/**
* Created by PhpStorm.
* User: zhudong
* Date: 2017/7/14
* Time: 下午7:36
*/
class PulisherDerector implements PulisherInterface {
protected $pulisher = null;
function derect(PulisherInterface $pulisher) {
$this->pulisher = $pulisher;
}
public function pulishText() {
$this->pulisher->pulishText();
}
}
|
package Paws::MediaLive::AacSettings;
use Moose;
has Bitrate => (is => 'ro', isa => 'Num', request_name => 'bitrate', traits => ['NameInRequest']);
has CodingMode => (is => 'ro', isa => 'Str', request_name => 'codingMode', traits => ['NameInRequest']);
has InputType => (is => 'ro', isa => 'Str', request_name => 'inputType', traits => ['NameInRequest']);
has Profile => (is => 'ro', isa => 'Str', request_name => 'profile', traits => ['NameInRequest']);
has RateControlMode => (is => 'ro', isa => 'Str', request_name => 'rateControlMode', traits => ['NameInRequest']);
has RawFormat => (is => 'ro', isa => 'Str', request_name => 'rawFormat', traits => ['NameInRequest']);
has SampleRate => (is => 'ro', isa => 'Num', request_name => 'sampleRate', traits => ['NameInRequest']);
has Spec => (is => 'ro', isa => 'Str', request_name => 'spec', traits => ['NameInRequest']);
has VbrQuality => (is => 'ro', isa => 'Str', request_name => 'vbrQuality', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::MediaLive::AacSettings
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::MediaLive::AacSettings object:
$service_obj->Method(Att1 => { Bitrate => $value, ..., VbrQuality => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::MediaLive::AacSettings object:
$result = $service_obj->Method(...);
$result->Att1->Bitrate
=head1 DESCRIPTION
Placeholder documentation for AacSettings
=head1 ATTRIBUTES
=head2 Bitrate => Num
Average bitrate in bits/second. Valid values depend on rate control
mode and profile.
=head2 CodingMode => Str
Mono, Stereo, or 5.1 channel layout. Valid values depend on rate
control mode and profile. The adReceiverMix setting receives a stereo
description plus control track and emits a mono AAC encode of the
description track, with control data emitted in the PES header as per
ETSI TS 101 154 Annex E.
=head2 InputType => Str
Set to "broadcasterMixedAd" when input contains pre-mixed main audio +
AD (narration) as a stereo pair. The Audio Type field (audioType) will
be set to 3, which signals to downstream systems that this stream
contains "broadcaster mixed AD". Note that the input received by the
encoder must contain pre-mixed audio; the encoder does not perform the
mixing. The values in audioTypeControl and audioType (in
AudioDescription) are ignored when set to broadcasterMixedAd. Leave set
to "normal" when input does not contain pre-mixed audio + AD.
=head2 Profile => Str
AAC Profile.
=head2 RateControlMode => Str
Rate Control Mode.
=head2 RawFormat => Str
Sets LATM / LOAS AAC output for raw containers.
=head2 SampleRate => Num
Sample rate in Hz. Valid values depend on rate control mode and
profile.
=head2 Spec => Str
Use MPEG-2 AAC audio instead of MPEG-4 AAC audio for raw or MPEG-2
Transport Stream containers.
=head2 VbrQuality => Str
VBR Quality Level - Only used if rateControlMode is VBR.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::MediaLive>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
|
use super::nms::{NmsOutput, NonMaxSuppression, NonMaxSuppressionInit};
use crate::common::*;
use tch_goodies::detection::MergedDenseDetection;
#[derive(Debug)]
pub struct YoloInferenceInit {
pub nms_iou_thresh: R64,
pub nms_conf_thresh: R64,
}
impl YoloInferenceInit {
pub fn build(self) -> Result<YoloInference> {
let Self {
nms_iou_thresh,
nms_conf_thresh,
} = self;
let nms = NonMaxSuppressionInit {
iou_threshold: nms_iou_thresh,
confidence_threshold: nms_conf_thresh,
}
.build()?;
Ok(YoloInference { nms })
}
}
#[derive(Debug)]
pub struct YoloInference {
nms: NonMaxSuppression,
}
impl YoloInference {
pub fn forward(&self, prediction: &MergedDenseDetection) -> YoloInferenceOutput {
tch::no_grad(|| {
let device = prediction.device();
// run nms
let NmsOutput {
batches,
classes,
instances,
bbox,
confidence,
} = self.nms.forward(prediction);
let selected = {
let selected: Vec<i64> = izip!(
Vec::<i64>::from(&batches),
Vec::<i64>::from(&classes),
Vec::<i64>::from(&instances),
Vec::<f32>::from(&confidence)
)
.enumerate()
// group up samples by (batch_index, instance_index)
.map(|args| {
let (nms_index, (batch, class, instance, confidence)) = args;
((batch, instance), (nms_index, class, r32(confidence)))
})
.into_group_map()
.into_iter()
// for each samples of the same (batch_index, instance_index),
// pick the one with max confidence.
.map(|args| {
let ((_batch, _instance), triples) = args;
let (nms_index, _class, _confidence) = triples
.into_iter()
.max_by_key(|(_nms_index, _class, confidence)| *confidence)
.unwrap();
nms_index as i64
})
.collect();
Tensor::of_slice(&selected).to_device(device)
};
let selected_batches = batches.index(&[Some(&selected)]);
let selected_classes = classes.index(&[Some(&selected)]);
let selected_instances = instances.index(&[Some(&selected)]);
let selected_bbox = bbox.index_select(&selected);
let selected_confidence = confidence.index(&[Some(&selected)]);
YoloInferenceOutput {
batches: selected_batches,
classes: selected_classes,
instances: selected_instances,
bbox: selected_bbox,
confidence: selected_confidence,
}
})
}
}
#[derive(Debug, TensorLike)]
pub struct YoloInferenceOutput {
/// Batch indexes in shape `[object]`.
pub batches: Tensor,
/// Class indexes in shape `[object]`.
pub classes: Tensor,
/// Object indexes in shape `[object]`.
pub instances: Tensor,
/// Box parameters.
pub bbox: TLBRTensor,
/// Confidence scores in shape `[object, 1]`.
pub confidence: Tensor,
}
impl YoloInferenceOutput {
pub fn num_samples(&self) -> i64 {
self.batches.size1().unwrap()
}
pub fn device(&self) -> Device {
self.batches.device()
}
pub fn index_select(&self, indexes: &Tensor) -> Self {
let Self {
batches,
classes,
instances,
bbox,
confidence,
} = self;
Self {
batches: batches.index(&[Some(indexes)]),
classes: classes.index(&[Some(indexes)]),
instances: instances.index(&[Some(indexes)]),
bbox: bbox.index_select(indexes),
confidence: confidence.index(&[Some(indexes)]),
}
}
pub fn batch_select(&self, batch_index: i64) -> Self {
let indexes = self.batches.eq(batch_index).nonzero().view([-1]);
self.index_select(&indexes)
}
}
|
export { addUserToDefaultChannels } from './addUserToDefaultChannels';
export { addUserToRoom } from './addUserToRoom';
export { archiveRoom } from './archiveRoom';
export { attachMessage } from './attachMessage';
export { checkEmailAvailability } from './checkEmailAvailability';
export { checkUsernameAvailability } from './checkUsernameAvailability';
export { cleanRoomHistory } from './cleanRoomHistory';
export { createRoom } from './createRoom';
export { createDirectRoom } from './createDirectRoom';
export { deleteMessage } from './deleteMessage';
export { deleteRoom } from './deleteRoom';
export { deleteUser } from './deleteUser';
export { getRoomByNameOrIdWithOptionToJoin } from './getRoomByNameOrIdWithOptionToJoin';
export { getUserSingleOwnedRooms } from './getUserSingleOwnedRooms';
export { generateUsernameSuggestion } from './getUsernameSuggestion';
export { insertMessage } from './insertMessage';
export { isTheLastMessage } from './isTheLastMessage';
export { loadMessageHistory } from './loadMessageHistory';
export { processWebhookMessage } from './processWebhookMessage';
export { removeUserFromRoom } from './removeUserFromRoom';
export { relinquishRoomOwnerships } from './relinquishRoomOwnerships';
export { saveCustomFields } from './saveCustomFields';
export { saveCustomFieldsWithoutValidation } from './saveCustomFieldsWithoutValidation';
export { saveUser } from './saveUser';
export { saveUserIdentity } from './saveUserIdentity';
export { sendMessage } from './sendMessage';
export { setEmail } from './setEmail';
export { setRealName, _setRealName } from './setRealName';
export { setStatusText, _setStatusText, _setStatusTextPromise } from './setStatusText';
export { getStatusText } from './getStatusText';
export { setUserAvatar } from './setUserAvatar';
export { _setUsername, setUsername } from './setUsername';
export { unarchiveRoom } from './unarchiveRoom';
export { updateMessage } from './updateMessage';
export { validateCustomFields } from './validateCustomFields';
export { validateName } from './validateName';
|
I know how to code **Java** and I am new to **C++** but I am sure it will be *similar* enough to be understood.
|
#!/bin/bash
if [ ! -d "build" ]; then
echo "Build directory does not exist, running ./build.sh"
./build.sh
fi
cd build
if [ ! -d ".git" ]; then
echo "Git not initialized in build directory, cloning gh-pages"
git init
git remote add origin git@github.com:SlateFoundation/slate-cbl.git
fi
echo "Ensuring HEAD matches remote gh-pages head"
git fetch origin gh-pages
git symbolic-ref HEAD refs/remotes/origin/gh-pages
SOURCE_COMMIT=$(git --git-dir=../../../.git rev-parse HEAD)
echo "Committing build for SlateFoundation/slate-cbl@$SOURCE_COMMIT"
git add --all
git commit -m "Update jsduck build to SlateFoundation/slate-cbl@$SOURCE_COMMIT"
echo "Pushing to origin/gh-pages"
git push -u origin HEAD:gh-pages
|
[1.0.0]: https://github.com/real-digital/half-flake/commits/1.0.0
# Changelog
All notable changes to this project will be listed in this file.
## [1.0.0] - 2019-06-27
The first release as a separated library
|
-- @testpoint: opengauss关键字collate(保留),作为游标名,部分测试点合理报错
--前置条件
drop table if exists collate_test cascade;
create table collate_test(cid int,fid int);
--关键字不带引号-失败
start transaction;
cursor collate for select * from collate_test order by 1;
close collate;
end;
--关键字带双引号-成功
start transaction;
cursor "collate" for select * from collate_test order by 1;
close "collate";
end;
--关键字带单引号-合理报错
start transaction;
cursor 'collate' for select * from collate_test order by 1;
close 'collate';
end;
--关键字带反引号-合理报错
start transaction;
cursor `collate` for select * from collate_test order by 1;
close `collate`;
end;
drop table if exists collate_test cascade;
|
ori $ra,$ra,0xf
mflo $6
mthi $0
srav $0,$6,$2
ori $4,$5,38212
mflo $5
mthi $4
mtlo $4
sb $3,15($0)
mflo $0
ori $0,$4,30354
div $6,$ra
mthi $1
sll $5,$3,1
lb $4,16($0)
lb $4,1($0)
mflo $4
lui $0,42531
ori $2,$2,37357
sb $5,1($0)
mfhi $5
divu $2,$ra
div $5,$ra
lui $2,20042
addiu $3,$1,-14581
srav $0,$4,$3
addu $4,$3,$3
sll $0,$1,5
ori $0,$0,52823
multu $4,$2
mfhi $6
lb $1,0($0)
mthi $1
mtlo $4
lui $4,46272
srav $0,$0,$3
srav $4,$2,$4
mflo $6
addiu $0,$0,1117
multu $1,$4
divu $0,$ra
sb $1,1($0)
addu $4,$4,$3
div $1,$ra
div $1,$ra
sb $4,5($0)
sll $2,$2,7
addu $4,$4,$5
mult $5,$5
multu $3,$1
sb $1,1($0)
addiu $6,$6,-26490
addu $1,$0,$4
mult $6,$1
lb $4,5($0)
sb $2,9($0)
lui $0,60916
mfhi $4
mtlo $0
div $4,$ra
mflo $6
addiu $1,$4,-14692
div $4,$ra
sb $0,4($0)
addu $6,$5,$3
multu $5,$0
addiu $4,$4,6099
sll $5,$5,15
addu $1,$6,$6
mult $1,$4
divu $1,$ra
mflo $4
addiu $5,$4,20047
div $4,$ra
mflo $5
mfhi $0
srav $4,$4,$4
addiu $1,$3,-22504
mflo $4
multu $1,$3
lb $0,3($0)
mult $3,$2
lui $2,58031
addu $5,$2,$4
div $4,$ra
mtlo $1
div $3,$ra
div $1,$ra
mfhi $0
multu $6,$6
multu $4,$2
lb $0,15($0)
lb $2,5($0)
addiu $5,$2,-20202
ori $6,$6,47311
lui $4,58563
mult $1,$5
multu $1,$6
ori $2,$2,40785
div $6,$ra
addiu $3,$1,-260
mflo $5
mult $4,$4
addu $6,$6,$6
mtlo $1
mtlo $4
lb $0,4($0)
mult $1,$1
divu $3,$ra
srav $4,$1,$6
lui $4,41782
div $0,$ra
div $1,$ra
addiu $4,$0,25423
ori $4,$1,43356
addiu $5,$1,-21926
srav $1,$6,$6
srav $4,$2,$2
addiu $4,$3,1018
lb $5,10($0)
mfhi $3
ori $6,$6,50460
sb $0,1($0)
addiu $5,$5,13209
sll $4,$5,1
mflo $2
srav $6,$3,$3
addu $3,$3,$3
mthi $6
srav $5,$2,$3
multu $6,$2
sll $5,$5,24
addiu $4,$6,-24818
multu $6,$4
sb $4,10($0)
mult $5,$1
lui $4,63595
mtlo $1
lb $4,13($0)
div $0,$ra
sb $5,6($0)
addiu $6,$6,14220
lb $6,12($0)
mfhi $4
div $1,$ra
lui $5,44799
lui $2,45954
ori $4,$2,54435
mtlo $3
sll $6,$0,28
multu $5,$4
sb $1,7($0)
lui $2,64053
addu $2,$2,$1
lui $2,12075
sb $3,9($0)
ori $5,$1,2179
mflo $3
lb $2,2($0)
srav $2,$4,$2
mthi $4
lb $3,5($0)
sb $2,5($0)
divu $4,$ra
ori $3,$6,51668
mtlo $5
sb $2,12($0)
addiu $1,$4,10180
div $5,$ra
div $2,$ra
div $4,$ra
mtlo $4
sll $0,$0,18
lb $2,12($0)
mthi $0
mthi $5
mfhi $1
div $5,$ra
lb $4,12($0)
multu $5,$3
ori $3,$2,21933
divu $6,$ra
mthi $0
addiu $4,$2,-11650
lb $4,4($0)
lb $5,8($0)
div $1,$ra
mflo $4
lui $1,13990
addu $6,$2,$2
ori $4,$5,45286
mthi $5
lui $6,60411
mtlo $1
sb $4,0($0)
addiu $5,$5,-27127
mtlo $2
mult $1,$4
mflo $4
multu $4,$2
lb $6,5($0)
ori $5,$6,46721
srav $4,$2,$4
div $4,$ra
mtlo $5
mfhi $4
mult $4,$2
div $5,$ra
lb $1,3($0)
lb $3,6($0)
sb $6,2($0)
mult $6,$1
addu $0,$3,$3
div $1,$ra
divu $0,$ra
mflo $1
div $1,$ra
multu $0,$2
lb $0,0($0)
mthi $2
mtlo $2
mult $2,$4
lui $4,44072
ori $3,$1,56304
ori $0,$0,22610
multu $5,$6
mtlo $4
multu $4,$4
multu $5,$2
ori $4,$2,64954
divu $4,$ra
sb $1,13($0)
sll $4,$0,29
srav $5,$2,$4
mult $5,$6
div $4,$ra
multu $5,$5
srav $1,$2,$2
sb $5,3($0)
lui $5,34385
mtlo $5
sll $4,$5,6
sb $5,3($0)
mflo $3
sb $6,2($0)
addu $3,$3,$3
srav $3,$4,$3
srav $1,$0,$3
lb $3,5($0)
ori $4,$4,34756
lb $2,8($0)
multu $2,$2
mfhi $0
srav $4,$1,$5
addiu $5,$2,-7723
lb $2,7($0)
sb $1,11($0)
mfhi $5
mflo $1
ori $1,$2,5235
ori $2,$2,168
divu $4,$ra
mult $4,$4
mthi $2
addiu $1,$4,19060
lui $4,24067
lb $4,12($0)
mthi $0
mthi $1
sb $4,5($0)
lb $5,2($0)
mtlo $1
mtlo $1
addiu $5,$0,-24002
div $0,$ra
divu $0,$ra
mflo $3
lui $5,2532
lui $3,29846
lui $5,5195
mflo $0
lb $5,2($0)
srav $5,$2,$2
mtlo $5
multu $5,$5
lui $3,52929
div $6,$ra
mult $5,$4
addu $1,$2,$5
divu $1,$ra
srav $5,$2,$2
ori $3,$3,47626
addiu $3,$6,3897
divu $5,$ra
addu $4,$2,$4
mult $3,$6
addu $4,$0,$4
srav $4,$2,$2
sb $1,6($0)
mtlo $6
mthi $5
addiu $4,$1,-21801
mflo $3
mult $1,$2
divu $1,$ra
mfhi $2
lui $1,21790
sll $4,$1,31
srav $2,$2,$2
lui $4,16230
mult $1,$4
mthi $6
divu $0,$ra
srav $1,$6,$6
divu $4,$ra
mfhi $0
mult $0,$6
mthi $4
mtlo $5
lb $5,12($0)
mthi $4
srav $4,$1,$2
addu $1,$4,$2
ori $4,$2,64579
mult $1,$1
div $2,$ra
mflo $3
addiu $4,$4,5872
ori $3,$4,64732
mfhi $4
divu $4,$ra
mtlo $4
multu $1,$5
lb $0,10($0)
div $4,$ra
srav $1,$2,$3
sb $1,3($0)
addu $6,$1,$2
mtlo $4
sll $6,$6,27
sll $1,$1,23
lb $4,13($0)
multu $1,$3
mflo $5
divu $3,$ra
sb $2,5($0)
div $6,$ra
divu $5,$ra
addu $4,$2,$4
sb $0,12($0)
sb $4,5($0)
mult $5,$3
div $1,$ra
mult $4,$4
mfhi $5
addiu $1,$0,7684
mfhi $4
addu $4,$2,$2
mfhi $6
mflo $1
sb $1,16($0)
mflo $2
divu $1,$ra
divu $4,$ra
sll $1,$5,24
mult $0,$4
srav $4,$4,$6
mult $0,$6
lb $5,11($0)
lb $5,3($0)
multu $1,$2
lui $4,42762
srav $3,$4,$3
addu $4,$4,$4
srav $3,$1,$3
div $1,$ra
srav $1,$3,$3
mtlo $4
mfhi $0
sb $3,1($0)
mult $1,$1
sb $1,16($0)
sb $4,14($0)
lb $4,1($0)
mtlo $2
multu $5,$2
srav $3,$3,$3
mtlo $2
divu $4,$ra
lb $1,0($0)
mfhi $4
lui $4,60992
mfhi $3
mfhi $4
multu $3,$2
sb $4,7($0)
addiu $5,$4,29802
lui $1,1309
srav $0,$2,$2
mfhi $6
lui $5,3580
sll $6,$6,2
multu $0,$1
addiu $2,$2,-12714
mfhi $0
mtlo $4
lui $6,12280
ori $2,$2,11305
div $3,$ra
sb $1,10($0)
mflo $6
mthi $5
mthi $5
sll $4,$0,1
ori $1,$2,11959
addiu $4,$1,24476
div $5,$ra
sb $5,6($0)
mthi $1
sb $4,5($0)
div $4,$ra
divu $5,$ra
mthi $3
div $1,$ra
sb $5,13($0)
div $0,$ra
addiu $4,$4,21765
mult $3,$2
srav $1,$4,$2
multu $0,$2
multu $2,$2
srav $1,$1,$5
divu $4,$ra
mflo $3
div $1,$ra
mtlo $5
mult $5,$1
mthi $5
addu $6,$2,$2
multu $4,$4
mflo $5
multu $1,$0
addiu $5,$5,27195
div $4,$ra
lui $4,36278
sb $4,15($0)
mtlo $4
divu $1,$ra
sb $5,15($0)
divu $0,$ra
ori $1,$1,10549
mfhi $4
sll $4,$4,2
lb $4,1($0)
srav $6,$1,$6
lb $1,7($0)
mtlo $0
mtlo $1
lui $4,43892
srav $3,$1,$3
divu $1,$ra
sll $5,$5,20
lui $6,56972
srav $4,$4,$3
mtlo $5
sll $3,$3,22
mult $6,$3
ori $1,$2,43726
mult $1,$1
div $4,$ra
mtlo $4
lb $2,1($0)
mfhi $4
lui $0,24077
mult $1,$6
lb $6,16($0)
srav $1,$2,$5
mtlo $4
divu $3,$ra
ori $5,$5,17173
lui $0,6822
lui $4,11032
lb $2,7($0)
div $1,$ra
addu $5,$1,$5
lb $6,8($0)
mtlo $2
lb $5,13($0)
srav $4,$4,$1
lb $4,15($0)
mtlo $6
mthi $6
sll $1,$4,5
sb $2,2($0)
mfhi $5
ori $4,$4,15366
mfhi $2
mult $1,$1
addu $6,$6,$3
multu $2,$2
divu $1,$ra
mfhi $3
mflo $5
div $6,$ra
div $4,$ra
addu $1,$2,$4
mtlo $4
mfhi $1
addiu $2,$2,1270
mult $6,$6
mfhi $5
lui $6,57845
srav $5,$4,$1
sb $6,14($0)
lb $5,5($0)
mfhi $4
div $4,$ra
mult $3,$3
mtlo $3
mthi $0
addu $4,$4,$3
sll $0,$1,25
lb $1,10($0)
addiu $1,$4,-16813
div $1,$ra
mthi $1
sll $4,$2,12
mflo $4
div $1,$ra
lui $1,10000
addiu $1,$1,-24974
srav $4,$0,$0
lb $3,10($0)
addiu $4,$0,19110
sb $2,4($0)
addu $5,$1,$1
div $5,$ra
divu $1,$ra
sb $0,14($0)
srav $2,$2,$2
addu $4,$4,$5
addiu $3,$1,-31399
mflo $0
sll $4,$4,28
lui $3,2649
divu $4,$ra
lb $3,5($0)
addiu $4,$4,-2406
addiu $3,$3,-3055
divu $5,$ra
lui $4,51036
div $5,$ra
mthi $1
sll $2,$6,23
multu $1,$1
lui $4,8422
lb $5,12($0)
mtlo $1
sll $4,$1,0
addu $2,$6,$2
sll $4,$4,5
mthi $4
srav $6,$2,$6
div $2,$ra
addiu $6,$6,-10053
mtlo $5
lui $5,12278
sb $5,4($0)
mthi $4
ori $2,$2,30801
mtlo $1
mult $1,$1
ori $4,$1,30939
sll $6,$0,18
sll $0,$5,28
mult $0,$4
addu $2,$2,$2
mflo $4
lb $0,15($0)
div $0,$ra
lb $1,7($0)
addiu $5,$6,-28154
multu $1,$2
sll $5,$5,30
lb $5,9($0)
sb $5,11($0)
addu $2,$4,$2
sb $4,9($0)
mflo $1
divu $4,$ra
lui $3,12706
lui $4,16935
mult $5,$0
sll $4,$1,13
sb $0,7($0)
addu $1,$4,$6
sll $4,$2,16
addu $5,$2,$5
divu $1,$ra
mtlo $5
mflo $0
sll $5,$5,27
lui $6,19151
mthi $0
addiu $6,$1,-19532
srav $4,$4,$4
addu $4,$5,$2
mflo $1
mult $5,$5
mult $1,$4
mtlo $4
mtlo $4
divu $4,$ra
mflo $1
mflo $4
mult $4,$4
mult $0,$2
mthi $1
mtlo $4
div $4,$ra
sll $4,$2,19
srav $4,$5,$3
addu $5,$0,$2
addu $1,$1,$1
mthi $5
div $4,$ra
mthi $5
multu $0,$2
addiu $1,$4,27015
sb $5,6($0)
sb $2,14($0)
lui $4,11375
addiu $4,$4,30554
sb $4,9($0)
sb $5,3($0)
sb $4,5($0)
srav $1,$4,$4
srav $2,$2,$1
div $5,$ra
lb $1,10($0)
divu $0,$ra
addu $5,$2,$3
multu $4,$4
multu $6,$6
mtlo $4
lui $5,38551
multu $1,$2
div $4,$ra
mtlo $6
lb $1,3($0)
addiu $5,$1,-25410
sb $3,7($0)
multu $1,$0
mthi $6
lb $0,1($0)
sll $1,$2,20
mtlo $4
lui $2,3235
mult $5,$4
mthi $2
lb $4,12($0)
mthi $1
sb $0,9($0)
mfhi $2
lb $3,0($0)
addiu $4,$6,-5330
sb $5,13($0)
divu $5,$ra
lb $2,15($0)
srav $5,$4,$3
mflo $6
mthi $5
div $6,$ra
mthi $4
sll $4,$4,28
mfhi $0
mflo $3
multu $5,$5
mthi $2
addu $3,$2,$3
mtlo $5
sb $2,9($0)
multu $4,$4
div $3,$ra
srav $4,$5,$4
mult $6,$0
ori $4,$4,63762
lui $5,44592
addiu $4,$5,-4498
addiu $0,$4,7937
mthi $2
div $4,$ra
lui $1,42479
multu $4,$3
addu $0,$6,$2
mfhi $4
addu $0,$0,$1
multu $1,$4
sb $1,7($0)
mfhi $5
mtlo $1
multu $5,$6
addiu $5,$0,1340
sll $0,$0,30
sll $1,$6,3
sll $4,$1,21
srav $5,$4,$4
mtlo $1
multu $4,$4
addiu $5,$5,32564
mthi $5
addiu $2,$1,11809
addiu $0,$2,27856
addiu $1,$5,-25617
multu $4,$4
lb $4,3($0)
multu $3,$1
lb $5,3($0)
divu $3,$ra
mflo $1
div $5,$ra
mult $4,$4
lb $1,10($0)
addu $2,$2,$1
mult $4,$4
mfhi $6
sb $5,6($0)
multu $2,$2
lui $0,47114
addu $1,$1,$5
addu $6,$5,$6
divu $5,$ra
sll $0,$2,31
ori $1,$2,34013
mtlo $0
ori $3,$3,58756
addu $4,$4,$4
multu $1,$2
lui $0,45863
mflo $1
multu $0,$0
sll $4,$6,2
addiu $0,$1,-8507
ori $5,$5,64286
addu $1,$4,$1
divu $5,$ra
mfhi $5
srav $1,$5,$2
sll $1,$2,10
div $5,$ra
lui $4,35271
lb $5,1($0)
div $2,$ra
lb $2,3($0)
lui $5,24071
mflo $6
multu $5,$1
mthi $4
sb $4,3($0)
sb $1,16($0)
sb $5,9($0)
addu $4,$4,$4
divu $2,$ra
sb $2,9($0)
sll $5,$4,28
div $2,$ra
ori $5,$3,14751
div $5,$ra
mflo $6
srav $2,$2,$3
sb $4,7($0)
addu $5,$2,$5
srav $1,$6,$3
divu $0,$ra
mflo $2
addu $3,$3,$3
lui $2,21281
mthi $0
multu $1,$1
divu $4,$ra
sll $0,$1,5
multu $3,$4
mult $6,$2
sb $6,4($0)
mthi $5
mult $1,$2
mult $4,$4
addu $1,$4,$4
mult $6,$6
divu $1,$ra
mtlo $4
addu $5,$5,$6
div $4,$ra
mthi $5
mthi $4
mflo $5
lb $4,7($0)
sb $5,14($0)
mfhi $6
lui $2,7455
lb $4,13($0)
mult $5,$2
mfhi $5
srav $1,$1,$3
mtlo $4
ori $0,$6,6537
div $3,$ra
sb $1,5($0)
addu $1,$4,$3
mfhi $1
addu $4,$2,$3
mfhi $4
lb $5,6($0)
div $0,$ra
mflo $3
divu $5,$ra
div $1,$ra
mfhi $5
mfhi $3
addu $4,$1,$3
mflo $1
ori $5,$2,18580
lb $5,5($0)
srav $2,$6,$2
ori $6,$0,14011
ori $2,$5,19351
divu $5,$ra
sll $6,$2,31
mtlo $4
multu $4,$4
ori $2,$2,31184
addiu $5,$5,-32247
lui $3,4216
div $5,$ra
sll $0,$0,20
mult $6,$5
mfhi $3
ori $6,$1,3272
sll $5,$4,23
sb $6,0($0)
sll $4,$4,13
mflo $2
lb $2,6($0)
mfhi $6
sll $4,$4,6
sb $3,8($0)
div $0,$ra
mflo $4
lui $4,59605
ori $4,$5,41688
mflo $3
multu $2,$2
lb $2,11($0)
sll $4,$4,23
mthi $0
divu $3,$ra
sll $4,$2,0
addu $6,$2,$1
mult $2,$2
lui $5,53140
divu $1,$ra
mflo $4
mult $6,$4
divu $5,$ra
mthi $5
ori $4,$2,62713
lui $4,10319
mthi $4
sll $4,$3,9
mtlo $2
ori $5,$4,32134
multu $1,$3
sb $4,7($0)
srav $4,$4,$1
mult $1,$1
multu $4,$4
sll $5,$2,5
sll $1,$1,30
lui $2,30387
mfhi $5
div $5,$ra
divu $5,$ra
mflo $6
lb $4,4($0)
mfhi $2
mfhi $1
lb $4,15($0)
mfhi $4
lb $6,8($0)
|
#!/bin/bash
source 'common.sh'
kl_heading 'Setup docker'
kl_cmd 'build docker image klueless/web-{{dashify settings.application}}'
docker image build -t klueless/web-{{dashify settings.application}} ../.
kl_cmd_end
|
package com.wavesplatform.dex.api
import java.time.LocalDateTime
import akka.http.scaladsl.model.ws.{BinaryMessage, Message, TextMessage}
import akka.http.scaladsl.server.Route
import akka.stream.Materializer
import akka.stream.scaladsl.{Flow, Sink, Source}
import com.wavesplatform.dex.api.http.ApiRoute
import io.swagger.annotations.Api
import javax.ws.rs.Path
import scala.concurrent.duration._
@Path("/ws")
@Api(value = "/web sockets/")
case class MatcherWebSocketRoute()(implicit mat: Materializer) extends ApiRoute {
private def greeter: Flow[Message, Message, Any] = Flow[Message].mapConcat {
case tm: TextMessage => TextMessage(Source.single("Hello ") ++ tm.textStream ++ Source.single("!")) :: Nil
case bm: BinaryMessage => bm.dataStream.runWith(Sink.ignore); Nil
}
private def time: Flow[Message, Message, _] = {
val sink = Sink.cancelled[Message]
val source = Source.tick(1.second, 1.second, "").map(_ => TextMessage(s"Now is ${LocalDateTime.now}"))
Flow.fromSinkAndSource(sink, source)
}
override def route: Route = pathPrefix("ws") {
path("greeter") {
get {
handleWebSocketMessages(greeter)
}
} ~ path("time") {
get {
handleWebSocketMessages(time)
}
}
}
}
|
SELECT
ogc_fid AS t_id,
ST_Multi(wkb_geometry) AS geometrie,
id_wp,
fid_amtei,
fid_fk,
fid_fr,
wirt_zone,
gem_bfs,
hoheitsgrenzen_gemeindegrenze.gemeindename,
fid_we,
round(gb_flaeche,0) AS gb_flaeche,
we_text,
fid_eigcod,
CASE
WHEN fid_eig = 1000
THEN '1000 - Bundeswald'
WHEN fid_eig = 2000
THEN '2000 - Staatswald'
WHEN fid_eig = 3100
THEN '3100 - Bürgergemeinde'
WHEN fid_eig = 3200
THEN '3200 - Einwohnergemeinde'
WHEN fid_eig = 3300
THEN '3300 - Einheitsgemeinde'
WHEN fid_eig = 4000
THEN '4000 - Öffentlich (gemischt)'
WHEN fid_eig = 5000
THEN '5000 - Gemischt öffentlich privat'
WHEN fid_eig = 6000
THEN '6000 - Privat'
WHEN fid_eig = 7000
THEN '7000 - Privat (gemischt)'
END AS fid_eig,
fid_prod,
CASE
WHEN wpnr = 501
THEN '501 - Wirtschaftswald'
WHEN wpnr = 502
THEN '502 - Schutzwald'
WHEN wpnr = 503
THEN '503 - Erholungswald'
WHEN wpnr = 504
THEN '504 - Natur und Landschaft'
WHEN wpnr = 505
THEN '505 - Schutzwald / Natur und Landschaft'
WHEN wpnr = 509
THEN '509 - Nicht Wald'
END AS wpnr,
CASE
WHEN wptyp = 1
THEN '1 - Mit Wald bestockt'
WHEN wptyp = 2
THEN '2 - Niederhaltezone'
WHEN wptyp = 3
THEN '3 - Waldstrasse'
WHEN wptyp = 4
THEN '4 - Maschinenweg'
WHEN wptyp = 5
THEN '5 - Bauten und Anlagen'
WHEN wptyp = 6
THEN '6 - Rodungsfläche (temporär)'
WHEN wptyp = 7
THEN '7 - Gewässer'
WHEN wptyp = 8
THEN '8 - Abbaustelle'
WHEN wptyp = 9
THEN '9 - Nicht Wald'
END AS wptyp,
CASE
WHEN betriebsteil = 1
THEN concat_ws(' - ', betriebsteil, 'Talwald')
WHEN betriebsteil = 2
THEN concat_ws(' - ', betriebsteil, 'Bergwald')
WHEN betriebsteil = 3
THEN concat_ws(' - ', betriebsteil, 'Vorberg')
WHEN betriebsteil = 4
THEN concat_ws(' - ', betriebsteil, 'Sonnseite')
WHEN betriebsteil = 5
THEN concat_ws(' - ', betriebsteil, 'Schattseite')
WHEN betriebsteil = 6
THEN concat_ws(' - ', betriebsteil, 'Lebern-Klus')
WHEN betriebsteil = 7
THEN concat_ws(' - ', betriebsteil, 'Aebisholz')
WHEN betriebsteil = 8
THEN concat_ws(' - ', betriebsteil, 'Jurawald')
WHEN betriebsteil = 9
THEN concat_ws(' - ', betriebsteil, 'Bornwald')
END AS betriebsteil,
fid_abt,
bstnr,
bsttyp,
CASE
WHEN
bsttyp >= 10
AND
bsttyp <= 14
THEN 'Jungwuchs/Dickung (10-14)'
WHEN
bsttyp >= 21
AND
bsttyp <= 24
THEN 'Stangenholz (21-24)'
WHEN
bsttyp >= 31
AND
bsttyp <= 34
THEN 'Schwaches Baumholz (31-34)'
WHEN
bsttyp >= 41
AND
bsttyp <= 44
THEN 'Mittleres Baumholz (41-44)'
WHEN
bsttyp >= 51
AND
bsttyp <= 54
THEN 'Starkes Baumholz (51-54)'
WHEN
bsttyp >= 61
AND
bsttyp <= 64
THEN 'St. Baumholz aufgelockert (61-64)'
WHEN
bsttyp >= 70
AND
bsttyp <= 74
THEN 'Übriger Wald ausser Bewirtschaftung (70-74)'
WHEN bsttyp = 75
THEN 'Altholzinsel (75)'
WHEN bsttyp = 76
THEN 'Andere Förderfläche (76)'
WHEN bsttyp = 77
THEN 'Waldrand (77)'
WHEN bsttyp = 79
THEN 'Waldreservat mit Vereinbarung (79)'
WHEN
bsttyp >= 81
AND
bsttyp <= 84
THEN 'Dauerwald / Plenterwald (81-84)'
WHEN
bsttyp >= 210
AND
bsttyp <= 214
THEN 'Mittelwald (210-214)'
WHEN
bsttyp >= 200
AND
bsttyp <= 204
THEN 'Niederwald (200-204)'
END AS bsttyp_text,
wpinfo,
bemerkung,
round(flae_gis::numeric,0) AS flae_gis,
round(korr_flae::numeric,0) AS korr_flae,
round(wpflae::numeric,0) AS wpflae,
zeitstand,
beschrift,
x_beschr,
y_beschr,
objnummer
FROM
awjf.wap_bst
LEFT JOIN agi_hoheitsgrenzen_pub.hoheitsgrenzen_gemeindegrenze
ON hoheitsgrenzen_gemeindegrenze.bfs_gemeindenummer = wap_bst.gem_bfs
WHERE
archive = 0
;
|
package com.pubnub.api.managers;
import com.pubnub.api.PubNub;
import com.pubnub.api.callbacks.ReconnectionCallback;
import com.pubnub.api.enums.PNReconnectionPolicy;
import lombok.extern.slf4j.Slf4j;
import java.util.Timer;
import java.util.TimerTask;
@Slf4j
public class DelayedReconnectionManager {
private static final int DELAY_SECONDS = 3;
private static final int MILLISECONDS = 1000;
private final PNReconnectionPolicy pnReconnectionPolicy;
private ReconnectionCallback callback;
private PubNub pubnub;
/**
* Timer for heartbeat operations.
*/
private Timer timer;
public DelayedReconnectionManager(PubNub pubnub) {
this.pubnub = pubnub;
this.pnReconnectionPolicy = pubnub.getConfiguration().getReconnectionPolicy();
}
public void scheduleDelayedReconnection() {
stop();
if (isReconnectionPolicyUndefined()) {
return;
}
timer = new Timer("Delayed Reconnection Manager timer", true);
timer.schedule(new TimerTask() {
@Override
public void run() {
callTime();
}
}, DELAY_SECONDS * MILLISECONDS);
}
public void setReconnectionListener(ReconnectionCallback reconnectionCallback) {
this.callback = reconnectionCallback;
}
void stop() {
if (timer != null) {
timer.cancel();
timer = null;
}
}
private boolean isReconnectionPolicyUndefined() {
if (pnReconnectionPolicy == null || pnReconnectionPolicy == PNReconnectionPolicy.NONE) {
log.warn("reconnection policy is disabled, please handle reconnection manually.");
return true;
}
return false;
}
private void callTime() {
stop();
callback.onReconnection();
}
}
|
package com.chaidarun.chronofile
import android.os.Bundle
import android.util.Log
import android.view.View
import android.view.ViewGroup
import android.widget.CheckBox
import android.widget.RadioButton
import android.widget.TextView
import androidx.appcompat.app.AlertDialog
import com.jakewharton.rxbinding2.view.RxView
import io.reactivex.disposables.CompositeDisposable
import kotlinx.android.synthetic.main.activity_graph.endDate
import kotlinx.android.synthetic.main.activity_graph.graphTabs
import kotlinx.android.synthetic.main.activity_graph.graphToolbar
import kotlinx.android.synthetic.main.activity_graph.graphViewPager
import kotlinx.android.synthetic.main.activity_graph.quickRange
import kotlinx.android.synthetic.main.activity_graph.startDate
import kotlinx.android.synthetic.main.fragment_area.areaIsGrouped
import kotlinx.android.synthetic.main.fragment_area.areaIsStacked
import kotlinx.android.synthetic.main.fragment_pie.pieIsGrouped
import kotlinx.android.synthetic.main.fragment_radar.radarIsGrouped
class GraphActivity : BaseActivity() {
private enum class PresetRange(val text: String, val duration: Long) {
TODAY("Today", DAY_SECONDS),
PAST_WEEK("Past week", 7 * DAY_SECONDS),
PAST_MONTH("Past month", 30 * DAY_SECONDS),
ALL_TIME("All time", Long.MAX_VALUE)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_graph)
setSupportActionBar(graphToolbar)
graphViewPager.run {
adapter = GraphPagerAdapter(supportFragmentManager)
currentItem = GraphPagerAdapter.Tab.PIE.ordinal
offscreenPageLimit = GraphPagerAdapter.Tab.values().size
}
graphTabs.setupWithViewPager(graphViewPager)
// Set tab font
// https://stackoverflow.com/a/31067431
with(graphTabs.getChildAt(0) as ViewGroup) {
val tabsCount = childCount
for (i in 0 until tabsCount) {
with(getChildAt(i) as ViewGroup) {
val tabChildsCount = childCount
(0 until tabChildsCount).map { getChildAt(it) }.forEach {
(it as? TextView)?.typeface = App.instance.typeface
}
}
}
}
var startTime: Long? = null
var endTime: Long? = null
setPresetRange(Store.state.history!!, PresetRange.PAST_MONTH)
disposables =
CompositeDisposable().apply {
add(
Store.observable.map { it.graphConfig.startTime }.distinctUntilChanged().subscribe {
startTime = it
if (it != null) startDate.text = formatDate(it)
}
)
add(
Store.observable.map { it.graphConfig.endTime }.distinctUntilChanged().subscribe {
endTime = it
if (it != null) endDate.text = formatDate(it)
}
)
add(
RxView.clicks(startDate).subscribe {
DatePickerFragment()
.apply {
arguments =
Bundle().apply {
putString(DatePickerFragment.ENDPOINT, "start")
putLong(DatePickerFragment.TIMESTAMP, startTime ?: epochSeconds())
}
}
.show(supportFragmentManager, "datePicker")
}
)
add(
RxView.clicks(endDate).subscribe {
DatePickerFragment()
.apply {
arguments =
Bundle().apply {
putString(DatePickerFragment.ENDPOINT, "end")
putLong(DatePickerFragment.TIMESTAMP, endTime ?: epochSeconds())
}
}
.show(supportFragmentManager, "datePicker")
}
)
add(
RxView.clicks(quickRange).subscribe {
with(AlertDialog.Builder(this@GraphActivity, R.style.MyAlertDialogTheme)) {
setSingleChoiceItems(PresetRange.values().map { it.text }.toTypedArray(), -1, null)
setPositiveButton("OK") { dialog, _ ->
val position = (dialog as AlertDialog).listView.checkedItemPosition
setPresetRange(Store.state.history!!, PresetRange.values()[position])
}
setNegativeButton("Cancel", null)
show()
}
}
)
}
}
private fun setPresetRange(history: History, presetRange: PresetRange) {
Log.i(TAG, "Setting range to $presetRange")
val now = history.currentActivityStartTime
val startTime =
Math.max(now - presetRange.duration, history.entries.getOrNull(0)?.startTime ?: 0)
Store.dispatch(Action.SetGraphRangeStart(startTime))
Store.dispatch(Action.SetGraphRangeEnd(now))
}
fun onCheckboxClicked(view: View) {
with(view as CheckBox) {
when (id) {
R.id.areaIsGrouped -> Store.dispatch(Action.SetGraphGrouping(areaIsGrouped.isChecked))
R.id.areaIsStacked -> Store.dispatch(Action.SetGraphStacking(areaIsStacked.isChecked))
R.id.pieIsGrouped -> Store.dispatch(Action.SetGraphGrouping(pieIsGrouped.isChecked))
R.id.radarIsGrouped -> Store.dispatch(Action.SetGraphGrouping(radarIsGrouped.isChecked))
}
}
}
fun onRadioButtonClicked(view: View) {
with(view as RadioButton) {
if (!isChecked) return
when (id) {
R.id.radioAverage -> Store.dispatch(Action.SetGraphMetric(Metric.AVERAGE))
R.id.radioTotal -> Store.dispatch(Action.SetGraphMetric(Metric.TOTAL))
}
}
}
}
|
import { AxiosResponse } from "axios";
import * as ProfileTypes from "./profiles";
import * as EventTypes from "./events";
import * as TenantTypes from "./tenants";
import * as AppTypes from "./apps";
import { QueryParams } from "./queryBuilder"
import { FilteredResponse } from "./sdkResponse";
import { AggregateEventType, AggregateSessionType } from "../utils/constants";
export type AxiosRes = Promise<AxiosResponse>;
export interface ConnectionData {
url: string;
auth: {
username: string;
password: string;
};
elasticUrl: string;
//TODO: here we have to see how to enable authentication
}
export interface Connection {
profile: {
create: (profileData: ProfileTypes.CreateProperties) => FilteredResponse;
get: (profileId: string) => FilteredResponse;
delete: (profileId: string) => FilteredResponse;
countByApp: (query: QueryParams[]) => FilteredResponse;
existingProperties: (params: ProfileTypes.ExistingProperties) => FilteredResponse;
allProperties: () => FilteredResponse;
countSessionByApp: (query: QueryParams[]) => FilteredResponse;
sessions: (profileId: string) => FilteredResponse;
getSession: (sessionId: string) => FilteredResponse;
getBySingleProperty: (params: ProfileTypes.GetByProperty) => FilteredResponse;
query: (params: ProfileTypes.QueryConfig, query: QueryParams[]) => FilteredResponse;
totalVisits: (query: QueryParams[]) => FilteredResponse
},
segment: {
create: (params: object) => FilteredResponse
},
rule: {
create: (params: object) => FilteredResponse,
getAll: () => FilteredResponse,
get: (rule: string) => FilteredResponse
},
event: {
query: (params: EventTypes.QueryConfig, query: QueryParams[]) => FilteredResponse,
getBySession: (sessionId: string, params: EventTypes.SessionEventQueryConfig) => FilteredResponse,
countByApp: (query: QueryParams[]) => FilteredResponse
},
tenant: {
register: (tenantData: TenantTypes.CreateProperties) => FilteredResponse;
get: (tenantName: string) => FilteredResponse;
getAll: () => FilteredResponse;
delete: (tenantKey: string) => FilteredResponse;
},
app: {
get: (tenantKey:string) => FilteredResponse;
register: (appData: AppTypes.CreateAppProperties) => FilteredResponse;
delete: (appData: AppTypes.CreateAppProperties) => FilteredResponse;
validateKey : (tenantKey: string) => FilteredResponse;
},
aggregate: {
event: (aggregateType: AggregateEventType, query: QueryParams[]) => FilteredResponse;
session: (aggregateType: AggregateSessionType, query: QueryParams[]) => FilteredResponse
}
}
|
/***************************************************************************
qgsprocessingmodelcomponent.h
-----------------------------
begin : June 2017
copyright : (C) 2017 by Nyall Dawson
email : nyall dot dawson at gmail dot com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#ifndef QGSPROCESSINGMODELCOMPONENT_H
#define QGSPROCESSINGMODELCOMPONENT_H
#include "qgis_core.h"
#include "qgis.h"
#include <QPointF>
///@cond NOT_STABLE
/**
* Represents a component of a model algorithm.
* \ingroup core
* \since QGIS 3.0
*/
class CORE_EXPORT QgsProcessingModelComponent
{
public:
/**
* Returns the friendly description text for the component.
* \see setDescription()
*/
QString description() const;
/**
* Sets the friendly \a description text for the component.
* \see description()
*/
void setDescription( const QString &description );
/**
* Returns the position of the model component within the graphical modeler.
* \see setPosition()
*/
QPointF position() const;
/**
* Sets the \a position of the model component within the graphical modeler.
* \see position()
*/
void setPosition( QPointF position );
protected:
//! Only subclasses can be created
QgsProcessingModelComponent( const QString &description = QString() );
//! Copies are protected to avoid slicing
QgsProcessingModelComponent( const QgsProcessingModelComponent &other ) = default;
//! Copies are protected to avoid slicing
QgsProcessingModelComponent &operator=( const QgsProcessingModelComponent &other ) = default;
/**
* Saves the component properties to a QVariantMap.
* \see restoreCommonProperties()
*/
void saveCommonProperties( QVariantMap &map ) const;
/**
* Restores the component properties from a QVariantMap.
* \see saveCommonProperties()
*/
void restoreCommonProperties( const QVariantMap &map );
private:
//! Position of component within model
QPointF mPosition;
QString mDescription;
};
///@endcond
#endif // QGSPROCESSINGMODELCOMPONENT_H
|
# A Request-Response Example with Status Code
A sample [Express](http://expressjs.com/) application to demo http request-response with status code.
## Run locally
1. Install [Node.js and npm](https://nodejs.org/)
1. Run `git clone https://github.com/ywdeng/wp19-node-05-response-400.git`
1. Run `cd wp19-node-05-response-400`
1. Run `npm install`
1. Run `npm install -g nodemon`
1. Run `npm run test`
1. Visit [http://localhost:3000](http://localhost:3000)
|
namespace Hades.Syntax.Lexeme
{
public enum Category
{
Unknown,
WhiteSpace,
Comment,
Literal,
Identifier,
Grouping,
Punctuation,
Operator,
Invalid,
Other,
Assignment,
LeftHand,
RightHand
}
}
|
# -*- coding: utf-8 -*-
require 'open-uri'
require 'nokogiri'
class Excuse
include Cinch::Plugin
match /excuse/, :use_prefix => true
def execute m
url = "http://www.programmerexcuses.com"
begin
doc = Nokogiri::HTML(open url)
m.reply doc.at('a').content
rescue
m.reply "Meh ¯\\_(ツ)_/¯"
end
end
end
|
<?php
class __FlowStateFactory {
const ACTION_STATE = 1;
const START_STATE = 2;
const END_STATE = 3;
const DECISION_STATE = 4;
const SUBFLOW_STATE = 5;
static public function createState($state_type) {
$return_value = null;
switch((int)$state_type) {
case self::ACTION_STATE:
$return_value = new __ActionFlowState();
break;
case self::START_STATE:
$return_value = new __StartFlowState();
break;
case self::END_STATE:
$return_value = new __EndFlowState();
break;
case self::DECISION_STATE:
$return_value = new __DecisionFlowState();
break;
case self::SUBFLOW_STATE:
$return_value = new __SubFlowState();
break;
default:
throw __ExceptionFactory::getInstance()->createException('Unknow flow state type: ' . $state_type);
break;
}
return $return_value;
}
}
|
#!/bin/sh
execpath=$(dirname $BASH_SOURCE)
function getTimestamp() {
timestamp=$(date +%s);
return $timestamp;
}
function generateHash() {
hashstart=getTimestamp;
hash=$(md5 -qs $hashstart);
return $hash;
}
# Set up database user. Run this script immediately after cloning the codebase and before rake db:setup.
echo "\nYour program name:"
read programname;
echo "\nYour program location:"
read programloc;
if [[ -n "$programname" && -n "$programloc" ]]; then
parts=("$programloc" "$programname");
# printf -v path '/%s' "${parts[@]%/}";
path="$programloc$programname"
# sed 's|/\+|/|g' $path;
path="$path".js
echo "#!/usr/bin/env node \n\n(function(context, undefined){\n\n})(this);" > $path
chmod 755 $path;
ls -la $path;
else
echo "\nYou must supply a program name!";
exit 1;
fi;
|
export enum HintLevel{ 'EASY', 'LIGHT', 'MEDIUM', 'EXTERME' }
export interface Hint {
char: String;
pos: Number;
}
export class Word {
public word: String;
public scramWord: String;
constructor(word: String) {
this.word = word;
this.scramWord = this.scrambleWord();
}
private scrambleWord(): String {
return this.word.split('').sort(function () {
return 0.5 - Math.random();
}).join('');
}
public hintWord(level: HintLevel): Hint[] | null {
let seed: Number,
result: Hint[] = [];
switch (level){
case HintLevel.EASY:
seed = Math.floor(this.scramWord.length * 0.2);
break;
case HintLevel.LIGHT:
seed = Math.floor(this.scramWord.length * 0.4);
break;
case HintLevel.MEDIUM:
seed = Math.floor(this.scramWord.length * 0.6);
break;
case HintLevel.EXTERME:
seed = Math.floor(this.scramWord.length * 0.8);
break;
default:
seed = 0;
}
if (seed === 0) return null
for (let i = 0; i < seed; i++){
let choice = this.word[Math.floor(Math.random() * this.word.length)];
let temp: Hint = {
char: choice,
pos: this.word.indexOf(choice)
}
result.push(temp);
}
return result;
}
}
|
/*
* Copyright (C) 2019 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <ignition/common/Profiler.hh>
#include <ignition/transport/Node.hh>
#include "ignition/sensors/AltimeterSensor.hh"
#include "ignition/sensors/Noise.hh"
#include "ignition/sensors/SensorFactory.hh"
#include "ignition/sensors/SensorTypes.hh"
using namespace ignition;
using namespace sensors;
/// \brief Private data for AltimeterSensor
class ignition::sensors::AltimeterSensorPrivate
{
/// \brief node to create publisher
public: transport::Node node;
/// \brief publisher to publish altimeter messages.
public: transport::Node::Publisher pub;
/// \brief true if Load() has been called and was successful
public: bool initialized = false;
/// \brief Vertical position in meters
public: double verticalPosition = 0.0;
/// \brief Vertical velocity in meters per second
public: double verticalVelocity = 0.0;
/// \brief Vertical reference, i.e. initial sensor position
public: double verticalReference = 0.0;
/// \brief Noise added to sensor data
public: std::map<SensorNoiseType, NoisePtr> noises;
};
//////////////////////////////////////////////////
AltimeterSensor::AltimeterSensor()
: dataPtr(new AltimeterSensorPrivate())
{
}
//////////////////////////////////////////////////
AltimeterSensor::~AltimeterSensor()
{
}
//////////////////////////////////////////////////
bool AltimeterSensor::Init()
{
return this->Sensor::Init();
}
//////////////////////////////////////////////////
bool AltimeterSensor::Load(const sdf::Sensor &_sdf)
{
if (!Sensor::Load(_sdf))
return false;
if (_sdf.Type() != sdf::SensorType::ALTIMETER)
{
ignerr << "Attempting to a load an Altimeter sensor, but received "
<< "a " << _sdf.TypeStr() << std::endl;
return false;
}
if (_sdf.AltimeterSensor() == nullptr)
{
ignerr << "Attempting to a load an Altimeter sensor, but received "
<< "a null sensor." << std::endl;
return false;
}
if (this->Topic().empty())
this->SetTopic("/altimeter");
this->dataPtr->pub =
this->dataPtr->node.Advertise<ignition::msgs::Altimeter>(this->Topic());
if (!this->dataPtr->pub)
{
ignerr << "Unable to create publisher on topic[" << this->Topic() << "].\n";
return false;
}
igndbg << "Altimeter data for [" << this->Name() << "] advertised on ["
<< this->Topic() << "]" << std::endl;
// Load the noise parameters
if (_sdf.AltimeterSensor()->VerticalPositionNoise().Type()
!= sdf::NoiseType::NONE)
{
this->dataPtr->noises[ALTIMETER_VERTICAL_POSITION_NOISE_METERS] =
NoiseFactory::NewNoiseModel(
_sdf.AltimeterSensor()->VerticalPositionNoise());
}
if (_sdf.AltimeterSensor()->VerticalVelocityNoise().Type()
!= sdf::NoiseType::NONE)
{
this->dataPtr->noises[ALTIMETER_VERTICAL_VELOCITY_NOISE_METERS_PER_S] =
NoiseFactory::NewNoiseModel(
_sdf.AltimeterSensor()->VerticalVelocityNoise());
}
this->dataPtr->initialized = true;
return true;
}
//////////////////////////////////////////////////
bool AltimeterSensor::Load(sdf::ElementPtr _sdf)
{
sdf::Sensor sdfSensor;
sdfSensor.Load(_sdf);
return this->Load(sdfSensor);
}
//////////////////////////////////////////////////
bool AltimeterSensor::Update(const std::chrono::steady_clock::duration &_now)
{
IGN_PROFILE("AltimeterSensor::Update");
if (!this->dataPtr->initialized)
{
ignerr << "Not initialized, update ignored.\n";
return false;
}
msgs::Altimeter msg;
*msg.mutable_header()->mutable_stamp() = msgs::Convert(_now);
auto frame = msg.mutable_header()->add_data();
frame->set_key("frame_id");
frame->add_value(this->Name());
// Apply altimeter vertical position noise
if (this->dataPtr->noises.find(ALTIMETER_VERTICAL_POSITION_NOISE_METERS) !=
this->dataPtr->noises.end())
{
this->dataPtr->verticalPosition =
this->dataPtr->noises[ALTIMETER_VERTICAL_POSITION_NOISE_METERS]->Apply(
this->dataPtr->verticalPosition);
}
// Apply altimeter vertical velocity noise
if (this->dataPtr->noises.find(
ALTIMETER_VERTICAL_VELOCITY_NOISE_METERS_PER_S) !=
this->dataPtr->noises.end())
{
this->dataPtr->verticalVelocity =
this->dataPtr->noises[
ALTIMETER_VERTICAL_VELOCITY_NOISE_METERS_PER_S]->Apply(
this->dataPtr->verticalVelocity);
}
msg.set_vertical_position(this->dataPtr->verticalPosition);
msg.set_vertical_velocity(this->dataPtr->verticalVelocity);
msg.set_vertical_reference(this->dataPtr->verticalReference);
// publish
this->AddSequence(msg.mutable_header());
this->dataPtr->pub.Publish(msg);
return true;
}
//////////////////////////////////////////////////
void AltimeterSensor::SetVerticalReference(double _reference)
{
this->dataPtr->verticalReference = _reference;
}
//////////////////////////////////////////////////
double AltimeterSensor::VerticalReference() const
{
return this->dataPtr->verticalReference;
}
//////////////////////////////////////////////////
void AltimeterSensor::SetPosition(double _pos)
{
this->dataPtr->verticalPosition = _pos - this->dataPtr->verticalReference;
}
//////////////////////////////////////////////////
double AltimeterSensor::VerticalPosition() const
{
return this->dataPtr->verticalPosition;
}
//////////////////////////////////////////////////
void AltimeterSensor::SetVerticalVelocity(double _vel)
{
this->dataPtr->verticalVelocity = _vel;
}
//////////////////////////////////////////////////
double AltimeterSensor::VerticalVelocity() const
{
return this->dataPtr->verticalVelocity;
}
|
from django import template
from django.utils.safestring import mark_safe
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import guess_lexer_for_filename, TextLexer
from pygments.util import ClassNotFound
register = template.Library()
@register.filter('highlight')
def highlight_filter(text, filename):
try:
lexer = guess_lexer_for_filename(filename, text)
except ClassNotFound:
lexer = TextLexer()
return mark_safe(highlight(
text,
lexer,
HtmlFormatter(linenos="table", lineanchors="line")
))
@register.simple_tag
def highlight_css():
return HtmlFormatter(linenos="table", lineanchors="line").get_style_defs()
|
fun funWithoutArgs(): Int {
return Any().hashCode().toInt()
}
fun funWithAnyArg(value_1: Any): Int {
return value_1.hashCode()
}
fun <K> select(vararg x: K): K = x[0]
fun <K> expandInv(vararg x: Inv<K>): K = x[0] as K
fun <K> expandIn(vararg x: In<K>): K = x[0] as K
fun <K> expandOut(vararg x: Out<K>): K = x[0] as K
fun <K> expandInvWithRemoveNullable(vararg x: Inv<K?>): K = x[0] as K
fun <K> expandInWithRemoveNullable(vararg x: In<K?>): K = x[0] as K
fun <K> expandOutWithRemoveNullable(vararg x: Out<K?>): K = x[0] as K
fun <K> removeNullable(vararg x: K?): K = x as K
|
{-# LANGUAGE DeriveDataTypeable #-}
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Thrift.Transport
( Transport(..)
, TransportExn(..)
, TransportExnType(..)
) where
import Control.Monad ( when )
import Control.Exception ( Exception, throw )
import Data.Typeable ( Typeable )
import qualified Data.ByteString.Lazy as LBS
import Data.Monoid
class Transport a where
tIsOpen :: a -> IO Bool
tClose :: a -> IO ()
tRead :: a -> Int -> IO LBS.ByteString
tWrite :: a -> LBS.ByteString -> IO ()
tFlush :: a -> IO ()
tReadAll :: a -> Int -> IO LBS.ByteString
tReadAll _ 0 = return mempty
tReadAll a len = do
result <- tRead a len
let rlen = fromIntegral $ LBS.length result
when (rlen == 0) (throw $ TransportExn "Cannot read. Remote side has closed." TE_UNKNOWN)
if len <= rlen
then return result
else (result `mappend`) `fmap` (tReadAll a (len - rlen))
data TransportExn = TransportExn String TransportExnType
deriving ( Show, Typeable )
instance Exception TransportExn
data TransportExnType
= TE_UNKNOWN
| TE_NOT_OPEN
| TE_ALREADY_OPEN
| TE_TIMED_OUT
| TE_END_OF_FILE
deriving ( Eq, Show, Typeable )
|
import * as github from '@actions/github';
import * as core from '@actions/core';
import * as formatter from './formatter';
import * as matrix from './matrix';
import Report from './Report';
import {TestFilter} from './TestFilter';
const GITHUB_SUMMARY_LIMIT = 50000;
function truncateByBytesUTF8(str: string, limit: number): string {
const result = new TextEncoder().encode(str).slice(0, limit);
return new TextDecoder('utf-8').decode(result);
}
export async function create(token: string, report: Report, filter: TestFilter): Promise<CheckRun> {
const message = truncateByBytesUTF8(formatter.toMarkdown(report, filter), GITHUB_SUMMARY_LIMIT);
const name = matrix.getName('JUnit Report');
const status = 'completed' as const;
const conclusion =
report.hasTests() && report.isSuccesfull()
? ('success' as const)
: ('failure' as const);
const pullRequest = github.context.payload.pull_request;
const head_sha = (pullRequest && pullRequest.head.sha) || github.context.sha;
const createCheckRequest = {
...github.context.repo,
name,
head_sha,
status,
conclusion,
output: {
title: name,
summary: message
}
};
core.debug(JSON.stringify(createCheckRequest, null, 2));
const octokit = github.getOctokit(token);
const response = await octokit.checks.create(createCheckRequest);
return {
id: response.data.id,
nodeId: response.data.node_id,
checkSuiteId: response.data.check_suite?.id,
conclusion
};
}
export interface CheckRun {
id: number,
nodeId: string,
checkSuiteId: number | undefined,
conclusion: string
}
|
// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
//! # Breakpoints
//!
//! This module lists the existing media breakpoints we use and contains some helper functions for working with them.
//!
//! It should be kept in sync with any breakpoint changes to `tailwind.config.js`.
use seed::window;
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub(crate) enum Size {
XS = 0,
SM = 569,
MD = 769,
LG = 1025,
XL = 1701,
}
fn inner_width() -> u64 {
window()
.inner_width()
.expect("Could not get inner_width")
.as_f64()
.expect("Could not parse inner_width to f64") as u64
}
/// Returns *maximum* matching breakpoint based on `window.inner_width`.
/// This models the behavior of breakpoints in tailwind: https://tailwindcss.com/docs/responsive-design/
pub(crate) fn size() -> Size {
let w = inner_width();
if w >= Size::XL as u64 {
Size::XL
} else if w >= Size::LG as u64 {
Size::LG
} else if w >= Size::MD as u64 {
Size::MD
} else if w >= Size::SM as u64 {
Size::SM
} else {
Size::XS
}
}
|
import React from 'react';
import { WrapProps } from '@interfaces/render';
export default class _Wrap extends React.Component<WrapProps, {}> {
render() {
const { Container, App, containerProps, appProps } = this.props;
return (
<Container {...containerProps}>
<App {...appProps} />
</Container>
);
}
}
|
package com.univocity.trader.exchange.interactivebrokers;
import java.util.*;
import static com.univocity.trader.exchange.interactivebrokers.TradeType.*;
/**
* Security types with defaults taken from https://interactivebrokers.github.io/tws-api/basic_contracts.html
*
* @author uniVocity Software Pty Ltd - <a href="mailto:dev@univocity.com">dev@univocity.com</a>
*/
public enum SecurityType {
FOREX("CASH", "IDEALPRO", MIDPOINT, BID, ASK, BID_ASK),
STOCKS("STK", "SMART", ADJUSTED_LAST, TRADES, MIDPOINT, BID, ASK, BID_ASK, HISTORICAL_VOLATILITY,
OPTION_IMPLIED_VOLATILITY),
ETFS("ETF", "SMART", TRADES, MIDPOINT, BID, ASK, BID_ASK, HISTORICAL_VOLATILITY, OPTION_IMPLIED_VOLATILITY),
INDICES("IND", "DTB", TRADES, HISTORICAL_VOLATILITY, OPTION_IMPLIED_VOLATILITY),
CFDS("CFD", "SMART", MIDPOINT, BID, ASK, BID_ASK),
FUTURES("FUT", "GLOBEX", TRADES, MIDPOINT, BID, ASK, BID_ASK),
OPTIONS("OPT", "BOX", TRADES, MIDPOINT, BID, ASK, BID_ASK),
FUTURES_OPTIONS("FOP", "GLOBEX", TRADES, MIDPOINT, BID, ASK, BID_ASK),
BONDS("BOND", "SMART", TRADES, MIDPOINT, BID, ASK, BID_ASK, YIELD_BID, YIELD_ASK, YIELD_BID_ASK, YIELD_LAST),
MUTUAL_FUNDS("FUND", "FUNDSERV", MIDPOINT, BID, ASK, BID_ASK),
COMMODITIES("CMDTY", "SMART", MIDPOINT, BID, ASK, BID_ASK),
IOPT("IOPT", "SBF", TRADES, MIDPOINT, BID, ASK, BID_ASK),
SPREAD("BAG", "SMART", TRADES, MIDPOINT, BID, ASK, BID_ASK)
// TODO: found "metals" here: https://interactivebrokers.github.io/tws-api/historical_bars.html, but nowhere else.
// METALS("???", "???", TRADES,MIDPOINT,BID,ASK,BID_ASK),
;
public final String securityCode;
public final String defaultExchange;
public final List<TradeType> availableTradeTypes;
SecurityType(String securityCode, String defaultExchange, TradeType... availableTradeTypes) {
this.securityCode = securityCode;
this.defaultExchange = defaultExchange;
this.availableTradeTypes = Arrays.asList(availableTradeTypes);
}
public List<TradeType> availableTradeTypes() {
return availableTradeTypes;
}
public TradeType defaultTradeType() {
return availableTradeTypes.get(0);
}
public boolean isTradeTypeAvailable(TradeType tradeType) {
return availableTradeTypes.contains(tradeType);
}
}
|
package fortos.model.step.timer
import fortos.engine.processor.EngineProcessor
import fortos.engine.processor.time.ConstantTimerEngineProcessor
import fortos.model.step.Step
@EngineProcessor(ConstantTimerEngineProcessor::class)
data class ConstantTimerStep(
override val type: String,
override val workload: List<Step>,
override val executionMetadata: ConstantTimerExecutionMetadata
) : Step(type, workload, null, executionMetadata)
data class ConstantTimerExecutionMetadata(
val transactions: Long,
val duration: Long,
val threads: Long,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.