text
stringlengths 1
1.05M
|
|---|
import { CookieManager, FileManager } from "../models";
import { FortGlobal } from "../fort_global";
import { ControllerTestData } from "../types";
import { HttpResponseStub } from "./http_response_stub";
import { HttpRequestStub } from "./http_request_stub";
import { Controller } from "../abstracts";
export const initController = (controllerInstance: Controller, data?: ControllerTestData) => {
data = data || {};
const parsedCookies = data.cookieValue || {};
const headers = (data.request && data.request.headers) || {};
controllerInstance.request = new HttpRequestStub(headers);
controllerInstance.response = new HttpResponseStub(headers);
controllerInstance.query = data.query || {};
controllerInstance.body = data.body || {};
controllerInstance.cookie = new CookieManager(parsedCookies);
const session = new FortGlobal.sessionProvider();
session.cookie = controllerInstance.cookie;
session.sessionId = parsedCookies[FortGlobal.appSessionIdentifier];
controllerInstance.session = session;
controllerInstance.param = data.param || {};
controllerInstance.data = data.data || {};
controllerInstance.file = new FileManager(data.file || {});
return controllerInstance;
};
|
#!/bin/bash
if [ -z "$1" ] || [ $1 = "-help" ] || [ $1 = "-?" ] || [ $1 = "-h" ]
then
printf "usage: serve-section.sh {section}\r\n"
printf "\r\n"
printf "Browserify's the main.js of the specified 'section' then starts the bamweb server serving the current app"
printf "\r\n"
printf "\r\n"
else
# browserifys the 'main.js' of the specified section then starts the bamweb server
SECTION=$1
rm pages/dist/$SECTION/main.bundle.js
npx browserify pages/$SECTION/main.js -o pages/dist/$SECTION/main.bundle.js
# todo implement this as bam /serve:appName
./bin/bamweb/bamweb /S /content:/opt/bam/content /verbose /apps:${PWD##*/}
fi
|
package com.yoavfranco.wikigame.fragments;
import android.animation.Animator;
import android.animation.ObjectAnimator;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.Nullable;
import android.support.v4.view.animation.FastOutSlowInInterpolator;
import android.support.v7.widget.AppCompatImageView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AnimationUtils;
import android.view.animation.ScaleAnimation;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.yoavfranco.wikigame.HTTP.WikiGameAPI;
import com.yoavfranco.wikigame.HTTP.WikiGameInterface;
import com.yoavfranco.wikigame.R;
import com.yoavfranco.wikigame.activities.MainActivity;
import com.yoavfranco.wikigame.activities.WikiDisplayActivity;
import com.yoavfranco.wikigame.activities.SearchActivity;
import com.yoavfranco.wikigame.utils.Challenge;
import com.yoavfranco.wikigame.utils.Consts;
import com.yoavfranco.wikigame.utils.ErrorDialogs;
import com.yoavfranco.wikigame.utils.Friend;
import com.yoavfranco.wikigame.utils.Level;
import com.yoavfranco.wikigame.utils.SearchItem;
import com.yoavfranco.wikigame.views.RoundButton;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import butterknife.BindView;
import butterknife.BindViews;
import butterknife.ButterKnife;
public class ChallengeQuickPlayScreen extends BaseScreen {
@BindViews({R.id.play_now, R.id.reset})
RoundButton[] buttons;
@BindView(R.id.tvStartWithInp)
TextView startArticleTextView;
@BindView(R.id.tvEndWithInp)
TextView targetArticleTextView;
@BindView(R.id.iv_arrow)
AppCompatImageView arrow;
@BindView(R.id.tvBestScoreClicks)
TextView bestScoreClicksTextView;
@BindView(R.id.tvBestTime)
TextView bestScoreTimeTextView;
@BindView(R.id.best_score_wrapper_clicks)
LinearLayout clicksLinearLayout;
@BindView(R.id.best_score_wrapper_time)
LinearLayout timeLinearLayout;
WikiGameAPI wikiGameAPI;
@BindView(R.id.icArrowTop)
AppCompatImageView icArrowTop;
@BindView(R.id.icArrowBottom)
AppCompatImageView icArrowBottom;
private Friend friend;
private String startArticleSubject;
private String targetArticleSubject;
private String serverStartArticle;
private String serverTargetArticle;
private String startArticle;
private String targetArticle;
private boolean isShuffleAnimationPaused;
private int shuffleAnimationInterval = 70;
private int shuffleAnimationTime;
private int shuffleAnimationTickCounter;
private String selectedMode;
@BindView(R.id.tvStartSubject)
TextView tvStartSubject;
@BindView(R.id.tvTargetSubject)
TextView tvTargetSubject;
boolean shuffleAnimationTimerStarted = false;
Handler handler = new Handler();
Runnable runnable = new Runnable() {
@Override
public void run() {
int maximumAnimationTime = 2000;
onShuffleAnimationTimerTick(shuffleAnimationInterval, maximumAnimationTime);
if (shuffleAnimationTimerStarted) {
startShuffleAnimationTimer();
}
}
};
public void stopShuffleAnimationTimer() {
shuffleAnimationTimerStarted = false;
handler.removeCallbacks(runnable);
}
public void startShuffleAnimationTimer() {
shuffleAnimationTimerStarted = true;
handler.postDelayed(runnable, shuffleAnimationInterval);
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.challenge_quickplay_screen, container, false);
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
ButterKnife.bind(this, view);
this.isShuffleAnimationPaused = false;
wikiGameAPI = new WikiGameAPI();
animateArrow();
updateUI();
if (isCreatingNewChallenge())
shuffleArticles();
// play button
buttons[0].setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
onPlayButtonClicked();
}
});
// reshuffle button
buttons[1].setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
onShuffleButtonClicked(this);
}
});
}
private void openSearchArticlesIntent(String type) {
Intent intent = new Intent(getActivity(), SearchActivity.class);
intent.putExtra("type", type);
startActivityForResult(intent, 1);
}
private void switchVisiblityState(boolean in) {
if(!in) {
icArrowTop.setVisibility(View.INVISIBLE);
icArrowTop.animate().alpha(0.0f).setDuration(200);
icArrowBottom.setVisibility(View.INVISIBLE);
icArrowBottom.animate().alpha(0.0f).setDuration(200);
tvTargetSubject.setVisibility(View.INVISIBLE);
tvTargetSubject.animate().alpha(0.0f).setDuration(200);
tvStartSubject.setVisibility(View.INVISIBLE);
tvStartSubject.animate().alpha(0.0f).setDuration(200);
} else {
icArrowTop.setVisibility(View.VISIBLE);
icArrowTop.animate().alpha(1.0f).setDuration(200);
icArrowBottom.setVisibility(View.VISIBLE);
icArrowBottom.animate().alpha(1.0f).setDuration(200);
tvTargetSubject.setVisibility(View.VISIBLE);
tvTargetSubject.animate().alpha(1.0f).setDuration(200);
tvStartSubject.setVisibility(View.VISIBLE);
tvStartSubject.animate().alpha(1.0f).setDuration(200);
}
}
private void onShuffleButtonClicked(View.OnClickListener onClickListener) {
switchVisiblityState(false);
buttons[1].animateClick(onClickListener);
shuffleArticles();
}
private void onPlayButtonClicked() {
buttons[0].setClickable(false);
if (startArticle == null) {
ErrorDialogs.showNetworkErrorDialog(getActivity(), false);
}
if (!isCreatingNewChallenge()) {
// we're answering a challenge
wikiGameAPI.tryChallengeAsync(friend.getUsername(), new WikiGameInterface(getActivity()) {
@Override
public void onFinishedProcessingWikiRequest(JSONObject response) {
try {
if (response.getString(Consts.STATUS_CODE_KEY).equals(Consts.STATUS_OK)) {
startChallengeGame();
}
else {
ErrorDialogs.showBadResponseDialog(getActivityContext(), false);
}
}
catch (JSONException e) {
ErrorDialogs.showBadResponseDialog(getActivityContext(), false);
e.printStackTrace();
}
}
});
} else if (isCreatingNewChallenge()) {
if (!serverStartArticle.equals(startArticle) || !serverTargetArticle.equals(targetArticle)) {
// the start/target articles are not the same as returned from server, meaning the user has chosen new ones
// let's let the server know
wikiGameAPI.chooseChallengeAsync(this.friend.getUsername(), this.selectedMode, this.startArticle, this.targetArticle, new WikiGameInterface(getActivity()) {
@Override
public void onFinishedProcessingWikiRequest(JSONObject response) {
handleServerResponse(response, true);
}
});
} else {
startChallengeGame();
}
}
}
private void handleServerResponse(JSONObject response, boolean shouldStartGame) {
try {
if (response.getString(Consts.STATUS_CODE_KEY).equals(Consts.STATUS_OK)) {
serverStartArticle = startArticle = (response.getString(Consts.KEY_START_ARTICLE));
serverTargetArticle = targetArticle = (response.getString(Consts.KEY_TARGET_ARTICLE));
if (response.has("start_article_subject")) {
startArticleSubject = response.getString("start_article_subject");
targetArticleSubject = response.getString("target_article_subject");
}
if (shouldStartGame) startChallengeGame();
} else {
ErrorDialogs.showSomethingWentWrongDialog(getActivity(), false);
stopShuffleAnimationTimer();
}
} catch (JSONException e) {
ErrorDialogs.showBadResponseDialog(getActivity(), false);
stopShuffleAnimationTimer();
e.printStackTrace();
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode != 1 || data == null) return;
final String type = data.getStringExtra("type");
final SearchItem searchItem = (SearchItem) data.getSerializableExtra("search_item");
if(type.equals("start")) {
this.startArticle = searchItem.getTitle();
this.startArticleSubject = searchItem.getSubject();
}
else {
this.targetArticle = searchItem.getTitle();
this.targetArticleSubject = searchItem.getSubject();
}
updateUI();
}
private void startChallengeGame() {
setOnClearListener(new OnClearListener() {
@Override
public void clearDone() {
if (startArticle != null) {
Intent intent = new Intent(getActivity(), WikiDisplayActivity.class);
intent.putExtra(Consts.BUNDLE_LEVEL0_FIRST_ARTICLE_KEY, startArticle);
intent.putExtra(Consts.BUNDLE_LEVEL0_TARGET_ARTICLE_KEY, targetArticle);
// TODO: delete the following line and support challenge at NewWikiDisplay
String mode = isCreatingNewChallenge() ? selectedMode : friend.getChallenge().getMode();
// getting default maximum allowed record
MainActivity mainActivity = (MainActivity) getActivity();
int requiredRecord = mode.equals(Consts.CLICKS_MODE) ? mainActivity.getMaximumAllowedClicks() : mainActivity.getMaximumAllowedTime();
Level dummyLevel = new Level("challenge level", false, mode, 0, requiredRecord);
intent.putExtra("current_level", dummyLevel);
intent.putExtra("friend", friend);
getActivity().startActivityForResult(intent, 1);
}
}
});
clear();
}
private void updateUI() {
if (getView() == null) return;
if (buttons != null)
buttons[0].setClickable(true);
icArrowTop.setVisibility(isCreatingNewChallenge() ? View.VISIBLE : View.GONE);
icArrowBottom.setVisibility(isCreatingNewChallenge() ? View.VISIBLE : View.GONE);
Challenge challenge = friend.getChallenge();
String mode = challenge != null ? challenge.getMode() : this.selectedMode;
if (mode == null) return;
if (startArticleSubject != null && targetArticleSubject != null) {
switchVisiblityState(true);
tvStartSubject.setText(startArticleSubject);
tvTargetSubject.setText(targetArticleSubject);
} else {
switchVisiblityState(false);
}
AppCompatImageView leftIcon = ((MainActivity) getActivity()).settings;
if (mode.equals(Consts.CLICKS_MODE)) {
leftIcon.setImageResource(R.drawable.ic_clicks);
leftIcon.setClickable(false);
timeLinearLayout.setVisibility(View.GONE);
clicksLinearLayout.setVisibility(View.VISIBLE);
} else {
leftIcon.setImageResource(R.drawable.ic_alarm);
leftIcon.setClickable(false);
timeLinearLayout.setVisibility(View.VISIBLE);
clicksLinearLayout.setVisibility(View.GONE);
}
if (isCreatingNewChallenge()) {
clicksLinearLayout.setVisibility(View.GONE);
timeLinearLayout.setVisibility(View.GONE);
buttons[1].setVisibility(View.VISIBLE);
startArticleTextView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
openSearchArticlesIntent("start");
}
});
targetArticleTextView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
openSearchArticlesIntent("target");
}
});
icArrowBottom.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
openSearchArticlesIntent("target");
}
});
icArrowTop.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
openSearchArticlesIntent("start");
}
});
} else {
buttons[1].setVisibility(View.GONE);
startArticle = challenge.getStartArticle();
targetArticle = challenge.getTargetArticle();
bestScoreClicksTextView.setText(challenge.getNumClicks() + " clicks to beat ");
long min = TimeUnit.SECONDS.toMinutes(challenge.getTime());
long sec = (TimeUnit.SECONDS.toSeconds(challenge.getTime()) - 60 * min);
bestScoreTimeTextView.setText(min + "m" + " " + (sec < 10 ? "0" + sec : sec) + "s" + " to beat ");
}
TextView tvLevelName = ((MainActivity) getActivity()).topBarTextView;
tvLevelName.setVisibility(View.VISIBLE);
tvLevelName.setText("Me VS " + friend.getUsername());
if (startArticle != null && targetArticle != null) {
this.startArticleTextView.setText(startArticle);
this.targetArticleTextView.setText(targetArticle);
}
}
@Override
public void onPause() {
super.onPause();
stopShuffleAnimationTimer();
this.isShuffleAnimationPaused = true;
}
@Override
public void onDestroy() {
super.onDestroy();
stopShuffleAnimationTimer();
}
public String getStartArticle() {
return startArticle;
}
public void setStartArticle(String startArticle) {
this.startArticle = startArticle;
}
public String getTargetArticle() {
return targetArticle;
}
public void setTargetArticle(String targetArticle) {
this.targetArticle = targetArticle;
}
public void shuffleArticles() {
startArticle = null;
if (wikiGameAPI == null)
wikiGameAPI = new WikiGameAPI();
// requesting random articles from server
wikiGameAPI.shuffleChallengeAsync(friend.getUsername(), selectedMode, new WikiGameInterface(this.getActivity()) {
@Override
public void onFinishedProcessingWikiRequest(JSONObject response) {
handleServerResponse(response, false);
}
@Override
public void onFailedMakingWikiRequest(WikiError errorCause) {
ErrorDialogs.showNetworkErrorDialog(getActivity(), false);
stopShuffleAnimationTimer();
}
});
shuffleAnimationTime = 840;
shuffleAnimationTickCounter = 0;
startShuffleAnimationTimer();
}
public String getSelectedMode() {
return selectedMode;
}
public void setSelectedMode(String selectedMode) {
this.selectedMode = selectedMode;
}
private void onShuffleAnimationTimerTick(int animationInterval, int maximumAnimationTime) {
int requiredTicks = Math.round((float) shuffleAnimationTime / animationInterval);
if (shuffleAnimationTickCounter >= requiredTicks) {
// animation should end now
if (startArticle != null && targetArticle != null) {
updateUI();
stopShuffleAnimationTimer();
} else {
// animation should end but we don't yet have the result from server!
if (shuffleAnimationTime != maximumAnimationTime) {
// let's give it a chance
shuffleAnimationTime = maximumAnimationTime;
} else {
stopShuffleAnimationTimer();
Toast.makeText(getActivity(), "Failed to shuffle articles!", Toast.LENGTH_SHORT);
//ErrorDialogs.showNetworkErrorDialog(getActivity(), false);
}
}
} else {
shuffleAnimationTickCounter++;
int randomIndex1 = new Random().nextInt(Consts.RANDOM_ARTICLES_ARRAY.length);
int randomIndex2 = new Random().nextInt(Consts.RANDOM_ARTICLES_ARRAY.length);
startArticleTextView.setText(Consts.RANDOM_ARTICLES_ARRAY[randomIndex1]);
targetArticleTextView.setText(Consts.RANDOM_ARTICLES_ARRAY[randomIndex2]);
}
}
/* @Override
public Animation onCreateAnimation(int transit, boolean enter, int nextAnim) {
Animation a = new Animation() {};
a.setDuration(0);
return a;
}*/
private void animateArrow() {
ScaleAnimation scale = (ScaleAnimation) AnimationUtils.loadAnimation(getActivity(), R.anim.grow_arrow);
scale.setInterpolator(new FastOutSlowInInterpolator());
arrow.startAnimation(scale);
}
@Override
public void clear() {
for (RoundButton button : buttons) {
if(button.getVisibility() == View.VISIBLE)
button.animateOut();
}
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (onClearListener != null)
onClearListener.clearDone();
}
}, 300);
}
@Override
public void onResume() {
super.onResume();
if (isShuffleAnimationPaused) {
startShuffleAnimationTimer();
this.isShuffleAnimationPaused = false;
}
for (RoundButton button : buttons) {
if (button.getVisibility() == View.VISIBLE)
button.animateIn(0);
}
}
private boolean isCreatingNewChallenge() {
return this.friend.getChallenge() == null;
}
public Friend getFriend() {
return friend;
}
public void setFriend(Friend friend) {
this.friend = friend;
}
@Override
public Animator onCreateAnimator(int transit, boolean enter, int nextAnim) {
ObjectAnimator fadeOut = ObjectAnimator.ofFloat(0, "alpha",
1f);
fadeOut.setDuration(0);
return fadeOut;
}
}
|
<reponame>batizhao/paper
package io.github.batizhao.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import io.github.batizhao.domain.Role;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.util.List;
/**
* @author batizhao
* @since 2020-02-26
*/
@Mapper
public interface RoleMapper extends BaseMapper<Role> {
/**
* 查用户角色
* @param id
* @return
*/
@Select("SELECT A.id, A.name, A.code FROM role A LEFT JOIN user_role B ON A.id = B.roleId WHERE B.userId = #{id}")
List<Role> findRolesByUserId(@Param("id") Long id);
// /**
// * 查询角色权限关系
// *
// * @return
// */
// @Select("SELECT A.code AS roleCode,C.path FROM role AS A LEFT JOIN role_menu B ON A.id=B.roleId LEFT JOIN menu AS C ON B.menuId=C.id")
// List<RoleMenu> findRoleMenus();
}
|
import uuid
# Generate random uuid string
random_string = str(uuid.uuid4())
# Print random string
print(random_string)
|
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# Script used to run tvOS tests.
# If not arguments are passed to the script, it will only compile
# the RNTester.
# If the script is called with a single argument "test", we'll
# also run the RNTester integration test (needs JS and packager):
# ./objc-test-tvos.sh test
set -ex
SCRIPTS=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
ROOT=$(dirname "$SCRIPTS")
cd "$ROOT"
export TEST_NAME="tvOS"
export SCHEME="RNTester-tvOS"
export SDK="appletvsimulator"
export DESTINATION="platform=tvOS Simulator,name=Apple TV,OS=12.1"
# If there's a "test" argument, pass it to the test script.
. ./scripts/objc-test.sh $1
|
use serde_json::{Value, from_str};
fn extract_user_info(json_str: &str) -> Option<(String, String)> {
if let Ok(parsed) = from_str::<Value>(json_str) {
if let Some(user) = parsed.get("user") {
if let (Some(id), Some(path)) = (user.get("id"), user.get("path")) {
if let (Value::String(id_str), Value::String(path_str)) = (id, path) {
return Some((id_str.clone(), path_str.clone()));
}
}
}
}
None
}
|
import IAPIParamTranslator from "../../API/interfaces/IAPIParamTranslator";
import IAPIParamTranslatorStatic from "../../API/interfaces/IAPIParamTranslatorStatic";
import DataFilterOption from "../../DataRender/vos/DataFilterOption";
export default class AnimationReportingParamVO implements IAPIParamTranslator<AnimationReportingParamVO> {
public static fromParams(
filter_anim_theme_active_options: DataFilterOption[],
filter_anim_module_active_options: DataFilterOption[],
filter_role_active_options: DataFilterOption[],
filter_user_active_options: DataFilterOption[],
filter_module_termine_active_option: DataFilterOption,
filter_module_valide_active_option: DataFilterOption
): AnimationReportingParamVO {
return new AnimationReportingParamVO(
filter_anim_theme_active_options,
filter_anim_module_active_options,
filter_role_active_options,
filter_user_active_options,
filter_module_termine_active_option,
filter_module_valide_active_option);
}
public static getAPIParams(param: AnimationReportingParamVO): any[] {
return [
param.filter_anim_theme_active_options,
param.filter_anim_module_active_options,
param.filter_role_active_options,
param.filter_user_active_options,
param.filter_module_termine_active_option,
param.filter_module_valide_active_option];
}
public constructor(
public filter_anim_theme_active_options: DataFilterOption[],
public filter_anim_module_active_options: DataFilterOption[],
public filter_role_active_options: DataFilterOption[],
public filter_user_active_options: DataFilterOption[],
public filter_module_termine_active_option: DataFilterOption,
public filter_module_valide_active_option: DataFilterOption) {
}
}
export const AnimationReportingParamVOStatic: IAPIParamTranslatorStatic<AnimationReportingParamVO> = AnimationReportingParamVO;
|
#!/bin/bash
git submodule init
git submodule update
#git submodule foreach git pull origin master
curl -s "https://get.sdkman.io" | bash
for x in .[[:alpha:]]*
do
if [ -n $x -a "$x" != ".git" ]
then
rm -r "$HOME/$x"
ln -s "`pwd`/$x" ~/
fi
done
pushd .
cd config
mkdir -p "$HOME/.config"
for x in *
do
if [ -n $x -a "$x" != ".git" ]
then
echo $x
rm -rf "$HOME/.config/$x"
ln -s "`pwd`/$x" ~/.config/
fi
done
popd
rm -rf ~/bin
ln -s "`pwd`/bin" ~/
rm -rf ~/.oh-my-zsh
ln -s "`pwd`/oh-my-zsh" ~/.oh-my-zsh
#sudo pip3 install pywal
#sudo pip3 install wal-steam
|
#!/bin/bash
for i in {1..5}; do
echo "Hello World!"
done
|
<reponame>pomali/priznanie-digital
import React, { ReactNode } from 'react'
export interface WarningProps {
className?: string
children: ReactNode
}
export const Warning = ({ children, className }: WarningProps) => (
<div className={`govuk-grid-column-full govuk-warning-text ${className}`}>
<span className="govuk-warning-text__icon" aria-hidden="true">
!
</span>
<div className="govuk-warning-text__text">{children}</div>
</div>
)
|
// TODO: use common file with BE implementation
export type ChainStoreFilterKeys = 'name' | 'website';
|
import os
relative_path = 'home/folder1/myFile.txt'
home = os.environ['HOME']
absolute_path = os.path.join(home, relative_path)
print(absolute_path)
|
def checkEquality(a, b):
if a == b:
return True
else:
return False
print(checkEquality(3,4))
# Output: False
|
// Method to search for a weapon by name
public string SearchWeapon(string weaponName)
{
IWeapon searchedWeapon = weapons.FirstOrDefault(w => w.Name == weaponName);
return searchedWeapon != null ? searchedWeapon.ToString() : "Weapon not found";
}
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import React, { FunctionComponent, ReactChild, ReactElement } from 'react';
import { EuiI18nConsumer } from '../context';
import { ExclusiveUnion } from '../common';
const defaultFormatter = new Intl.NumberFormat('en');
function defaultFormatNumber(value: number) {
return defaultFormatter.format(value);
}
interface EuiI18nNumberValueShape {
value: number;
children?: (x: ReactChild) => ReactElement<any>;
}
interface EuiI18nNumberValuesShape {
values: number[];
/**
* ReactNode to render as this component's content
*/
children: (x: ReactChild[]) => ReactElement<any>;
}
export type EuiI18nNumberProps = ExclusiveUnion<
EuiI18nNumberValueShape,
EuiI18nNumberValuesShape
>;
function hasValues(x: EuiI18nNumberProps): x is EuiI18nNumberValuesShape {
return x.values != null;
}
const EuiI18nNumber: FunctionComponent<EuiI18nNumberProps> = (props) => (
<EuiI18nConsumer>
{(i18nConfig) => {
const formatNumber = i18nConfig.formatNumber || defaultFormatNumber;
if (hasValues(props)) {
return props.children(props.values.map((value) => formatNumber(value)));
}
const formattedValue = (formatNumber || defaultFormatNumber)(props.value);
if (props.children) {
return props.children(formattedValue);
} else {
return formattedValue;
}
}}
</EuiI18nConsumer>
);
export { EuiI18nNumber };
|
import Stage0Header from '../header/stage0header/stage0header.vue';
import Stage0Body from '../body/stage0body/stage0body.vue';
export default {
name : 'stage0',
components: {
Stage0Header,
Stage0Body
}
}
|
defmodule SumFirstN do
def sum(list, n) do
list
|> Enum.take(n)
|> Enum.reduce(& &1 + &2)
end
end
SumFirstN.sum([2, 5, 6, 4, 9], 3) # 13
|
package br.com.alinesolutions.anotaai.model.util;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlTransient;
import br.com.alinesolutions.anotaai.model.BaseEntity;
import br.com.alinesolutions.anotaai.model.SessaoUsuario;
@NamedQueries({})
@Entity
public class ArquivoTemporario extends BaseEntity<Long, ArquivoTemporario> {
private static final long serialVersionUID = 1L;
@NotNull
@ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.ALL)
private SessaoUsuario sessaoUsuario;
@NotNull
@ManyToOne(cascade = CascadeType.ALL)
private Arquivo arquivo;
private String uuid;
public SessaoUsuario getSessaoUsuario() {
return sessaoUsuario;
}
@XmlTransient
public Arquivo getArquivo() {
return arquivo;
}
public String getUuid() {
return uuid;
}
public void setSessaoUsuario(SessaoUsuario sessaoUsuario) {
this.sessaoUsuario = sessaoUsuario;
}
public void setArquivo(Arquivo arquivo) {
this.arquivo = arquivo;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
}
|
<filename>python/eskapade/data_quality/__init__.py
# flake8: noqa
from eskapade.data_quality.links import *
|
export * from './ThemeTest';
|
<filename>app/usecases/authUseCase.js<gh_stars>0
export default class AuthUseCase {
constructor(authService, errorService) {
this.authService = authService
this.errorService = errorService
}
async auth(username, password) {
if (await this.authService.verifyPassword(username, password)) {
return await this.authService.createToken(username)
}
throw this.errorService.unauthorized()
}
}
|
5 (the length of the largest palindromic substring: 'civic')
|
#include <iostream>
// Function to display Fibonacci sequence
void fibonacci_sequence(int length)
{
int n1 = 0, n2 = 1, n3;
if (length == 1)
{
std::cout << n1 << " ";
}
else if (length == 2)
{
std::cout << n1 << " " << n2 << " ";
}
else
{
std::cout << n1 << " " << n2 << " ";
for (int i = 2; i < length; ++i)
{
n3 = n1 + n2;
n1 = n2;
n2 = n3;
std::cout << n3 << " ";
}
}
}
// Driver Code
int main()
{
int length = 10; // length of Fibonacci Sequence
fibonacci_sequence(length);
return 0;
}
|
import subprocess
import requests
import json
import os
import re
import argparse
import logging
import csv
import shutil
def process_data(url):
# Read the "retracted_exps.csv" file and extract relevant information
with open('retracted_exps.csv', 'r') as file:
csv_reader = csv.reader(file)
for row in csv_reader:
# Process the extracted data
# Example: process_data(row)
# Make an HTTP GET request to the specified URL and handle the response
try:
response = requests.get(url)
response.raise_for_status() # Raise an exception for 4xx or 5xx status codes
# Process the response data
# Example: process_response(response.json())
except requests.exceptions.RequestException as e:
logging.error(f"HTTP request to {url} failed: {e}")
# Execute a shell command using the subprocess module
try:
subprocess.run(['ls', '-l']) # Example shell command
# Process the command output
# Example: process_command_output(output)
except subprocess.CalledProcessError as e:
logging.error(f"Shell command execution failed: {e}")
# Log the processing steps using the logging module
logging.info("Data processing completed")
# Example usage
process_data('https://example.com/api/data')
|
<reponame>pradeep-gr/mbed-os5-onsemi<gh_stars>10-100
/*
* Copyright (c) 2013-2016, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __CFSTORE_UVISOR_H__
#define __CFSTORE_UVISOR_H__
/* target specifc ACLs */
#if defined(TARGET_LIKE_FRDM_K64F)
#define CFSTORE_UVISOR_LED_ON false
#define CFSTORE_UVISOR_LED_OFF true
#define CFSTORE_UVISOR_MAIN_LED LED_BLUE
#define CFSTORE_UVISOR_MAIN_BTN SW2
#define CFSTORE_UVISOR_MAIN_BTN_PUPD PullUp
#define CFSTORE_UVISOR_MAIN_ACL(acl_list_name) \
static const UvisorBoxAclItem acl_list_name[] = { \
{MCG, sizeof(*MCG), UVISOR_TACLDEF_PERIPH}, \
{SIM, sizeof(*SIM), UVISOR_TACLDEF_PERIPH}, \
{PORTB, sizeof(*PORTB), UVISOR_TACLDEF_PERIPH}, \
{PORTC, sizeof(*PORTC), UVISOR_TACLDEF_PERIPH}, \
{RTC, sizeof(*RTC), UVISOR_TACLDEF_PERIPH}, \
{LPTMR0, sizeof(*LPTMR0), UVISOR_TACLDEF_PERIPH}, \
{PIT, sizeof(*PIT), UVISOR_TACLDEF_PERIPH}, \
{SMC, sizeof(*SMC), UVISOR_TACLDEF_PERIPH}, \
{UART0, sizeof(*UART0), UVISOR_TACLDEF_PERIPH}, \
}
#elif defined(TARGET_LIKE_STM32F429I_DISCO)
#define CFSTORE_UVISOR_LED_ON false
#define CFSTORE_UVISOR_LED_OFF true
#define CFSTORE_UVISOR_MAIN_LED LED1
#define CFSTORE_UVISOR_MAIN_BTN USER_BUTTON
#define CFSTORE_UVISOR_MAIN_BTN_PUPD PullDown
#define CFSTORE_UVISOR_MAIN_ACL(acl_list_name) \
static const UvisorBoxAclItem acl_list_name[] = { \
{TIM2, sizeof(*TIM2), UVISOR_TACLDEF_PERIPH}, \
{TIM5, sizeof(*TIM5), UVISOR_TACLDEF_PERIPH}, \
{GPIOA, sizeof(*GPIOA), UVISOR_TACLDEF_PERIPH}, \
{GPIOG, sizeof(*GPIOG), UVISOR_TACLDEF_PERIPH}, \
/* FIXME: secure RCC/EXTI/SYSCFG/FLASH */ \
{RCC, sizeof(*RCC), UVISOR_TACLDEF_PERIPH}, \
{EXTI, sizeof(*EXTI), UVISOR_TACLDEF_PERIPH}, \
{SYSCFG, sizeof(*SYSCFG), UVISOR_TACLDEF_PERIPH}, \
{FLASH, sizeof(*FLASH), UVISOR_TACLDEF_PERIPH}, \
{PWR, sizeof(*PWR), UVISOR_TACLDEF_PERIPH}, \
{USART1, sizeof(*USART1), UVISOR_TACLDEF_PERIPH}, \
{(void *) 0x42470000, 0x1000, UVISOR_TACLDEF_PERIPH}, \
}
#elif defined(TARGET_LIKE_EFM32GG_STK) \
|| defined(TARGET_LIKE_EFM32LG_STK) \
|| defined(TARGET_LIKE_EFM32WG_STK)
#define CFSTORE_UVISOR_LED_ON false
#define CFSTORE_UVISOR_LED_OFF true
#define CFSTORE_UVISOR_MAIN_LED LED1
#define CFSTORE_UVISOR_MAIN_BTN BTN0
#define CFSTORE_UVISOR_MAIN_BTN_PUPD PullUp
#define CFSTORE_UVISOR_MAIN_ACL(acl_list_name) \
static const UvisorBoxAclItem acl_list_name[] = { \
{GPIO, sizeof(*GPIO), UVISOR_TACLDEF_PERIPH}, \
{UART0, sizeof(*UART0), UVISOR_TACLDEF_PERIPH}, \
{TIMER0, sizeof(*TIMER0), UVISOR_TACLDEF_PERIPH}, \
/* FIXME: Secure CMU */ \
{CMU, sizeof(*CMU), UVISOR_TACLDEF_PERIPH}, \
{RTC, sizeof(*RTC), UVISOR_TACLDEF_PERIPH}, \
/* FIXME: Secure MSC */ \
{MSC, sizeof(*MSC), UVISOR_TACLDEF_PERIPH}, \
/* mbed-hal-silabs requires the DI page to be readable */ \
{(void*) 0x0FE08000, 0x1000, UVISOR_TACLDEF_SECURE_CONST}, \
}
#elif defined(TARGET_LIKE_EFM32PG_STK)
#define CFSTORE_UVISOR_LED_ON false
#define CFSTORE_UVISOR_LED_OFF true
#define CFSTORE_UVISOR_MAIN_LED LED1
#define CFSTORE_UVISOR_MAIN_BTN BTN0
#define CFSTORE_UVISOR_MAIN_BTN_PUPD PullUp
#define CFSTORE_UVISOR_MAIN_ACL(acl_list_name) \
static const UvisorBoxAclItem acl_list_name[] = { \
{GPIO, sizeof(*GPIO), UVISOR_TACLDEF_PERIPH}, \
{USART0, sizeof(*USART0), UVISOR_TACLDEF_PERIPH}, \
{TIMER0, sizeof(*TIMER0), UVISOR_TACLDEF_PERIPH}, \
/* FIXME: Secure CMU */ \
{CMU, sizeof(*CMU), UVISOR_TACLDEF_PERIPH}, \
{EMU, sizeof(*EMU), UVISOR_TACLDEF_PERIPH}, \
{RTCC, sizeof(*RTCC), UVISOR_TACLDEF_PERIPH}, \
/* FIXME: Secure MSC */ \
{MSC, sizeof(*MSC), UVISOR_TACLDEF_PERIPH}, \
/* mbed-hal-silabs requires the DI page to be readable */ \
{(void*) 0x0FE08000, 0x1000, UVISOR_TACLDEF_SECURE_CONST}, \
}
#else
#define CFSTORE_UVISOR_LED_ON true
#define CFSTORE_UVISOR_LED_OFF false
#define CFSTORE_UVISOR_MAIN_LED NC
#define CFSTORE_UVISOR_MAIN_BTN NC
#define CFSTORE_UVISOR_MAIN_BTN_PUPD PullNone
#define CFSTORE_UVISOR_MAIN_ACL(acl_list_name) \
static const UvisorBoxAclItem acl_list_name[] = {}
#endif
#endif
|
<reponame>juliuste/widschi-bot
import { loadJsonFile as loadJson } from 'load-json-file'
import { writeJsonFile as writeJson } from 'write-json-file'
import lodash from 'lodash'
import { people } from './settings.js'
const { shuffle, minBy, fromPairs, toPairs, max, min } = lodash
const statePath = './state.json'
const initialState = () => fromPairs(people.map(p => [p.name, 0]))
const writeNextState = async state => writeJson(statePath, state)
const next = async ({ preSelected: name }, amount) => {
const state = await (loadJson(statePath).catch(error => {
if (error.code !== 'ENOENT') throw error
return initialState()
}))
let selectedPerson
if (name && Object.keys(state).map(k => k.toLowerCase()).includes(name.toLowerCase())) {
const person = people.find(p => p.name.toLowerCase() === name.toLowerCase())
state[person.name] += (amount || 1)
selectedPerson = person.name
} else {
const [randomPersonWithLowestScore] = minBy(shuffle(toPairs(state)), x => x[1])
state[randomPersonWithLowestScore] += (amount || 1)
selectedPerson = randomPersonWithLowestScore
}
await writeNextState(state)
const difference = max(Object.values(state)) - min(Object.values(state))
return { person: people.find(p => p.name === selectedPerson), difference }
}
export default next
|
if [ $# -ne 1 ]; then
echo "$0: Takes one argument, the name of the AI to compile"
exit 1
fi
AI_Name=$1 #Get the AI name you passed as a command line parameter
AI_Name="${AI_Name%.*}" #Remove extension in case you passed "V4.cpp" instead of "V4"
clang++-9 -std=c++17 -march=native -mpopcnt -mbmi2 -mfma -mavx2 -Ofast -funroll-loops -finline "$AI_Name.cpp" -lpthread -o "$AI_Name"
|
<reponame>pavel-pimenov/go-dcpp<filename>adc/client/client2hub.go
package client
import (
"context"
"errors"
"fmt"
"log"
"math/rand"
"strconv"
"sync"
"time"
"github.com/direct-connect/go-dcpp/adc"
"github.com/direct-connect/go-dcpp/version"
)
// DialHub connects to a hub and runs a handshake.
func DialHub(addr string, info *Config) (*Conn, error) {
conn, err := adc.Dial(addr)
if err != nil {
return nil, err
}
return HubHandshake(conn, info)
}
type Config struct {
PID adc.PID
Name string
Extensions adc.ExtFeatures
}
func (c *Config) validate() error {
if c.PID.IsZero() {
return errors.New("PID should not be empty")
}
if c.Name == "" {
return errors.New("name should be set")
}
return nil
}
// HubHandshake begins a Client-Hub handshake on a connection.
func HubHandshake(conn *adc.Conn, conf *Config) (*Conn, error) {
if err := conf.validate(); err != nil {
return nil, err
}
sid, mutual, err := protocolToHub(conn)
if err != nil {
_ = conn.Close()
return nil, err
}
c := &Conn{
conn: conn,
sid: sid, pid: conf.PID,
fea: mutual,
closing: make(chan struct{}),
closed: make(chan struct{}),
}
c.user.Pid = &conf.PID
c.user.Name = conf.Name
c.user.Features = conf.Extensions
c.user.Slots = 1
if err := identifyToHub(conn, sid, &c.user); err != nil {
conn.Close()
return nil, err
}
c.ext = make(map[adc.Feature]struct{})
for _, ext := range c.user.Features {
c.ext[ext] = struct{}{}
}
if err := c.acceptUsersList(); err != nil {
conn.Close()
return nil, err
}
//c.conn.KeepAlive(time.Minute / 2)
go c.readLoop()
return c, nil
}
func protocolToHub(conn *adc.Conn) (adc.SID, adc.ModFeatures, error) {
ourFeatures := adc.ModFeatures{
// should always be set for ADC
adc.FeaBASE: true,
adc.FeaBAS0: true,
adc.FeaTIGR: true,
// extensions
// TODO: some hubs will stop the handshake after sending the hub info
// if this extension is specified
//adc.FeaPING: true,
adc.FeaBZIP: true,
// TODO: ZLIG
}
// Send supported features (SUP), initiating the PROTOCOL state.
// We expect SUP followed by SID to transition to IDENTIFY.
//
// https://adc.sourceforge.io/ADC.html#_protocol
err := conn.WriteHubMsg(adc.Supported{
Features: ourFeatures,
})
if err != nil {
return adc.SID{}, nil, err
}
if err := conn.Flush(); err != nil {
return adc.SID{}, nil, err
}
// shouldn't take longer than this
deadline := time.Now().Add(time.Second * 5)
// first, we expect a SUP from the hub with a list of supported features
msg, err := conn.ReadInfoMsg(deadline)
if err != nil {
return adc.SID{}, nil, err
}
sup, ok := msg.(adc.Supported)
if !ok {
return adc.SID{}, nil, fmt.Errorf("expected SUP command, got: %#v", msg)
}
hubFeatures := sup.Features
// check mutual features
mutual := ourFeatures.Intersect(hubFeatures)
if !mutual.IsSet(adc.FeaBASE) && !mutual.IsSet(adc.FeaBAS0) {
return adc.SID{}, nil, fmt.Errorf("hub does not support BASE")
} else if !mutual.IsSet(adc.FeaTIGR) {
return adc.SID{}, nil, fmt.Errorf("hub does not support TIGR")
}
// next, we expect a SID that will assign a Session ID
msg, err = conn.ReadInfoMsg(deadline)
if err != nil {
return adc.SID{}, nil, err
}
sid, ok := msg.(adc.SIDAssign)
if !ok {
return adc.SID{}, nil, fmt.Errorf("expected SID command, got: %#v", msg)
}
return sid.SID, mutual, nil
}
func identifyToHub(conn *adc.Conn, sid adc.SID, user *adc.User) error {
// Hub may send INF, but it's not required.
// The client should broadcast INF with PD/ID and other required fields.
//
// https://adc.sourceforge.io/ADC.html#_identify
if user.Id.IsZero() {
user.Id = user.Pid.Hash()
}
if user.Application == "" {
user.Application = version.Name
user.Version = version.Vers
}
for _, f := range []adc.Feature{adc.FeaSEGA, adc.FeaTCP4} {
if !user.Features.Has(f) {
user.Features = append(user.Features, f)
}
}
err := conn.WriteBroadcast(sid, user)
if err != nil {
return err
}
if err := conn.Flush(); err != nil {
return err
}
// TODO: registered user
return nil
}
// Conn represents a Client-to-Hub connection.
type Conn struct {
conn *adc.Conn
fea adc.ModFeatures
closing chan struct{}
closed chan struct{}
pid adc.PID
sid adc.SID
user adc.User
ext map[adc.Feature]struct{}
hub adc.HubInfo
peers struct {
sync.RWMutex
// keeps both online and offline users
byCID map[adc.CID]*Peer
// only keeps online users
bySID map[adc.SID]*Peer
}
revConn struct {
sync.Mutex
tokens map[string]revConnToken
}
}
type revConnToken struct {
cid adc.CID
cancel <-chan struct{}
addr chan string
errc chan error
}
// PID returns Private ID associated with this connection.
func (c *Conn) PID() adc.PID { return c.pid }
// CID returns Client ID associated with this connection.
func (c *Conn) CID() adc.CID { return c.user.Id }
// SID returns Session ID associated with this connection.
// Only valid after a Client-Hub handshake.
func (c *Conn) SID() adc.SID { return c.sid }
// Hub returns hub information.
func (c *Conn) Hub() adc.HubInfo { return c.hub }
// Features returns a set of negotiated features.
func (c *Conn) Features() adc.ModFeatures { return c.fea.Clone() }
func (c *Conn) Close() error {
select {
case <-c.closing:
<-c.closed
return nil
default:
}
close(c.closing)
err := c.conn.Close()
<-c.closed
return err
}
func (c *Conn) writeDirect(to adc.SID, msg adc.Message) error {
if err := c.conn.WriteDirect(c.SID(), to, msg); err != nil {
return err
}
return c.conn.Flush()
}
func (c *Conn) revConnToken(ctx context.Context, cid adc.CID) (token string, addr <-chan string, _ <-chan error) {
ch := make(chan string, 1)
errc := make(chan error, 1)
for {
tok := strconv.Itoa(rand.Int())
c.revConn.Lock()
_, ok := c.revConn.tokens[tok]
if !ok {
if c.revConn.tokens == nil {
c.revConn.tokens = make(map[string]revConnToken)
}
c.revConn.tokens[tok] = revConnToken{cancel: ctx.Done(), cid: cid, addr: ch, errc: errc}
c.revConn.Unlock()
return tok, ch, errc
}
c.revConn.Unlock()
// collision, pick another token
}
}
func (c *Conn) readLoop() {
defer close(c.closed)
for {
cmd, err := c.conn.ReadPacket(time.Time{})
if err != nil {
log.Println(err)
return
}
switch cmd := cmd.(type) {
case *adc.BroadcastPacket:
if err := c.handleBroadcast(cmd); err != nil {
log.Println(err)
return
}
case *adc.InfoPacket:
if err := c.handleInfo(cmd); err != nil {
log.Println(err)
return
}
case *adc.FeaturePacket:
if err := c.handleFeature(cmd); err != nil {
log.Println(err)
return
}
case *adc.DirectPacket:
// TODO: ADC flaw: why ever send the client his own SID? hub should append it instead
// same for the sending party
if cmd.Targ != c.SID() {
log.Println("direct command to a wrong destination:", cmd.Targ)
return
}
if err := c.handleDirect(cmd); err != nil {
log.Println(err)
return
}
default:
log.Printf("unhandled command: %T", cmd)
}
}
}
func (c *Conn) handleBroadcast(p *adc.BroadcastPacket) error {
// we could decode the message and type-switch, but for cases
// below it's better to decode later
switch p.Name {
case (adc.User{}).Cmd():
// easier to merge while decoding
return c.peerUpdate(p.ID, p.Data)
case (adc.SearchRequest{}).Cmd():
peer := c.peerBySID(p.ID)
// async decoding
go c.handleSearch(peer, p.Data)
return nil
// TODO: MSG
default:
log.Printf("unhandled broadcast command: %v", p.Name)
return nil
}
}
func (c *Conn) handleInfo(p *adc.InfoPacket) error {
msg, err := p.Decode()
if err != nil {
return err
}
switch msg := msg.(type) {
case adc.ChatMessage:
// TODO: ADC: maybe hub should take a AAAA SID for itself
// and this will become B-MSG AAAA, instead of I-MSG
fmt.Printf("%s\n", msg.Text)
return nil
case adc.Disconnect:
// TODO: ADC flaw: this should be B-QUI, not I-QUI
// it always includes a SID and is, in fact, a broadcast
return c.peerQuit(msg.ID)
default:
log.Printf("unhandled info command: %v", p.Name)
return nil
}
}
func (c *Conn) handleFeature(cmd *adc.FeaturePacket) error {
// TODO: ADC protocol: this is another B-XXX command, but with a feature selector
// might be a good idea to extend selector with some kind of tags
// it may work for extensions, geo regions, chat channels, etc
// TODO: ADC flaw: shouldn't the hub convert F-XXX to B-XXX if the current client
// supports all listed extensions? does the client care about the selector?
for fea, want := range cmd.Features {
if _, enabled := c.ext[fea]; enabled != want {
return nil
}
}
// FIXME: this allows F-MSG that we should probably avoid
return c.handleBroadcast(&adc.BroadcastPacket{
ID: cmd.ID, BasePacket: cmd.BasePacket,
})
}
func (c *Conn) handleDirect(cmd *adc.DirectPacket) error {
msg, err := cmd.Decode()
if err != nil {
return err
}
switch msg := msg.(type) {
case adc.ConnectRequest:
c.revConn.Lock()
tok, ok := c.revConn.tokens[msg.Token]
delete(c.revConn.tokens, msg.Token)
c.revConn.Unlock()
if !ok {
// TODO: handle a direct connection request from peers
log.Printf("ignoring connection attempt from %v", cmd.ID)
return nil
}
p := c.peerBySID(cmd.ID)
go c.handleConnReq(p, tok, msg)
return nil
default:
log.Printf("unhandled direct command: %v", cmd.Name)
return nil
}
}
func (c *Conn) handleConnReq(p *Peer, tok revConnToken, s adc.ConnectRequest) {
if p == nil {
tok.errc <- ErrPeerOffline
return
}
if s.Proto != adc.ProtoADC {
tok.errc <- fmt.Errorf("unsupported protocol: %q", s.Proto)
return
}
if s.Port == 0 {
tok.errc <- errors.New("no port to connect to")
return
}
addr := p.Info().Ip4
if addr == "" {
tok.errc <- errors.New("no address to connect to")
return
}
tok.addr <- addr + ":" + strconv.Itoa(s.Port)
}
func (c *Conn) handleSearch(p *Peer, data []byte) {
var sch adc.SearchRequest
if err := adc.Unmarshal(data, &sch); err != nil {
log.Println("failed to decode search:", err)
return
}
log.Printf("search: %+v", sch)
}
func (c *Conn) OnlinePeers() []*Peer {
c.peers.RLock()
defer c.peers.RUnlock()
arr := make([]*Peer, 0, len(c.peers.bySID))
for _, p := range c.peers.bySID {
arr = append(arr, p)
}
return arr
}
func (c *Conn) peerBySID(sid adc.SID) *Peer {
c.peers.RLock()
p := c.peers.bySID[sid]
c.peers.RUnlock()
return p
}
func (c *Conn) peerJoins(sid adc.SID, u adc.User) *Peer {
c.peers.Lock()
defer c.peers.Unlock()
if c.peers.byCID == nil {
c.peers.bySID = make(map[adc.SID]*Peer)
c.peers.byCID = make(map[adc.CID]*Peer)
}
p, ok := c.peers.byCID[u.Id]
if ok {
c.peers.bySID[sid] = p
p.online(sid)
p.update(u)
return p
}
p = &Peer{hub: c, user: &u}
c.peers.bySID[sid] = p
c.peers.byCID[u.Id] = p
p.online(sid)
return p
}
func (c *Conn) peerQuit(sid adc.SID) error {
c.peers.Lock()
defer c.peers.Unlock()
p := c.peers.bySID[sid]
if p == nil {
return fmt.Errorf("unknown user quits: %v", sid)
}
p.offline()
delete(c.peers.bySID, sid)
return nil
}
func (c *Conn) peerUpdate(sid adc.SID, data []byte) error {
c.peers.Lock()
p, ok := c.peers.bySID[sid]
if ok {
c.peers.Unlock()
p.mu.Lock()
defer p.mu.Unlock()
return adc.Unmarshal(data, p.user)
}
defer c.peers.Unlock()
var u adc.User
if err := adc.Unmarshal(data, &u); err != nil {
return err
}
if c.peers.byCID == nil {
c.peers.bySID = make(map[adc.SID]*Peer)
c.peers.byCID = make(map[adc.CID]*Peer)
}
p = &Peer{hub: c, user: &u}
c.peers.bySID[sid] = p
c.peers.byCID[u.Id] = p
p.online(sid)
return nil
}
func (c *Conn) acceptUsersList() error {
// https://adc.sourceforge.io/ADC.html#_identify
deadline := time.Now().Add(time.Minute)
// Accept commands in the following order:
// 1) Hub info (I-INF)
// 2) Status (I-STA, optional)
// 3) User info (B-INF, xN)
// 3.1) Our own info (B-INF)
const (
hubInfo = iota
optStatus
userList
)
stage := hubInfo
for {
cmd, err := c.conn.ReadPacket(deadline)
if err != nil {
return err
}
switch cmd := cmd.(type) {
case *adc.InfoPacket:
switch stage {
case hubInfo:
// waiting for hub info
if cmd.Name != (adc.User{}).Cmd() {
return fmt.Errorf("expected hub info, received: %#v", cmd)
}
if err := adc.Unmarshal(cmd.Data, &c.hub); err != nil {
return err
}
stage = optStatus
case optStatus:
// optionally wait for status command
if cmd.Name != (adc.Status{}).Cmd() {
return fmt.Errorf("expected status, received: %#v", cmd)
}
var st adc.Status
if err := adc.Unmarshal(cmd.Data, &st); err != nil {
return err
} else if !st.Ok() {
return st.Err()
}
stage = userList
default:
return fmt.Errorf("unexpected command in stage %d: %#v", stage, cmd)
}
case *adc.BroadcastPacket:
switch stage {
case optStatus:
stage = userList
fallthrough
case userList:
if cmd.ID == c.sid {
// make sure to wipe PID, so we don't send it later occasionally
c.user.Pid = nil
if err := adc.Unmarshal(cmd.Data, &c.user); err != nil {
return err
}
// done, should switch to NORMAL
return nil
}
// other users
var u adc.User
if err := adc.Unmarshal(cmd.Data, &u); err != nil {
return err
}
_ = c.peerJoins(cmd.ID, u)
// continue until we see ourselves in the list
default:
return fmt.Errorf("unexpected command in stage %d: %#v", stage, cmd)
}
default:
return fmt.Errorf("unexpected command: %#v", cmd)
}
}
}
type Peer struct {
hub *Conn
mu sync.RWMutex
sid *adc.SID // may change if user disconnects
user *adc.User
}
func (p *Peer) online(sid adc.SID) {
p.mu.Lock()
p.sid = &sid
p.mu.Unlock()
}
func (p *Peer) offline() {
p.mu.Lock()
p.sid = nil
p.mu.Unlock()
}
func (p *Peer) getSID() *adc.SID {
p.mu.RLock()
sid := p.sid
p.mu.RUnlock()
return sid
}
func (p *Peer) Online() bool {
return p.getSID() != nil
}
func (p *Peer) Info() adc.User {
p.mu.RLock()
user := *p.user
p.mu.RUnlock()
return user
}
func (p *Peer) update(u adc.User) {
p.mu.Lock()
if p.user.Id != u.Id {
p.mu.Unlock()
panic("wrong cid")
}
*p.user = u
p.mu.Unlock()
}
|
brew install libdvdcss handbrake
|
#!/bin/bash
# Example of how to use the command line tool to build your php-commonjs scripts into a single file.
#
# switch to php-commonjs root
#
cd `dirname $0`/..
# you may want to specify a location for php.ini so you don't run into open_basedir restrictions
#
PHP="/usr/bin/php -c /etc"
# pipe compiler output to your destination file
# be sure to pass module search paths so the compiler can find them
#
$PHP -f bin/compile.php -- --compile=example/example-script.js --search=example/modules > example/example-compiled.js
echo "Compiled to `pwd`/example/example-compiled.js"
|
<reponame>dbatten5/dagster
import pytest
from click.testing import CliRunner
from dagster import AssetKey, AssetMaterialization, Output, execute_pipeline, pipeline, solid
from dagster.cli.asset import asset_wipe_command
from dagster.core.instance import DagsterInstance
from dagster.seven import json
@pytest.fixture(name="asset_instance")
def mock_asset_instance(mocker):
# can use the ephemeral instance, since the default InMemoryEventLogStorage is asset aware
instance = DagsterInstance.ephemeral()
mocker.patch(
"dagster.core.instance.DagsterInstance.get",
return_value=instance,
)
yield instance
@solid
def solid_one(_):
yield AssetMaterialization(asset_key=AssetKey("asset_1"))
yield Output(1)
@solid
def solid_two(_):
yield AssetMaterialization(asset_key=AssetKey("asset_2"))
yield AssetMaterialization(asset_key=AssetKey(["path", "to", "asset_3"]))
yield AssetMaterialization(asset_key=AssetKey(("path", "to", "asset_4")))
yield Output(1)
@solid
def solid_normalization(_):
yield AssetMaterialization(asset_key="path/to-asset_5")
yield Output(1)
@pipeline
def pipeline_one():
solid_one()
@pipeline
def pipeline_two():
solid_one()
solid_two()
def test_asset_wipe_errors(asset_instance): # pylint: disable=unused-argument
runner = CliRunner()
result = runner.invoke(asset_wipe_command)
assert result.exit_code == 2
assert (
"Error, you must specify an asset key or use `--all` to wipe all asset keys."
in result.output
)
result = runner.invoke(asset_wipe_command, ["--all", json.dumps(["path", "to", "asset_key"])])
assert result.exit_code == 2
assert "Error, cannot use more than one of: asset key, `--all`." in result.output
def test_asset_exit(asset_instance): # pylint: disable=unused-argument
runner = CliRunner()
result = runner.invoke(asset_wipe_command, ["--all"], input="NOT_DELETE\n")
assert result.exit_code == 0
assert "Exiting without removing asset indexes" in result.output
def test_asset_single_wipe(asset_instance):
runner = CliRunner()
execute_pipeline(pipeline_one, instance=asset_instance)
execute_pipeline(pipeline_two, instance=asset_instance)
asset_keys = asset_instance.all_asset_keys()
assert len(asset_keys) == 4
result = runner.invoke(
asset_wipe_command, [json.dumps(["path", "to", "asset_3"])], input="DELETE\n"
)
assert result.exit_code == 0
assert "Removed asset indexes from event logs" in result.output
result = runner.invoke(
asset_wipe_command, [json.dumps(["path", "to", "asset_4"])], input="DELETE\n"
)
assert result.exit_code == 0
assert "Removed asset indexes from event logs" in result.output
asset_keys = asset_instance.all_asset_keys()
assert len(asset_keys) == 2
def test_asset_multi_wipe(asset_instance):
runner = CliRunner()
execute_pipeline(pipeline_one, instance=asset_instance)
execute_pipeline(pipeline_two, instance=asset_instance)
asset_keys = asset_instance.all_asset_keys()
assert len(asset_keys) == 4
result = runner.invoke(
asset_wipe_command,
[json.dumps(["path", "to", "asset_3"]), json.dumps(["asset_1"])],
input="DELETE\n",
)
assert result.exit_code == 0
assert "Removed asset indexes from event logs" in result.output
asset_keys = asset_instance.all_asset_keys()
assert len(asset_keys) == 2
def test_asset_wipe_all(asset_instance):
runner = CliRunner()
execute_pipeline(pipeline_one, instance=asset_instance)
execute_pipeline(pipeline_two, instance=asset_instance)
asset_keys = asset_instance.all_asset_keys()
assert len(asset_keys) == 4
result = runner.invoke(asset_wipe_command, ["--all"], input="DELETE\n")
assert result.exit_code == 0
assert "Removed asset indexes from event logs" in result.output
asset_keys = asset_instance.all_asset_keys()
assert len(asset_keys) == 0
|
#!/usr/bin/env bash
set -e
set -x
# Path to thhe nix file containing the derivation we want to package
NIXFILE=$(dirname $0)/bindist.nix
# "drv" will be set to the path inside /nix/store which contains the resulting
# the derivation
drv=$(nix-build $NIXFILE)
# output filename
tarball=$(pwd)/clash-snap-bindist.tar.xz
# temporary working directory
tmpdir=$(mktemp -d)
trap "rm -rf $tmpdir" EXIT
cd $tmpdir
# create symlinks inside target's /bin to every executable in the derivation
# (non-transitively)
mkdir -p bin
for binary in $(find ${drv}/bin); do
ln -s ${binary} bin/$(basename ${binary})
done
rm bin/bin
# Copy nix derivation, including fully qualified directory name, to this
# folder. There's probably a better way to do this?
tar cf - --owner 0 --group 0 $(nix-store -qR $drv) | tar xf -
# Allow symlinks to be deleted
find * -type d -exec chmod +w {} \;
# Convert absolute symlinks to relative symlinks (needed for snap)
find . -lname '/*' -exec ksh -c '
for link; do
target=$(readlink "$link")
link=${link#./}
root=${link//+([!\/])/..}; root=${root#/}; root=${root%..}
rm "$link"
ln -s "$root${target#/}" "$link"
done
' _ {} +
# Package it for snapcraft. Snapcraft will later recompress it more
# thorougly. Not using /no compression/ as these files will be used
# in GitLab artifacts.
XZ_DEFAULT="-T 0 -3" tar cJf $tarball --owner 0 --group 0 *
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source bin-test/utils/assert.sh
source bin-test/utils/cleanup.sh
# All tests should start with solr_test
function solr_suite_before() {
bin/solr stop -all > /dev/null 2>&1
bin/solr start -c > /dev/null 2>&1
local source_configset_dir="server/solr/configsets/sample_techproducts_configs"
TMP_CONFIGSET_DIR="/tmp/test_config"
rm -rf $TMP_CONFIGSET_DIR; cp -r $source_configset_dir $TMP_CONFIGSET_DIR
}
function solr_suite_after() {
bin/solr stop -all > /dev/null 2>&1
rm -rf $TMP_CONFIGSET_DIR
}
function solr_unit_test_before() {
delete_all_collections > /dev/null 2>&1
}
function solr_unit_test_after() {
delete_all_collections > /dev/null 2>&1
}
function solr_test_can_create_collection() {
local create_cmd="bin/solr create_collection -c COLL_NAME"
local expected_output="Created collection 'COLL_NAME'"
local actual_output; actual_output=$($create_cmd)
assert_cmd_succeeded "$create_cmd" || return 1
assert_output_contains "$actual_output" "$expected_output" || return 1
}
function solr_test_rejects_d_option_with_invalid_config_dir() {
local create_cmd="bin/solr create_collection -c COLL_NAME -d /asdf"
local expected_output="Specified configuration directory /asdf not found!"
local actual_output; actual_output=$($create_cmd)
assert_cmd_failed "$create_cmd" || return 1
assert_output_contains "$actual_output" "$expected_output" || return 1
}
function solr_test_accepts_d_option_with_explicit_builtin_config() {
local create_cmd="bin/solr create_collection -c COLL_NAME -d sample_techproducts_configs"
local expected_output="Created collection 'COLL_NAME'"
local actual_output; actual_output=$($create_cmd)
assert_cmd_succeeded "$create_cmd" || return 1
assert_output_contains "$actual_output" "$expected_output" || return 1
}
function solr_test_accepts_d_option_with_explicit_path_to_config() {
local create_cmd="bin/solr create_collection -c COLL_NAME -d $TMP_CONFIGSET_DIR"
local expected_output="Created collection 'COLL_NAME'"
local actual_output; actual_output=$($create_cmd)
assert_cmd_succeeded "$create_cmd" || return 1
assert_output_contains "$actual_output" "$expected_output" || return 1
}
function solr_test_accepts_n_option_as_config_name() {
local create_cmd="bin/solr create_collection -c COLL_NAME -n other_conf_name"
local expected_name_output="Created collection 'COLL_NAME'"
local expected_config_name_output="config-set 'other_conf_name'"
local actual_output; actual_output=$($create_cmd)
# Expect to fail, change to success
assert_cmd_succeeded "$create_cmd" || return 1
assert_output_contains "$actual_output" "$expected_name_output" || return 1
assert_output_contains "$actual_output" "$expected_config_name_output" || return 1
}
function solr_test_allows_config_reuse_when_n_option_specifies_same_config() {
local create_cmd1="bin/solr create_collection -c COLL_NAME_1 -n shared_config"
local expected_coll_name_output1="Created collection 'COLL_NAME_1'"
local create_cmd2="bin/solr create_collection -c COLL_NAME_2 -n shared_config"
local expected_coll_name_output2="Created collection 'COLL_NAME_2'"
local expected_config_name_output="config-set 'shared_config'"
local actual_output1; actual_output1=$($create_cmd1)
assert_cmd_succeeded "$create_cmd1" || return 1
assert_output_contains "$actual_output1" "$expected_coll_name_output1" || return 1
assert_output_contains "$actual_output1" "$expected_config_name_output" || return 1
local actual_output2; actual_output2=$($create_cmd2)
assert_cmd_succeeded "$create_cmd2" || return 1
assert_output_contains "$actual_output2" "$expected_coll_name_output2" || return 1
assert_output_contains "$actual_output2" "$expected_config_name_output" || return 1
}
function solr_test_create_multisharded_collections_when_s_provided() {
local create_cmd="bin/solr create_collection -c COLL_NAME -s 2"
local expected_coll_name_output="Created collection 'COLL_NAME'"
local expected_shards_output="2 shard(s)"
local actual_output; actual_output=$($create_cmd)
assert_cmd_succeeded "$create_cmd" || return 1
assert_output_contains "$actual_output" "$expected_coll_name_output" || return 1
assert_output_contains "$actual_output" "$expected_shards_output" || return 1
}
function solr_test_creates_replicated_collections_when_r_provided() {
local create_cmd="bin/solr create_collection -c COLL_NAME -rf 2"
local expected_coll_name_output="Created collection 'COLL_NAME'"
local expected_rf_output="2 replica(s)"
local actual_output; actual_output=$($create_cmd)
assert_cmd_succeeded "$create_cmd" || return 1
assert_output_contains "$actual_output" "$expected_coll_name_output" || return 1
assert_output_contains "$actual_output" "$expected_rf_output" || return 1
}
|
<filename>cses/1732.cc
// https://cses.fi/problemset/task/1732/
#include <bits/stdc++.h>
using namespace std;
using vi = vector<int>;
int main() {
int x = 0, y = 0, n;
string s;
cin >> s;
n = s.size();
vi a(n), r;
for (int i=1; i<n; i++) {
a[i] = max(0, min(a[i-x], y-i));
while (a[i]+i < n && s[a[i]] == s[a[i]+i]) a[i]++;
if (a[i]+i > y) x=i, y=a[i]+i;
if (a[i]+i == n) r.push_back(a[i]);
}
for (int i = r.size() - 1; i >= 0; i--) cout << r[i] << " \n"[!i];
}
|
def convert_coordinates(lat1, lon1, in_cs, out_cs):
in_cs_proj = Proj(in_cs)
out_cs_proj = Proj(out_cs)
return transform(in_cs_proj, out_cs_proj, lat1, lon1)
in_coordinates = 'epsg:4326'
out_coordinates = 'epsg:32636'
convert_coordinates(-13.28629, 119.87576, in_coordinates, out_coordinates)
|
#!/usr/bin/env bash
ray_version=""
commit=""
ray_branch=""
workload=""
for i in "$@"
do
echo "$i"
case "$i" in
--ray-version=*)
ray_version="${i#*=}"
;;
--commit=*)
commit="${i#*=}"
;;
--ray-branch=*)
ray_branch="${i#*=}"
;;
--workload=*)
workload="${i#*=}"
;;
--help)
usage
exit
;;
*)
echo "unknown arg, $i"
exit 1
;;
esac
done
echo "version: $ray_version"
echo "commit: $commit"
echo "branch: $ray_branch"
echo "workload: $workload"
wheel="https://s3-us-west-2.amazonaws.com/ray-wheels/$ray_branch/$commit/ray-$ray_version-cp36-cp36m-manylinux1_x86_64.whl"
# Install Anaconda.
wget --quiet https://repo.continuum.io/archive/Anaconda3-5.0.1-Linux-x86_64.sh || true
bash Anaconda3-5.0.1-Linux-x86_64.sh -b -p "$HOME/anaconda3" || true
# shellcheck disable=SC2016
echo 'export PATH="$HOME/anaconda3/bin:$PATH"' >> ~/.bashrc
conda uninstall -y terminado
source activate tensorflow_p36 && pip install -U pip
source activate tensorflow_p36 && pip install -U "$wheel"
pip install -U pip
conda uninstall -y terminado || true
pip install terminado
pip install boto3==1.4.8 cython==0.29.0
python "workloads/$workload.py"
|
<reponame>lixin9311/bitshares-go
package history
import (
"github.com/scorum/bitshares-go/caller"
"github.com/scorum/bitshares-go/types"
)
type API struct {
caller caller.Caller
id caller.APIID
}
func NewAPI(id caller.APIID, caller caller.Caller) *API {
return &API{id: id, caller: caller}
}
func (api *API) call(method string, args []interface{}, reply interface{}) error {
return api.caller.Call(api.id, method, args, reply)
}
// GetMarketHistory returns market history base/quote (candlesticks) for the given period
func (api *API) GetMarketHistory(base, quote types.ObjectID, bucketSeconds uint32, start, end types.Time) ([]*Bucket, error) {
var resp []*Bucket
err := api.call("get_market_history", []interface{}{base.String(), quote.String(), bucketSeconds, start, end}, &resp)
return resp, err
}
// GetMarketHistoryBuckets returns a list of buckets that can be passed to
// `GetMarketHistory` as the `bucketSeconds` argument
func (api *API) GetMarketHistoryBuckets() ([]uint32, error) {
var resp []uint32
err := api.call("get_market_history_buckets", caller.EmptyParams, &resp)
return resp, err
}
// GetFillOrderHistory returns filled orders
func (api *API) GetFillOrderHistory(base, quote types.ObjectID, limit uint32) ([]*OrderHistory, error) {
var resp []*OrderHistory
err := api.call("get_fill_order_history", []interface{}{base.String(), quote.String(), limit}, &resp)
return resp, err
}
// GetAccountHistory gets operations relevant to the specified account
// account: The account whose history should be queried
// stop: ID of the earliest operation to retrieve
// limit: Maximum number of operations to retrieve (must not exceed 100)
// start: ID of the most recent operation to retrieve
func (api *API) GetAccountHistory(account, stop types.ObjectID, limit int, start types.ObjectID) ([]*OperationHistory, error) {
var history []*OperationHistory
err := api.call("get_account_history", []interface{}{account.String(), stop.String(), limit, start.String()}, &history)
return history, err
}
|
//策略接口,计算购车总金额
export interface Strategy {
calPrice(price:number, num:number):number;
}
//购买5辆及以下不打折
export class Nodiscount implements Strategy {
public calPrice(price:number, num:number):number {
return price * num;
}
}
//购买5辆以上打9折
export class Disount implements Strategy {
public calPrice(price:number, num:number):number {
return price * num * 0.9;
}
}
//上下文,根据不同策略来计算购车总金额
export class Context {
private strategy:Strategy;
public constructor(strategy:Strategy) {
this.strategy = strategy;
}
public calPrice(price:number, num:number):number {
return this.strategy.calPrice(price, num);
}
}
export class StrategyTest {
public static test() {
let strategy:Strategy;
//计算购买3辆总金额
strategy = new Nodiscount();
let context:Context = new Context(strategy);
console.log("购买3辆总金额: " + context.calPrice(10000,3));
//计算12辆总金额
strategy = new Disount();
context = new Context(strategy);
console.log("购买12辆总金额: " + context.calPrice(10000,12));
}
}
|
"""
Based on Premailer.
This is a hack of Premailer that uses BeautifulSoup and SoupSelect instead of lxml.
"""
# http://www.peterbe.com/plog/premailer.py
import re, os
import codecs
import urlparse, urllib
from BeautifulSoup import BeautifulSoup, Comment
import soupselect; soupselect.monkeypatch()
__version__ = '1.9'
__all__ = ['PremailerError', 'Premailer', 'transform']
class PremailerError(Exception):
pass
def _merge_styles(old, new, class_=''):
"""
if ::
old = 'font-size:1px; color: red'
and ::
new = 'font-size:2px; font-weight: bold'
then ::
return 'color: red; font-size:2px; font-weight: bold'
In other words, the new style bits replace the old ones.
The @class_ parameter can be something like ':hover' and if that
is there, you split up the style with '{...} :hover{...}'
Note: old could be something like '{...} ::first-letter{...}'
"""
news = {}
for k, v in [x.strip().split(':', 1) for x in new.split(';') if x.strip()]:
news[k.strip()] = v.strip()
groups = {}
grouping_regex = re.compile('([:\-\w]*){([^}]+)}')
grouped_split = grouping_regex.findall(old)
if grouped_split:
for old_class, old_content in grouped_split:
olds = {}
for k, v in [x.strip().split(':', 1) for x in old_content.split(';') if x.strip()]:
olds[k.strip()] = v.strip()
groups[old_class] = olds
else:
olds = {}
for k, v in [x.strip().split(':', 1) for x in old.split(';') if x.strip()]:
olds[k.strip()] = v.strip()
groups[''] = olds
# Perform the merge
merged = news
for k, v in groups.get(class_, {}).items():
if k not in merged:
merged[k] = v
groups[class_] = merged
if len(groups) == 1:
return '; '.join(['%s:%s' % (k, v) for (k, v) in groups.values()[0].items()])
else:
all = []
for class_, mergeable in sorted(groups.items(),
lambda x, y: cmp(x[0].count(':'), y[0].count(':'))):
all.append('%s{%s}' % (class_,
'; '.join(['%s:%s' % (k, v)
for (k, v)
in mergeable.items()])))
return ' '.join([x for x in all if x != '{}'])
_css_comments = re.compile(r'/\*.*?\*/', re.MULTILINE|re.DOTALL)
_regex = re.compile('((.*?){(.*?)})', re.DOTALL|re.M)
_semicolon_regex = re.compile(';(\s+)')
_colon_regex = re.compile(':(\s+)')
class Premailer(object):
def __init__(self, html, base_url=None,
exclude_pseudoclasses=False,
keep_style_tags=False,
include_star_selectors=False,
external_styles=None):
self.html = html
self.base_url = base_url
self.exclude_pseudoclasses = exclude_pseudoclasses
# whether to delete the <style> tag once it's been processed
self.keep_style_tags = keep_style_tags
# whether to process or ignore selectors like '* { foo:bar; }'
self.include_star_selectors = include_star_selectors
if isinstance(external_styles, basestring):
external_styles = [external_styles]
self.external_styles = external_styles
def _parse_style_rules(self, css_body):
leftover = []
rules = []
css_body = _css_comments.sub('', css_body)
for each in _regex.findall(css_body.strip()):
__, selectors, bulk = each
bulk = _semicolon_regex.sub(';', bulk.strip())
bulk = _colon_regex.sub(':', bulk.strip())
if bulk.endswith(';'):
bulk = bulk[:-1]
for selector in [x.strip() for x in selectors.split(',') if x.strip()]:
if ':' in selector and self.exclude_pseudoclasses:
# a pseudoclass
leftover.append((selector, bulk))
continue
elif selector == '*' and not self.include_star_selectors:
continue
rules.append((selector, bulk))
return rules, leftover
def transform(self):
"""change the self.html and return it with CSS turned into style
attributes.
"""
page = BeautifulSoup(self.html)
if page is None:
print repr(self.html)
raise PremailerError("Could not parse the html")
# Strip comments.
comments = page.findAll(text=lambda text: isinstance(text, Comment))
map(lambda c: c.extract(), comments)
##
## style selectors
##
rules = []
for style in page.find("style") or []:
css_body = str(style)
these_rules, these_leftover = self._parse_style_rules(css_body)
rules.extend(these_rules)
if these_leftover:
style.text = '\n'.join(['%s {%s}' % (k, v) for (k, v) in these_leftover])
elif not self.keep_style_tags:
style.extract()
if self.external_styles:
for stylefile in self.external_styles:
print stylefile
if stylefile.startswith('http://'):
css_body = urllib.urlopen(stylefile).read()
elif os.path.exists(stylefile):
try:
f = codecs.open(stylefile)
css_body = f.read()
finally:
f.close()
else:
raise ValueError(u"Could not find external style: %s" % stylefile)
these_rules, these_leftover = self._parse_style_rules(css_body)
rules.extend(these_rules)
for selector, style in rules:
class_ = ''
if ':' in selector:
selector, class_ = re.split(':', selector, 1)
class_ = ':%s' % class_
#sel = CSSSelector(selector)
items = page.findSelect(selector)
for item in items:
old_style = item.get('style','')
new_style = _merge_styles(old_style, style, class_)
item['style'] = new_style
self._style_to_basic_html_attributes(item, new_style)
for item in page.findAll(lambda tag: tag.get('class', None) != None):
# delete the 'class' attribute
del item['class']
##
## URLs
##
if self.base_url:
for attr in ('href', 'src'):
for item in page.findAll(lambda tag: tag.get(attr, None)!= None):
item[attr] = urlparse.urljoin(self.base_url, item[attr])
# The default __repr__ encoding for the used version of BeautifulSoup is utf-8
return str(page).replace('<head/>','<head></head>')
def _style_to_basic_html_attributes(self, element, style_content):
"""given an element and styles like
'background-color:red; font-family:Arial' turn some of that into HTML
attributes. like 'bgcolor', etc.
Note, the style_content can contain pseudoclasses like:
'{color:red; border:1px solid green} :visited{border:1px solid green}'
"""
if style_content.count('}') and \
style_content.count('{') == style_content.count('{'):
style_content = style_content.split('}')[0][1:]
attributes = {}
for key, value in [x.split(':') for x in style_content.split(';')
if len(x.split(':'))==2]:
key = key.strip()
if key == 'text-align':
attributes['align'] = value.strip()
elif key == 'background-color':
attributes['bgcolor'] = value.strip()
elif key == 'width':
value = value.strip()
if value.endswith('px'):
value = value[:-2]
attributes['width'] = value
#else:
# print "key", repr(key)
# print 'value', repr(value)
for key, value in attributes.items():
if key in element:
# already set, don't dare to overwrite
continue
element[key] = value
def transform(html, base_url=None):
return Premailer(html, base_url=base_url).transform()
if __name__=='__main__':
html = """<html>
<head>
<title>Test</title>
<style>
h1, h2 { color:red; }
strong {
text-decoration:none
}
p { font-size:2px }
p.footer { font-size: 1px}
</style>
</head>
<body>
<h1>Hi!</h1>
<p><strong>Yes!</strong></p>
<p class="footer" style="color:red">Feetnuts</p>
<img href="/images/logo.png"/>
<a href="">28 Wins</a>
</body>
</html>"""
print transform(html, base_url="http://www.28wins.com")
|
// Copyright 2016 PLUMgrid
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hover
import (
"fmt"
"syscall"
"github.com/vishvananda/netlink"
"github.com/iovisor/iomodules/hover/bpf"
"github.com/iovisor/iomodules/hover/canvas"
)
type InterfaceNode interface {
canvas.Node
Link() netlink.Link
SetLink(netlink.Link)
}
type ExtInterface struct {
canvas.NodeBase
link netlink.Link
}
func NewExtInterface(link netlink.Link) *ExtInterface {
return &ExtInterface{
NodeBase: canvas.NewNodeBase(-1, -1, link.Attrs().Name, "i:", 1),
link: link,
}
}
func (ifc *ExtInterface) FD() int {
if ifc.NodeBase.FD() >= 0 {
return ifc.NodeBase.FD()
}
cflags := []string{
fmt.Sprintf("-DINTERFACE_ID=%d", ifc.link.Attrs().Index),
}
bpf := bpf.NewBpfModule(bpf.NetdevTxC, cflags)
if bpf == nil {
panic(fmt.Errorf("Failed to compile bpf module for %s egress", ifc.Path()))
}
// free the llvm memory, just keep the fd
defer bpf.Close()
fd, err := bpf.LoadNet("egress")
if err != nil {
panic(err)
}
fd2, err := syscall.Dup(fd)
if err != nil {
panic(err)
}
ifc.NodeBase.SetFD(fd2)
return ifc.NodeBase.FD()
}
func (ifc *ExtInterface) Link() netlink.Link { return ifc.link }
func (ifc *ExtInterface) SetLink(link netlink.Link) { ifc.link = link }
func (ifc *ExtInterface) SetID(id int) { ifc.NodeBase.SetID(id) }
type IngressChain struct {
fd int
}
func NewIngressChain(chain [4]int) (*IngressChain, error) {
cflags := []string{
fmt.Sprintf("-DCHAIN_VALUE0=%#x", chain[0]),
fmt.Sprintf("-DCHAIN_VALUE1=%#x", chain[1]),
fmt.Sprintf("-DCHAIN_VALUE2=%#x", chain[2]),
fmt.Sprintf("-DCHAIN_VALUE3=%#x", chain[3]),
}
//Debug.Printf("netdev: %v\n", cflags)
bpf := bpf.NewBpfModule(bpf.NetdevRxC, cflags)
if bpf == nil {
return nil, fmt.Errorf("NewIngressChain bpf compile failed")
}
defer bpf.Close()
fd, err := bpf.LoadNet("ingress")
if err != nil {
return nil, err
}
fd2, err := syscall.Dup(fd)
if err != nil {
return nil, err
}
return &IngressChain{fd: fd2}, nil
}
func (c *IngressChain) Close() { syscall.Close(c.fd) }
func (c *IngressChain) FD() int { return c.fd }
type EgressChain struct {
fd int
}
func NewEgressChain(chain [4]int) (*EgressChain, error) {
cflags := []string{
fmt.Sprintf("-DCHAIN_VALUE0=%#x", chain[0]),
fmt.Sprintf("-DCHAIN_VALUE1=%#x", chain[1]),
fmt.Sprintf("-DCHAIN_VALUE2=%#x", chain[2]),
fmt.Sprintf("-DCHAIN_VALUE3=%#x", chain[3]),
}
//Debug.Printf("netdev: %v\n", cflags)
bpf := bpf.NewBpfModule(bpf.NetdevEgressC, cflags)
if bpf == nil {
return nil, fmt.Errorf("NewEgressChain bpf compile failed")
}
defer bpf.Close()
fd, err := bpf.LoadNet("egress")
if err != nil {
return nil, err
}
fd2, err := syscall.Dup(fd)
if err != nil {
return nil, err
}
return &EgressChain{fd: fd2}, nil
}
func (c *EgressChain) Close() { syscall.Close(c.fd) }
func (c *EgressChain) FD() int { return c.fd }
|
export { I18nextCLILanguageDetector as default } from './i18next-cli-language-detector';
|
#!/bin/bash
set -e
set -x
TAG=$1
LABEL=$2
if [[ $PUBLISH_DOCKERHUB == 'true' ]]
then
echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker tag ${LABEL} ${LABEL}-builder:${TAG}
docker push ${LABEL}-builder:${TAG}
fi
|
<reponame>voidberg/imagecache<gh_stars>1-10
module.exports = {
attach: function attach() {
this.desaturate = function desaturate(image, config, callback) {
image.greyscale();
return callback();
};
},
};
|
<filename>tests/unit/bud-server/service.test.ts<gh_stars>0
import {Bud, factory} from '@repo/test-kit/bud'
describe('@roots/bud-server', function () {
let bud: Bud
beforeAll(async () => {
bud = await factory({mode: 'development'})
})
it('has expected defaults', () => {
expect(bud.store.get('server')).toMatchSnapshot({
browser: {
indicator: true,
log: true,
overlay: true,
},
middleware: {
dev: true,
hot: true,
proxy: false,
},
dev: {
url: new URL('http://localhost:3000/'),
},
proxy: {
url: new URL('http://localhost/'),
},
watch: {
files: [],
},
})
})
it('is modifiable', () => {
expect(bud.store.get('server.browser.indicator')).toBe(true)
bud.store.set('server.browser.indicator', false)
expect(bud.store.get('server.browser.indicator')).toBe(false)
})
it('has run method', () => {
try {
expect(bud.server.run).toBeInstanceOf(Function)
} catch (e) {
console.error(e)
}
})
})
|
import React from 'react';
import SVGComp from '../../Components/VectorComp';
import { Icon } from '../../Helper';
import './Styles.scss';
const SingleMember = ({ name, role, image, description, setPreview }) => {
return (
<div id="team-member-full-details-container">
<div id="member-details-image-container">
<img src={image} />
<span id="close-team-member">
<SVGComp path={Icon.close} hover fill="#333" onClick={setPreview} />
</span>
</div>
<div id="team-member-details-info-container">
<span>
{name} <SVGComp path={Icon.close} hover fill="#333" onClick={setPreview} />
</span>
<span>Role: {role}</span>
<span>{description}</span>
</div>
</div>
);
};
export default SingleMember;
|
total_read = 0 # Initialize the total amount read
while True:
read = int(input("Enter the input value: ")) # Read the input value
if read < 0: # Check if the specific condition is met (e.g., negative input)
break # Exit the loop if the condition is met
total_read += read # Accumulate the total amount read
print("Total amount read:", total_read) # Output the total amount read
|
// Taussig
//
// Written in 2013 by <NAME> <<EMAIL>>
//
// To the extent possible under law, the author(s) have dedicated all copyright and related
// and neighboring rights to this software to the public domain worldwide. This software is
// distributed without any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
// Test for presence of reference type typedef
#ifndef TAUSSIG_DETAIL_HAS_REFERENCE_TYPE_HPP
#define TAUSSIG_DETAIL_HAS_REFERENCE_TYPE_HPP
#include <taussig/detail/sequence_impl.h++>
#include <wheels/meta/bool.h++>
#include <wheels/meta/trait_of.h++>
#include <wheels/meta/depend_on.h++>
namespace seq {
namespace detail {
struct reference_type_test {
template <typename T>
wheels::meta::DependOn<wheels::meta::True, typename sequence_impl<T>::reference> static test(int);
template <typename>
wheels::meta::False static test(...);
};
template <typename T>
struct has_reference_type : wheels::meta::TraitOf<reference_type_test, T> {};
} // namespace detail
} // namespace seq
#endif // TAUSSIG_DETAIL_HAS_REFERENCE_TYPE_HPP
|
angular.module('app', []).controller('GameCtrl', ['$scope', '$timeout', function($scope, $timeout){
$scope.variables = {};
$scope.correct = 0;
$scope.wrong = 0;
$scope.timer = 0;
_start = false;
_end = false;
var _answer = null;
var _answerCorrect = null;
var _sym = ['-','+','÷','x'];
function _initialize() {
$scope.variables = {
'x': Math.floor(Math.random() * (10)) + 1,
'y': Math.floor(Math.random() * (10)) + 1,
};
$scope.symbol = _sym[Math.round(Math.random()*3)];
switch ($scope.symbol) {
case '-':
_answer = $scope.variables.x - $scope.variables.y;
break;
case '+':
_answer = $scope.variables.x + $scope.variables.y;
break;
case 'x':
_answer = $scope.variables.x * $scope.variables.y;
break;
default:
var x = $scope.variables.x;
$scope.variables.x = x * $scope.variables.y;
_answer = x;
break
}
_answerCorrect = null;
$scope.answer = null;
}
angular.extend($scope, {
isAnswer: function (){
return _answerCorrect === true;
},
isIncorrect: function (){
return _answerCorrect === false;
},
accuracy: function(){
return $scope.correct / ($scope.correct + $scope.wrong) * 100;
},
noGame: function(){
return !_start && !_end;
},
startGame: function(){
_start = true;
_end = false;
$scope.correct = 0;
$scope.wrong = 0;
$scope.timer = 0;
$timeout($scope.increaseTimer, 1000);
},
playingGame: function(){
return _start && !_end;
},
endGame: function() {
return _start && _end;
},
increaseTimer: function(){
$scope.timer++;
if($scope.timer == 60){
_end = true;
} else {
$timeout($scope.increaseTimer, 1000);
}
},
checkAnswer: function(){
_answerCorrect = parseInt($scope.answer) == _answer;
if(_answerCorrect) {
$scope.correct++;
_initialize()
}else{
$scope.wrong++;
$scope.answer = null;
}
}
});
_initialize();
}
]);
|
#!/usr/bin/env bash
echo "Configuring nomad ..."
mkdir -p /etc/nomad.d
chmod 700 /etc/nomad.d
touch /etc/nomad.d/nomad.hcl
# Enable Nomad's CLI command autocomplete support. Skip if installed
grep "complete -C /usr/bin/nomad nomad" ~/.bashrc &>/dev/null || nomad -autocomplete-install
cat <<EOF > /etc/nomad.d/nomad.hcl
data_dir = "/opt/nomad"
region = "$1"
datacenter = "$2"
bind_addr = "0.0.0.0"
client {
enabled = true
server_join {
retry_join = ["$4"]
retry_max = 5
retry_interval = "15s"
}
options = {
"driver.raw_exec" = "1"
"driver.raw_exec.enable" = "1"
}
}
# Require TLS
tls {
http = true
rpc = true
ca_file = "/home/ubuntu/nomad/ssl/nomad-ca.pem"
cert_file = "/home/ubuntu/nomad/ssl/client.pem"
key_file = "/home/ubuntu/nomad/ssl/client-key.pem"
verify_server_hostname = true
verify_https_client = true
}
EOF
|
mkdir exported-models
mkdir jobs
mkdir results
mkdir tmp
mkdir weights
|
#!/bin/bash
#/**
# * php-xdebug
# * php debug module
# *
# * @category dev
# */
BASEDIR=$(dirname "${0}")
. ${BASEDIR}/../tools/colors.sh
VERSION='7.3'
OPTIND=0
while getopts :v:h OPTION; do
case "${OPTION}" in
v) VERSION="${OPTARG}";;
h) echo_label 'description'; echo_primary 'Config php-modules'
echo_label 'usage'; echo_primary "${0} -v [version] -r (restore default) -h (help)"
exit 0;;
:) echo_error "\"${OPTARG}\" requires value"
exit 1;;
\?) echo_error "invalid option \"${OPTARG}\""
exit 1;;
esac
done
# check valid version
for VALID_VERSION in 5.6 7.0 7.1 7.2 7.3; do
if [ "${VERSION}" = "${VALID_VERSION}" ]; then
INSTALL='true'
fi
done
if [ "${INSTALL}" != 'true' ]; then
echo_error "Cannot install xdebug for \"php$VERSION\", invalid version"
exit 1
fi
echo_info 'sudo apt-get install --assume-yes php-xdebug'
sudo apt-get install --assume-yes php-xdebug
# config xdebug
echo_info "sudo bash -c \"cat > /etc/php/${VERSION}/mods-available/xdebug.ini <<EOF ... EOF\""
sudo bash -c "cat > /etc/php/${VERSION}/mods-available/xdebug.ini <<EOF
zend_extension=xdebug.so
xdebug.remote_autostart = 1
xdebug.remote_enable = 1
xdebug.remote_handler = dbgp
xdebug.remote_host = 127.0.0.1
xdebug.remote_log = /tmp/xdebug_remote.log
xdebug.remote_mode = req
xdebug.remote_port = 9005 #if you want to change the port you can change
EOF"
# restart php service
echo_info "sudo systemctl restart php${VERSION}-fpm"
sudo systemctl restart php${VERSION}-fpm
|
#!/usr/bin/env bash
declare -A a=()
declare -r fg=1 # foreground character
declare -r bg=_ # background character
draw() {
local -i x=$1 # most recently drawn-to column number, rightward from 0
local -i y=$2 # most recently drawn-to row number, upward from 0
local -ri d=$3 # vertical displacement of trunk, then again of branch
local -ri n=$4 # number of iterations remaining, including this one
((n > 0)) || return
local -i i
for ((i = d; i > 0; --i)); do
((++y))
a[$x,$y]=$fg
done
local -i x_=$x y_=$y
for ((i = d; i > 0; --i)); do
((++y_, --x_))
a[$x_,$y_]=$fg
done
draw $x_ $y_ $((d / 2)) $((n - 1))
for ((i = d; i > 0; --i)); do
((++y, ++x))
a[$x,$y]=$fg
done
draw $x $y $((d / 2)) $((n - 1))
}
declare -i n=0
read -r n
draw 49 -1 16 $n
declare -i x y
for ((y = 62; y >= 0; --y)); do
for ((x = 0; x < 100; ++x)); do
printf %c ${a[$x,$y]:-$bg}
done
printf \\n
done
|
function genemyinitMap() {
var latitude = jQuery('#gmap').data( 'latitude' );
var longitude = jQuery('#gmap').data( 'longitude' );
var title = jQuery('#gmap').data( 'title' );
var image = jQuery('#gmap').data( 'marker' );
var zoom = jQuery('#gmap').data( 'zoom' );
var myLatLng = {lat: latitude, lng: longitude};
var map = new google.maps.Map(document.getElementById('gmap'), {
center: myLatLng,
zoom: zoom,
styles: [
{ "elementType": "geometry", "stylers": [ { "color": "#ebe3cd" } ] },
{ "elementType": "labels.text.fill", "stylers": [ { "color": "#523735" } ] },
{ "elementType": "labels.text.stroke", "stylers": [ { "color": "#f5f1e6" } ] },
{
"featureType": "administrative",
"elementType": "geometry.stroke",
"stylers": [ { "color": "#c9b2a6" } ]
},
{
"featureType": "administrative.land_parcel",
"elementType": "geometry.stroke",
"stylers": [ { "color": "#dcd2be" } ]
},
{
"featureType": "administrative.land_parcel",
"elementType": "labels.text.fill",
"stylers": [ { "color": "#ae9e90" } ]
},
{
"featureType": "landscape.natural",
"elementType": "geometry",
"stylers": [ { "color": "#dfd2ae" } ]
},
{
"featureType": "poi",
"elementType": "geometry",
"stylers": [ { "color": "#dfd2ae" } ]
},
{
"featureType": "poi",
"elementType": "labels.text.fill",
"stylers": [ { "color": "#93817c" } ]
},
{
"featureType": "poi.park",
"elementType": "geometry.fill",
"stylers": [ { "color": "#a5b076" } ]
},
{
"featureType": "poi.park",
"elementType": "labels.text.fill",
"stylers": [ { "color": "#447530" } ]
},
{
"featureType": "road",
"elementType": "geometry",
"stylers": [ { "color": "#f5f1e6" } ]
},
{
"featureType": "road.arterial",
"elementType": "geometry",
"stylers": [ { "color": "#fdfcf8" } ]
},
{
"featureType": "road.highway",
"elementType": "geometry",
"stylers": [ { "color": "#f8c967" } ]
},
{
"featureType": "road.highway",
"elementType": "geometry.stroke",
"stylers": [ { "color": "#e9bc62" } ]
},
{
"featureType": "road.highway.controlled_access",
"elementType": "geometry",
"stylers": [ { "color": "#e98d58" } ]
},
{
"featureType": "road.highway.controlled_access",
"elementType": "geometry.stroke",
"stylers": [ { "color": "#db8555" } ]
},
{
"featureType": "road.local",
"elementType": "labels.text.fill",
"stylers": [ { "color": "#806b63" } ]
},
{
"featureType": "transit.line",
"elementType": "geometry",
"stylers": [ { "color": "#dfd2ae" } ]
},
{
"featureType": "transit.line",
"elementType": "labels.text.fill",
"stylers": [ { "color": "#8f7d77" } ]
},
{
"featureType": "transit.line",
"elementType": "labels.text.stroke",
"stylers": [ { "color": "#ebe3cd" } ]
},
{
"featureType": "transit.station",
"elementType": "geometry",
"stylers": [ { "color": "#dfd2ae" } ]
},
{
"featureType": "water",
"elementType": "geometry.fill",
"stylers": [ { "color": "#b9d3c2" } ]
},
{
"featureType": "water",
"elementType": "labels.text.fill",
"stylers": [ { "color": "#92998d" } ]
}
]
});
var marker = new google.maps.Marker({
position: myLatLng,
map: map,
icon: image,
title: jQuery('#gmap').data( 'title' )
});
marker.setMap(map);
}
|
/**
* @author ooooo
* @date 2021/2/27 11:48
*/
#ifndef CPP_0395__SOLUTION2_H_
#define CPP_0395__SOLUTION2_H_
#include <vector>
#include <iostream>
#include <unordered_map>
using namespace std;
// 分治法
class Solution {
public:
int dfs(string &s, int l, int r, int k) {
if (l > r) return 0;
int n = r + 1;
vector<int> m(26, 0);
for (int i = l; i < n; ++i) {
m[s[i] - 'a']++;
}
char split = '0';
for (int i = 0; i < 26; ++i) {
if (m[i] > 0 && m[i] < k) {
// 这个字符没有k个,以它为分界线
split = i + 'a';
break;
}
}
if (split == '0') {
return r - l + 1;
}
int ans = 0;
int i = l;
while (i < n) {
while (i < n && s[i] == split) {
i++;
}
if (i >= n) {
return 0;
}
int j = i;
while (j < n && s[j] != split) {
j++;
}
ans = max(ans, dfs(s, i, j - 1, k));
i = j;
}
return ans;
}
int longestSubstring(string s, int k) {
return dfs(s, 0, s.size() - 1, k);
}
};
#endif //CPP_0395__SOLUTION2_H_
|
#!/usr/bin/env bash
#
# Copyright (c) 2017-2020 The Zenacoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check for new lines in diff that introduce trailing whitespace.
# We can't run this check unless we know the commit range for the PR.
export LC_ALL=C
while getopts "?" opt; do
case $opt in
?)
echo "Usage: $0 [N]"
echo " COMMIT_RANGE='<commit range>' $0"
echo " $0 -?"
echo "Checks unstaged changes, the previous N commits, or a commit range."
echo "COMMIT_RANGE='47ba2c3...ee50c9e' $0"
exit 0
;;
esac
done
if [ -z "${COMMIT_RANGE}" ]; then
if [ -n "$1" ]; then
COMMIT_RANGE="HEAD~$1...HEAD"
else
# This assumes that the target branch of the pull request will be master.
MERGE_BASE=$(git merge-base HEAD master)
COMMIT_RANGE="$MERGE_BASE..HEAD"
fi
fi
showdiff() {
if ! git diff -U0 "${COMMIT_RANGE}" -- "." ":(exclude)depends/patches/" ":(exclude)contrib/guix/patches/" ":(exclude)src/leveldb/" ":(exclude)src/crc32c/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/" ":(exclude)src/qt/locale/"; then
echo "Failed to get a diff"
exit 1
fi
}
showcodediff() {
if ! git diff -U0 "${COMMIT_RANGE}" -- *.cpp *.h *.md *.py *.sh ":(exclude)src/leveldb/" ":(exclude)src/crc32c/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/" ":(exclude)src/qt/locale/"; then
echo "Failed to get a diff"
exit 1
fi
}
RET=0
# Check if trailing whitespace was found in the diff.
if showdiff | grep -E -q '^\+.*\s+$'; then
echo "This diff appears to have added new lines with trailing whitespace."
echo "The following changes were suspected:"
FILENAME=""
SEEN=0
SEENLN=0
while read -r line; do
if [[ "$line" =~ ^diff ]]; then
FILENAME="$line"
SEEN=0
elif [[ "$line" =~ ^@@ ]]; then
LINENUMBER="$line"
SEENLN=0
else
if [ "$SEEN" -eq 0 ]; then
# The first time a file is seen with trailing whitespace, we print the
# filename (preceded by a newline).
echo
echo "$FILENAME"
SEEN=1
fi
if [ "$SEENLN" -eq 0 ]; then
echo "$LINENUMBER"
SEENLN=1
fi
echo "$line"
fi
done < <(showdiff | grep -E '^(diff --git |@@|\+.*\s+$)')
RET=1
fi
# Check if tab characters were found in the diff.
if showcodediff | perl -nle '$MATCH++ if m{^\+.*\t}; END{exit 1 unless $MATCH>0}' > /dev/null; then
echo "This diff appears to have added new lines with tab characters instead of spaces."
echo "The following changes were suspected:"
FILENAME=""
SEEN=0
SEENLN=0
while read -r line; do
if [[ "$line" =~ ^diff ]]; then
FILENAME="$line"
SEEN=0
elif [[ "$line" =~ ^@@ ]]; then
LINENUMBER="$line"
SEENLN=0
else
if [ "$SEEN" -eq 0 ]; then
# The first time a file is seen with a tab character, we print the
# filename (preceded by a newline).
echo
echo "$FILENAME"
SEEN=1
fi
if [ "$SEENLN" -eq 0 ]; then
echo "$LINENUMBER"
SEENLN=1
fi
echo "$line"
fi
done < <(showcodediff | perl -nle 'print if m{^(diff --git |@@|\+.*\t)}')
RET=1
fi
exit $RET
|
<reponame>pradeep-gr/mbed-os5-onsemi
/**********************************************************************
* $Id$ lpc_phy.h 2011-11-20
*//**
* @file lpc_phy.h
* @brief Common PHY definitions used with all PHYs
* @version 1.0
* @date 20 Nov. 2011
* @author NXP MCU SW Application Team
*
* Copyright(C) 2011, NXP Semiconductor
* All rights reserved.
*
***********************************************************************
* Software that is described herein is for illustrative purposes only
* which provides customers with programming information regarding the
* products. This software is supplied "AS IS" without any warranties.
* NXP Semiconductors assumes no responsibility or liability for the
* use of the software, conveys no license or title under any patent,
* copyright, or mask work right to the product. NXP Semiconductors
* reserves the right to make changes in the software without
* notification. NXP Semiconductors also make no representation or
* warranty that such application will be suitable for the specified
* use without further testing or modification.
**********************************************************************/
#ifndef __LPC_PHY_H_
#define __LPC_PHY_H_
#include "lwip/opt.h"
#include "lwip/err.h"
#include "lwip/netif.h"
#ifdef __cplusplus
extern "C"
{
#endif
/* These PHY functions are usually part of the EMAC driver */
/** \brief Phy status update state machine
*
* This function provides a state machine for maintaining the PHY
* status without blocking. It must be occasionally called for the
* PHY status to be maintained.
*
* \param[in] netif NETIF structure
*/
s32_t lpc_phy_sts_sm(struct netif *netif);
/** \brief Initialize the PHY
*
* This function initializes the PHY. It will block until complete.
* This function is called as part of the EMAC driver
* initialization. Configuration of the PHY at startup is
* controlled by setting up configuration defines in lpc_phy.h.
*
* \param[in] netif NETIF structure
* \param[in] rmii If set, configures the PHY for RMII mode
* \return ERR_OK if the setup was successful, otherwise ERR_TIMEOUT
*/
err_t lpc_phy_init(struct netif *netif, int rmii);
/** \brief Write a value via the MII link (non-blocking)
*
* This function will write a value on the MII link interface to a PHY
* or a connected device. The function will return immediately without
* a status. Status needs to be polled later to determine if the write
* was successful.
*
* \param[in] PhyReg PHY register to write to
* \param[in] Value Value to write
*/
void lpc_mii_write_noblock(u32_t PhyReg, u32_t Value);
/** \brief Write a value via the MII link (blocking)
*
* This function will write a value on the MII link interface to a PHY
* or a connected device. The function will block until complete.
*
* \param[in] PhyReg PHY register to write to
* \param[in] Value Value to write
* \returns 0 if the write was successful, otherwise !0
*/
err_t lpc_mii_write(u32_t PhyReg, u32_t Value);
/** \brief Reads current MII link busy status
*
* This function will return the current MII link busy status and is meant to
* be used with non-blocking functions for monitor PHY status such as
* connection state.
*
* \returns !0 if the MII link is busy, otherwise 0
*/
u32_t lpc_mii_is_busy(void);
/** \brief Starts a read operation via the MII link (non-blocking)
*
* This function returns the current value in the MII data register. It is
* meant to be used with the non-blocking oeprations. This value should
* only be read after a non-block read command has been issued and the
* MII status has been determined to be good.
*
* \returns The current value in the MII value register
*/
u32_t lpc_mii_read_data(void);
/** \brief Starts a read operation via the MII link (non-blocking)
*
* This function will start a read operation on the MII link interface
* from a PHY or a connected device. The function will not block and
* the status mist be polled until complete. Once complete, the data
* can be read.
*
* \param[in] PhyReg PHY register to read from
*/
err_t lpc_mii_read(u32_t PhyReg, u32_t *data);
/** \brief Read a value via the MII link (blocking)
*
* This function will read a value on the MII link interface from a PHY
* or a connected device. The function will block until complete.
*
* \param[in] PhyReg PHY register to read from
* \param[in] data Pointer to where to save data read via MII
* \returns 0 if the read was successful, otherwise !0
*/
void lpc_mii_read_noblock(u32_t PhyReg);
/**
* This function provides a method for the PHY to setup the EMAC
* for the PHY negotiated duplex mode.
*
* @param[in] full_duplex 0 = half duplex, 1 = full duplex
*/
void lpc_emac_set_duplex(int full_duplex);
/**
* This function provides a method for the PHY to setup the EMAC
* for the PHY negotiated bit rate.
*
* @param[in] mbs_100 0 = 10mbs mode, 1 = 100mbs mode
*/
void lpc_emac_set_speed(int mbs_100);
#ifdef __cplusplus
}
#endif
#endif /* __LPC_PHY_H_ */
/* --------------------------------- End Of File ------------------------------ */
|
// Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#include <iostream>
#include "inference.hpp"
#include "tensor.hpp"
#include "data_loader.hpp"
#include "profiling.h"
#include "parse_command.h"
int main(int argc, char *argv[])
{
UNI_TIME_INIT
ParseRes parse_res;
parseCommandLine(argc, argv, &parse_res, "examples");
char *modelPath = (char *)"";
char *sequenceDirectory = (char *)"";
char *affinityPolicyName = (char *)"";
char *algorithmMapPath = (char *)"";
if (!parse_res.model.second) {
exit(-1);
}
if (parse_res.model.second) {
modelPath = parse_res.model.first;
}
if (parse_res.inputPath.second) {
sequenceDirectory = parse_res.inputPath.first;
}
if (parse_res.archInfo.second) {
affinityPolicyName = parse_res.archInfo.first;
}
if (parse_res.algoPath.second) {
algorithmMapPath = parse_res.algoPath.first;
}
bool useGPU = (strcmp(affinityPolicyName, "GPU") == 0) ? true : false;
auto pipeline = createPipeline(affinityPolicyName, modelPath, algorithmMapPath);
// load sequences
std::map<std::string, std::shared_ptr<Tensor>> inMap = pipeline->get_input();
std::vector<TensorDesc> sequenceDescs;
TensorDesc wordInputDesc = (*(inMap["nmt_words"])).get_desc();
wordInputDesc.dt = DT_U32;
sequenceDescs.push_back(wordInputDesc);
TensorDesc positionInputDesc = (*(inMap["nmt_positions"])).get_desc();
positionInputDesc.dt = DT_U32;
sequenceDescs.push_back(positionInputDesc);
std::vector<std::vector<Tensor>> sequences, results;
std::vector<std::string> sequencePaths =
load_data(sequenceDirectory + std::string("/input"), sequenceDescs, &sequences);
std::vector<TensorDesc> resultDescs;
resultDescs.push_back(wordInputDesc);
std::vector<std::string> resultPaths =
load_data(sequenceDirectory + std::string("/result"), resultDescs, &results);
double totalTime = 0;
U32 sequenceIndex = 0;
U32 falseResult = 0;
std::cout << "[RESULT]:" << std::endl;
for (auto sequence : sequences) {
std::cout << sequencePaths[sequenceIndex] << ": " << std::endl;
std::map<std::string, TensorDesc> inputDescMap;
inputDescMap["nmt_words"] = sequence[0].get_desc();
inputDescMap["nmt_positions"] = sequence[1].get_desc();
pipeline->reready(inputDescMap);
std::map<std::string, U8 *> inputMap;
inputMap["nmt_words"] = (U8 *)((CpuMemory *)(sequence[0].get_memory()))->get_ptr();
inputMap["nmt_positions"] = (U8 *)((CpuMemory *)(sequence[1].get_memory()))->get_ptr();
pipeline->set_input_by_copy(inputMap);
double timeBegin = ut_time_ms();
pipeline->run();
#ifdef _USE_GPU
if (useGPU) {
gcl_finish(OCLContext::getInstance().handle.get());
}
#endif
double timeEnd = ut_time_ms();
totalTime += (timeEnd - timeBegin);
Tensor output = pipeline->get_tensor_by_name("decoder_output");
#ifdef _USE_GPU
if (useGPU) {
auto mem = (OclMemory *)output.get_memory();
if (!mem->check_mapped()) {
Tensor outputMap = output.clone(false);
auto memMap = (OclMemory *)outputMap.get_memory();
memMap->mapped_alloc();
memMap->copy_from(mem);
output = outputMap;
mem = memMap;
}
mem->get_mapped_ptr();
}
#endif
std::cout << output.string(32) << std::endl;
if (resultPaths.size() > sequenceIndex) {
U32 *result = (U32 *)((CpuMemory *)(results[sequenceIndex][0].get_memory()))->get_ptr();
U32 inferenceSize = output.length();
for (U32 i = 0; i < results[sequenceIndex][0].length(); i++) {
if (i >= inferenceSize || result[i] != output.element(i)) {
falseResult++;
break;
}
}
}
sequenceIndex++;
}
UNI_TIME_STATISTICS
pipeline->saveAlgorithmMapToFile(algorithmMapPath);
std::cout << "[SUMMARY]:" << std::endl;
UNI_CI_LOG(
"translation correct rate: %f %%\n", 100.0 * (sequenceIndex - falseResult) / sequenceIndex);
UNI_CI_LOG("avg_time:%fms/sequence\n", 1.0 * totalTime / sequenceIndex);
if (falseResult > 0) {
UNI_ERROR_LOG("verify failed\n");
}
return 0;
}
|
#!/bin/bash
set -e
set -x
until [ -f /var/lib/docker/certs/client/ca.pem ]
do
echo "Waiting for /var/lib/docker/certs/client/ca.pem to be available from dind volume"
sleep 1
done
START_TIME=`date +"%d-%m-%yT%H-%M-%S"`
mkdir -pv ~/.docker
cp -v /var/lib/docker/certs/client/* ~/.docker
touch ./builder-started.txt
bash ./scripts/setup_helm.sh
bash ./scripts/setup_aws.sh $AWS_ACCESS_KEY $AWS_SECRET $AWS_REGION $CLUSTER_NAME
npm run check-db-exists
npm run install-projects
npm run prepare-database
cd packages/client && npm run buildenv
cd ../..
bash ./scripts/cleanup_builder.sh $DOCKER_LABEL
if [ $PRIVATE_ECR == "true" ]
then
aws ecr get-login-password --region $AWS_REGION | docker login -u AWS --password-stdin $ECR_URL
else
aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL
fi
mkdir -p ./project-package-jsons/projects/default-project
cp packages/projects/default-project/package.json ./project-package-jsons/projects/default-project
find packages/projects/projects/ -name package.json -exec bash -c 'mkdir -p ./project-package-jsons/$(dirname $1) && cp $1 ./project-package-jsons/$(dirname $1)' - '{}' \;
DOCKER_BUILDKIT=1 docker build -t root-builder -f dockerfiles/package-root/Dockerfile-root .
npm install -g cli aws-sdk
bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL analytics $START_TIME $PRIVATE_ECR $AWS_REGION &
bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL api $START_TIME $PRIVATE_ECR $AWS_REGION &
bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL client $START_TIME $PRIVATE_ECR $AWS_REGION &
bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL instanceserver $START_TIME $PRIVATE_ECR $AWS_REGION &
bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL testbot $START_TIME $PRIVATE_ECR $AWS_REGION &
wait
bash ./scripts/deploy.sh $RELEASE_NAME ${TAG}__${START_TIME}
DEPLOY_TIME=`date +"%d-%m-%yT%H-%M-%S"`
if [ $PUBLISH_DOCKERHUB == 'true' ]
then
echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
bash ./scripts/publish_dockerhub.sh ${TAG}__${START_TIME} $DOCKER_LABEL analytics &
bash ./scripts/publish_dockerhub.sh ${TAG}__${START_TIME} $DOCKER_LABEL api &
bash ./scripts/publish_dockerhub.sh ${TAG}__${START_TIME} $DOCKER_LABEL client &
bash ./scripts/publish_dockerhub.sh ${TAG}__${START_TIME} $DOCKER_LABEL instanceserver &
bash ./scripts/publish_dockerhub.sh ${TAG}__${START_TIME} $DOCKER_LABEL testbot &
wait
fi
bash ./scripts/cleanup_builder.sh $DOCKER_LABEL
END_TIME=`date +"%d-%m-%yT%H-%M-%S"`
echo "Started build at $START_TIME, deployed image to K8s at $DEPLOY_TIME, ended at $END_TIME"
sleep infinity
|
#!/usr/bin/env bash
#
# Copyright 2019-2020 DJANTA, LLC (https://www.djanta.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed toMap in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# taken from OpenZipkin
set -euo pipefail
set -x
argv0=$(echo "$0" | sed -e 's,\\,/,g')
basedir=$(dirname "$(readlink "$0" || echo "$argv0")")
case "$(uname -s)" in
Linux) basedir=$(dirname "$(readlink -f "$0" || echo "$argv0")");;
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
# Load current shared labrary ...
# shellcheck disable=SC1090
source "${basedir}"/common.sh
build_started_by_tag() {
if [ "${TRAVIS_TAG}" == "" ]; then
echo "[Publishing] This build was not started by a tag, publishing snapshot"
return 1
else
echo "[Publishing] This build was started by the tag ${TRAVIS_TAG}, publishing release"
return 0
fi
}
is_pull_request() {
if [ "${TRAVIS_PULL_REQUEST}" != "false" ]; then
echo "[Not Publishing] This is a Pull Request"
return 0
else
echo "[Publishing] This is not a Pull Request"
return 1
fi
}
is_master_branch() {
if [ "${TRAVIS_BRANCH}" = master ]; then
echo "[Publishing] Travis branch is master"
return 0
else
echo "[Not Publishing] Travis branch is not master"
return 1
fi
}
is_release_branch() {
if [ "${TRAVIS_BRANCH}" = release ]; then
echo "[Publishing] Travis branch is release"
return 0
else
echo "[Not Publishing] Travis branch is not release"
return 1
fi
}
check_travis_branch_equals_travis_tag() {
#Weird comparison comparing branch to tag because when you 'git push --tags'
#the branch somehow becomes the tag value
#github issue: https://github.com/travis-ci/travis-ci/issues/1675
if [ "${TRAVIS_BRANCH}" != "${TRAVIS_TAG}" ]; then
echo "Travis branch does not equal Travis tag, which it should, bailing out."
echo " github issue: https://github.com/travis-ci/travis-ci/issues/1675"
exit 1
else
echo "[Publishing] Branch (${TRAVIS_BRANCH}) same as Tag (${TRAVIS_TAG})"
fi
}
check_release_tag() {
tag="${TRAVIS_TAG}"
if [[ "$tag" =~ ^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$ ]]; then
echo "Build started by version tag $tag. During the release process tags like this"
echo "are created by the 'release' Maven plugin. Nothing to do here."
exit 0
elif [[ ! "$tag" =~ ^(release|v|version|rc)-[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$ ]]; then
echo "You must specify a tag of the format 'release-0.0.0' to release this project."
echo "The provided tag ${tag} doesn't match that. Aborting."
exit 1
fi
}
print_project_version() {
./mvnw help:evaluate -N -Dexpression=project.version|sed -n '/^[0-9]/p'
}
is_release_commit() {
project_version="$(print_project_version)"
if [[ "$project_version" =~ ^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$ ]]; then
echo "Build started by release commit $project_version. Will synchronize to maven central."
return 0
else
return 1
fi
}
release_version() {
# shellcheck disable=SC2001
echo "${TRAVIS_TAG}" | sed 's/^release-//'
}
safe_checkout_master() {
# We need to be on a branch for release:perform to be able to create commits, and we want that branch to be master.
# But we also want to make sure that we build and release exactly the tagged version, so we verify that the remote
# master is where our tag is.
git checkout -B master
git fetch origin master:origin/master
commit_local_master="$(git show --pretty='format:%H' master)"
commit_remote_master="$(git show --pretty='format:%H' origin/master)"
if [ "$commit_local_master" != "$commit_remote_master" ]; then
echo "Master on remote 'origin' has commits since the version under release, aborting"
exit 1
fi
}
safe_checkout() {
# We need to be on a branch for release:perform to be able to create commits, and we want that branch to be master.
# But we also want to make sure that we build and release exactly the tagged version, so we verify that the remote
# master is where our tag is.
local branch="${1:-master}"
git checkout -B "${branch}"
git fetch origin "${branch}":origin/"${branch}"
commit_local="$(git show --pretty='format:%H' "${branch}")"
commit_remote="$(git show --pretty='format:%H' origin/"${branch}")"
if [ "$commit_local" != "$commit_remote" ]; then
echo "${branch} on remote 'origin' has commits since the version under release, aborting"
exit 1
fi
}
javadoc_to_gh_pages() {
version="$(print_project_version)"
rm -rf javadoc-builddir
builddir="javadoc-builddir/$version"
# Collect javadoc for all modules
# shellcheck disable=SC2044
for jar in $(find . -name "*${version}-javadoc.jar"); do
# shellcheck disable=SC2001
module="$(echo "$jar" | sed "s~.*/\(.*\)-${version}-javadoc.jar~\1~")"
this_builddir="$builddir/$module"
if [ -d "$this_builddir" ]; then
# Skip modules we've already processed.
# We may find multiple instances of the same javadoc jar because of, for instance,
# integration tests copying jars around.
continue
fi
mkdir -p "$this_builddir"
unzip "$jar" -d "$this_builddir"
# Build a simple module-level index
echo "<li><a href=\"${module}/index.html\">${module}</a></li>" >> "${builddir}/index.html"
done
# Update gh-pages
git fetch origin gh-pages:gh-pages
git checkout gh-pages
rm -rf "$version"
mv "javadoc-builddir/$version" ./
rm -rf "javadoc-builddir"
# Update simple version-level index
if ! grep "$version" index.html 2>/dev/null; then
echo "<li><a href=\"${version}/index.html\">${version}</a></li>" >> index.html
fi
# Ensure links are ordered by versions, latest on top
sort -rV index.html > index.html.sorted
mv index.html.sorted index.html
git add "$version"
git add index.html
git commit -m "Automatically updated javadocs for $version"
git push origin gh-pages
}
#----------------------
# MAIN
#----------------------
if ! is_pull_request && build_started_by_tag; then
check_travis_branch_equals_travis_tag
check_release_tag
fi
# skip license on travis due to #1512
./mvnw install -nsu -Dlicense.skip=true
# formatter errors:
# shellcheck disable=SC2046
if [ -z $(git status --porcelain) ];
then
echo "No changes detected, all good"
else
echo "The following files have formatting changes:"
git status --porcelain
echo ""
echo "Please run 'mvn clean install' locally to format files"
exit 1
fi
# If we are on a pull request, our only job is to run tests, which happened above via ./mvnw install
if is_pull_request; then
true
# If we are on master, we will deploy the latest snapshot or release version
# - If a release commit fails to deploy for a transient reason, delete the broken version from bintray and click rebuild
elif is_master_branch; then
#./mvnw --batch-mode -s ./.settings.xml -Prelease -nsu -pl -:djanta-benchmark -DskipTests deploy
./mvnw --batch-mode -s ./.settings.xml -Prelease -nsu -DskipTests deploy
# If the deployment succeeded, sync it to Maven Central. Note: this needs to be done once per project, not module, hence -N
if is_release_commit; then
./mvnw --batch-mode -s ./.settings.xml -nsu io.zipkin.centralsync-maven-plugin:centralsync-maven-plugin:sync
javadoc_to_gh_pages
fi
# If we are on a release tag, the following will update any version references and push a version tag for deployment.
elif build_started_by_tag; then
safe_checkout_master
# skip license on travis due to #1512
./mvnw --batch-mode -s ./.settings.xml -Prelease -nsu -DreleaseVersion="$(release_version)" \
-Darguments="-DskipTests -Dlicense.skip=true" release:prepare
fi
|
package com.company;
import java.util.Scanner;
public class Exercise_4_19 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter the first 9 digits of an ISBN as integer: ");
String s = input.nextLine();
int sum = 0, digit;
for (int i= 1; i< 10; i++) {
digit = Character.getNumericValue(s.charAt(i-1));
sum += digit*i;
}
sum = sum % 11;
System.out.print(s);
System.out.println((sum==10)? "X":sum);
}
}
|
<filename>fluentlenium-core/src/test/java/org/fluentlenium/integration/EventsTest.java<gh_stars>0
package org.fluentlenium.integration;
import org.fluentlenium.core.domain.FluentWebElement;
import org.fluentlenium.core.events.ElementListener;
import org.fluentlenium.core.events.FindByListener;
import org.fluentlenium.core.events.NavigateAllListener;
import org.fluentlenium.core.events.NavigateListener;
import org.fluentlenium.integration.localtest.LocalFluentCase;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.support.events.EventFiringWebDriver;
public class EventsTest extends LocalFluentCase {
@Override
public WebDriver getDefaultDriver() {
return new EventFiringWebDriver(super.getDefaultDriver());
}
@Test
public void clickOn() {
final ElementListener beforeListener = Mockito.mock(ElementListener.class);
final ElementListener afterListener = Mockito.mock(ElementListener.class);
events().beforeClickOn(beforeListener);
events().afterClickOn(afterListener);
goTo(DEFAULT_URL);
click("button");
Mockito.verify(beforeListener, Mockito.times(1)).on(Mockito.<FluentWebElement>anyObject(), Mockito.<WebDriver>anyObject());
Mockito.verify(afterListener, Mockito.times(1)).on(Mockito.<FluentWebElement>anyObject(), Mockito.<WebDriver>anyObject());
}
@Test
public void findBy() {
final FindByListener beforeListener = Mockito.mock(FindByListener.class);
final FindByListener afterListener = Mockito.mock(FindByListener.class);
events().beforeFindBy(beforeListener);
events().afterFindBy(afterListener);
goTo(DEFAULT_URL);
findFirst("button");
Mockito.verify(beforeListener, Mockito.times(1)).on(Mockito.<By>anyObject(), Mockito.<FluentWebElement>anyObject(), Mockito.<WebDriver>anyObject());
Mockito.verify(afterListener, Mockito.times(1)).on(Mockito.<By>anyObject(), Mockito.<FluentWebElement>anyObject(), Mockito.<WebDriver>anyObject());
}
@Test
public void navigate() {
final NavigateAllListener beforeListener = Mockito.mock(NavigateAllListener.class);
final NavigateAllListener afterListener = Mockito.mock(NavigateAllListener.class);
events().beforeNavigate(beforeListener);
events().afterNavigate(afterListener);
goTo(DEFAULT_URL);
Mockito.verify(beforeListener, Mockito.times(1)).on(Mockito.eq(DEFAULT_URL), Mockito.<WebDriver>anyObject(), Mockito.<NavigateAllListener.Direction>anyObject());
Mockito.verify(afterListener, Mockito.times(1)).on(Mockito.eq(DEFAULT_URL), Mockito.<WebDriver>anyObject(), Mockito.<NavigateAllListener.Direction>anyObject());
getDriver().navigate().refresh();
Mockito.verify(beforeListener, Mockito.times(1)).on((String)Mockito.isNull(), Mockito.<WebDriver>anyObject(), Mockito.eq(NavigateAllListener.Direction.REFRESH));
Mockito.verify(afterListener, Mockito.times(1)).on((String)Mockito.isNull(), Mockito.<WebDriver>anyObject(), Mockito.eq(NavigateAllListener.Direction.REFRESH));
}
@Test
public void refresh() {
final NavigateListener beforeListener = Mockito.mock(NavigateListener.class);
final NavigateListener afterListener = Mockito.mock(NavigateListener.class);
events().beforeNavigateRefresh(beforeListener);
events().afterNavigateRefresh(afterListener);
goTo(DEFAULT_URL);
Mockito.verify(beforeListener, Mockito.times(0)).on(Mockito.<WebDriver>anyObject());
Mockito.verify(afterListener, Mockito.times(0)).on(Mockito.<WebDriver>anyObject());
getDriver().navigate().refresh();
Mockito.verify(beforeListener, Mockito.times(1)).on(Mockito.<WebDriver>anyObject());
Mockito.verify(afterListener, Mockito.times(1)).on(Mockito.<WebDriver>anyObject());
}
}
|
/*
* Copyright 2017-2022 original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micronaut.data.mongodb.serde;
import io.micronaut.core.annotation.Internal;
import io.micronaut.core.convert.ConversionContext;
import io.micronaut.core.convert.ConversionService;
import io.micronaut.core.type.Argument;
import io.micronaut.data.annotation.GeneratedValue;
import io.micronaut.data.annotation.MappedProperty;
import io.micronaut.data.document.serde.CustomConverterSerializer;
import io.micronaut.data.document.serde.IdPropertyNamingStrategy;
import io.micronaut.data.document.serde.IdSerializer;
import io.micronaut.data.model.runtime.AttributeConverterRegistry;
import io.micronaut.data.model.runtime.RuntimePersistentEntity;
import io.micronaut.data.model.runtime.convert.AttributeConverter;
import io.micronaut.serde.Encoder;
import io.micronaut.serde.Serializer;
import io.micronaut.serde.bson.custom.CodecBsonDecoder;
import io.micronaut.serde.config.naming.PropertyNamingStrategy;
import io.micronaut.serde.exceptions.SerdeException;
import io.micronaut.serde.reference.PropertyReference;
import io.micronaut.serde.reference.SerializationReference;
import org.bson.codecs.Codec;
import org.bson.codecs.IterableCodec;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.types.ObjectId;
import java.io.IOException;
/**
* The Micronaut Data's Serde's {@link io.micronaut.serde.Serializer.EncoderContext}.
*
* @author <NAME>
* @since 3.3
*/
@Internal
final class DataEncoderContext implements Serializer.EncoderContext {
private final Argument<ObjectId> OBJECT_ID = Argument.of(ObjectId.class);
private final AttributeConverterRegistry attributeConverterRegistry;
private final Argument argument;
private final RuntimePersistentEntity<Object> runtimePersistentEntity;
private final Serializer.EncoderContext parent;
private final CodecRegistry codecRegistry;
/**
* Default constructor.
*
* @param attributeConverterRegistry The AttributeConverterRegistry
* @param argument The argument
* @param runtimePersistentEntity The runtime persistent entity
* @param parent The parent context
* @param codecRegistry The codec registry
*/
DataEncoderContext(AttributeConverterRegistry attributeConverterRegistry,
Argument argument,
RuntimePersistentEntity<Object> runtimePersistentEntity,
Serializer.EncoderContext parent,
CodecRegistry codecRegistry) {
this.attributeConverterRegistry = attributeConverterRegistry;
this.argument = argument;
this.runtimePersistentEntity = runtimePersistentEntity;
this.parent = parent;
this.codecRegistry = codecRegistry;
}
@Override
public ConversionService<?> getConversionService() {
return parent.getConversionService();
}
@Override
public boolean hasView(Class<?>... views) {
return parent.hasView(views);
}
@Override
public <B, P> SerializationReference<B, P> resolveReference(SerializationReference<B, P> reference) {
return parent.resolveReference(reference);
}
@Override
public <T, D extends Serializer<? extends T>> D findCustomSerializer(Class<? extends D> serializerClass) throws SerdeException {
if (serializerClass == IdSerializer.class) {
IdSerializer idSerializer = new IdSerializer() {
@Override
public Serializer<Object> createSpecific(EncoderContext encoderContext, Argument<?> type) throws SerdeException {
boolean isGeneratedObjectIdAsString = type.isAssignableFrom(String.class)
&& type.isAnnotationPresent(GeneratedValue.class);
if (isGeneratedObjectIdAsString) {
Serializer<? super ObjectId> objectIdSerializer = findSerializer(OBJECT_ID);
return (encoder, encoderContext2, stringType, value) -> {
String stringId = (String) value;
objectIdSerializer.serialize(encoder, encoderContext2, OBJECT_ID, new ObjectId(stringId));
};
}
return (Serializer<Object>) findSerializer(type);
}
@Override
public void serialize(Encoder encoder, EncoderContext context, Argument<?> type, Object value) {
throw new IllegalStateException("Create specific call is required!");
}
};
return (D) idSerializer;
}
if (serializerClass == CustomConverterSerializer.class) {
CustomConverterSerializer customConverterSerializer = new CustomConverterSerializer() {
@Override
public Serializer<Object> createSpecific(EncoderContext encoderContext, Argument<?> type) throws SerdeException {
Class<?> converterClass = type.getAnnotationMetadata().classValue(MappedProperty.class, "converter")
.orElseThrow(IllegalStateException::new);
Class<Object> converterPersistedType = type.getAnnotationMetadata().classValue(MappedProperty.class, "converterPersistedType")
.orElseThrow(IllegalStateException::new);
Argument<Object> convertedType = Argument.of(converterPersistedType);
Serializer<? super Object> serializer = findSerializer(convertedType);
AttributeConverter<Object, Object> converter = attributeConverterRegistry.getConverter(converterClass);
return new Serializer<Object>() {
@Override
public void serialize(Encoder encoder, EncoderContext context, Argument<?> type, Object value) throws IOException {
if (value == null) {
encoder.encodeNull();
return;
}
Object converted = converter.convertToPersistedValue(value, ConversionContext.of(type));
if (converted == null) {
encoder.encodeNull();
return;
}
serializer.serialize(encoder, context, convertedType, converted);
}
};
}
@Override
public void serialize(Encoder encoder, EncoderContext context, Argument<?> type, Object value) {
throw new IllegalStateException("Create specific call is required!");
}
};
return (D) customConverterSerializer;
}
return parent.findCustomSerializer(serializerClass);
}
@Override
public <T> Serializer<? super T> findSerializer(Argument<? extends T> type) throws SerdeException {
Codec<? extends T> codec = codecRegistry.get(type.getType(), codecRegistry);
if (codec instanceof MappedCodec) {
return ((MappedCodec<T>) codec).serializer;
}
if (codec != null && !(codec instanceof IterableCodec)) {
return new CodecBsonDecoder<>((Codec<T>) codec);
}
return parent.findSerializer(type);
}
@Override
public <D extends PropertyNamingStrategy> D findNamingStrategy(Class<? extends D> namingStrategyClass) throws SerdeException {
if (namingStrategyClass == IdPropertyNamingStrategy.class) {
return (D) DataSerdeRegistry.ID_PROPERTY_NAMING_STRATEGY;
}
return parent.findNamingStrategy(namingStrategyClass);
}
@Override
public <B, P> void pushManagedRef(PropertyReference<B, P> reference) {
parent.pushManagedRef(reference);
}
@Override
public void popManagedRef() {
parent.popManagedRef();
}
}
|
json_string = json.dumps({"name":"John", "age":25, "location":"US"})
|
from flask import Flask, jsonify
from .base import BaseController
from flaskr.models import Event
class EventController(BaseController):
def __init__(self, app: Flask):
super().__init__()
self.app = app
def get(self):
events = map(lambda ev: ev.as_dict(), Event().get_all())
return jsonify(list(events))
def post(self):
raise NotImplementedError()
|
<reponame>YMxiaobei/iconv-lite-ts2
"use strict";
// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script.
export let _exports = {
"437": "cp437",
"737": "cp737",
"775": "cp775",
"850": "cp850",
"852": "cp852",
"855": "cp855",
"856": "cp856",
"857": "cp857",
"858": "cp858",
"860": "cp860",
"861": "cp861",
"862": "cp862",
"863": "cp863",
"864": "cp864",
"865": "cp865",
"866": "cp866",
"869": "cp869",
"874": "windows874",
"922": "cp922",
"1046": "cp1046",
"1124": "cp1124",
"1125": "cp1125",
"1129": "cp1129",
"1133": "cp1133",
"1161": "cp1161",
"1162": "cp1162",
"1163": "cp1163",
"1250": "windows1250",
"1251": "windows1251",
"1252": "windows1252",
"1253": "windows1253",
"1254": "windows1254",
"1255": "windows1255",
"1256": "windows1256",
"1257": "windows1257",
"1258": "windows1258",
"28591": "iso88591",
"28592": "iso88592",
"28593": "iso88593",
"28594": "iso88594",
"28595": "iso88595",
"28596": "iso88596",
"28597": "iso88597",
"28598": "iso88598",
"28599": "iso88599",
"28600": "iso885910",
"28601": "iso885911",
"28603": "iso885913",
"28604": "iso885914",
"28605": "iso885915",
"28606": "iso885916",
"windows874": {
"type": "_sbcs",
"chars": "€����…�����������‘’“”•–—�������� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
},
"win874": "windows874",
"cp874": "windows874",
"windows1250": {
"type": "_sbcs",
"chars": "€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
},
"win1250": "windows1250",
"cp1250": "windows1250",
"windows1251": {
"type": "_sbcs",
"chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ ЎўЈ¤Ґ¦§Ё©Є«¬®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
},
"win1251": "windows1251",
"cp1251": "windows1251",
"windows1252": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"win1252": "windows1252",
"cp1252": "windows1252",
"windows1253": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡�‰�‹�����‘’“”•–—�™�›���� ΅Ά£¤¥¦§¨©�«¬®―°±²³΄µ¶·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�"
},
"win1253": "windows1253",
"cp1253": "windows1253",
"windows1254": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ����‘’“”•–—˜™š›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖרÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ"
},
"win1254": "windows1254",
"cp1254": "windows1254",
"windows1255": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰�‹�����‘’“”•–—˜™�›���� ¡¢£₪¥¦§¨©×«¬®¯°±²³´µ¶·¸¹÷»¼½¾¿ְֱֲֳִֵֶַָֹֺֻּֽ־ֿ׀ׁׂ׃װױײ׳״�������אבגדהוזחטיךכלםמןנסעףפץצקרשת���"
},
"win1255": "windows1255",
"cp1255": "windows1255",
"windows1256": {
"type": "_sbcs",
"chars": "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œں ،¢£¤¥¦§¨©ھ«¬®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûüے"
},
"win1256": "windows1256",
"cp1256": "windows1256",
"windows1257": {
"type": "_sbcs",
"chars": "€�‚�„…†‡�‰�‹�¨ˇ¸�‘’“”•–—�™�›�¯˛� �¢£¤�¦§Ø©Ŗ«¬®Æ°±²³´µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙"
},
"win1257": "windows1257",
"cp1257": "windows1257",
"windows1258": {
"type": "_sbcs",
"chars": "€�‚ƒ„…†‡ˆ‰�‹Œ����‘’“”•–—˜™�›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
},
"win1258": "windows1258",
"cp1258": "windows1258",
"iso88591": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"cp28591": "iso88591",
"iso88592": {
"type": "_sbcs",
"chars": "
Ą˘Ł¤ĽŚ§¨ŠŞŤŹŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
},
"cp28592": "iso88592",
"iso88593": {
"type": "_sbcs",
"chars": "
Ħ˘£¤�Ĥ§¨İŞĞĴ�ݰħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙"
},
"cp28593": "iso88593",
"iso88594": {
"type": "_sbcs",
"chars": "
ĄĸŖ¤Ĩϧ¨ŠĒĢŦޝ°ą˛ŗ´ĩšēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖרŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙"
},
"cp28594": "iso88594",
"iso88595": {
"type": "_sbcs",
"chars": "
ЁЂЃЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ"
},
"cp28595": "iso88595",
"iso88596": {
"type": "_sbcs",
"chars": "
���¤�������،�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������"
},
"cp28596": "iso88596",
"iso88597": {
"type": "_sbcs",
"chars": "
‘’£€₯¦§¨©ͺ«¬�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�"
},
"cp28597": "iso88597",
"iso88598": {
"type": "_sbcs",
"chars": "
�¢£¤¥¦§¨©×«¬®¯°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת���"
},
"cp28598": "iso88598",
"iso88599": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖרÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ"
},
"cp28599": "iso88599",
"iso885910": {
"type": "_sbcs",
"chars": "
ĄĒĢĪĨͧĻĐŠŦŽŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ"
},
"cp28600": "iso885910",
"iso885911": {
"type": "_sbcs",
"chars": "
กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
},
"cp28601": "iso885911",
"iso885913": {
"type": "_sbcs",
"chars": "
”¢£¤„¦§Ø©Ŗ«¬®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’"
},
"cp28603": "iso885913",
"iso885914": {
"type": "_sbcs",
"chars": "
Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ"
},
"cp28604": "iso885914",
"iso885915": {
"type": "_sbcs",
"chars": "
¡¢£€¥Š§š©ª«¬®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"cp28605": "iso885915",
"iso885916": {
"type": "_sbcs",
"chars": "
ĄąŁ€„Чš©Ș«ŹźŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ"
},
"cp28606": "iso885916",
"cp437": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm437": "cp437",
"csibm437": "cp437",
"cp737": {
"type": "_sbcs",
"chars": "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ "
},
"ibm737": "cp737",
"csibm737": "cp737",
"cp775": {
"type": "_sbcs",
"chars": "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£Ø×¤ĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’±“¾¶§÷„°∙·¹³²■ "
},
"ibm775": "cp775",
"csibm775": "cp775",
"cp850": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
},
"ibm850": "cp850",
"csibm850": "cp850",
"cp852": {
"type": "_sbcs",
"chars": "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´˝˛ˇ˘§÷¸°¨˙űŘř■ "
},
"ibm852": "cp852",
"csibm852": "cp852",
"cp855": {
"type": "_sbcs",
"chars": "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№ыЫзЗшШэЭщЩчЧ§■ "
},
"ibm855": "cp855",
"csibm855": "cp855",
"cp856": {
"type": "_sbcs",
"chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת�£�×����������®¬½¼�«»░▒▓│┤���©╣║╗╝¢¥┐└┴┬├─┼��╚╔╩╦╠═╬¤���������┘┌█▄¦�▀������µ�������¯´±‗¾¶§÷¸°¨·¹³²■ "
},
"ibm856": "cp856",
"csibm856": "cp856",
"cp857": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîıÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ�ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ�×ÚÛÙìÿ¯´±�¾¶§÷¸°¨·¹³²■ "
},
"ibm857": "cp857",
"csibm857": "cp857",
"cp858": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
},
"ibm858": "cp858",
"csibm858": "cp858",
"cp860": {
"type": "_sbcs",
"chars": "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm860": "cp860",
"csibm860": "cp860",
"cp861": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm861": "cp861",
"csibm861": "cp861",
"cp862": {
"type": "_sbcs",
"chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm862": "cp862",
"csibm862": "cp862",
"cp863": {
"type": "_sbcs",
"chars": "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm863": "cp863",
"csibm863": "cp863",
"cp864": {
"type": "_sbcs",
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~°·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ��ﻻﻼ� ﺂ£¤ﺄ��ﺎﺏﺕﺙ،ﺝﺡﺥ٠١٢٣٤٥٦٧٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷×ﻉـﻓﻗﻛﻟﻣﻧﻫﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩﻬﻰﻲﻐﻕﻵﻶﻝﻙﻱ■�"
},
"ibm864": "cp864",
"csibm864": "cp864",
"cp865": {
"type": "_sbcs",
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
},
"ibm865": "cp865",
"csibm865": "cp865",
"cp866": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ "
},
"ibm866": "cp866",
"csibm866": "cp866",
"cp869": {
"type": "_sbcs",
"chars": "������Ά�·¬¦‘’Έ―ΉΊΪΌ��ΎΫ©Ώ²³ά£έήίϊΐόύΑΒΓΔΕΖΗ½ΘΙ«»░▒▓│┤ΚΛΜΝ╣║╗╝ΞΟ┐└┴┬├─┼ΠΡ╚╔╩╦╠═╬ΣΤΥΦΧΨΩαβγ┘┌█▄δε▀ζηθικλμνξοπρσςτ΄±υφχ§ψ΅°¨ωϋΰώ■ "
},
"ibm869": "cp869",
"csibm869": "cp869",
"cp922": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§¨©ª«¬®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖרÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ"
},
"ibm922": "cp922",
"csibm922": "cp922",
"cp1046": {
"type": "_sbcs",
"chars": "ﺈ×÷ﹱ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ،ﺧﺳ٠١٢٣٤٥٦٧٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئابةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎﻓـفقكلمنهوىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬﻩ�"
},
"ibm1046": "cp1046",
"csibm1046": "cp1046",
"cp1124": {
"type": "_sbcs",
"chars": "
ЁЂҐЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ"
},
"ibm1124": "cp1124",
"csibm1124": "cp1124",
"cp1125": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ "
},
"ibm1125": "cp1125",
"csibm1125": "cp1125",
"cp1129": {
"type": "_sbcs",
"chars": "
¡¢£¤¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
},
"ibm1129": "cp1129",
"csibm1129": "cp1129",
"cp1133": {
"type": "_sbcs",
"chars": "
ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ���ຯະາຳິີຶືຸູຼັົຽ���ເແໂໃໄ່້໊໋໌ໍໆ�ໜໝ₭����������������໐໑໒໓໔໕໖໗໘໙��¢¬¦�"
},
"ibm1133": "cp1133",
"csibm1133": "cp1133",
"cp1161": {
"type": "_sbcs",
"chars": "��������������������������������่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛¢¬¦ "
},
"ibm1161": "cp1161",
"csibm1161": "cp1161",
"cp1162": {
"type": "_sbcs",
"chars": "€…‘’“”•–— กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
},
"ibm1162": "cp1162",
"csibm1162": "cp1162",
"cp1163": {
"type": "_sbcs",
"chars": "
¡¢£€¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
},
"ibm1163": "cp1163",
"csibm1163": "cp1163",
"maccroatian": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈ƫȅ ÀÃÕŒœĐ—“”‘’÷◊�©⁄¤‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ"
},
"maccyrillic": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
},
"macgreek": {
"type": "_sbcs",
"chars": "Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ�"
},
"maciceland": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüݰ¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"macroman": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"macromania": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›Ţţ‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"macthai": {
"type": "_sbcs",
"chars": "«»…“”�•‘’� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู–—฿เแโใไๅๆ็่้๊๋์ํ™๏๐๑๒๓๔๕๖๗๘๙®©����"
},
"macturkish": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙ�ˆ˜¯˘˙˚¸˝˛ˇ"
},
"macukraine": {
"type": "_sbcs",
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
},
"koi8r": {
"type": "_sbcs",
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"koi8u": {
"type": "_sbcs",
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"koi8ru": {
"type": "_sbcs",
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"koi8t": {
"type": "_sbcs",
"chars": "қғ‚Ғ„…†‡�‰ҳ‹ҲҷҶ�Қ‘’“”•–—�™�›�����ӯӮё¤ӣ¦§���«¬®�°±²Ё�Ӣ¶·�№�»���©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
},
"armscii8": {
"type": "_sbcs",
"chars": "
�և։)(»«—.՝,-֊…՜՛՞ԱաԲբԳգԴդԵեԶզԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհՁձՂղՃճՄմՅյՆնՇշՈոՉչՊպՋջՌռՍսՎվՏտՐրՑցՒւՓփՔքՕօՖֆ՚�"
},
"rk1048": {
"type": "_sbcs",
"chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊҚҺЏђ‘’“”•–—�™љ›њқһџ ҰұӘ¤Ө¦§Ё©Ғ«¬®Ү°±Ііөµ¶·ё№ғ»әҢңүАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
},
"tcvn": {
"type": "_sbcs",
"chars": "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ"
},
"georgianacademy": {
"type": "_sbcs",
"chars": "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"georgianps": {
"type": "_sbcs",
"chars": "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
},
"pt154": {
"type": "_sbcs",
"chars": "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
},
"viscii": {
"type": "_sbcs",
"chars": "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ"
},
"iso646cn": {
"type": "_sbcs",
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������"
},
"iso646jp": {
"type": "_sbcs",
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������"
},
"hproman8": {
"type": "_sbcs",
"chars": "
ÀÂÈÊËÎÏ´ˋˆ¨˜ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±�"
},
"macintosh": {
"type": "_sbcs",
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
},
"ascii": {
"type": "_sbcs",
"chars": "��������������������������������������������������������������������������������������������������������������������������������"
},
"tis620": {
"type": "_sbcs",
"chars": "���������������������������������กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����"
}
}
|
<gh_stars>1-10
import React from 'react';
import { connect } from 'react-redux';
import { setLibraryFilter, setFloorFilter, setTextFilter } from '../actions/filters'
import floors from '../locations/floors';
import FA from 'react-fontawesome';
const Filters = (props) => (
<div className="browse-filters">
<p className="sidebar-title">Filters</p>
{/* Search should have autocomplete */}
<input
type="text"
placeholder={"Search"}
className="browse-filters__item browse-filters__item--field"
value={props.filters.text}
onChange={(e) => {
props.dispatch(setTextFilter(e.target.value))
}}
/>
{
// Clear text icon in Search bar (X)
props.filters.text &&
<a
className="browse-filters-clear-text"
onClick={
() => {
props.dispatch(setTextFilter());
}
}
> <FA name="times-circle" /> </a>
}
{/* Select Library */}
<select className="browse-filters__item browse-filters__item--select"
value={props.filters.library}
onChange={(e) => {
const lib = e.target.value;
props.dispatch(setLibraryFilter(lib));
props.dispatch(setFloorFilter(floors[lib][0].name));
}}
>
{
props.resources.map(({name, id}) => {
return (
<option value={name} key={'res' + id} > {name} </option>
);
})
}
</select>
{/* Select Floor */}
<select className="browse-filters__item browse-filters__item--select"
value={props.filters.floor}
onChange={(e) => {
props.dispatch(setFloorFilter(e.target.value));
}}
>
{
floors[props.filters.library].map((floor) => {
return (
<option value={floor.name} key={'floor::id::' + floor.filter}> {floor.name} </option>
);
})
}
</select>
</div>
);
const mapStateToProps = (state) => {
return {
filters: state.filters,
resources: state.resources
};
}
export default connect(mapStateToProps)(Filters);
|
<filename>client/db/Database.js
const {
createRxDatabase,
addRxPlugin
} = require('rxdb');
const RxDBLeaderElectionPlugin = require('rxdb/plugins/leader-election');
const { RxDBReplicationPlugin } = require('rxdb/plugins/replication');
const { RxDBNoValidatePlugin } = require('rxdb/plugins/no-validate');
addRxPlugin(require('pouchdb-adapter-idb'));
addRxPlugin(require('pouchdb-adapter-http')); // enable syncing over http
addRxPlugin(RxDBLeaderElectionPlugin);
addRxPlugin(RxDBReplicationPlugin);
addRxPlugin(RxDBNoValidatePlugin);
const dbName = 'pets';
const theSchema = require('../schema/schema.json');
const theData = require('../data/pets.json');
const syncURL = `http://localhost:5984/${dbName}/`;
let dbPromise = null;
// EXAMPLES : https://github.com/pubkey/rxdb/tree/master/examples/react/src
const _create = async () => {
const theSchema = require('../schema/schema.json');
const theData = require('../data/pets.json');
const db = await createRxDatabase({
name: dbName,
adapter: 'idb'
});
window['db'] = db;
// show leadership in title
db.waitForLeadership().then(() => {
document.title = '♛ ' + document.title;
});
const pets = await db.addCollections({
pets: {
schema: theSchema
}
});
// hooks
db.collections[dbName].preInsert(docObj => {
const { name } = docObj;
return db.collections[dbName]
.findOne({
selector: { name }
}).exec()
.then(has => {
if (has !== null) {
const message = `A pet named ${name} already exists`;
throw new Error(message);
}
return db;
})
.catch((err) => {
console.error(err);
});
});
db.pets.sync({ remote: syncURL });
theData.forEach((item) => {
db.collections[dbName].upsert(item);
})
// db.collections[dbName].dump()
// .then(json => console.dir(json));
return db;
}
module.exports = {
get : () => {
if (! dbPromise)
dbPromise = _create();
return dbPromise;
}
};
|
<gh_stars>0
//
// Created by valkee on 4/26/2020.
//
#define WITHOUT_NUMPY
#include "matplotlibcpp.h"
#include "leaderboard.h"
#include <cmath>
namespace plt = matplotlibcpp;
void SaveGraph(timer::LeaderBoard leaderboard) {
// Retrieve list of players
std::vector<timer::Player> player_list = leaderboard.RetrieveScores();
// Preparing data
int n = player_list.size();
std::vector<double> x(n), y(n);
// Add each y position as the players score from the beginning of the database
for (size_t i = 0; i < player_list.size(); ++i) {
x.at(i) = i;
y.at(i) = player_list.at(i).score;
}
plt::plot(x, y);
plt::title("3x3 Cube Times");
plt::save("C:\\Users\\Natsu\\Documents\\CS126\\cinder_0.9.2_vc2015\\my-project\\final-project-akevli\\assets\\graph.png");
plt::show();
}
|
const { execSync } = require('child_process');
const builder = require('electron-builder');
const rm = require('del');
const fs = require('fs');
const path = require('path');
const package = require('./package.json');
const { BuildPublic } = require('./buildpublic');
const outDir = path.resolve(__dirname, "electron-build/");
async function CleanBuildDir(){
if (fs.existsSync(outDir)) {
await rm(outDir, { force: true });
}
}
async function cleanAndBuildTSC() {
await CleanBuildDir();
fs.mkdirSync(outDir);
execSync(`cp -r src/ ${outDir}/src/`);
await rm(`${outDir}/**/*.ts`,{force:true});
execSync("npm run tsc");
if(process.env.BUILD == "BETA"){
console.log("beta");
let tempdata = fs.readFileSync(`${outDir}/src/utils/environment.js`,"utf8");
tempdata = tempdata.replace('const environment = "PROD";','const environment = "BETA";');
fs.writeFileSync(`${outDir}/src/utils/environment.js`,tempdata,"utf8")
}
}
async function init() {
await cleanAndBuildTSC();
try{
await builder.build({
config: {
...package.build,
}
})
await BuildPublic();
}catch(ex){
console.error(ex);
}
await CleanBuildDir();
}
init();
|
<reponame>killua4564/hashclash
/**************************************************************************\
|
| Copyright (C) 2009 <NAME>
|
| This program is free software: you can redistribute it and/or modify
| it under the terms of the GNU General Public License as published by
| the Free Software Foundation, either version 3 of the License, or
| (at your option) any later version.
|
| This program is distributed in the hope that it will be useful,
| but WITHOUT ANY WARRANTY; without even the implied warranty of
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
| GNU General Public License for more details.
|
| You should have received a copy of the GNU General Public License
| along with this program. If not, see <http://www.gnu.org/licenses/>.
|
\**************************************************************************/
#include <stdexcept>
#include <iostream>
#include <fstream>
#include <vector>
#include <string>
#include <boost/lexical_cast.hpp>
#include <boost/program_options.hpp>
#include "main.hpp"
#include <hashclash/sdr.hpp>
#include <hashclash/timer.hpp>
using namespace hashclash;
using namespace std;
namespace po = boost::program_options;
namespace fs = boost::filesystem;
boost::mutex mut;
std::string workdir;
int main(int argc, char** argv)
{
hashclash::timer runtime(true);
cout <<
"MD5 differential path toolbox\n"
"Copyright (C) 2009 <NAME>\n"
"http://homepages.cwi.nl/~stevens/\n"
<< endl;
try {
parameters_type parameters;
vector< vector<int> > msgdiff(16);
// Define program options
po::options_description
cmds("Allowed commands"),
desc("Allowed options"),
msg("Define message differences (as +bitnr and -bitnr, bitnr=1-32)"),
all("Allowed options");
cmds.add_options()
("help,h", "Show options\n")
("startnearcollision", "Use inputfile{1,2} to:\n"
" - determine next near-collision template\n"
" - construct partial lower diff. path\n"
" - construct partial upper diff. path\n"
" - write md5diffpath_forward.cfg\n"
" - write md5diffpath_backward.cfg\n"
" - write md5diffpath_connect.cfg\n")
("upperpaths", "Write all partial upper diff. paths\n")
("findcollision", "Find nearcollision using path\n"
" given by inputfile1\n")
("convert", "Convert files between binary and text\n")
("split", po::value<unsigned>(¶meters.split),
"Split inputfile1 in given # files\n")
("join,j", po::value<vector<string> >(¶meters.files),
"Join files and save to outputfile1\n"
"Each filename has to be proceeded by -j\n")
("pathfromtext", "Load path in text form from inputfile1\n"
" and save as paths set to outputfile1\n")
("pathfromcollision", "Reconstruct paths from colliding inputfiles")
("startpartialpathfromfile", "Create partial path from binary file")
;
desc.add_options()
("workdir,w"
, po::value<string>(&workdir)->default_value("./data")
, "Set working directory.")
("inputfile1"
, po::value<string>(¶meters.infile1)->default_value("")
, "Set inputfile 1.")
("inputfile2"
, po::value<string>(¶meters.infile2)->default_value("")
, "Set inputfile 1.")
("outputfile1"
, po::value<string>(¶meters.outfile1)->default_value("")
, "Set outputfile 1.")
("outputfile2"
, po::value<string>(¶meters.outfile2)->default_value("")
, "Set outputfile 2.")
("pathtyperange"
, po::value<unsigned>(¶meters.pathtyperange)->default_value(0)
, "Increases potential # diffs eliminated per n.c.")
("skipnc"
, po::value<unsigned>(¶meters.skipnc)->default_value(0)
, "Skip a number of near-collision templates")
("threads"
, po::value<int>(¶meters.threads)->default_value(-1)
, "Number of worker threads")
;
msg.add_options()
("diffm0", po::value< vector<int> >(&msgdiff[0]), "delta m0")
("diffm1", po::value< vector<int> >(&msgdiff[1]), "delta m1")
("diffm2", po::value< vector<int> >(&msgdiff[2]), "delta m2")
("diffm3", po::value< vector<int> >(&msgdiff[3]), "delta m3")
("diffm4", po::value< vector<int> >(&msgdiff[4]), "delta m4")
("diffm5", po::value< vector<int> >(&msgdiff[5]), "delta m5")
("diffm6", po::value< vector<int> >(&msgdiff[6]), "delta m6")
("diffm7", po::value< vector<int> >(&msgdiff[7]), "delta m7")
("diffm8", po::value< vector<int> >(&msgdiff[8]), "delta m8")
("diffm9", po::value< vector<int> >(&msgdiff[9]), "delta m9")
("diffm10", po::value< vector<int> >(&msgdiff[10]), "delta m10")
("diffm11", po::value< vector<int> >(&msgdiff[11]), "delta m11")
("diffm12", po::value< vector<int> >(&msgdiff[12]), "delta m12")
("diffm13", po::value< vector<int> >(&msgdiff[13]), "delta m13")
("diffm14", po::value< vector<int> >(&msgdiff[14]), "delta m14")
("diffm15", po::value< vector<int> >(&msgdiff[15]), "delta m15")
;
all.add(cmds).add(desc).add(msg);
// Parse program options
po::positional_options_description p;
p.add("inputfile1", 1);
p.add("inputfile2", 1);
p.add("outputfile1", 1);
p.add("outputfile2", 1);
po::variables_map vm;
po::store(po::command_line_parser(argc, argv)
.options(all).positional(p).run(), vm);
{
std::ifstream ifs("md5diffpathhelper.cfg");
if (ifs) po::store(po::parse_config_file(ifs, all), vm);
}
po::notify(vm);
// Process program options
if (vm.count("help")
|| 0 == vm.count("startnearcollision")
+vm.count("upperpaths")
+vm.count("enhancepath")
+vm.count("findcollision")
+vm.count("convert")
+vm.count("split")
+vm.count("join")
+vm.count("pathfromtext")
+vm.count("pathfromcollision")
+vm.count("startpartialpathfromfile")
) {
cout << cmds << desc << endl;
return 0;
}
for (unsigned k = 0; k < 16; ++k)
{
parameters.m_diff[k] = 0;
for (unsigned j = 0; j < msgdiff[k].size(); ++j)
if (msgdiff[k][j] > 0)
parameters.m_diff[k] += 1<<(msgdiff[k][j]-1);
else
parameters.m_diff[k] -= 1<<(-msgdiff[k][j]-1);
}
if (parameters.threads <= 0 || parameters.threads > boost::thread::hardware_concurrency())
parameters.threads = boost::thread::hardware_concurrency();
if (vm.count("startnearcollision"))
return startnearcollision(parameters);
if (vm.count("upperpaths"))
return upperpaths(parameters);
if (vm.count("findcollision"))
return collisionfinding(parameters);
if (vm.count("convert"))
return convert(parameters);
if (vm.count("split"))
return split(parameters);
if (vm.count("join"))
return join(parameters);
if (vm.count("pathfromtext"))
return pathfromtext(parameters);
if (vm.count("pathfromcollision"))
return pathfromcollision(parameters);
if (vm.count("startpartialpathfromfile"))
return partialpathfromfile(parameters);
// Start job with given parameters
} catch (exception& e) {
cout << "Runtime: " << runtime.time() << endl;
cerr << "Caught exception!!:" << endl << e.what() << endl;
throw;
} catch (...) {
cout << "Runtime: " << runtime.time() << endl;
cerr << "Unknown exception caught!!" << endl;
throw;
}
cout << "Runtime: " << runtime.time() << endl;
return 0;
}
unsigned load_block(istream& i, uint32 block[])
{
for (unsigned k = 0; k < 16; ++k)
block[k] = 0;
unsigned len = 0;
char uc;
for (unsigned k = 0; k < 16; ++k)
for (unsigned c = 0; c < 4; ++c)
{
i.get(uc);
if (!!i) {
++len;
block[k] += uint32((unsigned char)(uc)) << (c*8);
} else {
i.putback(uc);
i.setstate(ios::failbit);
return len;
}
}
return len;
}
void save_block(ostream& o, uint32 block[])
{
for (unsigned k = 0; k < 16; ++k)
for (unsigned c = 0; c < 4; ++c)
o << (unsigned char)((block[k]>>(c*8))&0xFF);
}
|
#!/bin/bash
# this is a legacy version (will be removed in the future). please use test.py.
set -v
set -e
rm -rf original union working-copy
mkdir original union working-copy original/play-dir original/del-dir
echo v1 > original/file
echo v1 > original/play-with-me
echo v1 > original/delete-me
cleanup() {
if [ -e "union" ]; then fusermount -u -q union; fi
rm -rf union original working-copy
}
trap cleanup EXIT
src/unionfs -d -o cow working-copy=rw:original=ro union >unionfs.log 2>&1 &
sleep 1
[ "$(cat union/file)" = "v1" ]
echo "v2" > union/file
[ "$(cat union/file)" = "v2" ]
echo "v2" > union/play-with-me
[ "$(cat union/play-with-me)" = "v2" ]
[ -f union/play-with-me ]
rm union/play-with-me
[ ! -f union/play-with-me ]
[ -f union/delete-me ]
rm union/delete-me
[ ! -f union/delete-me ]
[ "$(ls union/play-dir)" = "" ]
echo "fool" > union/play-dir/foo
[ "$(ls union/play-dir)" = "foo" ]
rm union/play-dir/foo
[ "$(ls union/play-dir)" = "" ]
[ -d union/play-dir ]
rmdir union/play-dir
[ ! -d union/play-dir ]
[ -d union/del-dir ]
rmdir union/del-dir
[ ! -d union/del-dir ]
! echo v1 > union/del-dir/foo
[ ! -d union/del-dir ]
mkdir union/del-dir
[ ! -f union/del-dir/foo ]
echo v1 > union/del-dir/foo
[ -f union/del-dir/foo ]
rm union/del-dir/foo
[ -d union/del-dir ]
rmdir union/del-dir
[ ! -d union/del-dir ]
# rmdir() test
set +e
set +v
rc=0
mkdir original/testdir
touch original/testdir/testfile
mkdir working-copy/testdir
rmdir union/testdir 2>/dev/null
if [ $? -eq 0 ]; then
echo "rmdir succeeded, although it must not"
rc=$(($rc + $?))
fi
rm union/testdir/testfile
rc=$(($rc + $?))
rmdir union/testdir/
rc=$(($rc + $?))
if [ $rc -ne 0 ]; then
echo "rmdir test failed"
exit 1
else
echo "rmdir test passed"
fi
set -e
fusermount -u union
[ "$(cat original/file)" = "v1" ]
[ "$(cat original/play-with-me)" = "v1" ]
[ "$(cat original/delete-me)" = "v1" ]
[ -d original/play-dir ]
[ -d original/del-dir ]
[ "$(cat working-copy/file)" = "v2" ]
echo "ALL TEST PASSED"
|
<reponame>naq219/Telpoo-framework
package com.telpoo.example.utils;
import android.content.Context;
import android.widget.Toast;
/**
* @author NAQ219
*
*/
public class Utils1 {
public static void showBolean(boolean re,Context context){
if (re)
Toast.makeText(context, "success ", 1).show();
else
Toast.makeText(context, "fail!", 1).show();
}
public static void showBolean(boolean re,String ifTrue,String ifFalse,Context context){
if (re)
Toast.makeText(context, ifTrue, 1).show();
else
Toast.makeText(context, ifFalse, 1).show();
}
}
|
import * as fs from 'fs';
import * as rd from 'readline'
import * as path from 'path'
var filenPath = path.join(__dirname, '..', 'text-assets', '2.1.mine.txt');
console.log(`filenPath: ${filenPath}`);
var reader = rd.createInterface(fs.createReadStream(filenPath))
var data: Array<{direction: string, value: number}> = [];
reader.on("line", (l: string) => {
const tokens = l.split(' ');
const direction = tokens[0];
const value = parseInt(tokens[1]);
data.push({direction, value});
})
console.log(`Will be empty data has not yet been read ${data.length}` );
reader.on("close", ()=> {
console.log(`Data has been read ${data.length}` );
var horizontal: number = 0;
var depth: number = 0;
for (var i: number = 0; i < data.length; i++) {
switch (data[i].direction) {
case "forward":
horizontal = horizontal + data[i].value;
break;
case "up":
depth = depth - data[i].value;
break;
case "down":
depth = depth + data[i].value;
break;
default:
console.log(`BROKEN COMMAND! ${data[i].direction} `);
break;
}
}
console.log(`horizontal: ${horizontal} depth: ${depth} answer: ${horizontal * depth} `)
})
|
create table stud(
sroll number(3) primary key,
sname varchar(30),
hostel number check(hostel<10),
parent_inc number(6)
);
|
<gh_stars>0
class CommentsController < ApplicationController
before_filter :require_login
def create
@event = Event.find_by(id: params[:event_id])
if @event.deleted
redirect_to event_path(@event)
elsif @event.attending_event?(current_user) || @event.user == current_user
@comment = Comment.new(comment_params)
@comment.user_id = current_user.id
@comment.event_id = @event.id
if request.xhr? && @comment.save
render partial: '/comments/comment', locals: { comment: @comment }
elsif @comment.save
redirect_to event_path(@event)
elsif request.xhr?
render partial: '/shared/empty'
else
@errors = @comment.errors.full_messages
redirect_to event_path(@event), flash: { error: @errors }
end
end
end
def comment_params
params.require(:comment).permit(:body)
end
end
|
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* main.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: abanvill <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2018/01/19 11:16:02 by abanvill #+# #+# */
/* Updated: 2018/01/23 16:50:48 by abanvill ### ########.fr */
/* */
/* ************************************************************************** */
#include "../includes/fdf.h"
static int launch_mlx(t_sgl *sgl)
{
command_center(sgl, CMD_CENTER);
print_image(sgl);
mlx_expose_hook(sgl->win, &command_expose, sgl);
mlx_hook(sgl->win, 2, 64, &command_clbk, sgl);
mlx_loop(sgl->mlx);
return (0);
}
int main(int ac, char *av[])
{
t_sgl *sgl;
if (ac != 2)
return (EXIT_FAILURE);
sgl = require_sgl();
if (init_sgl(sgl) == -1)
return (EXIT_FAILURE);
if (init_sgl_screen(sgl) == -1)
return (EXIT_FAILURE);
if (parse_file(sgl, av[1]) == -1)
return (EXIT_FAILURE);
if (init_sgl_map(sgl) == -1)
return (EXIT_FAILURE);
if (init_commands(sgl) == -1)
return (EXIT_FAILURE);
if (link_mlx(sgl) == -1)
return (-1);
launch_mlx(sgl);
return (EXIT_SUCCESS);
}
|
<filename>fetcher.py
import requests, re, json
import lxml.etree
import lxml.html.soupparser
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/536.26.14 (KHTML, like Gecko) Version/6.0.1 Safari/536.26.14'
json_getter = re.compile(r'trulia\.propertyData\.set\((.*?)\);')
def get(url):
return requests.get(url, headers={'User-Agent':user_agent})
def parse(html):
try:
return lxml.etree.fromstring(html)
except:
return lxml.html.soupparser.fromstring(html)
def get_data(city, kind='for_rent',
price=None, pets=None, amenities=None):
"""
Get property listings from Trulia.
city: The name (city, state) of the city to look in
kind: for_rent or for_sale
returns: itreator of dicts with result data"""
assert kind in ('for_rent','for_sale')
city = city.replace(', ', ',').replace(' ', '_')
url_chunks = []
if price:
url_chunks.append('%d-%d_price' % price)
if pets:
url_chunks.append('%s_pets' % pets)
if amenities:
if isinstance(amenities, str):
url_chunks.append('%s_amenities' % amenities)
else:
for e in amenities:
url_chunks.append('%s_amenities' % e)
base_url = 'http://trulia.com/%s/%s/%s' % (kind, city, '/'.join(url_chunks))
first_page = get(base_url).text
res = parse(first_page).xpath("id('4_paging')/a[last()]")[0]
page_count = int(res.text)
for page in xrange(page_count):
if page == 0:
html = first_page
else:
html = get('%s/%d_p/' % (base_url, page)).text
for blob in json_getter.finditer(html):
for e in json.loads(blob.group(1)):
yield e
def get_picture_urls(datum):
try:
photos = datum['hasPhotos']
assert photos is not None
except:
return
for photo in photos:
yield 'http://thumbs.trulia-cdn.com/pictures/thumbs/%s' % photo.split(':')[0]
|
using System;
public class PermissionChecker
{
public bool CheckPermission(string userAccount, string requestedOperation)
{
// Perform permission check logic here
bool hasPermission = PerformPermissionCheck(userAccount, requestedOperation);
if (hasPermission)
{
return true;
}
else
{
NotifyUserOfPermissionIssue(userAccount, requestedOperation);
return false;
}
}
private bool PerformPermissionCheck(string userAccount, string requestedOperation)
{
// Replace this with actual permission check logic
// For demonstration purposes, assume all users have permission for all operations
return true;
}
private void NotifyUserOfPermissionIssue(string userAccount, string requestedOperation)
{
// Replace this with actual notification logic
Console.WriteLine($"User '{userAccount}' does not have permission to perform operation '{requestedOperation}'.");
// Additional notification handling can be added here
}
}
|
<filename>qht-modules/qht-interface/src/main/java/com/qht/PageDto.java
package com.qht;
import java.util.Arrays;
import java.util.List;
/**
* 分页添加查询
* @author 草原狼
* @date Jul 11, 2018 5:35:00 PM
*/
public class PageDto<T> {
public static final int DEFAULT_PAGE = 1;
public static final int DEFAULT_PAGE_SIZE = 12;
/**
* 条件字段
*/
private String[] searchFields;
/**
* 条件 如"like",">","<","=",等等。 <br/>
* 条件拼凑语句: <br/>
* searchFields[i]+" "+condition[i]+" "+searchValues[i]+" and "+searchFields[i+1]+" "+condition[i+1]+" "+searchValues[i+1]
*/
private String[] condition;
/**
* 条件内容
*/
private String[] searchValues;
/**
* 排序字段
*/
private String orderField;
/**
* 排序方式
*/
private OrderType orderType;
/**
* 分组字段
*/
private String[] groupByFields;
/**
* 是否使用 DISTINCT
*/
private Boolean distinct;
/**
* 当页页码
*/
private Integer current;
/**
* 每页记录数
*/
private Integer pageSize;
/**
* 总记录数
*/
private long total=0L;
/**
* 总页数
*/
private long pages=0L;
/**
* 备用字段
*/
private Object Spare;
public Object getSpare() {
return Spare;
}
public void setSpare(Object spare) {
Spare = spare;
}
/**
* 返回结果
*/
private List<T> data;
public String[] getSearchFields() {
return searchFields;
}
public String[] getCondition() {
return condition;
}
public String[] getSearchValues() {
return searchValues;
}
public String getOrderField() {
return orderField;
}
public OrderType getOrderType() {
return orderType;
}
public String[] getGroupByFields() {
return groupByFields;
}
public Boolean isDistinct() {
return distinct;
}
public Integer getCurrent() {
return current;
}
public void setCurrent(Integer current) {
this.current = current;
}
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
}
public Integer getPageSize() {
return pageSize;
}
public Long getTotal() {
return total;
}
public void setSearchFields(String[] searchFields) {
this.searchFields = searchFields;
}
public void setCondition(String[] condition) {
this.condition = condition;
}
public void setSearchValues(String[] searchValues) {
this.searchValues = searchValues;
}
public void setOrderField(String orderField) {
this.orderField = orderField;
}
public void setOrderType(String orderType) {
this.orderType = OrderType.valueOf(orderType);
}
public void setOrderType(OrderType orderType) {
this.orderType = orderType;
}
public void setGroupByFields(String[] groupByFields) {
this.groupByFields = groupByFields;
}
public void setDistinct(Boolean distinct) {
this.distinct = distinct;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
public void setTotal(long total) {
this.total = total;
}
public List<T> getData() {
return data;
}
public void setData(List<T> data) {
this.data = data;
}
@Override
public String toString() {
return "Page [searchFields=" + Arrays.toString(searchFields) + ", condition=" + Arrays.toString(condition)
+ ", searchValues=" + Arrays.toString(searchValues) + ", orderField=" + orderField + ", orderType="
+ orderType + ", groupByFields=" + Arrays.toString(groupByFields) + ", distinct=" + distinct + ", page="
+ current + ", pageNum=" + pageSize + ", total=" + total + ", rows=" + pages + ", data=" + data + "]";
}
public static <E, T> PageDto<E> convert(PageDto<T> page, List<E> data) {
PageDto<E> ePage = new PageDto<>();
ePage.setPages(page.getPages());
ePage.setTotal(page.getTotal());
ePage.setCurrent(page.getCurrent());
ePage.setPageSize(page.getPageSize());
ePage.setData(data);
return ePage;
}
public Long getPages() {
return pages;
}
public void setPages(long pages) {
this.pages = pages;
}
public Object extendData;
public Object getExtendData() {
return extendData;
}
public void setExtendData(Object extendData) {
this.extendData = extendData;
}
}
|
def multi_bracket_validation(input_string):
stack = []
opening_brackets = "({["
closing_brackets = ")}]"
bracket_pairs = {')': '(', '}': '{', ']': '['}
for char in input_string:
if char in opening_brackets:
stack.append(char)
elif char in closing_brackets:
if not stack or stack.pop() != bracket_pairs[char]:
return False
return not stack # Return True if stack is empty, False otherwise
# Test cases
def test_mbv_true_case_empty_str():
assert multi_bracket_validation('') == True
def test_mbv_true_case_valid_brackets():
assert multi_bracket_validation('[{()}]') == True
def test_mbv_false_case_invalid_brackets():
assert multi_bracket_validation('[{()}') == False
def test_mbv_false_case_invalid_order():
assert multi_bracket_validation('{[}]') == False
def test_mbv_false_case_extra_closing_bracket():
assert multi_bracket_validation('[{()}](') == False
|
<reponame>mauri-medina/peoplemanagement
package com.swnat.service;
import com.swnat.dto.PaginationResponse;
import com.swnat.model.Candidate;
import org.springframework.web.multipart.MultipartFile;
public interface CandidateService extends IGenericService<Candidate, Long> {
/**
* Get all by filter
* @param filter
* @param page
* @param size
* @param sortBy
* @return
*/
PaginationResponse<Candidate> findByFilter(String filter, int page, int size, String sortBy);
Candidate getOne(Long id);
void updateInterviewStatus(String id, String interviewStatus);
void updateChallengeStatus(String id, String challengeStatus);
String uploadImage(MultipartFile image);
void removeImage(String urlImage);
}
|
// ┌──────────────────────────────────────────────────────────────────────────────────────────────┐
// │ Copyright (c) 2021 by the author of the React-weather project. All rights reserved. │
// │ This owner-supplied source code has no limitations on the condition imposed on the │
// │ maintenance of the copyright notice. │
// │ For more information, read the LICENSE file at the root of the project. │
// │ Written by author <NAME> <<EMAIL>>. │
// └──────────────────────────────────────────────────────────────────────────────────────────────┘
import * as C from './styled';
export default function Footer() {
return (
<C.Container>
<C.Copyright>Copyright © 2021 <NAME></C.Copyright>
</C.Container>
);
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ObjectMapper/ObjectMapper.framework"
install_framework "${BUILT_PRODUCTS_DIR}/PKHUD/PKHUD.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ObjectMapper/ObjectMapper.framework"
install_framework "${BUILT_PRODUCTS_DIR}/PKHUD/PKHUD.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#include <cstddef>
template <typename T>
class custom_linked_list {
private:
struct Node {
T data;
Node* next;
Node(const T& value) : data(value), next(nullptr) {}
};
Node _first;
Node _last;
size_t _length;
public:
custom_linked_list() : _length(0) {
this->_last.next = &_first;
}
virtual ~custom_linked_list() {
this->clear();
}
void push_front(const T& value) {
Node* newNode = new Node(value);
newNode->next = _first.next;
_first.next = newNode;
if (_last.next == &_first) {
_last.next = newNode;
}
_length++;
}
void pop_front() {
if (_first.next != nullptr) {
Node* temp = _first.next;
_first.next = temp->next;
delete temp;
if (_first.next == nullptr) {
_last.next = &_first;
}
_length--;
}
}
void clear() {
while (_first.next != nullptr) {
Node* temp = _first.next;
_first.next = temp->next;
delete temp;
}
_last.next = &_first;
_length = 0;
}
size_t size() const {
return _length;
}
bool empty() const {
return _length == 0;
}
};
|
<filename>src/assertions.ts
import { objectToString, objectType } from './common';
import { AssertionError } from './errors';
import * as checks from './checks';
export const INVERT = true;
export function Assert(condition: boolean, invert: boolean, value: any, assertion: string,
details: string): void {
let name = ((invert) ? 'isNot' : 'is') + assertion.slice(2);
let messagePrefix = (invert) ? 'is' : 'is not';
let message = `${messagePrefix} ${details}`;
if ((invert) ? condition : !condition) throw new AssertionError(value, name, message);
}
export function isSameType(target: any, value: any, invert: boolean = false): void {
let targetType = objectType(target);
let valueType = objectType(value);
Assert(targetType === valueType, invert, valueType, isSameType.name,
`the same type as ${objectToString(targetType)}`);
}
export function isSameTypeName(target: string, value: string, invert: boolean = false): void {
Assert(target === value, invert, value, isSameTypeName.name,
`the same type as ${objectToString(target)}`);
}
export function isEqual(target: any, value: any, invert: boolean = false): void {
Assert(target === value, invert, value, isEqual.name, `equal to ${objectToString(target)}`);
}
export function isSymbol(value: string, invert: boolean = false): void {
Assert(typeof value === 'symbol', invert, value, isSymbol.name, 'a symbol');
}
export function isBoolean(value: string, invert: boolean = false): void {
Assert(typeof value === 'boolean', invert, value, isBoolean.name, 'a boolean');
}
export function isString(value: string, invert: boolean = false): void {
Assert(typeof value === 'string', invert, value, isString.name, 'a string');
}
export function isNumber(value: number, invert: boolean = false): void {
Assert(!Number.isNaN(value), invert, value, isNumber.name, 'a number');
}
export function isInt(value: number, invert: boolean = false): void {
Assert(Number.isInteger(value), invert, value, isInt.name, 'an integer');
}
export function isFloat(value: number, invert: boolean = false): void {
// todo update
Assert(!Number.isNaN(value), invert, value, isFloat.name, 'a float');
}
export function isEqualTo(target: number, value: number, invert: boolean = false): void {
Assert(value == target, invert, value, isEqualTo.name, `== ${target}`);
}
export function isGreaterThan(target: number, value: number, invert: boolean = false): void {
Assert(value > target, invert, value, isGreaterThan.name, `> ${target}`);
}
export function isGreaterThanOrEqualTo(target: number, value: number, invert: boolean = false): void {
Assert(value >= target, invert, value, isGreaterThanOrEqualTo.name, `>= ${target}`);
}
export function isLessThanOrEqualTo(target: number, value: number, invert: boolean = false): void {
Assert(value <= target, invert, value, isLessThanOrEqualTo.name, `>= ${target}`);
}
export function isLessThan(target: number, value: number, invert: boolean = false): void {
Assert(value < target, invert, value, isLessThan.name, `< ${target}`);
}
export function isArray(value: any[], invert: boolean = false): void {
Assert(Array.isArray(value), invert, value, isArray.name, 'an array');
}
export function isNull(value: any, invert: boolean = false): void {
Assert(value === null, invert, value, isNull.name, 'null');
}
export function isObject(value: any, invert: boolean = false): void {
Assert(typeof value === 'object', invert, value, isObject.name, 'an object');
}
export function isUndefined(value: number, invert: boolean = false): void {
Assert(typeof value === 'undefined', invert, value, isUndefined.name, 'undefined');
}
export function isRegEx(regEx: RegExp, value: string, invert: boolean = false): void {
Assert(regEx.test(value), invert, value, isRegEx.name, `a regular expression match`);
}
export function isDateString(value: string, invert: boolean = false): void {
Assert(isNaN(Date.parse(value)) , invert, value, isRegEx.name, `a regular expression match`);
}
export function isIso8601(value: string, invert: boolean = false): void {
Assert(checks.isIso8601(value), invert, value, isIso8601.name, `an ISO8601 date match`);
}
|
/*
* Copyright (c) 2007-2013 Concurrent, Inc. All Rights Reserved.
*
* Project and contact information: http://www.cascading.org/
*
* This file is part of the Cascading project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cascading.tuple.hadoop;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
import java.util.Set;
import cascading.CascadingTestCase;
import cascading.flow.hadoop.HadoopFlowProcess;
import cascading.tuple.Tuple;
import cascading.tuple.collect.SpillableProps;
import cascading.tuple.hadoop.collect.HadoopSpillableTupleList;
import cascading.tuple.hadoop.collect.HadoopSpillableTupleMap;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
/**
*
*/
public class SpillableTupleHadoopTest extends CascadingTestCase
{
public SpillableTupleHadoopTest()
{
super();
}
@Test
public void testSpillList()
{
long time = System.currentTimeMillis();
performListTest( 5, 50, null, 0 );
performListTest( 49, 50, null, 0 );
performListTest( 50, 50, null, 0 );
performListTest( 51, 50, null, 1 );
performListTest( 499, 50, null, 9 );
performListTest( 500, 50, null, 9 );
performListTest( 501, 50, null, 10 );
System.out.println( "time = " + ( System.currentTimeMillis() - time ) );
}
@Test
public void testSpillListCompressed()
{
GzipCodec codec = ReflectionUtils.newInstance( GzipCodec.class, new JobConf() );
long time = System.currentTimeMillis();
performListTest( 5, 50, codec, 0 );
performListTest( 49, 50, codec, 0 );
performListTest( 50, 50, codec, 0 );
performListTest( 51, 50, codec, 1 );
performListTest( 499, 50, codec, 9 );
performListTest( 500, 50, codec, 9 );
performListTest( 501, 50, codec, 10 );
System.out.println( "time = " + ( System.currentTimeMillis() - time ) );
}
private void performListTest( int size, int threshold, CompressionCodec codec, int spills )
{
HadoopSpillableTupleList list = new HadoopSpillableTupleList( threshold, codec, new JobConf() );
for( int i = 0; i < size; i++ )
{
String aString = "string number " + i;
double random = Math.random();
list.add( new Tuple( i, aString, random, new Text( aString ) ) );
}
assertEquals( "not equal: list.size();", size, list.size() );
assertEquals( "not equal: list.getNumFiles()", spills, list.spillCount() );
int i = -1;
int count = 0;
for( Tuple tuple : list )
{
int value = tuple.getInteger( 0 );
assertTrue( "wrong diff", value - i == 1 );
assertEquals( "wrong value", "string number " + count, tuple.getObject( 3 ).toString() );
i = value;
count++;
}
assertEquals( "not equal: list.size();", size, count );
Iterator<Tuple> iterator = list.iterator();
assertEquals( "not equal: iterator.next().get(1)", "string number 0", iterator.next().getObject( 1 ) );
assertEquals( "not equal: iterator.next().get(1)", "string number 1", iterator.next().getObject( 1 ) );
}
@Test
public void testSpillMap()
{
long time = System.currentTimeMillis();
JobConf jobConf = new JobConf();
performMapTest( 5, 5, 100, 20, jobConf );
performMapTest( 5, 50, 100, 20, jobConf );
performMapTest( 50, 5, 200, 20, jobConf );
performMapTest( 500, 50, 7000, 20, jobConf );
System.out.println( "time = " + ( System.currentTimeMillis() - time ) );
}
@Test
public void testSpillMapCompressed()
{
long time = System.currentTimeMillis();
JobConf jobConf = new JobConf();
jobConf.set( SpillableProps.SPILL_CODECS, "org.apache.hadoop.io.compress.GzipCodec" );
performMapTest( 5, 5, 100, 20, jobConf );
performMapTest( 5, 50, 100, 20, jobConf );
performMapTest( 50, 5, 200, 20, jobConf );
performMapTest( 500, 50, 7000, 20, jobConf );
System.out.println( "time = " + ( System.currentTimeMillis() - time ) );
}
private void performMapTest( int numKeys, int listSize, int mapThreshold, int listThreshold, JobConf jobConf )
{
HadoopFlowProcess flowProcess = new HadoopFlowProcess( jobConf );
HadoopSpillableTupleMap map = new HadoopSpillableTupleMap( SpillableProps.defaultMapInitialCapacity, SpillableProps.defaultMapLoadFactor, mapThreshold, listThreshold, flowProcess );
Set<Integer> keySet = new HashSet<Integer>();
Random gen = new Random( 1 );
for( int i = 0; i < listSize * numKeys; i++ )
{
String aString = "string number " + i;
double random = Math.random();
double keys = numKeys / 3.0;
int key = (int) ( gen.nextDouble() * keys + gen.nextDouble() * keys + gen.nextDouble() * keys );
map.get( new Tuple( key ) ).add( new Tuple( i, aString, random, new Text( aString ) ) );
keySet.add( key );
}
assertEquals( "not equal: map.size();", keySet.size(), map.size() );
}
}
|
<reponame>harry-xiaomi/SREWorks
package com.alibaba.sreworks.health.domain.req.incident;
import com.alibaba.sreworks.health.common.constant.Constant;
import com.google.common.base.Preconditions;
import io.swagger.annotations.ApiModel;
import org.apache.commons.lang3.StringUtils;
/**
* 新增异常类型请求
*
* @author: <EMAIL>
* @date: 2021/10/20 11:47
*/
@ApiModel(value = "新增异常类型")
public class IncidentTypeCreateReq extends IncidentTypeBaseReq {
@Override
public String getLabel() {
Preconditions.checkArgument(StringUtils.isNotEmpty(label), "类型标识不允许为空");
return label;
}
@Override
public String getName() {
Preconditions.checkArgument(StringUtils.isNotEmpty(name), "类型名称不允许为空");
return name;
}
}
|
<filename>src/main/java/org/spongepowered/spunbric/mod/mixin/api/data/DataHolderMixin_API.java
package org.spongepowered.spunbric.mod.mixin.api.data;
import net.minecraft.block.entity.BlockEntity;
import net.minecraft.entity.Entity;
import net.minecraft.item.ItemStack;
import org.spongepowered.api.data.DataHolder;
import org.spongepowered.api.data.Key;
import org.spongepowered.api.data.persistence.DataContainer;
import org.spongepowered.api.data.property.Property;
import org.spongepowered.api.data.value.Value;
import org.spongepowered.api.data.value.ValueContainer;
import org.spongepowered.asm.mixin.Mixin;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.Set;
@Mixin(value = {BlockEntity.class, Entity.class, ItemStack.class/*, SpongeUser.class*/}, priority = 899)
public abstract class DataHolderMixin_API implements DataHolder {
@Override
public int getContentVersion() {
return 0;
}
@Override
public DataContainer toContainer() {
return null;
}
@Override
public <V> Optional<V> getProperty(Property<V> property) {
return Optional.empty();
}
@Override
public Map<Property<?>, ?> getProperties() {
return null;
}
@Override
public <E> Optional<E> get(Key<? extends Value<E>> key) {
return Optional.empty();
}
@Override
public OptionalInt getInt(Key<? extends Value<Integer>> key) {
return null;
}
@Override
public OptionalDouble getDouble(Key<? extends Value<Double>> key) {
return null;
}
@Override
public OptionalLong getLong(Key<? extends Value<Long>> key) {
return null;
}
@Override
public <E, V extends Value<E>> Optional<V> getValue(Key<V> key) {
return Optional.empty();
}
@Override
public boolean supports(Key<?> key) {
return false;
}
@Override
public ValueContainer copy() {
return null;
}
@Override
public Set<Key<?>> getKeys() {
return null;
}
@Override
public Set<Value.Immutable<?>> getValues() {
return null;
}
}
|
set -u
set -e
debootstrap --components=main,universe,multiverse --include=vim,build-essential,git,redis-server,lua5.1,postgresql,libpq-dev,python-dev,python3-dev,memcached,mongodb,libperl-dev,ruby,ruby-dev,wget,language-pack-en,libcurl4-openssl-dev,mysql-server,libyajl-dev,beanstalkd,ssh,rsync,libluajit-5.1-dev,curl,ipython,liblocal-lib-perl,python-virtualenv,python-pip,libpcre3-dev,libjansson-dev,quota,gawk,libreadline-dev,libyaml-dev,libsqlite3-dev,sqlite3,autoconf,libgdbm-dev,libncurses5-dev,automake,libtool,bison,libffi-dev,libphp7.1-embed,php7.1-json,php7.1-mysql,php7.1-gd,php7.1-pgsql,php7.1-dev,libxml2-dev,libdb-dev,libbz2-dev,libjpeg-dev,libpng-dev,ruby-rack,postgresql-contrib,postgis,libxslt1-dev,sphinxsearch,libmysqlclient-dev,imagemagick,libreoffice,tesseract-ocr,tesseract-ocr-ita,pdftk,wkhtmltopdf,graphicsmagick,poppler-utils,ghostscript,language-pack-it,language-pack-de,language-pack-es,language-pack-pt,language-pack-pl,nullmailer,nodejs,nano,htop,emacs,mercurial,screen,apache2-utils,unzip,erlang-nox,libdatetime-perl,libmemcached-dev,libapache2-mod-svn,libapache2-mod-gnutls,libapache2-mod-xsendfile,libapache2-mod-php7.1,php-pear,libcap2-bin,libcap-dev,libode-dev,gettext,libreoffice-style-galaxy,libapache2-mod-rpaf,graphviz,strace,e2fslibs-dev,bind9-host,php7.1-curl,bc,pastebinit,tmux,php7.1-mcrypt,php7.1-intl,php7.1-imap,mc,zip,xmlsec1,libxmlsec1-dev,attr,acl,libssh2-1-dev,libkrb5-dev,python3-virtualenv,libgeo-ip-perl,virtualenv,subversion,ftp,libgeoip-dev,libattr1-dev,telnet,npm,golang,couchdb,erlang,ffmpeg,unoconv,libfl-dev,libzmq3-dev,php7.1-xsl,libdbd-pg-perl,libdbd-mysql-perl,openjdk-8-jdk,node-clean-css,node-less,apache2,apache2-dev,php-mbstring,virtualenvwrapper,python-setuptools,net-tools,python3.6-dev,python3.6-venv,php-imagick,nginx artful /distros/artful
chroot /distros/artful /bin/bash -x <<'EOF'
set -u
set -e
dpkg-reconfigure tzdata
echo "exit 101" > /usr/sbin/policy-rc.d
chmod 755 /usr/sbin/policy-rc.d
mkdir /.old_root
ln -s /proc/self/mounts /etc/mtab
mkdir /containers
mkdir -p /opt/unbit/uwsgi/plugins
rm /etc/hosts /etc/hostname
ln -s /run/resolvconf/hostname /etc/hostname
ln -s /run/resolvconf/hosts /etc/hosts
cd /root
git clone https://github.com/unbit/nss-unbit
cd nss-unbit
make
EOF
cp nsswitch.conf /distros/artful/etc/nsswitch.conf
cp shortcuts.ini /distros/artful/opt/unbit/uwsgi/shortcuts.ini
|
echo "Homeserver: (https://matrix.org)"
read HOMESERVER
echo "User ID: (@user:matrix.org)"
read USER_ID
echo "Room: (!abc:matrix.org)"
read ROOM
echo "Password"
read PASSWORD
echo "{\"homeserver\": \"$HOMESERVER\", \"user_id\": \"$USER_ID\", \"default_room\": \"$ROOM\", \"password\": \"$PASSWORD\"}" > credentials.json
|
import { Injectable } from '@angular/core';
import { Resolve, ActivatedRouteSnapshot, RouterStateSnapshot, Routes } from '@angular/router';
import { UserRouteAccessService } from '../../shared';
import { JhiPaginationUtil } from 'ng-jhipster';
import { RoleComponent } from './role.component';
import { RoleDetailComponent } from './role-detail.component';
import { RolePopupComponent } from './role-dialog.component';
import { RoleDeletePopupComponent } from './role-delete-dialog.component';
export const roleRoute: Routes = [
{
path: 'role',
component: RoleComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.role.home.title'
},
canActivate: [UserRouteAccessService]
}, {
path: 'role/:id',
component: RoleDetailComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.role.home.title'
},
canActivate: [UserRouteAccessService]
}
];
export const rolePopupRoute: Routes = [
{
path: 'role-new',
component: RolePopupComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.role.home.title'
},
canActivate: [UserRouteAccessService],
outlet: 'popup'
},
{
path: 'role/:id/edit',
component: RolePopupComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.role.home.title'
},
canActivate: [UserRouteAccessService],
outlet: 'popup'
},
{
path: 'role/:id/delete',
component: RoleDeletePopupComponent,
data: {
authorities: ['ROLE_PMANAGER'],
pageTitle: 'manaProjectApp.role.home.title'
},
canActivate: [UserRouteAccessService],
outlet: 'popup'
}
];
|
class ProxyManager:
def __init__(self, low_water_mark, proxies_list):
self.LOW_WATER_MARK = low_water_mark
self.proxies_list = proxies_list
def fetch_proxy(self):
while len(self.proxies_list) < int(self.LOW_WATER_MARK * 1):
new_proxy = self.get_new_proxy()
if new_proxy:
self.proxies_list.append(new_proxy)
def get_new_proxy(self):
# Implement logic to fetch a new proxy
# This could involve making a request to a proxy provider or using a proxy pool
# Return the new proxy if successfully fetched, otherwise return None
pass
|
import AMD from '../../amd/src/amd.e6';
import Core from '../../core/src/core.e6';
import Event from '../../event/src/event.e6';
import Detect from '../../detect/src/detect.e6';
import Module from '../../modules/src/base.es6';
import ModulesApi from '../../modules/src/api.e6';
window.Moff = new Core();
window.Moff.amd = new AMD();
window.Moff.event = new Event();
window.Moff.Module = new Module();
window.Moff.detect = new Detect();
window.Moff.modules = new ModulesApi();
|
<filename>2021-05-09/今日更新求职招聘类/pages/workplace/workplace.js
// pages/workplace/workplace.js
const select_city = require('../../utils/AELACTION.js');
const select = require('../../utils/util.js');
var app = getApp();
Page({
data:{
show_img:false,
has_select:[],
user_action:[],
hot_city:[],
province:[],
length:'5',
show:true
},
onLoad:function(options){
// 页面初始化 options为页面跳转所带来的参数
let basicInf = options.status;
wx.setStorage({ //缓存状态
key:"basicInf",
data:basicInf
})
if( basicInf ){
this.setData({
show:false
})
}
let all = select_city.all; //全部城市
let province = select_city.province; //全部省份
let hot = select_city.hot; //热门城市
let hot_city = select.hot_city(all,hot);
let province_list = select.province(all,province)
this.setData({
hot_city:hot_city,
province:province_list,
})
},
onReady:function(){
// 页面渲染完成
},
onShow:function(){
// 页面显示
var has_select = wx.getStorageSync('has_select') || [];
var userCityId = wx.getStorageSync('userCityId');
// console.log( userCityId )
this.setData({
has_select:has_select,
'user_action[0]':userCityId
})
},
onHide:function(){
// 页面隐藏
},
onUnload:function(){
// 页面关闭
},
del_this:function(e){ //删除选中城市
let id = e.currentTarget.dataset.id;
var has_select_temp = wx.getStorageSync('has_select') || [];
var idArr =[];
for(let i=0 ,len =has_select_temp ;i<len.length;i++){
if( len[i].id != id ){
idArr.push(len[i])
}
};
wx.setStorage({
key: 'has_select',
data:idArr
})
this.setData({
has_select:idArr
})
},
select_action:function(e){//选择热门城市
var basicInf = wx.getStorageSync('basicInf');
let all = select_city.all; //全部城市
let id = e.currentTarget.dataset.id;
let city = e.currentTarget.dataset.city;
var has_select_now = [{id:id,city:city}];
var has_select_temp = wx.getStorageSync('has_select') || [];
if( basicInf ){
// console.log( id )
wx.setStorage({ //缓存id
key:"cityId",
data:id,
success:function(){
wx.navigateBack({
delta: 1
})
}
})
}else{
if( has_select_temp.length <5 ){
var has_select;
if( has_select_now[0].id ){
has_select = has_select_temp.concat(has_select_now);
wx.setStorage({
key: 'has_select',
data:select.quchong(has_select,'city',all)
})
this.setData({
has_select:select.quchong(has_select,'city',all)
})
}
}else{
wx.showModal({
content: '工作地点最多支持选择5个',
showCancel:false,
success: function(res) {
if (res.confirm) {
wx.navigateBack({delta: 1})
}
}
})
}
}
},
select_province:function(e){//选择省份下的城市
let id = e.currentTarget.dataset.id;
var basicInf = wx.getStorageSync('basicInf');
if( basicInf ){
wx.redirectTo({
url: '../workplaceTwo/workplaceTwo?id='+id,
})
}else{
wx.navigateTo({
url: '../workplaceTwo/workplaceTwo?id='+id,
})
}
}
})
|
#!/usr/bin/env bash
rm -rf train_vis checkpoints log.txt
mkdir train_vis
mkdir train_vis/train
mkdir train_vis/valid
|
<gh_stars>0
import './App.css';
import { Routes, Route } from 'react-router-dom';
import React, { useState } from 'react';
import Home from './views/Home/Home';
import UserDashboard from './views/UserDashboard/UserDashboard';
import WorkerDashboard from './views/WorkerDashboard/WorkerDashboard';
import AdminDashboard from './views/AdminDashboard/AdminDashboard';
import UserLogin from './views/Login/UserLogin';
import UserRegister from './views/Register/UserRegister';
import WorkerLogin from './views/Login/WorkerLogin';
import WorkerRegister from './views/Register/WorkerRegister';
import AdminLogin from './views/Login/AdminLogin';
import Profile from './views/Profile/Profile';
import EditProfile from './views/Profile/EditProfile';
const App = () => {
const user = {
username: 'User 1',
address: 'New Delhi',
email: '<EMAIL>',
password: '@#$$%#$^@@',
};
const [token, setToken] = useState('');
return (
<div className="App">
<Routes>
<Route exact path="/" element={<Home />} />
<Route exact path="/UserDashboard" element={<UserDashboard />} />
<Route exact path="/WorkerDashboard" element={<WorkerDashboard />} />
<Route exact path="/AdminDashboard" element={<AdminDashboard />} />
<Route exact path="/UserLogin" element={<UserLogin setToken={setToken} />} />
<Route exact path="/UserRegister" element={<UserRegister />} />
<Route exact path="/WorkerLogin" element={<WorkerLogin />} />
<Route exact path="/WorkerRegister" element={<WorkerRegister />} />
<Route exact path="/AdminLogin" element={<AdminLogin />} />
<Route exact path="/Profile" element={<Profile user={user} token={token} />} />
<Route exact path="/EditProfile" element={<EditProfile user={user} />} />
</Routes>
</div>
);
};
export default App;
|
from sandman2 import get_app
from sandman2.model import activate
# Define the database connection details
DATABASE = 'sqlite:///path_to_your_database.db'
# Define the table to be exposed as a RESTful API
TABLE = 'your_table_name'
def main():
# Activate the model for the specified table
activate(all=True)
# Create the sandman2 web application
app = get_app(__name__, DATABASE)
# Start the web application to listen for incoming HTTP requests
app.run()
if __name__ == '__main__':
main()
|
<gh_stars>0
# -*- coding: utf-8 -*-
import datetime
import logging
import json
import os
import pymysql
import sys
import yaml
from typing import Any
from typing import Dict
from typing import Final
from typing import List
from typing import NoReturn
from typing import Union
from timo.database_manager.models import Database
QueryResult = Dict[str, Union[str, int, float]]
QueryResults = List[QueryResult]
LOG_PATH: Final[str] = os.path.dirname(__file__) + '/logs/mysql.log'
if os.path.isfile(LOG_PATH):
os.remove(LOG_PATH)
logging.basicConfig(
filename=LOG_PATH,
level=logging.INFO, filemode='w',
format='%(asctime)s:\n\t%(levelname)s:%(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p'
)
def _log_file_write(func):
def wrapper(*args, **kwargs):
path = func(*args, **kwargs)
logging.info(f'Save success as query execution result file (path: {path})')
return wrapper
class MySQL(Database):
"""MySQL Client"""
def __init__(self, config) -> None:
super(MySQL, self).__init__(config)
self.db = config.db
self.charset = config.charset
def __connect__(self) -> NoReturn:
try:
self.conn = pymysql.connect(
host=self.host,
port=self.port,
user=self.user,
password=<PASSWORD>,
db=self.db,
charset=self.charset
)
self.cursor = self.conn.cursor(pymysql.cursors.DictCursor)
except pymysql.MySQLError as e:
logging.error(e)
print(f"Error connecting to MySQL Platform: {e}")
sys.exit(1)
else:
logging.info('Database connection successful')
def __commit__(self) -> NoReturn:
try:
self.conn.commit()
except pymysql.MySQLError as e:
logging.error(e)
sys.exit(2)
else:
logging.info('The changes have been reflected in the database.')
def __disconnect__(self) -> NoReturn:
self.__commit__()
self.cursor.close()
self.conn.close()
logging.info('Database connection termination success')
def _clean_datetime(self, query_result: Union[QueryResult, QueryResults]) -> str:
"""Converts a datetime object to a string when the column data type is DATETIME
Args:
query_result (Union[QueryResult, QueryResults]): [description]
Returns:
str: [description]
"""
if isinstance(query_result, dict):
query_result = [query_result]
for i, result in enumerate(query_result):
for key, value in result.items():
if isinstance(value, datetime.datetime):
query_result[i][key] = value.strftime(r'%Y-%m-%d %H:%M:%S')
return query_result
def execute(self, query: str, *, args: Dict[str, Any]={}) -> NoReturn:
try:
self.__connect__()
self.cursor.execute(query=query, args=args)
except pymysql.MySQLError as e:
print(e)
logging.error(e)
self.__disconnect__()
else:
logging.info(f'Query execution completed (Type: {query.upper().split()[0]})')
def executemany(self, query: str, *, args: Dict[str, Any]={}) -> NoReturn:
self.__connect__()
self.cursor.executemany(query=query, args=args)
self.__disconnect__()
def execute_with_fetch_one(self, query: str, *, args: Dict[str, Any]={}) -> Dict[str, Union[str, int, float]]:
self.execute(query=query, args=args)
response: dict = self.cursor.fetchone()
self.__disconnect__()
clean_response = self._clean_datetime(response)
return clean_response[0]
def execute_with_fetch_many(self, query: str, size: int, *, args: Dict[str, Any]={}) -> Dict[str, Union[str, int, float]]:
self.execute(query=query, args=args)
response: dict = self.cursor.fetchmany(size=size)
self.__disconnect__()
clean_response = self._clean_datetime(response)
return clean_response
def execute_with_fetch_all(self, query: str, *, args: Dict[str, Any]={}) -> Dict[str, Union[str, int, float]]:
self.execute(query=query, args=args)
response: dict = self.cursor.fetchall()
clean_response = self._clean_datetime(response)
self.__disconnect__()
return clean_response
@_log_file_write
def execute_save_json(self, query: str, path: str, *, args: Dict[str, Any]={}, encoding='utf-8') -> str:
response = self.execute_with_fetch_all(query=query, args=args)
with open(file=path, mode='w', encoding=encoding) as f:
f.write(json.dumps(response, indent='\t', ensure_ascii=False))
return path
@_log_file_write
def execute_save_yaml(self, query: str, path: str, *, args: Dict[str, Any]={}, encoding='utf-8') -> str:
response = self.execute_with_fetch_all(query=query, args=args)
with open(file=path, mode='w', encoding=encoding) as f:
f.write(yaml.dump(response, indent=4, allow_unicode=True))
return path
def execute_save_yml(self, query: str, path: str, *, args: Dict[str, Any]={}, encoding='utf-8') -> str:
self.execute_save_yaml(query=query, path=path, args=args, encoding=encoding)
if __name__ == "__main__":
from config import Config
mysql = MySQL(Config.mysql)
# response = mysql.execute_with_fetch_all(query='select * from users')
mysql.execute_save_json(query='select * from users', path='./results/sql.json')
mysql.execute_save_yml(query='select * from users', path='./results/sql.yml')
|
/**
* Autogenerated by Thrift Compiler (0.9.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package bisondb.generated;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DomainNotFoundException extends TException implements org.apache.thrift.TBase<DomainNotFoundException, DomainNotFoundException._Fields>, java.io.Serializable, Cloneable, Comparable<DomainNotFoundException> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DomainNotFoundException");
private static final org.apache.thrift.protocol.TField DOMAIN_FIELD_DESC = new org.apache.thrift.protocol.TField("domain", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new DomainNotFoundExceptionStandardSchemeFactory());
schemes.put(TupleScheme.class, new DomainNotFoundExceptionTupleSchemeFactory());
}
private String domain; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
DOMAIN((short)1, "domain");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // DOMAIN
return DOMAIN;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.DOMAIN, new org.apache.thrift.meta_data.FieldMetaData("domain", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DomainNotFoundException.class, metaDataMap);
}
public DomainNotFoundException() {
}
public DomainNotFoundException(
String domain)
{
this();
this.domain = domain;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public DomainNotFoundException(DomainNotFoundException other) {
if (other.is_set_domain()) {
this.domain = other.domain;
}
}
public DomainNotFoundException deepCopy() {
return new DomainNotFoundException(this);
}
@Override
public void clear() {
this.domain = null;
}
public String get_domain() {
return this.domain;
}
public void set_domain(String domain) {
this.domain = domain;
}
public void unset_domain() {
this.domain = null;
}
/** Returns true if field domain is set (has been assigned a value) and false otherwise */
public boolean is_set_domain() {
return this.domain != null;
}
public void set_domain_isSet(boolean value) {
if (!value) {
this.domain = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case DOMAIN:
if (value == null) {
unset_domain();
} else {
set_domain((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case DOMAIN:
return get_domain();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case DOMAIN:
return is_set_domain();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof DomainNotFoundException)
return this.equals((DomainNotFoundException)that);
return false;
}
public boolean equals(DomainNotFoundException that) {
if (that == null)
return false;
boolean this_present_domain = true && this.is_set_domain();
boolean that_present_domain = true && that.is_set_domain();
if (this_present_domain || that_present_domain) {
if (!(this_present_domain && that_present_domain))
return false;
if (!this.domain.equals(that.domain))
return false;
}
return true;
}
@Override
public int hashCode() {
HashCodeBuilder builder = new HashCodeBuilder();
boolean present_domain = true && (is_set_domain());
builder.append(present_domain);
if (present_domain)
builder.append(domain);
return builder.toHashCode();
}
@Override
public int compareTo(DomainNotFoundException other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(is_set_domain()).compareTo(other.is_set_domain());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_domain()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.domain, other.domain);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("DomainNotFoundException(");
boolean first = true;
sb.append("domain:");
if (this.domain == null) {
sb.append("null");
} else {
sb.append(this.domain);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!is_set_domain()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'domain' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class DomainNotFoundExceptionStandardSchemeFactory implements SchemeFactory {
public DomainNotFoundExceptionStandardScheme getScheme() {
return new DomainNotFoundExceptionStandardScheme();
}
}
private static class DomainNotFoundExceptionStandardScheme extends StandardScheme<DomainNotFoundException> {
public void read(org.apache.thrift.protocol.TProtocol iprot, DomainNotFoundException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // DOMAIN
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.domain = iprot.readString();
struct.set_domain_isSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, DomainNotFoundException struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.domain != null) {
oprot.writeFieldBegin(DOMAIN_FIELD_DESC);
oprot.writeString(struct.domain);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class DomainNotFoundExceptionTupleSchemeFactory implements SchemeFactory {
public DomainNotFoundExceptionTupleScheme getScheme() {
return new DomainNotFoundExceptionTupleScheme();
}
}
private static class DomainNotFoundExceptionTupleScheme extends TupleScheme<DomainNotFoundException> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, DomainNotFoundException struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.domain);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, DomainNotFoundException struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.domain = iprot.readString();
struct.set_domain_isSet(true);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.