text stringlengths 1 1.05M |
|---|
<gh_stars>1-10
# ----------------------------------------------------------------------------
# Python Wires Tests
# ----------------------------------------------------------------------------
# Copyright (c) <NAME>.
# See LICENSE for details.
# ----------------------------------------------------------------------------
"""
Shared Wires instance caller/callee tests.
"""
from __future__ import absolute_import
import unittest
from . import mixin_use_shared_instance, mixin_test_coupling
class TestWiresCoupling(mixin_use_shared_instance.UseSharedInstanceMixin,
mixin_test_coupling.TestCouplingMixin,
unittest.TestCase):
"""
Call-time coupling tests for the shared Wires instance.
"""
# ----------------------------------------------------------------------------
|
def gcd(x, y):
if x == 0:
return y
if y == 0:
return x
if x == y:
return x
if x > y:
small = y
else:
small = x
for i in range(1, small+1):
if((x % i == 0) and (y % i == 0)):
gcd = i
return gcd |
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part2.sh
# Description: OpenWrt DIY script part 2 (After Update feeds)
#
# Modify default IP
#sed -i 's/192.168.1.1/192.168.50.5/g' package/base-files/files/bin/config_generate
sed -i 's/192.168.1.1/192.168.50.88/g' package/base-files/files/bin/config_generate
|
<reponame>ooooo-youwillsee/leetcode<filename>lcof_014/cpp_014-1/Solution1.h
//
// Created by ooooo on 2020/3/11.
//
#ifndef CPP_014_1__SOLUTION1_H_
#define CPP_014_1__SOLUTION1_H_
#include <iostream>
#include <unordered_map>
using namespace std;
/**
* dfs + memo
*/
class Solution {
public:
/**
* @param flag false 表示不能切
* @return
*/
int dfs(int l, int r, bool flag) {
if (memo.count(r - l)) return memo[r - l];
if (l + 1 == r) return 1;
int ans = !flag ? 1 : r - l;
for (int i = l + 1; i < r; ++i) {
ans = max(ans, dfs(l, i, true) * dfs(i, r, true));
}
return memo[r - l] = ans;
}
unordered_map<int, int> memo;
int cuttingRope(int n) {
return dfs(0, n, false);
}
};
#endif //CPP_014_1__SOLUTION1_H_
|
<html>
<head>
<title>Clock</title>
<script type="text/javascript">
function startTime() {
var today = new Date();
var h = today.getHours();
var m = today.getMinutes();
var s = today.getSeconds();
m = checkTime(m);
s = checkTime(s);
document.getElementById('txt').innerHTML =
h + ":" + m + ":" + s;
//setTimeout(startTime, 1000);
}
function checkTime(i) {
if (i < 10) {i = "0" + i}; // add zero in front of numbers < 10
return i;
}
</script>
</head>
<body onload="startTime()" style="background-color: red;">
<div id="txt" style="font-size: 20px; color: white; font-family: sans-serif; position: absolute; top: 50%; left: 50%; transform: translate(-50%, -50%);"></div>
</body>
</html> |
const mongoose = require('mongoose');
const { Schema } = mongoose;
const foodModel = new Schema({
superMarkerName: { type: String },
producDescription: { type: String },
availablePresentation: { type: String },
price: { type: Number }
});
module.exports = mongoose.model('Food', foodModel);
|
import './bootstrap-vue' |
#!/bin/bash
#
# Moveit script
#
# Place in /etc/cron/hourly
# Be sure to chmod +x
#
CERT=~ben/certs/rtb4free_key.pem
RTB=ubuntu@rtb4free.com
WORKDIR=.
DATE=$(date +%Y%m%d)
TIME=$(date +%T)
mkdir -p $WORKDIR/logs/$RTB
ssh -i $CERT $RTB sudo cp /var/log/rtb4free.log /var/log/rtb4free.log.$DATE.$TIME
ssh -i $CERT $RTB sudo truncate -s0 /var/log/rtb4free.log
scp -i $CERT $RTB:/var/log/rtb4free.log.$DATE.$TIME $WORKDIR/logs/$RTB
ssh -i $CERT $RTB sudo rm /var/log/rtb4free.log.$DATE.$TIME
ssh -i $CERT $RTB sudo cp XRTB/logs/request XRTB/logs/request.$DATE.$TIME
ssh -i $CERT $RTB sudo truncate -s0 XRTB/logs/request
scp -i $CERT $RTB:XRTB/logs/request.$DATE.$TIME $WORKDIR/logs/$RTB
|
MAX_EXAMPLES = CONFIG["max-examples"]
def to_bin(x):
if x > MAX_EXAMPLES:
return "Exceeds maximum value"
return bin(x)[2:]
def to_oct(x):
if x > MAX_EXAMPLES:
return "Exceeds maximum value"
return oct(x)[2:] |
def binary_search(arr, target):
left = 0
right = len(arr)-1
while left <= right:
mid = left + (right-left)//2
if arr[mid] == target:
return mid
elif arr[mid] > target:
right = mid - 1
else:
left = mid + 1
return -1 |
#!/bin/sh
set -e
# Install extra dependencies for VTK
dnf install -y --setopt=install_weak_deps=False \
bzip2 patch git-core git-lfs
# Documentation tools
dnf install -y --setopt=install_weak_deps=False \
doxygen perl-Digest-MD5
# Development tools
dnf install -y --setopt=install_weak_deps=False \
libasan libtsan libubsan clang-tools-extra \
ninja-build
# MPI dependencies
dnf install -y --setopt=install_weak_deps=False \
openmpi-devel mpich-devel
# Qt dependencies
dnf install -y --setopt=install_weak_deps=False \
qt5-qtbase-devel qt5-qttools-devel qt5-qtquickcontrols2-devel
# Mesa dependencies
dnf install -y --setopt=install_weak_deps=False \
mesa-libOSMesa-devel mesa-libOSMesa mesa-dri-drivers mesa-libGL* glx-utils
# External dependencies
dnf install -y --setopt=install_weak_deps=False \
libXcursor-devel libharu-devel utf8cpp-devel pugixml-devel libtiff-devel \
eigen3-devel double-conversion-devel lz4-devel expat-devel glew-devel \
hdf5-devel hdf5-mpich-devel hdf5-openmpi-devel hdf5-devel netcdf-devel \
netcdf-mpich-devel netcdf-openmpi-devel libogg-devel libtheora-devel \
jsoncpp-devel gl2ps-devel protobuf-devel libxkbcommon libxcrypt-compat \
boost-devel tbb-devel postgresql-server-devel libpq-devel mariadb-devel \
libiodbc-devel PDAL-devel liblas-devel openslide-devel libarchive-devel \
freeglut-devel sqlite-devel PEGTL-devel cgnslib-devel proj-devel \
wkhtmltopdf cli11-devel fmt-devel
# Python dependencies
dnf install -y --setopt=install_weak_deps=False \
python3-twisted python3-autobahn python3 python3-devel python3-numpy \
python3-pip python3-mpi4py-mpich python3-mpi4py-openmpi python3-matplotlib
python3 -m pip install wslink
# Java dependencies
dnf install -y --setopt=install_weak_deps=False \
java-openjdk-devel
# RPMFusion (for ffmpeg)
dnf install -y --setopt=install_weak_deps=False \
https://mirrors.rpmfusion.org/free/fedora/rpmfusion-free-release-33.noarch.rpm
# RPMFusion external dependencies
dnf install -y --setopt=install_weak_deps=False \
ffmpeg-devel
# External repository support
dnf install -y --setopt=install_weak_deps=False \
dnf-plugins-core
# Openturns dependencies
dnf config-manager --add-repo https://download.opensuse.org/repositories/science:/openturns/Fedora_33/science:openturns.repo
dnf install -y --setopt=install_weak_deps=False \
openturns-libs openturns-devel
dnf clean all
|
<gh_stars>0
'use strict';
var path = process.cwd();
var shortener = require(path + '/app/api/shortener.js');
module.exports = function (app) {
app.route('/new/*/')
.get(shortener)
.post(shortener)
app.get('/', function(req, res) {
res.sendFile(path + '/public/index.html');
})
};
|
<filename>src/TeamContributionCalendar/TeamContributionCalendar.test.js<gh_stars>1-10
import { expect } from "chai";
import sinon from "sinon";
import jsdom from "mocha-jsdom";
import TeamContributionCalendar from "./TeamContributionCalendar";
import * as getStyledCalendarElement from "../utils/GetStyledCalendarElement/GetStyledCalendarElement";
import * as gitHubUtils from "../utils/GitHubUtils/GitHubUtils";
import * as gitLabUtils from "../utils/GitLabUtils/GitLabUtils";
import * as calendarUtils from "../utils/CalendarUtils/CalendarUtils";
import * as testUtils from "../utils/TestUtils/TestUtils";
import BasicCalendar from "../resources/BasicCalendar/BasicCalendar.json";
import * as defaultUsers from "../resources/DefaultUsers/DefaultUsers";
import elementIds from "../resources/ElementIds/ElementIds";
describe("TeamContributionCalendar", () => {
jsdom({
url: "https://example.org/"
});
const sandbox = sinon.createSandbox();
const testParams = testUtils.getTestParams();
let teamContributionCalendar;
beforeEach(() => {
teamContributionCalendar = new TeamContributionCalendar(
testParams.container,
testParams.gitHubUsers,
testParams.gitLabUsers,
testParams.proxyServerUrl
);
});
afterEach(() => {
sandbox.restore();
});
it("sets the given container and proxy server url into `configs`", () => {
const expectedConfig = {
container: testParams.container,
proxyServerUrl: testParams.proxyServerUrl
};
expect(teamContributionCalendar.configs).to.eql(expectedConfig);
});
it("sets the GH and GL users into `users`", () => {
const expectedUsers = {
gitHub: [...testParams.gitHubUsers],
gitLab: [...testParams.gitLabUsers]
};
expect(teamContributionCalendar.users).to.eql(expectedUsers);
});
it("sets the actual calendar to `BasicCalendar` by default", () => {
expect(teamContributionCalendar.actualSvg).to.equal(BasicCalendar);
});
it("sets the total contributions to 0 by default", () => {
expect(teamContributionCalendar.totalContributions).to.equal(0);
});
it("sets `isLoading` to true by default", () => {
expect(teamContributionCalendar.isLoading).to.equal(true);
});
describe("renderBasicAppearance", () => {
let renderSvgStub;
let renderHeaderStub;
let updateHeaderStub;
let getJsonFormattedCalendarSyncStub;
let initializeStub;
beforeEach(() => {
getJsonFormattedCalendarSyncStub = sandbox
.stub(gitHubUtils, "getJsonFormattedCalendarSync")
.returns({
error: false
});
initializeStub = sandbox.stub(gitHubUtils, "initialize");
renderSvgStub = sandbox.stub(
TeamContributionCalendar.prototype,
"renderSvg"
);
renderHeaderStub = sandbox.stub(
TeamContributionCalendar.prototype,
"renderHeader"
);
updateHeaderStub = sandbox.stub(
TeamContributionCalendar.prototype,
"updateHeader"
);
});
it("renders the calendar's SVG", () => {
teamContributionCalendar.renderBasicAppearance();
expect(renderSvgStub.calledOnce).to.equal(true);
});
it("renders the calendar's header", () => {
teamContributionCalendar.renderBasicAppearance();
expect(renderHeaderStub.calledOnce).to.equal(true);
});
it("fetches the default GH user`s calendar synchronously", async () => {
await teamContributionCalendar.renderBasicAppearance();
expect(
getJsonFormattedCalendarSyncStub.calledWithExactly(
testParams.proxyServerUrl,
defaultUsers.gitHub
)
).to.equal(true);
});
describe("when no users are passed via the configs", () => {
it("hides the loading indicator", async () => {
const calendarWithoutUsers = new TeamContributionCalendar(
".container",
[],
[]
);
await calendarWithoutUsers.renderBasicAppearance();
expect(
updateHeaderStub.calledWithExactly({
isLoading: false
})
).to.equal(true);
});
});
describe("when the fetch fails", () => {
const defaultUserData = {
error: true,
errorMessage: "Could not fetch the calendar of the default user."
};
beforeEach(() => {
getJsonFormattedCalendarSyncStub.returns(defaultUserData);
});
it("hides the loading indicator", async () => {
try {
await teamContributionCalendar.renderBasicAppearance();
} catch (err) {
expect(
updateHeaderStub.calledWithExactly({
isLoading: false
})
).to.equal(true);
}
});
it("throws the error", () => {
return teamContributionCalendar.renderBasicAppearance().catch(err => {
expect(err.message).to.equal(defaultUserData.errorMessage);
});
});
});
describe("when the fetch does not fail", () => {
let updateSvgStub;
const defaultUserData = {
parsedCalendar: testUtils.getFakeContributionsObjectWithDailyCounts({
"2019-01-20": 12
}),
error: false,
errorMessage: null
};
const defaultUserEmptyCalendar = testUtils.getFakeContributionsObjectWithDailyCounts(
{
"2019-01-20": 0
}
);
beforeEach(() => {
initializeStub.returns(defaultUserEmptyCalendar);
updateSvgStub = sandbox.stub(
TeamContributionCalendar.prototype,
"updateSvg"
);
getJsonFormattedCalendarSyncStub.returns(defaultUserData);
});
it("empties the default user's calendar data", async () => {
await teamContributionCalendar.renderBasicAppearance();
expect(
initializeStub.calledWithExactly(defaultUserData.parsedCalendar)
).to.equal(true);
});
it("updates the calendar's appearance with the emptied calendar", async () => {
await teamContributionCalendar.renderBasicAppearance();
expect(
updateSvgStub.calledWithExactly({
updatedSvg: defaultUserEmptyCalendar
})
).to.equal(true);
});
});
});
describe("updateHeader", () => {
let renderHeaderStub;
beforeEach(() => {
renderHeaderStub = sandbox.stub(
TeamContributionCalendar.prototype,
"renderHeader"
);
});
describe("when `isLoading` is defined", () => {
const dataWithIsLoading = {
isLoading: false
};
it("updates the loading state to the received value", () => {
teamContributionCalendar.updateHeader(dataWithIsLoading);
expect(teamContributionCalendar.isLoading).to.equal(
dataWithIsLoading.isLoading
);
});
});
describe("when `isLoading` is not defined", () => {
it("does not update the loading state", () => {
const previousLoadingState = teamContributionCalendar.isLoading;
teamContributionCalendar.updateHeader({});
expect(teamContributionCalendar.isLoading).to.equal(
previousLoadingState
);
});
});
describe("when `contributions` is defined", () => {
const dataWithContributions = {
contributions: 100
};
it("increments the total contributions with the received value", () => {
const expectedTotalContributions =
teamContributionCalendar.totalContributions +
dataWithContributions.contributions;
teamContributionCalendar.updateHeader(dataWithContributions);
expect(teamContributionCalendar.totalContributions).to.equal(
expectedTotalContributions
);
});
});
describe("when `contributions` is not defined", () => {
it("does not increment the total contributions", () => {
const previousTotalContributions =
teamContributionCalendar.totalContributions;
teamContributionCalendar.updateHeader({});
expect(teamContributionCalendar.totalContributions).to.equal(
previousTotalContributions
);
});
});
it("re-renders the header", () => {
teamContributionCalendar.updateHeader({});
expect(renderHeaderStub.calledOnce).to.equal(true);
});
});
describe("updateSvg", () => {
let renderSvgStub;
beforeEach(() => {
renderSvgStub = sandbox.stub(
TeamContributionCalendar.prototype,
"renderSvg"
);
});
describe("when `updatedSvg` is defined", () => {
const dataWithUpdatedSvg = {
updatedSvg: testUtils.getFakeContributionsObjectWithDailyCounts({
"2019-10-28": 5
})
};
it("updates the actual SVG", () => {
const previousActualSvg = {
...teamContributionCalendar.actualSvg
};
teamContributionCalendar.updateSvg(dataWithUpdatedSvg);
expect(teamContributionCalendar.actualSvg).to.eql({
...previousActualSvg,
...dataWithUpdatedSvg.updatedSvg
});
});
});
describe("when `updateSvg` is not defined", () => {
it("does not update the actual SVG", () => {
const previousActualSvg = {
...teamContributionCalendar.actualSvg
};
teamContributionCalendar.updateSvg({});
expect(teamContributionCalendar.actualSvg).to.eql(previousActualSvg);
});
});
it("re-renders the SVG", () => {
teamContributionCalendar.updateSvg({});
expect(renderSvgStub.calledOnce).to.equal(true);
});
});
describe("renderHeader", () => {
let elementExistsStub;
beforeEach(() => {
elementExistsStub = sandbox.stub(calendarUtils, "elementExists");
});
describe("when the container does not exist", () => {
beforeEach(() => {
elementExistsStub
.withArgs(teamContributionCalendar.configs.container)
.returns(false);
});
it("throws an error", () => {
expect(() => teamContributionCalendar.renderHeader()).to.throw(
"The given container does not exist."
);
});
});
describe("when the container exists", () => {
let headerStub;
let containerStub;
let replaceChildSpy;
let prependSpy;
const newHeader = "newHeader";
const previousHeader = "previousHeader";
beforeEach(() => {
elementExistsStub
.withArgs(teamContributionCalendar.configs.container)
.returns(true);
replaceChildSpy = sandbox.spy();
prependSpy = sandbox.spy();
headerStub = sandbox
.stub(getStyledCalendarElement, "header")
.returns(newHeader);
containerStub = sandbox
.stub(getStyledCalendarElement, "container")
.returns({
prepend: prependSpy,
replaceChild: replaceChildSpy
});
sandbox.stub(document, "getElementById").returns(previousHeader);
});
it("gets the styled calendar container", () => {
teamContributionCalendar.renderHeader();
expect(
containerStub.calledWithExactly(
teamContributionCalendar.configs.container
)
).to.equal(true);
});
it("generates the new header", () => {
teamContributionCalendar.renderHeader();
expect(
headerStub.calledWithExactly(
teamContributionCalendar.totalContributions,
teamContributionCalendar.isLoading
)
).to.equal(true);
});
describe("when the calendar's header exists", () => {
beforeEach(() => {
elementExistsStub.withArgs(`#${elementIds.HEADER}`).returns(true);
});
it("replaces the previous header", () => {
teamContributionCalendar.renderHeader();
expect(
replaceChildSpy.calledWithExactly(newHeader, previousHeader)
).to.equal(true);
});
});
describe("when the calendar's header does not exist", () => {
beforeEach(() => {
elementExistsStub.withArgs(`#${elementIds.HEADER}`).returns(false);
});
it("prepends the header to the container", () => {
teamContributionCalendar.renderHeader();
expect(prependSpy.calledWithExactly(newHeader)).to.equal(true);
});
});
});
});
describe("renderSvg", () => {
let elementExistsStub;
beforeEach(() => {
elementExistsStub = sandbox.stub(calendarUtils, "elementExists");
});
describe("when the container does not exist", () => {
beforeEach(() => {
elementExistsStub.returns(false);
});
it("throws an error", () => {
expect(() => teamContributionCalendar.renderSvg()).to.throw(
"The given container does not exist."
);
});
});
describe("when the container exists", () => {
let containerStub;
let svgContainerStub;
let replaceChildSpy;
let appendChildSpy;
beforeEach(() => {
elementExistsStub
.withArgs(teamContributionCalendar.configs.container)
.returns(true);
replaceChildSpy = sandbox.spy();
appendChildSpy = sandbox.spy();
containerStub = sandbox
.stub(getStyledCalendarElement, "container")
.returns({
replaceChild: replaceChildSpy,
appendChild: appendChildSpy
});
svgContainerStub = sandbox
.stub(getStyledCalendarElement, "svgContainer")
.returns({
innerHTML: null
});
});
it("gets the styled calendar container", () => {
teamContributionCalendar.renderSvg();
expect(
containerStub.calledWithExactly(
teamContributionCalendar.configs.container
)
).to.equal(true);
});
it("generates a new SVG container", () => {
teamContributionCalendar.renderSvg();
expect(svgContainerStub.calledOnce).to.equal(true);
});
describe("when the calendar's SVG container already exists", () => {
const previousSvgContainer = "previousSvgContainer";
beforeEach(() => {
elementExistsStub
.withArgs(`#${elementIds.SVG_CONTAINER}`)
.returns(true);
sandbox
.stub(document, "getElementById")
.returns(previousSvgContainer);
});
it("replaces the previous container", () => {
teamContributionCalendar.renderSvg();
expect(replaceChildSpy.calledOnce).to.equal(true);
});
});
describe("when the calendar's SVG container does not exist", () => {
beforeEach(() => {
elementExistsStub
.withArgs(`#${elementIds.SVG_CONTAINER}`)
.returns(false);
});
it("appends the container and the tooltips to the calendar", () => {
teamContributionCalendar.renderSvg();
expect(appendChildSpy.calledTwice).to.equal(true);
});
});
});
});
describe("aggregateUserCalendars", () => {
let gitHubGetJsonFormattedCalendarAsyncStub;
let gitLabGetJsonFormattedCalendarAsyncStub;
let processCalendarStub;
let consoleErrorStub;
beforeEach(() => {
gitHubGetJsonFormattedCalendarAsyncStub = sandbox.stub(
gitHubUtils,
"getJsonFormattedCalendarAsync"
);
gitLabGetJsonFormattedCalendarAsyncStub = sandbox.stub(
gitLabUtils,
"getJsonFormattedCalendarAsync"
);
processCalendarStub = sandbox.stub(
TeamContributionCalendar.prototype,
"processCalendar"
);
consoleErrorStub = sandbox.stub(console, "error");
});
describe("GitHub", () => {
it("initiates queries to fetch the calendars of the provided users", () => {
teamContributionCalendar.aggregateUserCalendars();
expect(gitHubGetJsonFormattedCalendarAsyncStub.callCount).to.equal(
teamContributionCalendar.users.gitHub.length
);
});
describe("when a query fails", () => {
it("logs an error to the console", async () => {
const errorMessage = "Error while fetching calendar for FOO";
gitHubGetJsonFormattedCalendarAsyncStub.returns({
error: true,
errorMessage
});
await teamContributionCalendar.aggregateUserCalendars();
expect(consoleErrorStub.calledWithExactly(errorMessage)).to.equal(
true
);
});
});
describe("when a query does not fail", () => {
const data = {
parsedCalendar: testUtils.getFakeContributionsObjectWithDailyCounts({
"2019-03-19": 5,
"2019-03-20": 10
}),
error: false
};
let dailyDataWithContributionsTransformationStub;
let filterByTimeframeStub;
beforeEach(() => {
dailyDataWithContributionsTransformationStub = sandbox.stub(
gitHubUtils,
"dailyDataWithContributionsTransformation"
);
filterByTimeframeStub = sandbox.stub(
calendarUtils,
"filterByTimeframe"
);
gitHubGetJsonFormattedCalendarAsyncStub.returns(data);
});
it("transforms the 'noisy' calendar to a 'date-contributions' object", async () => {
await teamContributionCalendar.aggregateUserCalendars();
expect(
dailyDataWithContributionsTransformationStub.calledWithExactly(
data.parsedCalendar
)
).to.equal(true);
});
it("removes the dates falling out of the specified timeframe", async () => {
const dailyDataWithContributions = {
"2019-03-19": 5,
"2019-03-20": 10
};
dailyDataWithContributionsTransformationStub.returns(
dailyDataWithContributions
);
await teamContributionCalendar.aggregateUserCalendars();
expect(
filterByTimeframeStub.calledWith(dailyDataWithContributions)
).to.equal(true);
});
it("processes the calendar", async () => {
const filteredDailyDataWithContributions = { "2019-03-20": 10 };
filterByTimeframeStub.returns(filteredDailyDataWithContributions);
await teamContributionCalendar.aggregateUserCalendars();
expect(
processCalendarStub.calledWithExactly(
filteredDailyDataWithContributions
)
).to.equal(true);
});
});
});
describe("GitLab", () => {
it("initiates queries to fetch the calendars of the provided users", () => {
teamContributionCalendar.aggregateUserCalendars();
expect(gitLabGetJsonFormattedCalendarAsyncStub.callCount).to.equal(
teamContributionCalendar.users.gitLab.length
);
});
describe("when a query fails", () => {
it("logs an error to the console", async () => {
const errorMessage = "Error while fetching calendar for FOO";
gitLabGetJsonFormattedCalendarAsyncStub.returns({
error: true,
errorMessage
});
await teamContributionCalendar.aggregateUserCalendars();
expect(consoleErrorStub.calledWithExactly(errorMessage)).to.equal(
true
);
});
});
describe("when a query does not fail", () => {
const data = {
parsedCalendar: {
"2019-03-19": 5,
"2019-03-20": 10
},
error: false
};
let filterByTimeframeStub;
beforeEach(() => {
filterByTimeframeStub = sandbox.stub(
calendarUtils,
"filterByTimeframe"
);
gitLabGetJsonFormattedCalendarAsyncStub.returns(data);
});
it("removes the dates falling out of the specified timeframe", async () => {
await teamContributionCalendar.aggregateUserCalendars();
expect(
filterByTimeframeStub.calledWith(data.parsedCalendar)
).to.equal(true);
});
it("processes the calendar", async () => {
const filteredDailyDataWithContributions = { "2019-03-20": 10 };
filterByTimeframeStub.returns(filteredDailyDataWithContributions);
await teamContributionCalendar.aggregateUserCalendars();
expect(
processCalendarStub.calledWithExactly(
filteredDailyDataWithContributions
)
).to.equal(true);
});
});
});
});
describe("processCalendar", () => {
const aggregatedCalendars = testUtils.getFakeContributionsObjectWithDailyCounts(
{
"2019-03-10": 18,
"2019-03-11": 15,
"2019-03-12": 7
}
);
const aggregatedContributions = 182;
const dailyDataWithContributions = {
"2019-03-10": 18,
"2019-03-11": 15
};
let aggregateCalendarsStub;
let aggregateContributionsStub;
let updateSvgStub;
let updateHeaderStub;
beforeEach(() => {
aggregateCalendarsStub = sandbox
.stub(calendarUtils, "aggregateCalendars")
.returns(aggregatedCalendars);
aggregateContributionsStub = sandbox
.stub(calendarUtils, "aggregateContributions")
.returns(aggregatedContributions);
updateSvgStub = sandbox.stub(
TeamContributionCalendar.prototype,
"updateSvg"
);
updateHeaderStub = sandbox.stub(
TeamContributionCalendar.prototype,
"updateHeader"
);
});
it("aggregates the calendars", () => {
teamContributionCalendar.processCalendar(dailyDataWithContributions);
expect(
aggregateCalendarsStub.calledWithExactly(
teamContributionCalendar.actualSvg,
dailyDataWithContributions
)
).to.equal(true);
});
it("aggregates the contributions", () => {
teamContributionCalendar.processCalendar(dailyDataWithContributions);
expect(
aggregateContributionsStub.calledWithExactly(dailyDataWithContributions)
).to.equal(true);
});
it("updates the SVG with the aggregated calendar", () => {
teamContributionCalendar.processCalendar(dailyDataWithContributions);
expect(updateSvgStub.calledWithExactly(aggregatedCalendars));
});
it("updates the header with the aggregated contributions", () => {
teamContributionCalendar.processCalendar(dailyDataWithContributions);
expect(
updateHeaderStub.calledWithExactly({
contributions: aggregatedContributions,
isLoading: false
})
).to.equal(true);
});
});
});
|
#!/usr/bin/env bash
set -e
conda deactivate
conda env remove --name hm-neural-network
|
#include <bits/stdc++.h>
using namespace std;
int main()
{
string s;
getline(cin, s);
s.erase(0, 1);
istringstream iss(s);
int number;
iss >> number;
if (number % 2 == 0)
{
cout << 0;
}
else
{
cout << 1;
}
return 0;
} |
def calculate_bmi(weight, height):
if weight <= 0 or height <= 0:
raise ValueError("Weight and height must be positive numbers")
bmi = weight / (height ** 2)
return round(bmi, 2)
def categorize_bmi(bmi):
if bmi < 18.5:
return "Underweight"
elif 18.5 <= bmi < 25:
return "Normal weight"
elif 25 <= bmi < 30:
return "Overweight"
else:
return "Obese"
if __name__ == "__main__":
try:
weight = float(input("Enter your weight in kilograms: "))
height = float(input("Enter your height in meters: "))
bmi = calculate_bmi(weight, height)
category = categorize_bmi(bmi)
print(f"Your BMI is: {bmi}")
print(f"You are categorized as: {category}")
except ValueError as e:
print(f"Error: {e}") |
<reponame>EvgenyMuryshkin/dsp-playground<filename>src/lib/complex.ts
export interface IComplexNumber {
r: number;
i: number;
} |
package mg.blog.mapper;
import mg.blog.entity.Comment;
import mg.blog.dto.CommentDto;
import mg.utils.mapper.DateMapper;
import org.mapstruct.Mapper;
import org.mapstruct.factory.Mappers;
@Mapper(uses = {DateMapper.class})
public abstract class CommentMapper {
public static final CommentMapper INSTANCE = Mappers.getMapper(CommentMapper.class);
public abstract Comment mapToEntity(CommentDto dto);
public abstract CommentDto mapToDTO(Comment entity);
}
|
<reponame>tallylab/twilio-auth
// Initialize Twilio
const Twilio = require('twilio')
const client = Twilio(process.env.TWILIO_ACCOUNT_SID, process.env.TWILIO_AUTH_TOKEN)
// Initialize NaCl Crypto lib
const initNaCl = async () => {
return new Promise((resolve, reject) => {
require('js-nacl').instantiate(async (nacl) => {
resolve(nacl)
})
})
}
// Export async initialization function
module.exports = async () => {
const nacl = await initNaCl()
const sid = process.env.TWILIO_VERIFY_SID || (await client.verify.services.create({
friendlyName: process.env.APP_NAME || 'twilio-auth testing'
})).sid
console.log(sid)
const { verifications, verificationChecks } = await client.verify.services(sid)
return { client, nacl, verifications, verificationChecks }
}
|
import dash
from dash.dependencies import Input, Output
import dash_daq as daq
from dash_daq import DarkThemeProvider
import dash_html_components as html
import numpy as np
import dash_core_components as dcc
import plotly.graph_objs as go
from scipy import signal
from time import sleep
import os
app = dash.Dash()
app.scripts.config.serve_locally = True
app.config['suppress_callback_exceptions'] = True
server = app.server
tabs = [
{'label': 'Run #{}'.format(i), 'value': i} for i in range(1, 2)
]
tab = 1
runs = {}
root_layout = html.Div([
dcc.Location(id='url', refresh=False),
html.Div([
daq.ToggleSwitch(
id='toggleTheme',
style={
'position': 'absolute',
'transform': 'translate(-50%, 20%)'
},
size=25
),
], id="toggleDiv",
style={
'width': 'fit-content',
'margin': '0 auto'
}),
html.Div(id='page-content'),
])
light_layout = html.Div(id='container', children=[
# Function Generator Panel - Left
html.Div([
html.H2("Dash DAQ: Function Generator & Oscilloscope Control Panel",
style={
'color': '#EBF0F8',
'marginLeft': '40px',
'display': 'inline-block',
'text-align': 'center'
}),
html.Img(src="https://s3-us-west-1.amazonaws.com/plotly-tutorials/" +
"excel/dash-daq/dash-daq-logo-by-plotly-stripe+copy.png",
style={
'position': 'relative',
'float': 'right',
'right': '10px',
'height': '75px'
})
], className='banner',
id='header',
style={
'height': '75px',
'margin': '0px -10px 10px',
'backgroundColor': '#447EFF'
}),
html.Div([
html.Div([
html.Div([
html.H3("POWER", id="power-title")
], className='Title'),
html.Div([
html.Div(
[
daq.PowerButton(
id='function-generator',
on='true',
label="Function Generator",
labelPosition='bottom',
color="#447EFF"),
],
className='six columns',
style={'margin-bottom': '15px'}),
html.Div(
[
daq.PowerButton(
id='oscilloscope',
on='true',
label="Oscilloscope",
labelPosition='bottom',
color="#447EFF")
],
className='six columns',
style={'margin-bottom': '15px'}),
], style={'margin': '15px 0'})
], className='row power-settings-tab'),
html.Div([
html.Div(
[html.H3("FUNCTION", id="function-title")],
className='Title'),
html.Div([
daq.Knob(
value=1E6,
id="frequency-input",
label="Frequency (Hz)",
labelPosition="bottom",
size=75,
color="#447EFF",
scale={'interval': 1E5},
max=2.5E6,
min=1E5,
className='four columns'
),
daq.Knob(
value=1,
id="amplitude-input",
label="Amplitude (mV)",
labelPosition="bottom",
size=75,
scale={'labelInterval': 10},
color="#447EFF",
max=10,
className='four columns'
),
daq.Knob(
value=0,
id="offset-input",
label="Offset (mV)",
labelPosition="bottom",
size=75,
scale={'labelInterval': 10},
color="#447EFF",
max=10,
className='four columns'
)], style={'marginLeft': '20%', 'textAlign': 'center'}),
html.Div([
daq.LEDDisplay(
id='frequency-display',
size=10, value=1E6,
label="Frequency (Hz)",
labelPosition="bottom",
color="#447EFF",
style={'marginBottom': '30px'},
className='four columns'),
daq.LEDDisplay(
id='amplitude-display',
size=10,
value=1,
label="Amplitude (mV)",
labelPosition="bottom",
color="#447EFF",
className='four columns'),
daq.LEDDisplay(
id='offset-display',
size=10,
value=10,
label="Offset (mV)",
labelPosition="bottom",
color="#447EFF",
className='four columns'),
], style={'marginLeft': '20%', 'textAlign': 'center'}),
dcc.RadioItems(
id='function-type',
options=[
{'label': 'Sine', 'value': 'SIN'},
{'label': 'Square', 'value': 'SQUARE'},
{'label': 'Ramp', 'value': 'RAMP'},
],
value='SIN',
labelStyle={'display': 'inline-block'},
style={'margin': '30px auto 0px auto',
'display': 'flex',
'width': '80%',
'alignItems': 'center',
'justifyContent': 'space-between'}
)
], className='row power-settings-tab'),
html.Hr(),
daq.ColorPicker(
id="color-picker",
label="Color Picker",
value=dict(hex="#447EFF"),
size=164,
),
], className='four columns left-panel'),
# Oscillator Panel - Right
html.Div([
html.Div([html.H3("GRAPH", id="graph-title")], className='Title'),
dcc.Tabs(
tabs=tabs,
value=1,
id='tabs',
style={'backgroundColor': '#447EFF', 'height': '80%'},
),
html.Div([
html.Div([
html.Div([
html.Div(
id="graph-info",
style={
'textAlign': 'center',
'fontSize': '16px', 'padding': '0px 5px',
'lineHeight': '20px',
'border': '2px solid #447EFF'}),
], className="row graph-param"),
], className="six columns"),
html.Button('+',
id='new-tab',
type='submit',
style={'height': '20px', 'width': '20px',
'padding': '2px', 'lineHeight': '10px',
'float': 'right'}),
], className='row oscope-info', style={'margin': '15px'}),
html.Hr(),
dcc.Graph(
id='oscope-graph',
figure=dict(
data=[dict(x=np.linspace(-0.000045, 0.000045, 1e3),
y=[0] * len(np.linspace(-0.000045, 0.000045, 1e3)),
marker={'color': '#2a3f5f'})],
layout=go.Layout(
xaxis={'title': 's', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='#F3F6FA',
)
),
config={'displayModeBar': True,
'modeBarButtonsToRemove': ['pan2d',
'zoomIn2d',
'zoomOut2d',
'autoScale2d',
'hoverClosestCartesian',
'hoverCompareCartesian']}
)
], className='seven columns right-panel')
])
dark_layout = DarkThemeProvider([
html.Link(
href="https://cdn.rawgit.com/samisahn/" +
"dash-app-stylesheets/dc60135a/dash-tektronix-350-dark.css",
rel="stylesheet"
),
# Function Generator Panel - Left
html.Div([
html.H2("Dash DAQ: Function Generator & Oscilloscope Control Panel",
style={
'color': 'white',
'marginLeft': '40px',
'display': 'inline-block',
'text-align': 'center'
}),
html.Img(src="https://s3-us-west-1.amazonaws.com/plotly-tutorials/" +
"excel/dash-daq/dash-daq-logo-by-plotly-stripe+copy.png",
style={
'position': 'relative',
'float': 'right',
'right': '10px',
'height': '75px'
})
], className='banner',
id='dark-header',
style={
'height': '75px',
'margin': '0px -10px 10px',
'backgroundColor': '#1d1d1d'
}),
html.Div([
html.Div([
html.Div([
html.H3("POWER", id="power-title")
], className='Title'),
html.Div([
html.Div(
[
daq.PowerButton(
id='function-generator',
on='true',
label="Function Generator",
labelPosition='bottom',
color="#EBF0F8"),
],
className='six columns',
style={'margin-bottom': '15px'}),
html.Div(
[
daq.PowerButton(
id='oscilloscope',
on='true',
label="Oscilloscope",
labelPosition='bottom',
color="#EBF0F8")
],
className='six columns',
style={'margin-bottom': '15px'}),
], style={'margin': '15px 0'})
], className='row power-settings-tab'),
html.Div([
html.Div(
[html.H3("FUNCTION", id="function-title")],
className='Title'),
html.Div([
daq.Knob(
value=1E6,
id="frequency-input",
label="Frequency (Hz)",
labelPosition="bottom",
size=75,
color="#EBF0F8",
scale={'interval': 1E5},
max=2.5E6,
min=1E5,
className='four columns'
),
daq.Knob(
value=1,
id="amplitude-input",
label="Amplitude (mV)",
labelPosition="bottom",
size=75,
scale={'labelInterval': 10},
color="#EBF0F8",
max=10,
className='four columns'
),
daq.Knob(
value=0,
id="offset-input",
label="Offset (mV)",
labelPosition="bottom",
size=75,
scale={'labelInterval': 10},
color="#EBF0F8",
max=10,
className='four columns'
)], style={'marginLeft': '20%', 'textAlign': 'center'}),
html.Div([
daq.LEDDisplay(
id='frequency-display',
size=10, value=1E6,
label="Frequency (Hz)",
labelPosition="bottom",
color="#EBF0F8",
style={'marginBottom': '30px'},
className='four columns'),
daq.LEDDisplay(
id='amplitude-display',
size=10,
value=1,
label="Amplitude (mV)",
labelPosition="bottom",
color="#EBF0F8",
className='four columns'),
daq.LEDDisplay(
id='offset-display',
size=10,
value=10,
label="Offset (mV)",
labelPosition="bottom",
color="#EBF0F8",
className='four columns'),
], style={'marginLeft': '20%', 'textAlign': 'center'}),
dcc.RadioItems(
id='function-type',
options=[
{'label': 'Sine', 'value': 'SIN'},
{'label': 'Square', 'value': 'SQUARE'},
{'label': 'Ramp', 'value': 'RAMP'},
],
value='SIN',
labelStyle={'display': 'inline-block'},
style={'margin': '30px auto 0px auto',
'display': 'flex',
'width': '80%',
'alignItems': 'center',
'justifyContent': 'space-between'}
)
], className='row power-settings-tab'),
html.Hr(),
daq.ColorPicker(
id="color-picker",
label="Color Picker",
value=dict(hex="#EBF0F8"),
size=164,
theme={'dark': True}
),
], className='four columns left-panel'),
# Oscillator Panel - Right
html.Div([
html.Div([html.H3("GRAPH", id="graph-title")], className='Title'),
dcc.Tabs(
tabs=tabs,
value=1,
id='dark-tabs',
style={
'backgroundColor': '#EBF0F8',
'color': '#2a3f5f',
'height': '80%'},
),
html.Div([
html.Div([
html.Div([
html.Div(
id="dark-graph-info",
style={
'textAlign': 'center',
'fontSize': '16px',
'padding': '0px 5px',
'lineHeight': '20px',
'border': '2px solid #EBF0F8'}),
], className="row graph-param"),
], className="six columns"),
html.Button('+',
id='new-tab',
type='submit',
style={
'backgroundColor': '#EBF0F8',
'height': '20px',
'width': '20px',
'padding': '2px',
'lineHeight': '10px',
'float': 'right'}),
], className='row oscope-info', style={'margin': '15px'}),
html.Hr(),
dcc.Graph(
id='dark-oscope-graph',
figure=dict(
data=[dict(x=np.linspace(-0.000045, 0.000045, 1e3),
y=[0] * len(np.linspace(-0.000045, 0.000045, 1e3)),
marker={'color': '#f2f5fa'})],
layout=go.Layout(
xaxis={'title': 's', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='rgba(0,0,0,0)',
paper_bgcolor='rgba(0,0,0,0)'
)
),
config={'displayModeBar': True,
'modeBarButtonsToRemove': ['pan2d',
'zoomIn2d',
'zoomOut2d',
'autoScale2d',
'hoverClosestCartesian',
'hoverCompareCartesian']}
)
], className='seven columns right-panel')
])
app.layout = root_layout
@app.callback(Output('toggleTheme', 'value'),
[Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/dark':
return True
else:
return False
@app.callback(Output('page-content', 'children'),
[Input('toggleTheme', 'value')])
def page_layout(value):
if value:
return dark_layout
else:
return light_layout
# Callbacks for color picker
@app.callback(Output('frequency-input', 'color'),
[Input('color-picker', 'value')])
def color_frequency_input(color):
return color['hex']
@app.callback(Output('amplitude-input', 'color'),
[Input('color-picker', 'value')])
def color_amplitude_input(color):
return color['hex']
@app.callback(Output('offset-input', 'color'),
[Input('color-picker', 'value')])
def color_offset_input(color):
return color['hex']
@app.callback(Output('frequency-display', 'color'),
[Input('color-picker', 'value')])
def color_frequency_display(color):
return color['hex']
@app.callback(Output('amplitude-display', 'color'),
[Input('color-picker', 'value')])
def color_amplitude_display(color):
return color['hex']
@app.callback(Output('offset-display', 'color'),
[Input('color-picker', 'value')])
def color_offset_display(color):
return color['hex']
@app.callback(Output('graph-info', 'style'),
[Input('color-picker', 'value')])
def color_info_border(color):
return {'textAlign': 'center', 'border': "2px solid " + color['hex']}
@app.callback(Output('dark-graph-info', 'style'),
[Input('color-picker', 'value')])
def color_dinfo_border(color):
return {'textAlign': 'center', 'border': "2px solid " + color['hex']}
@app.callback(Output('tabs', 'style'),
[Input('color-picker', 'value')])
def color_tabs_background(color):
return {'backgroundColor': color['hex']}
@app.callback(Output('power-title', 'style'),
[Input('color-picker', 'value')])
def color_power_title(color):
return {'color': color['hex']}
@app.callback(Output('function-title', 'style'),
[Input('color-picker', 'value')])
def color_function_title(color):
return {'color': color['hex']}
@app.callback(Output('graph-title', 'style'),
[Input('color-picker', 'value')])
def color_graph_title(color):
return {'color': color['hex']}
@app.callback(Output('function-generator', 'color'),
[Input('color-picker', 'value')])
def color_function_generator(color):
return color['hex']
@app.callback(Output('oscilloscope', 'color'),
[Input('color-picker', 'value')])
def color_oscilloscope(color):
return color['hex']
@app.callback(Output('header', 'style'),
[Input('color-picker', 'value')])
def color_banner(color):
return {'backgroundColor': color['hex']}
# Callbacks for knob inputs
@app.callback(Output('frequency-display', 'value'),
[Input('frequency-input', 'value')],)
def update_frequency_display(value):
return value
@app.callback(Output('amplitude-display', 'value'),
[Input('amplitude-input', 'value')],)
def update_amplitude_display(value):
return value
@app.callback(Output('offset-display', 'value'),
[Input('offset-input', 'value')])
def update_offset_display(value):
return value
# Callbacks graph and graph info
@app.callback(Output('graph-info', 'children'),
[Input('oscope-graph', 'figure'),
Input('tabs', 'value')])
def update_info(_, value):
if '' + str(value) in runs:
return runs['' + str(value)][1]
return "-"
@app.callback(Output('dark-graph-info', 'children'),
[Input('dark-oscope-graph', 'figure'),
Input('dark-tabs', 'value')])
def update_dinfo(_, value):
if '' + str(value) in runs:
return runs['' + str(value)][1]
return "-"
@app.callback(Output('oscope-graph', 'figure'),
[Input('tabs', 'value'),
Input('frequency-input', 'value'),
Input('function-type', 'value'),
Input('amplitude-input', 'value'),
Input('offset-input', 'value'),
Input('oscilloscope', 'on'),
Input('function-generator', 'on')])
def update_output(value, frequency, wave, amplitude, offset, osc_on, fnct_on):
global tab
time = np.linspace(-0.000045, 0.000045, 1e3)
zero = dict(
data=[dict(x=time, y=[0] * len(time),
marker={'color': '#2a3f5f'})],
layout=go.Layout(
xaxis={'title': 's', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='#F3F6FA',
)
)
if not osc_on:
return dict(
data=[],
layout=go.Layout(
xaxis={'title': 's', 'color': '#506784', 'titlefont': dict(
family='Dosis',
size=15,
), 'showticklabels': False, 'ticks': '', 'zeroline': False},
yaxis={'title': 'Voltage (mV)', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
), 'showticklabels': False, 'zeroline': False},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='#506784',
)
)
if not fnct_on:
return zero
if tab is not value:
if '' + str(value) in runs:
tab = value
figure = runs['' + str(value)][0]
figure['data'][0]['marker']['color'] = '#2a3f5f'
figure['layout'] = go.Layout(
xaxis={'title': 's', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='#F3F6FA',
)
return figure
tab = value
return zero
else:
if wave == 'SIN':
y = [float(offset) +
(float(amplitude) *
np.sin(np.radians(2.0 * np.pi * float(frequency) * t)))
for t in time]
elif wave == 'SQUARE':
y = [float(offset) +
float(amplitude) *
(signal.square(2.0 * np.pi * float(frequency)/10 * t))
for t in time]
elif wave == 'RAMP':
y = float(amplitude) * \
(np.abs(signal.sawtooth(2*np.pi * float(frequency)/10 * time)))
y = float(offset) + 2*y - float(amplitude)
figure = dict(
data=[dict(x=time, y=y, marker={'color': '#2a3f5f'})],
layout=go.Layout(
xaxis={'title': 's', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#506784',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='#F3F6FA',
)
)
runs['' + str(value)] = figure, str(wave) + " | " + str(frequency) +\
"Hz" + " | " + str(amplitude) + "mV" + " | " + str(offset) + "mV"
# wait to update the runs variable
sleep(0.10)
return figure
@app.callback(Output('dark-oscope-graph', 'figure'),
[Input('dark-tabs', 'value'),
Input('frequency-input', 'value'),
Input('function-type', 'value'),
Input('amplitude-input', 'value'),
Input('offset-input', 'value'),
Input('oscilloscope', 'on'),
Input('function-generator', 'on')])
def update_doutput(value, frequency, wave, amplitude, offset, osc_on, fnct_on):
global tab
time = np.linspace(-0.000045, 0.000045, 1e3)
zero = dict(
data=[dict(x=time, y=[0] * len(time), marker={'color': '#f2f5fa'})],
layout=go.Layout(
xaxis={'title': 's', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='rgba(0,0,0,0)',
paper_bgcolor='rgba(0,0,0,0)'
)
)
if not osc_on:
return dict(
data=[],
layout=go.Layout(
xaxis={'title': 's', 'color': 'rgba(0,0,0,0)',
'titlefont': dict(
family='Dosis',
size=15,
), 'showticklabels': False,
'ticks': '', 'zeroline': False},
yaxis={'title': 'Voltage (mV)', 'color': 'rgba(0,0,0,0)',
'titlefont': dict(
family='Dosis',
size=15,
), 'showticklabels': False, 'zeroline': False},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='rgba(0,0,0,0)',
paper_bgcolor='rgba(0,0,0,0)'
)
)
if not fnct_on:
return zero
if tab is not value:
if '' + str(value) in runs:
tab = value
figure = runs['' + str(value)][0]
figure['data'][0]['marker']['color'] = "#f2f5fa"
figure['layout'] = go.Layout(
xaxis={'title': 's', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='rgba(0,0,0,0)',
paper_bgcolor='rgba(0,0,0,0)'
)
return figure
tab = value
return zero
else:
if wave == 'SIN':
y = [float(offset) +
(float(amplitude) *
np.sin(np.radians(2.0 * np.pi * float(frequency) * t)))
for t in time]
elif wave == 'SQUARE':
y = [float(offset) +
float(amplitude) *
(signal.square(2.0 * np.pi * float(frequency)/10 * t))
for t in time]
elif wave == 'RAMP':
y = float(amplitude) * \
(np.abs(signal.sawtooth(2*np.pi * float(frequency)/10 * time)))
y = float(offset) + 2*y - float(amplitude)
figure = dict(
data=[dict(x=time, y=y, marker={'color': '#f2f5fa'})],
layout=go.Layout(
xaxis={'title': 's', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
)},
yaxis={'title': 'Voltage (mV)', 'color': '#EBF0F8',
'titlefont': dict(
family='Dosis',
size=15,
), 'autorange': False, 'range': [-10, 10]},
margin={'l': 40, 'b': 40, 't': 0, 'r': 50},
plot_bgcolor='rgba(0,0,0,0)',
paper_bgcolor='rgba(0,0,0,0)'
)
)
runs['' + str(value)] = figure, str(wave) + " | " + str(frequency) + \
"Hz" + " | " + str(amplitude) + "mV" + " | " + str(offset) + "mV"
# wait to update the runs variable
sleep(0.10)
return figure
@app.callback(Output('tabs', 'tabs'),
[Input('new-tab', 'n_clicks')])
def new_tabs(n_clicks):
if n_clicks is not None:
tabs.append({'label': 'Run #' + str(tabs[-1]['value'] + 1),
'value': int(tabs[-1]['value']) + 1})
return tabs
return tabs
@app.callback(Output('dark-tabs', 'tabs'),
[Input('new-tab', 'n_clicks')])
def new_dtabs(n_clicks):
if n_clicks is not None:
tabs.append({'label': 'Run #' + str(tabs[-1]['value'] + 1),
'value': int(tabs[-1]['value']) + 1})
return tabs
return tabs
external_css = ["https://codepen.io/chriddyp/pen/bWLwgP.css",
"https://cdn.rawgit.com/samisahn/dash-app-stylesheets/" +
"eccb1a1a/dash-tektronix-350.css",
"https://fonts.googleapis.com/css?family=Dosis"]
for css in external_css:
app.css.append_css({"external_url": css})
if 'DYNO' in os.environ:
app.scripts.append_script({
'external_url': 'https://cdn.rawgit.com/chriddyp/' +
'ca0d8f02a1659981a0ea7f013a378bbd/raw/' +
'e79f3f789517deec58f41251f7dbb6bee72c44ab/plotly_ga.js'
})
if __name__ == '__main__':
app.run_server(port=5500, debug=True)
|
package org.opensextant.lr.test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.opensextant.lr.LanguageResourceUtils;
import org.opensextant.lr.resources.POSEntry;
import org.opensextant.lr.store.LanguageResourceStore;
public class LoadPOSLexiconTest {
public static void main(String[] args) {
String host = args[0];
// get the lexicon file
File lexFile = new File(args[1]);
LanguageResourceStore store = new LanguageResourceStore(host, 9300);
System.out.println("Created Word Stat Store");
loadLexicon(lexFile, store);
store.flush();
}
public static void loadLexicon(File lexFile, LanguageResourceStore store) {
BufferedReader br = null;
int entryCount = 0;
;
int lineCount = 0;
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(
lexFile), "UTF-8"));
String line;
while ((line = br.readLine()) != null) {
lineCount++;
POSEntry tmpEntry = LanguageResourceUtils.parsePOSLine(line);
String base = LanguageResourceUtils
.baseForm(tmpEntry.getWord());
tmpEntry.setBaseForm(base);
tmpEntry.setStore(store);
tmpEntry.save();
entryCount++;
}
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("Read " + lineCount + " lines, created "
+ entryCount + " entries");
}
}
|
import React from "react";
import { NavLink, useLocation } from "react-router-dom";
export default function Sidebar() {
let location = useLocation();
return (
<aside className="bg-gray-800 sm:w-1/3 xl:w-1/5 sm:min-h-screen p-5">
<div>
<p className="text-white font-extrabold text-xl">Developers.Inc</p>
</div>
<nav className="mt-5 list-none">
<li className={location.pathname === "/home" ? "bg-blue-800 p-2 rounded" : "p-2"}>
<NavLink to="/home" className="text-white">
Home
</NavLink>
</li>
<li className={location.pathname === "/usuarios" ? "bg-blue-800 p-2 rounded" : "p-2"}>
<NavLink to="/users" className="text-white">
Usuarios
</NavLink>
</li>
<li className={location.pathname === "/listarproyectoslider" ? "bg-blue-800 p-2 rounded" : "p-2"}>
<NavLink to="/listarproyectoslider" className="text-white">
List Proy Lider
</NavLink>
</li>
<li className={location.pathname === "/listarproyectosadministrador" ? "bg-blue-800 p-2 rounded" : "p-2"}>
<NavLink to="/listarproyectosadministrador" className="text-white">
List Proy Admin
</NavLink>
</li>
<li className={location.pathname === "/listarproyectosestudiantes" ? "bg-blue-800 p-2 rounded" : "p-2"}>
<NavLink to="/listarproyectosestudiantes" className="text-white">
List Proy Estu
</NavLink>
</li>
<li className={location.pathname === "/inscripciones" ? "bg-blue-800 p-2 rounded" : "p-2"}>
<NavLink to="/inscripciones" className="text-white">
Inscripciones
</NavLink>
</li>
<li className={location.pathname === "/avances" ? "bg-blue-800 p-2 rounded" : "p-2"}>
<NavLink to="/avances" className="text-white">
Avances
</NavLink>
</li>
</nav>
</aside>
);
}
|
echo ________________________________________
echo Wallet generation for MTDRDB ...
echo ________________________________________
if [[ $1 == "" ]]
then
echo DB OCID not provided
echo Usage example : ./generateWallet.sh ocid1.autonomousdatabase.oc1.phx.abyhqljtza4ucpamla4huo5o2iopoxk55hia3rfubnwgpmzolya
exit
fi
export DB_OCID=$1
read -s -p "Wallet Password: " mtdrdb_wallet_password
umask 177
cat >pw <<!
{ "password": "$mtdrdb_wallet_password" }
!
oci db autonomous-database generate-wallet --autonomous-database-id $DB_OCID --file wallet.zip --from-json file://pw
rm pw
|
import React, { useContext } from 'react';
import {
SearchResultsDiv,
SeeTeamTitle,
} from "./search-results-styles";
import { D3Context } from "../../contexts/D3Context";
import EmployeeResult from "../EmployeeResult/EmployeeResult";
const SearchResults = () => {
// Get the d3 state and action dispatcher
const { d3State } = useContext(D3Context);
// TODO: Using index position as key temporarily; should use ID provided by ElasticSearch in future.
return (
<SearchResultsDiv>
{ d3State.seeTeamTitle &&
<SeeTeamTitle>{d3State.seeTeamTitle}</SeeTeamTitle>
}
{ d3State.employeeSearchResults &&
d3State.employeeSearchResults.map((employee, idx) => (
<EmployeeResult
key={employee._id}
employeeData={employee._source}
/>
))
}
{ d3State.seeTeamSearchResults &&
d3State.seeTeamSearchResults.map((employee, idx) => (
<EmployeeResult
key={idx}
employeeData={employee}
/>
))
}
</SearchResultsDiv>
);
}
export default SearchResults; |
#include <iostream>
#include <thread>
#include <vector>
#include <ctime>
#include "Scene.h" // Assume Scene class is defined in Scene.h
void renderThread(Scene& scene, Image& image, int width, int height, int startY, int endY) {
for (int y = startY; y < endY; ++y) {
for (int x = 0; x < width; ++x) {
// Render the pixel at (x, y) in the image
// Example: image.setPixel(x, y, scene.renderPixel(x, y));
}
}
}
int main() {
const int numThreads = std::thread::hardware_concurrency(); // Get the number of available hardware threads
Scene scene; // Assume the scene is initialized
Image image; // Assume the image buffer is initialized
int width = image.getWidth();
int height = image.getHeight();
std::vector<std::thread> threads;
time_t start = time(0);
for (int i = 0; i < numThreads; ++i) {
int startY = (height * i) / numThreads;
int endY = (height * (i + 1)) / numThreads;
threads.emplace_back(renderThread, std::ref(scene), std::ref(image), width, height, startY, endY);
}
for (auto& thread : threads) {
thread.join();
}
std::cout << "Rendering finished in " << difftime(time(0), start) << " seconds." << std::endl;
std::string outfile_name = "rendered_image.png"; // Output file name
std::cout << "Writing to " << outfile_name << "." << std::endl;
image.save(outfile_name);
return 0;
} |
<reponame>sarmatdev/ptah-api
'use strict';
const {INTERNAL_SERVER_ERROR, NOT_FOUND} = require('../../../../config/errors');
const {TariffsList} = require('../../../../common/classes/tariffs-list.class');
module.exports = async (ctx, next) => {
try {
const id = ctx.params.id || '';
let result;
const tariffsList = new TariffsList(ctx);
if (id) {
result = await tariffsList.GetById(id);
if (!result && result.length) {
return ctx.throw(404, NOT_FOUND);
}
} else {
result = {tariffs: await tariffsList.GetAll()};
}
if (!result) {
return ctx.throw(500, INTERNAL_SERVER_ERROR);
}
ctx.status = 200;
ctx.body = result;
} catch (err) {
return ctx.throw(err.status || 500, err.message)
}
next();
};
|
<filename>components/layout/index.tsx
import Footer from './Footer'
import Content from './Content'
import Sidebar from './Sidebar'
import Header from './Header'
import Main from './Main'
import Basic, { BasicProps } from './Basic'
import React from 'react'
interface LayoutType extends React.FC<BasicProps> {
Header: typeof Header;
Footer: typeof Footer;
Content: typeof Content;
Main: typeof Main;
Sidebar: typeof Sidebar;
}
const Layout = Basic as LayoutType
Layout.Header = Header
Layout.Footer = Footer
Layout.Content = Content
Layout.Sidebar = Sidebar
Layout.Main = Main
export { Layout }
|
#numpy is needed for tests:
pip install numpy
WRAPPER="/usr/bin/time -f \"peak_used_memory:%M(Kb)elapsed_user_time:%U(sec)\""
#WRAPPER="/usr/bin/time"
echo "testing cfind_max_subsum 10**9:"
#$WRAPPER python "performance/cfind_max_subsum.py" 1000000000
echo "testing cfind_max_subsum 10**4:"
$WRAPPER python "performance/cfind_max_subsum.py" 10000
echo "testing pfind_max_subsum 10**4:"
$WRAPPER python "performance/pfind_max_subsum.py" 10000
echo "testing pfind_max_subsum 10**8:"
$WRAPPER python "performance/pfind_max_subsum.py" 100000000
|
<gh_stars>0
TEST_3D_LINES = [
"K0100 4",
"K1002 Teil",
"K2002/1 3D-Position",
"K2004/1 0",
"K2008/1 10",
"K2002/2 X-Achse",
"K2004/2 0",
"K2110/2 9,8",
"K2111/2 10,2",
"K2002/3 Y-Achse",
"K2004/3 0",
"K2110/3 15,8",
"K2111/3 16,2",
"K2002/4 Z-Achse",
"K2004/4 0",
"K2110/4 19,8",
"K2111/4 20,2",
"K5111/1 1",
"K5112/2 1",
"K5103/1 2",
"K5102/2 2",
"K5102/2 3",
"K5102/2 4",
"K0001/1 0",
"K0002/1 256",
"K0001/2 10,023",
"K0001/3 15,986",
"K0001/4 20,006"]
TEST_DFQ_LINES = [
"K0100 3",
"K1001 08/15",
"K1002 Teil 1",
"K2001/1 1.1",
"K2002/1 Länge",
"K2311/1 Drehen",
"K2402/1 Meßschieber",
"K2002/2 Durchmesser",
"K2022/2 3",
"K2402/2 Meßschieber",
"K2002/3 Gewinde",
"K2004/3 1",
"K2011/3 200",
"K2311/3 Schneiden",
"K2402/3 Lehre",
"9.94012.08.1999/15:23:450#1230.96601000001",
"9.95012.08.1999/15:23:580#1231.09101000002",
"9.98012.08.1999/15:24:120#1230.99301000003",
"10.01012.08.1999/15:24:380#1230.96401000001",
"10.02012.08.1999/15:25:020#1230.91501000001",
"10.06012.08.1999/15:25:020#1231.01101000002",
"9.94012.08.1999/15:25:590#1231.00901000001",
"9.99012.08.1999/15:26:170#1231.01101000002",
"10.00012.08.1999/15:26:500#1231.06201000002",
"10.03012.08.1999/15:27:230#1231.01101000001",
"10.17012.08.1999/15:27:563#1231.00901000001"]
|
<reponame>NYCMOTI/open-bid<filename>features/step_definitions/admin_auction_status_steps.rb
Then(/^I should see that approval has not been requested for the auction$/) do
I18n.t('statuses.c2_presenter.not_requested.body', link: '')
end
|
import React from 'react';
import { Provider } from 'react-redux';
import { Router, RouterContext } from 'react-router';
export function createForServer(store, renderProps) {
return (
<Provider store={store} key="provider">
<div>
<RouterContext {...renderProps} />
</div>
</Provider>
);
}
export function createForClient(store, { routes, history, devComponent }) {
const component = (
<Router history={history}>
{routes}
</Router>
);
const root = (
<Provider store={store} key="provider">
<div>
{component}
{devComponent}
</div>
</Provider>
);
return root;
}
|
#!/bin/sh
HyperTRIBE_DIR="/home/analysis/editing/HyperTRIBE/CODE"
#the sam file from the previous step
samfile=$1
#tablename for mysql table
tablename=$2
#expt name (identifier for an experiment), choose something short
expt=$3
#unique replicate number or timepoint for an experiment
tp=$4
echo "Load SAM file to MySQL tables with the following parameters:"
echo "SAMFILE: $samfile"
echo "TABLENAME: $tablename"
echo "EXPT NAME: $expt"
echo "TP: $tp"
prefix=${samfile%.sam*}
#create the matrix file
perl $HyperTRIBE_DIR/sam_to_matrix.pl $samfile $expt $tp
matrix_file=$prefix".matrix"
mv $samfile".matrix.wig" $matrix_file
#load the matrixfile to mysql database.
#the mysql database associated variable for this perl script needs to be updated before running this step
perl $HyperTRIBE_DIR/load_matrix_data.pl -t $tablename -d $matrix_file
|
<reponame>k7n4n5t3w4rt/selection-sort-v3<gh_stars>0
// @flow
/* eslint-env browser */
export function selectionSortFactory(
{
CONTAINER_ID = "",
SHOW_WORKING = true,
FPS = 10,
ACCELLERATION = 100,
CLICK = 1,
COLS = 5,
ROWS = 5,
MAX_SECONDS_TRANSITION_INTERVAL = 2,
CONSTANT_TRANSITION_SPEED = false,
LOOP = true,
RELOAD_INTERVAL = 1000,
} /*: config */,
gridDisplay /*: function */,
) /* :Object */ {
const config = {
CONTAINER_ID,
SHOW_WORKING,
FPS,
ACCELLERATION,
CLICK,
COLS,
ROWS,
MAX_SECONDS_TRANSITION_INTERVAL,
CONSTANT_TRANSITION_SPEED,
LOOP,
RELOAD_INTERVAL,
};
// The display
const D = gridDisplay();
config.CLICK = D.getClick(
config.SHOW_WORKING,
config.FPS,
config.ACCELLERATION,
);
function run() /*: void */ {
// The input
const a = makeArrayToSort(config.COLS, config.ROWS);
// The display
D.displayGrid(
a,
config.COLS,
config.ROWS,
config.CONTAINER_ID,
config.SHOW_WORKING,
);
D.enableShowWorkingToggleControl(config);
loop(a, 0);
}
function loop(a /*: Array<Object> */, i /*: number */) /*: null|void */ {
// reloadPageIfFinishedLooping(a.length, i)
if (i < a.length) {
D.setCellDisplay(
i,
"add",
"active",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
findMinIndex(a, i).then((minIndex) => {
// If this one is already in the right position
// jump to the next cell and return out
if (minIndex === i) {
skipToNextLoop(a, i, minIndex);
return null;
}
swapAndLoopAgain(a, i, minIndex);
});
} else if (config.LOOP) {
return reloadIfFinishedLooping(config.RELOAD_INTERVAL);
}
}
function setReload(reloadInterval /*: number */) /*: void */ {
setTimeout(() => {
run();
}, reloadInterval);
}
function reloadIfFinishedLooping(reloadInterval /* :number */) /* :void */ {
setReload(reloadInterval);
}
function skipToNextLoop(
a /* :Array<Object> */,
i /* :number */,
minIndex /* :number */,
) /* :void */ {
D.setCurrentCellDisplayToActive(
i,
config.CONTAINER_ID,
config.SHOW_WORKING,
);
setTimeout(() => {
D.clearActiveCellsDisplay(
i,
minIndex,
config.CONTAINER_ID,
config.SHOW_WORKING,
);
loop(a, ++i);
}, config.CLICK * 1); // eslint-disable-line no-undef
}
function swapAndLoopAgain(
a /* :Array<Object> */,
i /* :number */,
minIndex /* :number */,
) /* :void */ {
setTimeout(() => {
D.swapCells(
a,
i,
minIndex,
config.CONTAINER_ID,
config.CONSTANT_TRANSITION_SPEED,
config.MAX_SECONDS_TRANSITION_INTERVAL,
config.COLS,
config.ROWS,
)
.then(() => {
D.swapActiveCellsDisplay(
i,
minIndex,
config.CONTAINER_ID,
config.SHOW_WORKING,
);
return swapArrayElements(a, i, minIndex);
})
.then((a) => {
setTimeout(() => {
D.clearActiveCellsDisplay(
i,
minIndex,
config.CONTAINER_ID,
config.SHOW_WORKING,
);
++i;
}, config.CLICK * 1); // eslint-disable-line no-undef
setTimeout(() => {
loop(a, i);
}, config.CLICK * 2); // eslint-disable-line no-undef
})
.catch((e) => {
console.error(e);
throw new Error(e);
});
}, config.CLICK * 1); // eslint-disable-line no-undef
}
function findMinIndex(
a /* :Array<Object> */,
j /* :number */,
) /* :Promise<number> */ {
let minValue = a[j].value;
let minIndex = j;
return new Promise((resolve) => {
const intervalID = setInterval(() => {
D.setCellDisplay(
j,
"remove",
"actively-looking",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
++j;
if (j >= a.length) {
clearInterval(intervalID);
D.setCellDisplay(
minIndex,
"remove",
"min",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
D.setCellDisplay(
minIndex,
"remove",
"actively-looking",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
D.setCellDisplay(
minIndex,
"add",
"active-min",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
return resolve(minIndex);
}
D.setCellDisplay(
j,
"add",
"actively-looking",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
if (a[j].value < minValue) {
D.setCellDisplay(
minIndex,
"remove",
"min",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
minValue = a[j].value;
minIndex = j;
D.setCellDisplay(
minIndex,
"add",
"min",
config.CONTAINER_ID,
config.SHOW_WORKING,
);
}
}, config.CLICK * 1); // eslint-disable-line no-undef
});
}
function swapArrayElements(
a /*: Array<Object> */,
i /*: number */,
minIndex /*: number */,
) /* :Array<Object> */ {
const tmpValue = a[i].value;
a[i].value = a[minIndex].value;
a[minIndex].value = tmpValue;
return a;
}
function makeArrayToSort(
cols /*: number */,
rows /*: number */,
) /* :Array<Object> */ {
const numItems = cols * rows;
const a = [];
let randomNumber = 0;
for (let i = 0; i < numItems; i++) {
randomNumber = Math.random();
a.push({
value: randomNumber,
id: "_" + i.toString(),
});
}
//
return a;
}
return {
config,
run,
loop,
skipToNextLoop,
swapAndLoopAgain,
findMinIndex,
swapArrayElements,
makeArrayToSort,
setReload,
};
}
|
#!/usr/bin/env bash
# Init option {{{
Color_off='\033[0m' # Text Reset
# terminal color template {{{
# Regular Colors
Black='\033[0;30m' # Black
Red='\033[0;31m' # Red
Green='\033[0;32m' # Green
Yellow='\033[0;33m' # Yellow
Blue='\033[0;34m' # Blue
Purple='\033[0;35m' # Purple
Cyan='\033[0;36m' # Cyan
White='\033[0;37m' # White
# Bold
BBlack='\033[1;30m' # Black
BRed='\033[1;31m' # Red
BGreen='\033[1;32m' # Green
BYellow='\033[1;33m' # Yellow
BBlue='\033[1;34m' # Blue
BPurple='\033[1;35m' # Purple
BCyan='\033[1;36m' # Cyan
BWhite='\033[1;37m' # White
# Underline
UBlack='\033[4;30m' # Black
URed='\033[4;31m' # Red
UGreen='\033[4;32m' # Green
UYellow='\033[4;33m' # Yellow
UBlue='\033[4;34m' # Blue
UPurple='\033[4;35m' # Purple
UCyan='\033[4;36m' # Cyan
UWhite='\033[4;37m' # White
# Background
On_Black='\033[40m' # Black
On_Red='\033[41m' # Red
On_Green='\033[42m' # Green
On_Yellow='\033[43m' # Yellow
On_Blue='\033[44m' # Blue
On_Purple='\033[45m' # Purple
On_Cyan='\033[46m' # Cyan
On_White='\033[47m' # White
# High Intensity
IBlack='\033[0;90m' # Black
IRed='\033[0;91m' # Red
IGreen='\033[0;92m' # Green
IYellow='\033[0;93m' # Yellow
IBlue='\033[0;94m' # Blue
IPurple='\033[0;95m' # Purple
ICyan='\033[0;96m' # Cyan
IWhite='\033[0;97m' # White
# Bold High Intensity
BIBlack='\033[1;90m' # Black
BIRed='\033[1;91m' # Red
BIGreen='\033[1;92m' # Green
BIYellow='\033[1;93m' # Yellow
BIBlue='\033[1;94m' # Blue
BIPurple='\033[1;95m' # Purple
BICyan='\033[1;96m' # Cyan
BIWhite='\033[1;97m' # White
# High Intensity backgrounds
On_IBlack='\033[0;100m' # Black
On_IRed='\033[0;101m' # Red
On_IGreen='\033[0;102m' # Green
On_IYellow='\033[0;103m' # Yellow
On_IBlue='\033[0;104m' # Blue
On_IPurple='\033[0;105m' # Purple
On_ICyan='\033[0;106m' # Cyan
On_IWhite='\033[0;107m' # White
# }}}
# version
Version='1.0.0-dev'
#System name
System="$(uname -s)"
# }}}
# fetch_repo {{{
fetch_repo () {
if [[ -d "$HOME/.dotfiles" ]]; then
info "Trying to update dotfiles"
cd "$HOME/.dotfiles"
git pull https://github.com/snuzinator/.dotfiles.git master
cd - > /dev/null 2>&1
success "Successfully update dotfiles"
else
info "Trying to clone dotfiles from snuz"
git clone https://github.com/snuzinator/.dotfiles.git "$HOME/.dotfiles"
success "Successfully clone dotfiles"
fi
}
# }}}
# usage {{{
usage (){
echo "add key: (example ./bash --pc)"
echo ""
echo "OPTIONS"
echo ""
echo "--pc install for Desktop"
echo "--notebook install for Notebook"
}
# }}}
# success/msg {{{
msg() {
printf '%b\n' "$1" >&2
}
success() {
msg "${Green}[✔]${Color_off} ${1}${2}"
}
info() {
msg "${Blue}[➭]${Color_off} ${1}${2}"
}
# }}}
# echo_with_color {{{
echo_with_color () {
printf '%b\n' "$1$2$Color_off" >&2
}
# }}}
# install_done {{{
install_done () {
echo_with_color ${Yellow} ""
echo_with_color ${Yellow} "Almost done!"
echo_with_color ${Yellow} "=============================================================================="
echo_with_color ${Yellow} "== Open Vim or Neovim and it will install the plugins automatically =="
echo_with_color ${Yellow} "=============================================================================="
echo_with_color ${Yellow} ""
echo_with_color ${Yellow} "That's it. Thanks for installing Dotfiles Snuz. Enjoy!"
echo_with_color ${Yellow} ""
}
# }}}
# install_vim {{{
install_vim (){
if [[ -f "$HOME/.vimrc" ]]; then
mv "$HOME/.vimrc" "$HOME/.vimrc_back"
success "Backup $HOME/.vimrc to $HOME/.vimrc_back"
ln -s "$HOME/.dotfiles/vim/vimrc" "$HOME/.vimrc"
fi
if [[ -d "$HOME/.vim" ]]; then
if [[ "$(readlink $HOME/.vim)" =~ dotfiles ]]; then
success "Link $HOME/.vim Already installed for vim"
else
mv "$HOME/.vim" "$HOME/.vim_back"
success "BackUp $HOME/.vim to $HOME/.vim_back"
ln -s "$HOME/.dotfiles/vim" "$HOME/.vim"
success "Installed dotfiles for vim"
fi
else
ln -s "$HOME/.dotfiles/vim" "$HOME/.vim"
ln -s "$HOME/.dotfiles/vim/vimrc" "$HOME/.vimrc"
success "Installed dotfile for vim"
fi
}
# }}}
# uninstall_vim {{{
uninstall_vim () {
if [[ -d "$HOME/.vim" ]]; then
if [[ "$(readlink $HOME/.vim)" =~ .dotfiles ]]; then
rm "$HOME/.vim"
success "Uninstall dotfile folder vim"
if [[ -d "$HOME/.vim_back" ]]; then
mv "$HOME/.vim_back" "$HOME/.vim"
success "Recover from $HOME/.vim_back"
fi
fi
fi
if [[ -f "$HOME/.vimrc_back" ]]; then
mv "$HOME/.vimrc_back" "$HOME/.vimrc"
success "Recover from $HOME/.vimrc_back"
fi
}
# }}}
# install_git {{{
install_git () {
if [[ -f "$HOME/.gitconfig" ]]; then
mv "$HOME/.gitconfig" "$HOME/.gitconfig_back"
success "Backup $HOME/.gitconfig to $HOME/.gitconfig_back"
ln -s "$HOME/.dotfiles/git/gitconfig" "$HOME/.gitconfig"
else
ln -s "$HOME/.dotfiles/git/gitconfig" "$HOME/.gitconfig"
success "Installed dotfile for gitconfig"
fi
if [[ -f "$HOME/.gitignore_global" ]]; then
mv "$HOME/.gitignore_global" "$HOME/.gitignore_global_back"
success "Backup $HOME/.gitignore_global to $HOME/.gitignore_global_back"
ln -s "$HOME/.dotfiles/git/gitignore_global" "$HOME/.gitignore_global"
else
ln -s "$HOME/.dotfiles/git/gitignore_global" "$HOME/.gitignore_global"
success "Installed dotfile for gitignore_global"
fi
if [[ -d "$HOME/.git_template" ]]; then
if [[ "$(readlink $HOME/.git_template)" =~ dotfiles ]]; then
success "Link $HOME/.git_template Already installed for git"
else
mv "$HOME/.git_template" "$HOME/.git_template_back"
success "Backup $HOME/.git_template to $HOME/.git_template_back"
ln -s "$HOME/.dotfiles/git/git_template" "$HOME/.git_template"
fi
else
ln -s "$HOME/.dotfiles/git/git_template" "$HOME/.git_template"
success "Installed dotfile for git_template"
fi
}
#}}}
# uninstall_git {{{
uninstall_git (){
if [[ -d "$HOME/.git_template" ]]; then
if [[ "$(readlink $HOME/.git_template)" =~ dotfiles ]]; then
rm "$HOME/.git_template"
success "Uninstall dotfile git_template"
if [[ -d "$HOME/.git_template_back" ]]; then
mv "$HOME/.git_template_back" "$HOME/.git_template"
success "Recover from $HOME/.git_template_back"
fi
fi
fi
if [[ -f "$HOME/.gitconfig_back" ]]; then
mv "$HOME/.gitconfig_back" "$HOME/.gitconfig"
success "Recover from $HOME/.gitconfig_back"
fi
if [[ -f "$HOME/.gitignore_global_back" ]]; then
mv "$HOME/.gitignore_global_back" "$HOME/.gitignore_global"
success "Recover from $HOME/.gitignore_global_back"
fi
}
#}}}
# install_tmux {{{
install_tmux (){
if [[ -f "$HOME/.tmux.conf" ]]; then
mv "$HOME/.tmux.conf" "$HOME/.tmux.conf_back"
success "Buckup $HOME/.tmux.conf to $HOME/.tmux.conf_back"
ln -s "$HOME/.dotfiles/tmux/tmux.conf" "$HOME/.tmux.conf"
else
ln -s "$HOME/.dotfiles/tmux/tmux.conf" "$HOME/.tmux.conf"
success "Installed dotfile for tmux.conf"
fi
if [[ -d "$HOME/.tmux" ]]; then
if [[ "$(readlink $HOME/.tmux)" =~ dotfiles ]]; then
success "Link $HOME/.tmux Already installed for tmux"
else
mv "$HOME/.tmux" "$HOME/.tmux_back"
success "Backup $HOME/.tmux to $HOME/.tmux_back"
ln -s "$HOME/.dotfiles/tmux" "$HOME/.tmux"
fi
else
ln -s "$HOME/.dotfiles/tmux" "$HOME/.tmux"
success "Installed folder for tmux"
fi
if [[ -d "$HOME/.tmux/plugins/tpm" ]]; then
if [[ -f "$HOME/.tmux/plugins/tpm/tpm" ]]; then
info "Trying to update tpm"
cd "$HOME/.tmux/plugins/tpm"
git pull
cd - > /dev/null 2>&1
success "Successfully update tpm"
fi
else
git clone https://github.com/tmux-plugins/tpm "$HOME/.tmux/plugins/tpm"
success "Tmux Plugin Manager"
info "Open Tmux sessions and press Ctrl+I for install plugins"
fi
}
#}}}
# uninstall_tmux{{{
uninstall_tmux() {
if [[ -d "$HOME/.tmux" ]]; then
if [[ "$(readlink $HOME/.tmux)" =~ dotfiles ]]; then
rm "$HOME/.tmux"
success "Uninstall dotfolder tmux"
if [[ -d "$HOME/.tmux_back" ]]; then
mv "$HOME/.tmux_back" "$HOME/.tmux"
success "Recover folder from $HOME/.tmux_back"
fi
fi
else
if [[ -d "$HOME/.tmux_back" ]]; then
mv "$HOME/.tmux_back" "$HOME/.tmux"
success "Recover folder from $HOME/.tmux_back"
fi
fi
if [[ -f "$HOME/.tmux.conf_back" ]]; then
mv "$HOME/.tmux.conf_back" "$HOME/.tmux.conf"
success "Recover from $HOME/.tmux.conf_back"
fi
}
#}}}
# install_termite {{{
install_termite () {
if [[ -d "$HOME/.config/termite" ]]; then
if [[ "$(readlink $HOME/.config/termite)" =~ dotfiles ]]; then
success "Link $HOME/.config/termite Already installed for termite"
else
mv "$HOME/.config/termite" "$HOME/.config/termite_back"
success "Backup $HOME/.config/termite to $HOME/.config/termite_back"
ln -s "$HOME/.dotfiles/termite" "$HOME/.config/termite"
success "Installed folder for termite"
fi
else
ln -s "$HOME/.dotfiles/termite" "$HOME/.config/termite"
success "Installed folder for termite"
fi
}
#}}}
# uninstall_termite{{{
uninstall_termite () {
if [[ -d "$HOME/.config/termite" ]]; then
if [[ "$(readlink $HOME/.config/termite)" =~ dotfiles ]]; then
rm "$HOME/.config/termite"
success "Uninstall dotfolder termite"
if [[ -d "$HOME/.config/termite_back" ]]; then
mv "$HOME/.config/termite_back" "$HOME/.config/termite"
success "Recover folder from $HOME/.config/termite_back"
fi
fi
else
if [[ -d "$HOME/.config/termite_back" ]]; then
mv "$HOME/.config/termite_back" "$HOME/.config/termite"
success "Recover folder from $HOME/.config/termite_back"
fi
fi
}
#}}}
# install_i3config {{{
install_i3config () {
if [[ -d "$HOME/.config/i3" ]]; then
if [[ "$(readlink $HOME/.config/i3)" =~ dotfiles ]]; then
success "Link $HOME/.config/i3 Already Installed for i3"
else
mv "$HOME/.config/i3" "$HOME/.config/i3_back"
success "Backup $HOME/.config/i3 to $HOME/.config/i3_back"
ln -s "$HOME/.dotfiles/i3" "$HOME/.config/i3"
success "Installed folder for i3"
fi
else
ln -s "$HOME/.dotfiles/i3" "$HOME/.config/i3"
success "Installed folder for i3"
fi
}
#}}}
# uninstall_i3config {{{
uninstall_i3config () {
if [[ -d "$HOME/.config/i3" ]]; then
if [[ "$(readlink $HOME/.config/i3)" =~ dotfiles ]]; then
rm "$HOME/.config/i3"
success "Uninstall dotfolder i3 config"
if [[ -d "$HOME/.config/i3_back" ]]; then
mv "$HOME/.config/i3_back" "$HOME/.config/i3"
success "Recover folder from $HOME/.config/i3_back"
fi
fi
else
if [[ -d "$HOME/.config/i3_back" ]]; then
mv "$HOME/.config/i3_back" "$HOME/.config/i3"
success "Recover folder from $HOME/.config/i3_back"
fi
fi
}
# }}}
# install_i3config_notebook {{{
install_i3config_notebook () {
if [[ -d "$HOME/.config/i3" ]]; then
if [[ "$(readlink $HOME/.config/i3)" =~ dotfiles ]]; then
success "Link $HOME/.config/i3 Already Installed for i3"
else
mv "$HOME/.config/i3" "$HOME/.config/i3_back"
success "Backup $HOME/.config/i3 to $HOME/.config/i3_back"
ln -s "$HOME/.dotfiles/i3_notebook" "$HOME/.config/i3"
success "Installed folder for i3"
fi
else
ln -s "$HOME/.dotfiles/i3_notebook" "$HOME/.config/i3"
success "Installed folder for i3"
fi
}
#}}}
# install_nvim {{{
install_nvim () {
if [[ -d "$HOME/.config/nvim" ]]; then
if [[ "$(readlink $HOME/.config/nvim)" =~ dotfiles ]]; then
success "Link $HOME/.config/nvim Already Installed for nvim"
else
mv "$HOME/.config/nvim" "$HOME/.config/nvim_back"
success "Backup $HOME/.config/nvim to $HOME/.config/nvim_back"
ln -s "$HOME/.dotfiles/vim/nvim" "$HOME/.config/nvim"
success "Installed folder for nvim"
fi
else
ln -s "$HOME/.dotfiles/vim/nvim" "$HOME/.config/nvim"
success "Installed folder for nvim"
fi
}
#}}}
# uninstall_nvim {{{
uninstall_nvim () {
if [[ -d "$HOME/.config/nvim" ]]; then
if [[ "$(readlink $HOME/.config/nvim)" =~ dotfiles ]]; then
rm "$HOME/.config/nvim"
success "Uninstall dotfolder nvim config"
if [[ -d "$HOME/.config/nvim_back" ]]; then
mv "$HOME/.config/nvim_back" "$HOME/.config/nvim"
success "Recover folder from $HOME/.config/nvim_back"
fi
fi
else
if [[ -d "$HOME/.config/nvim_back" ]]; then
mv "$HOME/.config/nvim_back" "$HOME/.config/nvim"
success "Recover folder from $HOME/.config/nvim_back"
fi
fi
}
# }}}
# install_zsh{{{
install_zsh (){
if [[ -f "$HOME/.zshrc" ]]; then
if [[ "$(readlink $HOME/.zshrc)" =~ dotfiles ]]; then
success "Link $HOME/.zshrc Already Installed for zshrc"
else
mv "$HOME/.zshrc" "$HOME/.zshrc_back"
success "Backup $HOME/.zshrc to $HOME/.zshrc_back"
ln -s "$HOME/.dotfiles/zsh/zshrc" "$HOME/.zshrc"
success "Installed file for zshrc"
fi
else
ln -s "$HOME/.dotfiles/zsh/zshrc" "$HOME/.zshrc"
success "Installed file for zshrc"
fi
}
#}}}
if [[ $1 = "" || $1 = "--help" ]]; then
usage
fi
if [[ $1 == "--pc" ]]; then
echo "install PC"
echo ""
fetch_repo
install_vim
install_git
install_tmux
install_termite
install_i3config
install_nvim
install_done
elif [[ $1 == "--notebook" ]]; then
echo "install Notebook"
echo ""
fetch_repo
install_vim
install_git
install_tmux
install_termite
install_i3config_notebook
install_nvim
install_done
elif [[ $1 == "--uninstall" ]]; then
echo ""
echo "uninstalling..."
uninstall_vim
uninstall_git
uninstall_tmux
uninstall_termite
uninstall_i3config
uninstall_nvim
else
usage
fi
|
DB_DATA=$HOME/data docker-compose up
|
package plugin.album.utils;
import android.annotation.SuppressLint;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.nio.charset.Charset;
import java.text.DecimalFormat;
import java.util.Vector;
public class StringUtils {
public static final boolean IGNORE_CASE = true;
public static final boolean IGNORE_WIDTH = true;
public static boolean isNullOrEmpty(CharSequence sequence) {
return sequence == null || sequence.length() == 0;
}
public static boolean isNotNullOrEmpty(CharSequence sequence) {
return !isNullOrEmpty(sequence);
}
public static boolean isAllWhitespaces(String str) {
boolean ret = true;
for (int i = 0; i < str.length(); i++) {
if (!Character.isWhitespace(str.charAt(i))) {
ret = false;
break;
}
}
return ret;
}
public static boolean isAllDigits(String str) {
if (str.length() == 0) {
return false;
}
for (int i = 0; i < str.length(); i++) {
char c = str.charAt(i);
if (c < '0' || c > '9') {
return false;
}
}
return true;
}
public static boolean equal(String s1, String s2) {
return equal(s1, s2, false);
}
public static boolean equal(String s1, String s2, boolean ignoreCase) {
if (s1 != null && s2 != null) {
if (ignoreCase) {
return s1.equalsIgnoreCase(s2);
} else {
return s1.equals(s2);
}
} else {
return (s1 == null && s2 == null);
}
}
public static Vector<String> parseMediaUrls(String str, String beginTag, String endTag) {
Vector<String> list = new Vector<String>();
if (!isNullOrEmpty(str)) {
int beginIndex = str.indexOf(beginTag, 0);
int endIndex = str.indexOf(endTag, 0);
while ((beginIndex != -1 && endIndex != -1) && (endIndex > beginIndex)) {
beginIndex += beginTag.length();
String imgUrl = str.substring(beginIndex, endIndex);
if (!isNullOrEmpty(imgUrl) && imgUrl.charAt(0) != '[') {
list.add(imgUrl);
}
endIndex += endIndex + endTag.length();
beginIndex = str.indexOf(beginTag, endIndex);
endIndex = str.indexOf(endTag, endIndex);
}
}
return list;
}
/**
* Safe string finding (indexOf) even the arguments are empty Case sentive ver.
*/
public static int find(String pattern, String s) {
return find(pattern, s, !IGNORE_CASE);
}
/**
* Safe string finding (indexOf) even the arguments are empty Case sentive can be parameterized
*/
public static int find(String pattern, String s, boolean ignoreCase) {
return find(pattern, s, ignoreCase, !IGNORE_WIDTH);
}
/**
* Safe string finding (indexOf) even the arguments are empty Case sentive and Full/Half width ignore can
* be parameterized
*/
@SuppressLint("DefaultLocale")
public static int find(String pattern, String s, boolean ignoreCase, boolean ignoreWidth) {
if (isNullOrEmpty(s)) {
return -1;
}
pattern = (pattern == null) ? "" : pattern;
if (ignoreCase) {
pattern = pattern.toLowerCase();
s = s.toLowerCase();
}
if (ignoreWidth) {
pattern = narrow(pattern);
s = narrow(s);
}
return s.indexOf(pattern);
}
public static String narrow(String s) {
if (isNullOrEmpty(s)) {
return "";
}
char[] cs = s.toCharArray();
for (int i = 0; i < cs.length; ++i)
cs[i] = narrow(cs[i]);
return new String(cs);
}
public static char narrow(char c) {
int code = c;
if (code >= 65281 && code <= 65373)// Interesting range
return (char) (code - 65248); // Full-width to half-width
else if (code == 12288) // Space
return (char) (code - 12288 + 32);
else if (code == 65377)
return (char) (12290);
else if (code == 12539)
return (char) (183);
else if (code == 8226)
return (char) (183);
else
return c;
}
public static int ord(char c) {
if ('a' <= c && c <= 'z')
return (int) c;
if ('A' <= c && c <= 'Z')
return c - 'A' + 'a';
return 0;
}
public static int compare(String x, String y) {
x = (x == null) ? "" : x;
y = (y == null) ? "" : y;
return x.compareTo(y);
}
public static String toUtf8(String str) {
try {
return new String(str.getBytes("UTF-8"), "UTF-8");
} catch (UnsupportedEncodingException e) {
return "";
}
}
public static String fromUtf8(String utf8Str) {
try {
return new String(utf8Str.getBytes("UTF-8"), Charset.defaultCharset());
} catch (UnsupportedEncodingException e) {
return "";
}
}
public static boolean containsFullChar(String param) {
char[] chs = param.toCharArray();
for (char ch : chs) {
if (!(('\uFF61' <= ch) && (ch <= '\uFF9F'))
&& !(('\u0020' <= ch) && (ch <= '\u007E'))) {
return true;
}
}
return false;
}
public static String ensureNotNull(final String str) {
if (str != null) {
return str;
} else {
return "";
}
}
public static String upperCase(String str) {
char[] ch = str.toCharArray();
if (ch[0] >= 'a' && ch[0] <= 'z') {
ch[0] = (char) (ch[0] - 32);
}
return new String(ch);
}
public static String remain2bits(double d) {
DecimalFormat df = new DecimalFormat("######0.00");
return df.format(d);
}
public static String remain2bits(String str) {
try {
return remain2bits(Double.valueOf(str));
} catch (NumberFormatException e) {
return str;
}
}
public static String remain2bits(int i) {
return remain2bits(Double.valueOf(i));
}
public static String subString(String source, int start, int length, String ellipsize) {
if (source == null || source.length() <= length) {
return source;
}
return source.substring(start, length) + ellipsize;
}
public static long parseLong(String s) {
long l = 0;
try {
l = Long.parseLong(s.trim());
} catch (Exception e) {
return l;
}
return l;
}
public static String getDistance1bitValue(double val) {
BigDecimal bd = new BigDecimal(val);
BigDecimal i = bd.setScale(1, RoundingMode.HALF_EVEN);
double value = i.doubleValue();
if (value % 1 == 0) {
return String.valueOf((int) value);
} else {
return String.valueOf(value);
}
}
public static String doubleTrans(double num, int decimalLen) {
String formatStr = "%." + decimalLen + "f";
String number1 = String.format(formatStr, num);
double number2 = Double.parseDouble(number1);
if (Math.round(number2) - number2 == 0) {
return String.valueOf((long) number2);
}
return String.valueOf(number2);
}
///////////////////////////////////////////////////////////////////////////
// 简化数字
///////////////////////////////////////////////////////////////////////////
private static final int TEN_MILLION = 10000000;
private static final int TEN_THOUSAND = 10000;
static double getDouble(double value, int retain) {
int coefficient = 1;
for (int i = 0; i < retain; i++) {
coefficient *= 10;
}
int roundNumber = (int) (value * coefficient);
return (double) roundNumber / coefficient;
}
public static String simplifyNum(long raw) {
if (raw / TEN_MILLION > 0) {
return String.format("%.2f千万", getDouble((double) raw / TEN_MILLION, 2));
} else if (raw / TEN_THOUSAND > 0) {
return String.format("%.2f万", getDouble((double) raw / TEN_THOUSAND, 2));
} else {
return String.valueOf(raw);
}
}
public static String getMinuteTime(long time){
if(time <= 0) time = 0;
int totalSeconds = (int) (time / 1000);
int seconds = totalSeconds % 60;
int minutes = totalSeconds / 60;
return String.format("%02d:%02d", minutes, seconds);
}
public static boolean isHttpUrl(String urls) {
urls = urls.toLowerCase();
return urls.indexOf("http") == 0;
}
}
|
import java.util.Random;
public class GeneratePassword {
public static void main(String[] args) {
int length = 8;
System.out.println(generatePswd(length));
}
static char[] generatePswd(int len) {
System.out.println("Your Password:");
String charsCaps = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
String chars = "abcdefghijklmnopqrstuvwxyz";
String nums = "0123456789";
String symbols ="!@#$%^&*_=+-/€.?<>)";
String passSymbols = charsCaps + chars + nums + symbols;
Random rnd = new Random();
char[] password = new char[len];
int index = 0;
for (int i = 0; i < len; i++) {
password[i] = passSymbols.charAt(rnd.nextInt(passSymbols.length()));
}
return password;
}
} |
<gh_stars>0
QUnit.testStart(function() {
let markup = `
<div>
<div id="container" class="dx-datagrid"></div>
</div>`;
$("#qunit-fixture").html(markup);
});
import $ from "jquery";
import "./keyboardNavigationParts/keyboardController.tests.js";
import "./keyboardNavigationParts/accessibility.tests.js";
import "./keyboardNavigationParts/customization.tests.js";
import "./keyboardNavigationParts/keyboardKeys.tests.js";
import "./keyboardNavigationParts/realControllers.tests.js";
import "./keyboardNavigationParts/rowsView.tests.js";
|
SELECT * FROM users WHERE user_id < 20 AND active = true; |
<reponame>xxyy/sic
tingoApp.factory('PersonDetailService', ['$http', '$location', '$uibModal', 'QuoteListService',
function ($http, $location, $uibModal, QuoteListService) {
var detailService = {};
detailService.person = {name: 'Loading..'};
detailService.new = function () {
QuoteListService.new(detailService.person);
};
detailService.fetch = function (id) {
QuoteListService.setFetched(false);
if (id == 'new') {
$uibModal.open({
controller: 'PersonNameModalController',
controllerAs: 'modalCtrl',
templateUrl: 'partials/person-name-modal.html'
}).result.then(
function (newName) {
$http.post('/api/person/new', {name: newName})
.then(function (response) {
detailService.person = response.data;
QuoteListService.quotes = [];
QuoteListService.setFetched(true);
}, function (response) {
alert('Fehler beim Erstellen der Person:' + response.data.errorMessage);
console.error(response);
});
}, function () {
$location.go('/'); //can't be in controller since we need to handle ESC too
}
);
} else {
$http.get('/api/quote/by/person/' + id)
.success(function (data) {
detailService.person = data.person;
QuoteListService.quotes = data.quotes;
QuoteListService.setFetched(true);
});
}
};
// Getters and Setters
detailService.getPerson = function () {
return detailService.person;
};
return detailService;
}]);
|
#!/bin/sh
#echo ">>>>>>>>>>>>>>>> ENTERING COMMAND TEMPLATE <<<<<<<<<<<<<<<<<<<<<<"
<%if(config?.use) {%>
use ${config.use}
<%}%>
<%if(config?.modules) {%>
module load ${config.modules}
<%}%>
echo \$\$ > ${CMD_PID_FILE}
cat ${CMD_FILENAME} | bash -e
result=\$?
echo -n \$result > $jobDir/${CMD_EXIT_FILENAME}.tmp
mv $jobDir/${CMD_EXIT_FILENAME}.tmp $jobDir/${CMD_EXIT_FILENAME}
#echo ">>>>>>>>>>>>>>>> EXITING COMMAND TEMPLATE <<<<<<<<<<<<<<<<<<<<<<"
exit \$result |
const {
GraphQLSchema,
GraphQLObjectType,
GraphQLString,
GraphQLInt,
GraphQLFloat
} = require('graphql');
const ProductType = new GraphQLObjectType({
name: 'Product',
fields: () => ({
name: { type: GraphQLString },
price: { type: GraphQLFloat }
})
});
const OrderType = new GraphQLObjectType({
name: 'Order',
fields: () => ({
id: { type: GraphQLInt },
quantity: { type: GraphQLInt }
})
});
const QueryType = new GraphQLObjectType({
name: 'Query',
fields: {
products: {
type: new GraphQLList(ProductType),
resolve: (root, args, context, info) => {
// ...
}
},
orders: {
type: OrderType,
args: {
id: { type: GraphQLInt }
},
resolve: (root, args, context, info) => {
// ...
}
}
}
});
const schema = new GraphQLSchema({ query: QueryType }); |
<reponame>soheil555/fairOS-js
import {
PodClose,
PodDelete,
PodNew,
PodOpen,
PodPresent,
PodReceiveInfo,
PodReceive,
PodShare,
PodStat,
PodSync,
PodModel,
} from "../../internal";
export class UserPod extends PodModel {
/**
* shows the pod info of the pod that is to be received
*/
podReceiveInfo({ reference }: PodReceiveInfo) {
return super.podReceiveInfo({ reference });
}
/**
* make a pod public and share it with others
*/
podReceive({ reference }: PodReceive) {
return super.podReceive({ reference });
}
/**
* create a new pod
*/
podNew({ pod_name, password }: PodNew) {
return super.podNew({
pod_name,
password,
});
}
/**
* Opens a pod
*/
podOpen({ pod_name, password }: PodOpen) {
return super.podOpen({
pod_name,
password,
});
}
/**
* Closes a pod
*/
podClose({ pod_name }: PodClose) {
return super.podClose({
pod_name,
});
}
/**
* Syncs the latest contents of the pod from Swarm
*/
podSync({ pod_name }: PodSync) {
return super.podSync({
pod_name,
});
}
/**
* Shared a pod
*/
podShare({ pod_name, password }: PodShare) {
return super.podShare({
pod_name,
password,
});
}
/**
* Deletes a pod
*/
podDelete({ pod_name, password }: PodDelete) {
return super.podDelete({
pod_name,
password,
});
}
/**
* Lists all pods of a user
*/
podList() {
return super.podList();
}
/**
* Show all the information about a pod
*/
podStat({ pod_name }: PodStat) {
return super.podStat({
pod_name,
});
}
/**
* Is Pod present
*/
podPresent({ pod_name }: PodPresent) {
return super.podPresent({
pod_name,
});
}
}
|
<reponame>community-boating/cbidb-public-web
import advWSClinic from "./advanced-ws-clinic";
import envSci from "./env-sci";
import funGames from "./fun-games";
import mainsail from "./mainsail";
import mercClinic from "./merc-clinic";
import mercFastTrack from "./merc-fast-track";
import paddleAdventure from "./paddle-adventure";
import raceTeam from "./race-team";
import robosail from "./robosail";
import sup from "./sup";
import ws from "./ws";
import wsRacingClinic from "./ws-racing-clinic";
import kayakAdventure from "./kayak-adventure";
import learnToRace from "./learn-to-race";
import waterQualityLab from "./water-quality-lab";
import testingClinic from "./testing-clinic";
export default ([
mercFastTrack,
mainsail,
mercClinic,
sup,
paddleAdventure,
kayakAdventure,
ws,
advWSClinic,
wsRacingClinic,
envSci,
raceTeam,
robosail,
funGames,
learnToRace,
waterQualityLab,
testingClinic,
]); |
// Copyright 2014 The StudyGolang Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// http://studygolang.com
// Author:polaris <EMAIL>
package model
import (
"logger"
"util"
)
// 用户收藏(用户可以收藏文章、话题、资源等)
type Favorite struct {
Uid int `json:"uid"`
Objtype int `json:"objtype"`
Objid int `json:"objid"`
Ctime string `json:"ctime"`
// 数据库访问对象
*Dao
}
func NewFavorite() *Favorite {
return &Favorite{
Dao: &Dao{tablename: "favorites"},
}
}
func (this *Favorite) Insert() (int64, error) {
this.prepareInsertData()
result, err := this.Dao.Insert()
if err != nil {
return 0, err
}
return result.RowsAffected()
}
func (this *Favorite) Find(selectCol ...string) error {
return this.Dao.Find(this.colFieldMap(), selectCol...)
}
func (this *Favorite) FindAll(selectCol ...string) ([]*Favorite, error) {
if len(selectCol) == 0 {
selectCol = util.MapKeys(this.colFieldMap())
}
rows, err := this.Dao.FindAll(selectCol...)
if err != nil {
return nil, err
}
// TODO:
favoriteList := make([]*Favorite, 0, 10)
logger.Debugln("selectCol", selectCol)
colNum := len(selectCol)
for rows.Next() {
favorite := NewFavorite()
err = this.Scan(rows, colNum, favorite.colFieldMap(), selectCol...)
if err != nil {
logger.Errorln("Favorite FindAll Scan Error:", err)
continue
}
favoriteList = append(favoriteList, favorite)
}
return favoriteList, nil
}
// 为了支持连写
func (this *Favorite) Where(condition string, args ...interface{}) *Favorite {
this.Dao.Where(condition, args...)
return this
}
// 为了支持连写
func (this *Favorite) Set(clause string, args ...interface{}) *Favorite {
this.Dao.Set(clause, args...)
return this
}
// 为了支持连写
func (this *Favorite) Limit(limit string) *Favorite {
this.Dao.Limit(limit)
return this
}
// 为了支持连写
func (this *Favorite) Order(order string) *Favorite {
this.Dao.Order(order)
return this
}
func (this *Favorite) prepareInsertData() {
this.columns = []string{"uid", "objtype", "objid"}
this.colValues = []interface{}{this.Uid, this.Objtype, this.Objid}
}
func (this *Favorite) colFieldMap() map[string]interface{} {
return map[string]interface{}{
"uid": &this.Uid,
"objtype": &this.Objtype,
"objid": &this.Objid,
"ctime": &this.Ctime,
}
}
|
import numpy as np
def process_data(adjusted_rewards, env_infos, agent_infos):
assert type(adjusted_rewards) == np.ndarray, 'adjusted_rewards must be a numpy array'
assert type(env_infos) == dict, 'env_infos must be a dictionary'
assert type(agent_infos) == dict, 'agent_infos must be a dictionary'
assert type(adjusted_rewards) == np.ndarray, 'adjusted_rewards must be a numpy array'
assert type(adjusted_rewards) == np.ndarray, 'adjusted_rewards must be a numpy array'
assert type(env_infos) == dict, 'env_infos must be a dictionary'
assert type(agent_infos) == dict, 'agent_infos must be a dictionary'
assert type(adjusted_rewards) == np.ndarray, 'adjusted_rewards must be a numpy array'
assert type(env_infos) == dict, 'env_infos must be a dictionary'
assert type(agent_infos) == dict, 'agent_infos must be a dictionary'
assert type(adjusted_rewards) == np.ndarray, 'adjusted_rewards must be a numpy array'
assert type(env_infos) == dict, 'env_infos must be a dictionary'
assert type(agent_infos) == dict, 'agent_infos must be a dictionary'
processed_data = {
'adjusted_rewards': adjusted_rewards,
'env_info1_mean': np.mean(env_infos['info1'], axis=2),
'env_info2_sum': np.sum(env_infos['info2'], axis=2),
'agent_info1_squared': np.square(agent_infos['agent_info1']),
'agent_info2_mean': np.mean(agent_infos['agent_info2'], axis=2)
}
return processed_data |
<reponame>Origen-SDK/ruby-iar
module RubyIAR
class Driver
class Workspace
include ProjectManager
# Create a new workspace object, but does not write it to the disk.
# This creates an internal representation only.
# The returned Workspace object will not have a directory path set.
def self.new_workspace(ws, dir: nil, projects: [], write: false, overwrite: false)
n_new_ws = Nokogiri::XML::Document.new
n_new_ws << Nokogiri::XML::Element.new('workspace', n_new_ws)
n_new_ws.at_xpath('workspace') << Nokogiri::XML::Element.new('batchBuild', n_new_ws)
ws = Pathname(ws).expand_path
ws = ws.dirname.join(Pathname(ws.basename.to_s + (ws.extname.to_s == '.eww' ? '' : '.eww')))
# Do some error checking here
if ws.exist?
#RubyIAR.error([
# "A workspace already exists at #{ws}!",
# "#new_workspace shying away from create a new one.",
# #"Use the option overwrite: true to forcibly create a new workspace at that location, or give a different location."
#])
#puts "Need a case for ws exists!".red
end
ws = Workspace.new(ws, model_only: n_new_ws)
unless projects.empty?
ws.add_projects(projects)
end
ws
end
singleton_class.send(:alias_method, :create_workspace, :new_workspace)
# Creates a new workspace object AND writes the file to the disk.
# This returns a new Workspace object that is setup as if the workspace was previously existing.
#def self.new_workspace_at(ws, dir: nil, projects: [])
#end
#include InstanceOverrideClassConfigurable
@project_xpath = '//workspace/project'
@ws_dir_token = '$WS_DIR$'
attr_reader :noko
attr_reader :_projects_
attr_reader :driver
def initialize(eww_file, driver: nil, **options)
if options[:model_only]
# Indicated that that this eww_file doesn't actually exists and that we're dealing with an internal (e.g., in memory) model.
@noko = options[:model_only] || begin
fail "Nokogiri model not given!"
end
@workspace_directory = eww_file.dirname
elsif !File.exists?(eww_file)
fail("RubyIAR: Workspace: Could not find given .eww #{eww_file} - Unable to initialize Workspace!")
else
@workspace_directory = File.dirname(eww_file)
@noko = Nokogiri::XML(File.open(eww_file)) do |n|
n.strict.noblanks
end
end
@eww_file = Pathname(eww_file)
@name = @eww_file.basename('.eww')
@_projects_ = {}
projects_with_format(:absolute_pathnames).each do |path|
proj = RubyIAR::Driver::Project.new(path, workspace: self)
@_projects_[proj.name] = proj
end
@driver = driver
end
def workspace_directory
@workspace_directory
end
alias_method :workspace_dir, :workspace_directory
alias_method :ws_dir, :workspace_directory
alias_method :dir, :workspace_directory
alias_method :directory, :workspace_directory
def resolve_path(path)
path = Pathname(path)
unless path.extname == '.ewp'
path = Pathname.new(path.to_s + '.ewp')
end
if path.relative?
Pathname('$WS_DIR$').join(path)
elsif
# Replace the absolute path with the workspace directory token
# E.g.: C:\my_projects\project1\p1.ewp and C:\my_projects\project1\p1.eww
# #=> $WS_DIR$\p1.ewp
Pathname(path.to_s.gsub(workspace_dir.to_s, '$WS_DIR$'))
end
end
# This method will accept projects as either a list of either
# strings, symbols, Pathnames, or an Array of any of the former.
def add_project(*projects)
def add(p, options={})
p = Pathname.new(p)
# Any absolute paths here won't translate well across users.
# Ensurethat the projects are added relative to the current workspace directory, even
# if an absolute path is given.
n_new_proj = noko.at_xpath('//workspace').new_node('project')
n_new_proj << n_new_proj.new_node('path', content: resolve_path(p))
noko.at_xpath('//workspace') << n_new_proj
p
end
projects.collect do |p|
add(p, options)
end
end
def add_project!(write_options: nil, **options)
add_project(options)
write_eww(write_options)
end
def copy_project(**options)
fail "Not implemented yet!"
end
# Copies the project, per {#copy_project}, and writes the .eww, per {#write_eww}.
# @note This will update <u>ALL</u> changes in the Nokogiri model. This is not isolated to just the copy project operation.
def copy_project!(write_options: nil, **options)
copy_project(options)
write_eww(write_options)
end
def remove_project(**options)
fail "Not implemented yet!"
end
def remove_project!(write_options: nil, **options)
remove_project(options)
write_eww(write_options)
end
# List all the current projects available in the Eww.
# @return [Array]
def projects(proj=nil, options={})
options = proj if proj.is_a?(Hash)
if proj
# Allow proj to be either a Symbol or String
_projects_[proj.to_s]
else
_projects_.keys
end
#n_project_nodes.map { |c| c.to_s }
#begin
# projects_with_format(:names_only)
#rescue MultipleProjectError
# Can't have the #projects method throwing this error. In this case, catch the error
# and return the absolute paths.
# projects_with_format(:absolute_paths)
#end
end
alias_method :projs, :projects
def projects_with_format(format)
case format
when :absolute_pathnames
# Returns the projects as absolute pathname objects
n_project_nodes.map { |proj| Pathname.new(proj.text.gsub('$WS_DIR$', workspace_directory)) }
when :absolute_paths
# Returns the projects as absolute paths, represented by Strings
n_project_nodes.map { |proj| Pathname.new(proj.text.gsub('$WS_DIR$', workspace_directory)).to_s }
when :raw
# Returns the projects as the raw input data from the .eww.
n_project_nodes.map { |proj| proj.to_s }
else
fail "Unknown format value: #{format}"
end
end
# Writes the .eww file.
def write
File.open(@eww_file, 'w') do |f|
f.puts(@noko)
end
@eww_file
end
alias_method :write_eww, :write
def refresh_eww
fail
end
# Forces a refresh of Nokogiri's model of the .eww, throwing away any user changes.
def refresh_eww!
fail
end
###
# Grabs all of the project nodes.
# @return [Nokogiri::XML::NodeSet]
def n_project_nodes
noko.xpath('//workspace/project/path').children
end
def n_project_node(n)
fail "Not implemented yet!"
end
end
end
end
|
docker run -it -v "$(pwd)"/giantsteps-key-dataset/audio:/usr/src/app/audio/ -v "$(pwd)"/generated:/usr/src/app/generated/ generator
|
#!/bin/bash
set -e
for jekyll_dir in {_assets,_includes,_layouts,_plugins,fonts,images,web.config}; do
rsync -a --delete $jekyll_dir ./Content
done
|
package com.leetcode;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class Solution_447Test {
@Test
public void testNumberOfBoomerangs() {
Solution_447 solution_447 = new Solution_447();
int[][] ints = new int[3][2];
ints[0] = new int[]{0,0};
ints[1] = new int[]{1,0};
ints[2] = new int[]{2,0};
System.out.println(solution_447.numberOfBoomerangs(ints));
}
} |
#!/bin/bash
#
# This script is for running server up in dev environment.
#
CC='\033[01;36m'
WC='\033[01;37m'
NC='\033[0m'
SHELL_PATH="$(cd "$(dirname "$0")"; pwd -P)"
PARENT_PATH="$(dirname $SHELL_PATH)"
source $SHELL_PATH/util.sh
MOUNT_VOLUME=$1
if [ -z $1 ]; then
coloredEcho white "Specify a path for monuting volume (e.g. $CC$(pwd))$NC"
read -p "Path: " MOUNT_VOLUME
else
coloredEcho white "You've specified path as mountuing volume: $MOUNT_VOLUME"
fi
coloredEcho white 'Run server up for dev environment'
docker run -d \
-p 8111:8111 \
-p 8888:8888 \
-e MATERIA_LOCALDEV=true \
-e MATERIA_DB_HOST=docker.for.mac.localhost \
-e MATERIA_DB_PORT='27017' \
-e MATERIA_DB_DATABASE='matters-dev' \
-e MATERIA_DOMAIN='http://localhost:8888' \
-e MATERIA_SENDGRID_KEY='' \
-v $MOUNT_VOLUME/:/home/ubuntu/materia \
--hostname server \
--name server \
matters-server:dev
|
#!/bin/bash
##
## Deprecated: use install.rkt instead
## On the other hand, install.rkt doesn't yet install c3 support.
##
# echo commands as they're executed
set -x
# exit if non-zero exit code
set -e
IPY_LOC=$(ipython locate)
IRACKET_SRC_DIR=$(pwd)
RACKET_KERNEL_DIR=${IPY_LOC}/kernels/racket
mkdir -p ${RACKET_KERNEL_DIR}
sed "s#IRACKET_SRC_DIR#$IRACKET_SRC_DIR#g" < static/kernel.json > ${RACKET_KERNEL_DIR}/kernel.json
echo "Kernel json file copied to $RACKET_KERNEL_DIR/kernel.json"
echo -n Download and Install C3? [Y/n]
read ANS
# "y", "Y" or just enter means yes. everything else means no
case "z$ANS" in
z[yY][eE][sS]|z[Yy]|z)
# "Yes", "y" or just enter means yes.
IPYTHON_PROFILE_STATIC=$(ipython locate profile ${IPYTHON_PROFILE})/static
cp ./static/ic3.js ${IPY_LOC}/nbextensions/ic3.js
cat ./static/custom.js >> $IPYTHON_PROFILE_STATIC/custom/custom.js
curl -L https://github.com/mbostock/d3/raw/v3.5.5/d3.min.js > $IPYTHON_PROFILE_STATIC/d3.js
curl -L https://github.com/masayuki0812/c3/raw/0.4.10/c3.min.js > $IPYTHON_PROFILE_STATIC/c3.js
curl -L https://github.com/masayuki0812/c3/raw/0.4.10/c3.min.css > $IPYTHON_PROFILE_STATIC/c3.css
echo "Installed C3 into $IPYTHON_PROFILE_STATIC and customized $IPYTHON_PROFILE_STATIC/custom/custom.js"
;;
*)
# everything else means no
true
;;
esac
echo Installation Succeeded.
|
class ChangeColumn < ActiveRecord::Migration[6.0]
def change
change_column :bills, :total, :float
end
end
|
package snippets;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
import org.junit.Test;
import io.smallrye.mutiny.Multi;
import io.smallrye.mutiny.Uni;
public class FlatMapTest {
@Test
public void rx() {
Multi<Integer> multi = Multi.createFrom().range(1, 3);
Uni<Integer> uni = Uni.createFrom().item(1);
// tag::rx[]
int result = uni
.map(i -> i + 1)
.await().indefinitely();
int result2 = uni
.flatMap(i -> Uni.createFrom().item(i + 1))
.await().indefinitely();
List<Integer> list = multi
.map(i -> i + 1)
.collectItems().asList()
.await().indefinitely();
List<Integer> list2 = multi
.flatMap(i -> Multi.createFrom().items(i, i))
.collectItems().asList()
.await().indefinitely();
List<Integer> list3 = multi
.concatMap(i -> Multi.createFrom().items(i, i))
.collectItems().asList()
.await().indefinitely();
// end::rx[]
assertThat(result).isEqualTo(2);
assertThat(result2).isEqualTo(2);
assertThat(list).containsExactly(2, 3);
assertThat(list2).containsExactly(1, 1, 2, 2);
assertThat(list3).containsExactly(1, 1, 2, 2);
}
@Test
public void mutiny() {
Multi<Integer> multi = Multi.createFrom().range(1, 3);
Uni<Integer> uni = Uni.createFrom().item(1);
// tag::mutiny[]
int result = uni
.onItem().apply(i -> i + 1)
.await().indefinitely();
int result2 = uni
.onItem().produceUni(i -> Uni.createFrom().item(i + 1))
.await().indefinitely();
List<Integer> list = multi
.onItem().apply(i -> i + 1)
.collectItems().asList()
.await().indefinitely();
List<Integer> list2 = multi
.onItem().produceMulti(i -> Multi.createFrom().items(i, i)).merge()
.collectItems().asList()
.await().indefinitely();
List<Integer> list3 = multi
.onItem().produceMulti(i -> Multi.createFrom().items(i, i)).concatenate()
.collectItems().asList()
.await().indefinitely();
// end::mutiny[]
assertThat(result).isEqualTo(2);
assertThat(result2).isEqualTo(2);
assertThat(list).containsExactly(2, 3);
assertThat(list2).containsExactly(1, 1, 2, 2);
assertThat(list3).containsExactly(1, 1, 2, 2);
}
}
|
<reponame>LitenApe/CookedHam
import { Meta, Story } from '@storybook/react';
import { createRef } from 'react';
import { Descendant as Component, useDescendant } from '.';
import { useMount } from '../../utils/hooks/useMount';
export default {
title: 'Atom/Descendant',
component: Component,
} as Meta;
function TestComponent({ pos }: { pos: number }) {
const ref = createRef<HTMLParagraphElement>();
const { index, register } = useDescendant();
useMount(() => {
register(ref);
});
return (
<p ref={ref} data-id={pos}>
Position of element: {index}
</p>
);
}
const Template: Story = (args) => (
<Component {...args}>
<TestComponent pos={0} />
<div style={{ padding: 5 }}>
<TestComponent pos={1} />
</div>
<div style={{ padding: 10 }}>
<div>
<TestComponent pos={2} />
</div>
</div>
<div style={{ padding: 20 }}>
<div>
<div>
<TestComponent pos={3} />
</div>
</div>
</div>
<TestComponent pos={4} />
</Component>
);
export const Descendant = Template.bind({});
Descendant.args = { ...Descendant.args };
|
<reponame>ideacrew/pa_edidb<filename>app/models/canonical_vocabulary/renewals/policy_builder.rb
module CanonicalVocabulary
module Renewals
class PolicyBuilder
def initialize(family)
@family = family
generate_policy_details
end
def current_insurance_plan(coverage)
current_plan = @policy_details.detect{|id, policy| policy.coverage_type == coverage }
current_plan.nil? ? nil : current_plan[1]
end
def generate_policy_details
policy_ids = @family.policies_enrolled
renewals_xml = Net::HTTP.get(URI.parse("http://localhost:3000/api/v1/renewal_policies?ids[]=#{policy_ids.join("&ids[]=")}&user_token=zUzBsoTSKPbvXCQsB4Ky"))
renewals = Nokogiri::XML(policies_xml).root.xpath("n1:renewal_policy")
# renewals = [ File.open(Rails.root.to_s + "/renewal_772.xml") ]
@policy_details = renewals.inject({}) do |policy_details, renewal_xml|
renewal = Parsers::Xml::Cv::Renewal.parse(renewal_xml)
current_plan = renewal.current_policy.enrollment.plan
policy_details[renewal.current_policy.id] = OpenStruct.new({
:plan_name => current_plan.name,
:coverage_type => current_plan.coverage_type.split('#')[1],
:future_plan_name => renewal.renewal_policy.enrollment.plan.name,
:quoted_premium => renewal.renewal_policy.enrollment.premium_amount_total
})
policy_details
end
end
end
end
end |
var query = from employee in Employees
where employee.Age > 25
&& employee.Salary > 3000
select employee; |
#!/bin/bash
set -Eeo pipefail
source "$YAK_BUILDTOOLS/all.sh"
dep --arch="$YAK_TARGET_ARCH" --distro=yak glibc
dep --arch="$YAK_TARGET_ARCH" --distro=yak mpfr
|
import ads
import igraph
from configparser import ConfigParser
from sqlalchemy import *
from sqlalchemy.sql import select, and_
from ads.exceptions import *
from difflib import SequenceMatcher
class CitationNetwork(object):
def __init__(self, filename):
self.db = Database(filename)
self.nodelist = self.db.get_all_nodes()
self.edgelist = self.db.get_all_edges()
# List of bibcodes for judgement sampling
self.bibcode_list = []
# Year interval for snowball sampling
self.snowball = {'start': '', 'end': '', 'scope': ''}
def parse_config(self, config_file):
"""
Parse the configuration file and set object parameters according to the parsed values.
:param config_file: path to configuration file
:return: None
"""
# Read configuration file
config = ConfigParser()
config.read_file(open(config_file))
# Assign API Key from configuration file
ads.config.token = config['ads_api']['APIKey']
# Assign default interval years for snowball sampling from configuration file
self.snowball['start'] = int(config['snowball_default_interval']['StartYear'])
self.snowball['end'] = int(config['snowball_default_interval']['EndYear'])
def sample(self, sampling_method):
"""
Initialize node sampling and write sample to database.
:param sampling_method: 'snowball' or 'judgement'
:return: None
"""
if sampling_method == 'snowball':
queried_nodes = self.smp_snowball(self.nodelist,
self.snowball['start'],
self.snowball['end'],
self.snowball['scope'])
elif sampling_method == 'judgement':
queried_nodes = [self.getby_bibcode(bc) for bc in self.bibcode_list]
else:
raise ValueError('Parameter sampling_method is invalid')
print('Writing nodes to database file...')
self.db.write_nodes(queried_nodes)
print('Finished writing nodes.')
@staticmethod
def author_is_same(node1, node2):
"""
Check if first author is the same in two nodes by doing a fuzzy
string comparison using the Ratcliff/Obershelp algorithm.
:return: boolean
"""
# Get name of first authors
name1 = node1.author.split('; ')[0]
name2 = node2.author.split('; ')[0]
# Split in first and last name
name1_split = name1.split(', ')
name2_split = name2.split(', ')
try:
name1_init = name1_split[1].split(' ')[0][0]
except IndexError:
name1_init = ''
try:
name2_init = name2_split[1].split(' ')[0][0]
except IndexError:
name2_init = ''
score = (SequenceMatcher(None, name1_init, name2_init).ratio() +
SequenceMatcher(None, name1_split[0], name2_split[0]).ratio()) / 2.0
if score > 0.80:
return True
else:
return False
@staticmethod
def getby_bibcode(bibcode):
"""
Query an ADS item by bibcode.
:param bibcode: bibcode (ADS unique identifier)
:return: queried item as Node object
"""
for i in range(5):
try:
query = ads.SearchQuery(bibcode=bibcode,
fl=['author', 'year', 'title',
'bibcode', 'reference', 'citation'])
for item in query:
new_node = Node(item)
if new_node is not None:
return new_node
else:
print('Couldn\'t make node for bibcode {}'.format(bibcode))
except (IndexError, APIResponseError):
print('Error occured while querying ADS. Retrying...')
continue
@staticmethod
def smp_snowball(nodelist, start_year, end_year, scope):
"""
Extend existing network by sampling through its citation and/or reference columns
and selecting new items if they fit into the given time frame.
:param nodelist: list of nodes from local database
:param start_year: starting year of time frame as integer
:param end_year: end year of time frame as integer
:param scope: 'cit' (only citation), 'ref' (only reference), 'citref' (both citation and reference)
:return: list of selected items as Node objects
"""
node_accumulator = []
if scope == 'cit':
print('Initializing sampler...')
for node in nodelist:
if node.citation is not None:
for bc in node.citation.split('; '):
print('Checking bibcode {}'.format(bc))
if start_year < int(bc[0:4]) < end_year:
print('Querying ADS...')
for i in range(5):
try:
query = ads.SearchQuery(bibcode=bc,
fl=['author', 'year', 'title',
'bibcode', 'reference', 'citation'])
for item in query:
node_accumulator.append(Node(item))
print('Node added to accumulator.')
break
except (IndexError, APIResponseError):
print('Error occured while querying ADS. Retrying...')
continue
else:
print('Bibcode not in year interval. Skipping...')
elif scope == 'ref':
print('Initializing sampler...')
for node in nodelist:
if node.reference is not None:
for bc in node.reference.split('; '):
print('Checking bibcode {}'.format(bc))
if start_year < int(bc[0:4]) < end_year:
print('Querying ADS...')
for i in range(5):
try:
query = ads.SearchQuery(bibcode=bc,
fl=['author', 'year', 'title',
'bibcode', 'reference', 'citation'])
for item in query:
node_accumulator.append(Node(item))
print('Node added to accumulator.')
break
except (IndexError, APIResponseError):
print('Error occured while querying ADS. Retrying...')
continue
else:
print('Bibcode not in year interval. Skipping...')
elif scope == 'citref':
print('Initializing sampler...')
for node in nodelist:
if node.citation is not None:
for bc in node.citation.split('; '):
print('Checking bibcode {}'.format(bc))
if start_year < int(bc[0:4]) < end_year:
print('Querying ADS...')
for i in range(5):
try:
query = ads.SearchQuery(bibcode=bc,
fl=['author', 'year', 'title',
'bibcode', 'reference', 'citation'])
for item in query:
node_accumulator.append(Node(item))
print('Node added to accumulator.')
break
except (IndexError, APIResponseError):
print('Error occured while querying ADS. Retrying...')
continue
else:
print('Bibcode not in year interval. Skipping...')
if node.reference is not None:
for bc in node.reference.split('; '):
print('Checking bibcode {}'.format(bc))
if start_year < int(bc[0:4]) < end_year:
print('Querying ADS...')
for i in range(5):
try:
query = ads.SearchQuery(bibcode=bc,
fl=['author', 'year', 'title',
'bibcode', 'reference', 'citation'])
for item in query:
node_accumulator.append(Node(item))
break
except (IndexError, APIResponseError):
continue
else:
print('Bibcode not in year interval. Skipping...')
print('Returning accumulator with {} items...'.format(len(node_accumulator)))
return node_accumulator
def make_regular_edges(self, direction):
"""
Generate regular, directed edges, either forwards (from cited to citing note)
or backwards (from citing to cited node) and write edges to database.
:param direction: fwd (forwards), bwd (backwards);
:return: None
"""
for node in self.nodelist:
if node.citation is not None:
for citation_bc in node.citation.split('; '):
if self.db.get_node(citation_bc) is not None:
if not self.author_is_same(self.db.get_node(citation_bc), node):
citation_id = self.db.get_node(citation_bc).id
if direction == 'fwd':
self.db.write_edge((node.id, citation_id, None))
elif direction == 'bwd':
self.db.write_edge((citation_id, node.id, None))
else:
raise ValueError("Direction type not valid.")
if node.reference is not None:
for reference_bc in node.reference.split('; '):
if self.db.get_node(reference_bc) is not None:
if not self.author_is_same(self.db.get_node(reference_bc), node):
reference_id = self.db.get_node(reference_bc).id
if direction == 'fwd':
self.db.write_edge((reference_id, node.id, None))
elif direction == 'bwd':
self.db.write_edge((node.id, reference_id, None))
else:
raise ValueError("Direction type not valid.")
def make_semsim_edges(self, measure):
"""
Generate edges by semantic similarity (co-citation or bibliographic coupling) and write them to databse.
:param measure: 'cocit' (co-citation) or 'bibcp' (bibliographic coupling)
:return: None
"""
g = igraph.Graph(directed=True)
g.add_vertices(len(self.nodelist) + 1)
self.db.del_table_content('edges')
self.make_regular_edges('bwd')
edges = self.db.get_all_edges()
g.add_edges([(edge[1], edge[2]) for edge in edges])
new_edges = []
if measure == 'cocit':
matrix = g.cocitation()
elif measure == 'bibcp':
matrix = g.bibcoupling()
else:
raise ValueError("Measure type not valid.")
for i1, v1 in enumerate(matrix):
for i2, v2 in enumerate(matrix):
if matrix[i1][i2] > 0:
index = matrix[i1][i2]
if (i1, i2, index) not in new_edges:
new_edges.append((i1, i2, index))
self.db.del_table_content('edges')
self.db.write_edges(new_edges)
def assign_modularity(self):
g = igraph.Graph(directed=True)
g.add_vertices(len(self.nodelist) + 1)
edges = self.db.get_all_edges()
g.add_edges([(edge[1], edge[2]) for edge in edges])
modularity = [i for i in g.community_infomap(trials=1)]
for node in self.nodelist:
for cluster_id in modularity:
for node_id in cluster_id:
if node.id == node_id:
self.db.set_node_cluster(int(node_id), int(modularity.index(cluster_id)))
class Node(object):
# TODO: Write docstrings
def __init__(self, ads_obj):
self.bibcode = ads_obj.bibcode
try:
self.author = "; ".join(ads_obj.author)
except TypeError:
self.author = str(ads_obj.author)
if ads_obj.title is not None:
self.title = "; ".join(ads_obj.title)
else:
self.title = None
self.year = ads_obj.year
self.citation = ads_obj.citation
self.reference = ads_obj.reference
self.cluster_id = None
def get_bibcode(self):
return self.bibcode
def get_author(self):
return self.author
def get_title(self):
return self.title
def get_year(self):
return self.year
def get_citation(self):
return self.citation
def get_reference(self):
return self.reference
def get_cluster_id(self):
return self.cluster_id
def __repr__(self):
return "Node(bibcode={}, author={}, title={}, year={}" \
.format(self.bibcode, self.author, self.title, self.year)
class Database(object):
# TODO: Write docstrings
def __init__(self, filename):
self.engine = create_engine('sqlite:///{}'.format(filename))
self.conn = self.engine.connect()
self.metadata = MetaData()
self.nodes = Table('nodes', self.metadata,
Column('id', Integer, primary_key=True), Column('bibcode', String(20)),
Column('author', String(255)), Column('title', String(255)),
Column('start', Float), Column('end', Float), Column('citation', String(3000)),
Column('reference', String(3000)), Column('type', String(2)),
Column('ordervar', Float), Column('cluster_id', Integer),
Column('cluster_id', Integer))
self.edges = Table('edges', self.metadata,
Column('id', Integer, primary_key=True),
Column('source', Integer),
Column('target', Integer),
Column('weight', Integer))
self.metadata.create_all(self.engine)
def get_all_nodes(self):
return [node for node in self.conn.execute(select([self.nodes]))]
def get_all_edges(self):
return [edge for edge in self.conn.execute(select([self.edges]))]
def get_node(self, bibcode):
node_query = self.conn.execute(select([self.nodes]).where(self.nodes.c.bibcode == bibcode))
return node_query.fetchone()
def set_node_cluster(self, node_id, value):
stmt = self.nodes.update().where(self.nodes.c.id == node_id).values(cluster_id=value)
self.conn.execute(stmt)
def is_in_db(self, item_type, item):
"""
Check if a specific item is already in the database. Search for nodes by bibcode.
:param item_type: node, edge or bibcode
:param item: if node, Node object; if edge, tuple
:return: boolean
"""
assert item_type in ['node', 'edge', 'bibcode']
if item_type == 'node':
node_query = self.conn.execute(select([self.nodes]).where(self.nodes.c.bibcode == item.bibcode))
return node_query.fetchone() is not None
elif item_type == 'edge':
edge_query = self.conn.execute(select([self.edges]).where(and_(self.edges.c.source == item[0],
self.edges.c.target == item[1])))
return edge_query.fetchone() is not None
elif item_type == 'bibcode':
node_query = self.conn.execute(select([self.nodes]).where(self.nodes.c.bibcode == item))
return node_query.fetchone() is not None
def del_table_content(self, table_name):
if table_name == 'nodes':
self.conn.execute(text('DELETE FROM nodes'))
elif table_name == 'edges':
self.conn.execute(text('DELETE FROM edges'))
else:
raise ValueError("Parameter table_name not valid.")
def write_nodes(self, node_list):
assert type(node_list) is list, 'Parameter node_list is not a list.'
for node in node_list:
if type(node) == Node:
if not self.is_in_db('node', node):
bibcode = node.get_bibcode()
author = node.get_author()
title = node.get_title()
year = node.get_year()
if node.get_citation() is not None:
citation = '; '.join(node.get_citation())
else:
citation = None
if node.get_reference() is not None:
reference = '; '.join(node.get_reference())
else:
reference = None
cluster_id = node.get_cluster_id()
insertion = self.nodes.insert().values(bibcode=bibcode,
author=author,
title=title,
start=year, end=year,
ordervar=(int(year) - 1900) / 100,
citation=citation,
reference=reference,
cluster_id=cluster_id)
self.conn.execute(insertion)
else:
print("Item with bibcode '{}' already in database!".format(node.get_bibcode()))
else:
pass
def write_node(self, node):
if not self.is_in_db('node', node):
bibcode = node.get_bibcode()
author = node.get_author()
title = node.get_title()
year = node.get_year()
if node.get_citation() is not None:
citation = '; '.join(node.get_citation())
else:
citation = None
if node.get_reference() is not None:
reference = '; '.join(node.get_reference())
else:
reference = None
cluster_id = node.get_cluster_id()
insertion = self.nodes.insert().values(bibcode=bibcode,
author=author,
title=title,
start=year, end=year,
ordervar=(int(year) - 1900) / 100,
citation=citation,
reference=reference,
cluster_id=cluster_id)
self.conn.execute(insertion)
print("Item with bibcode '{}' added to database!".format(node.get_bibcode()))
return True
else:
print("Item with bibcode '{}' already in database!".format(node.get_bibcode()))
return False
def write_edges(self, edgelist):
assert type(edgelist) is list, 'Parameter edgelist is not a list.'
for edge in edgelist:
if not self.is_in_db('edge', edge):
insertion = self.edges.insert().values(source=edge[0], target=edge[1], weight=edge[2])
self.conn.execute(insertion)
def write_edge(self, edge):
assert type(edge) is tuple, 'Parameter edge is not a tuple'
if not self.is_in_db('edge', edge):
insertion = self.edges.insert().values(source=edge[0], target=edge[1], weight=edge[2])
self.conn.execute(insertion)
|
<filename>tools/tasks/project/copy2wp.ts
import * as gulp from 'gulp';
import {join} from 'path';
import {APP_DEST, WP_DIR} from '../../config';
export = () => {
return gulp.src(join(APP_DEST, '**/*.*'))
.pipe(gulp.dest(WP_DIR));
};
|
<gh_stars>0
package cn.focus.eco.house.zipkin.brave.mongo;
import brave.Span;
import brave.Tracer;
import brave.Tracing;
import com.mongodb.ServerAddress;
import com.mongodb.connection.ConnectionId;
import com.mongodb.event.CommandFailedEvent;
import com.mongodb.event.CommandListener;
import com.mongodb.event.CommandStartedEvent;
import com.mongodb.event.CommandSucceededEvent;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import zipkin.Endpoint;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Copyright (C) 1998 - 2017 SOHU Inc., All Rights Reserved.
* <p>
*
* @Author: leijunhan (<EMAIL>)
* @Date: 2017/11/30
*/
public class MongoTracingListener implements CommandListener {
private static final Logger logger = Logger.getLogger(MongoTracingListener.class);
private Tracing tracing;
private Map<ConnectionId, Span> spanCache;
@Autowired
MongoTracingListener(Tracing tracing) {
this.tracing = tracing;
this.spanCache = new ConcurrentHashMap();
}
public static CommandListener create(Tracing tracing) {
return new MongoTracingListener(tracing);
}
@Override
public void commandStarted(CommandStartedEvent event) {
Tracer tracer = this.tracing.tracer();
Span span = tracer.nextSpan();
if(!span.isNoop()) {
span.kind(Span.Kind.CLIENT).name(event.getCommandName());
span.tag("mongo.query", event.getCommand().toJson());
ServerAddress serverAddress = event.getConnectionDescription().getServerAddress();
Endpoint.Builder builder = Endpoint.builder().serviceName("mongo-" + event.getDatabaseName()).port(serverAddress.getPort());
builder.parseIp(serverAddress.getHost());
span.remoteEndpoint(builder.build());
span.start();
}
this.spanCache.put(event.getConnectionDescription().getConnectionId(), span);
}
@Override
public void commandSucceeded(CommandSucceededEvent event) {
ConnectionId connectionId = event.getConnectionDescription().getConnectionId();
Span span = (Span)this.spanCache.get(connectionId);
this.spanCache.remove(connectionId);
if(span == null) {
String msg = String.format("Successfully executed command \'%s\' with id %s on connection \'%s\' to server \'%s\'", new Object[]{event.getCommandName(), Integer.valueOf(event.getRequestId()), event.getConnectionDescription().getConnectionId(), event.getConnectionDescription().getServerAddress()});
logger.info("span is null, ignore tracing. " + msg);
} else {
span.finish();
}
}
@Override
public void commandFailed(CommandFailedEvent event) {
ConnectionId connectionId = event.getConnectionDescription().getConnectionId();
Span span = (Span)this.spanCache.get(connectionId);
this.spanCache.remove(connectionId);
if(span == null) {
String msg = String.format("Failed execution of command \'%s\' with id %s on connection \'%s\' to server \'%s\' with exception \'%s\'", new Object[]{event.getCommandName(), Integer.valueOf(event.getRequestId()), event.getConnectionDescription().getConnectionId(), event.getConnectionDescription().getServerAddress(), event.getThrowable()});
logger.info("span is null, ignore tracing. " + msg);
} else {
span.tag("error", event.getThrowable().getMessage());
span.finish();
}
}
}
|
<gh_stars>1-10
/* globals $: true */
/**
* The FileSystemSync module connects the Bramble editor's file system change events
* to the PathCache and SyncManager, making sure that all changes to local files
* get recorded and eventually sent to the server.
*/
var $ = require("jquery");
var strings = require("strings");
var Project = require("../project");
var SyncManager = require("./manager");
var SyncState = require("./state");
var syncManager;
var brambleInstance;
function saveAndSyncAll(callback) {
if (!(brambleInstance && syncManager)) {
callback(
new Error("[Thimble Error] saveAndSyncAll() called before init()")
);
return;
}
brambleInstance.saveAll(function() {
syncManager.once("complete", callback);
syncManager.sync();
});
}
function init(csrfToken) {
// If an anonymous user is using thimble, they
// will not have any persistence of files
if (!Project.getUser()) {
return null;
}
syncManager = SyncManager.init(csrfToken);
// Update the UI with a "Saving..." indicator whenever we sync a file
syncManager.on("file-sync-start", function() {
$("#navbar-save-indicator").removeClass("hide");
$("#navbar-save-indicator").text(strings.get("fileSavingIndicator"));
});
syncManager.on("file-sync-stop", function() {
$("#navbar-save-indicator").addClass("hide");
});
syncManager.on("file-sync-error", function() {
// Saving over the network failed, let the user know, and that we'll retry
$("#navbar-save-indicator").text(strings.get("fileSavingFailedIndicator"));
});
// Warn the user when we're syncing so they don't close the window by accident
syncManager.on("sync-start", function() {
SyncState.syncing();
});
syncManager.on("complete", function() {
SyncState.completed();
});
Bramble.once("ready", function(bramble) {
function handleFileChange(path) {
Project.queueFileUpdate(path);
}
function handleFileDelete(path) {
Project.queueFileDelete(path);
}
function handleFileRename(oldFilename, newFilename) {
// Step 1: Create the new file
Project.queueFileUpdate(newFilename);
// Step 2: Delete the old file
Project.queueFileDelete(oldFilename);
}
function handleFolderRename(paths) {
Project.queueFolderRename(paths);
}
bramble.on("fileChange", handleFileChange);
bramble.on("fileDelete", handleFileDelete);
bramble.on("fileRename", handleFileRename);
bramble.on("folderRename", handleFolderRename);
// Begin autosyncing
syncManager.start();
brambleInstance = bramble;
});
}
module.exports = {
init: init,
saveAndSyncAll: saveAndSyncAll
};
|
package marks
type Prompter interface {
Select(string, []string) (int, error)
Confirm(string) bool
}
|
const jwt = require("jsonwebtoken");
const secret = process.env.JWT_SECRET;
function auth(req, res, next) {
try {
const token = req.header("x-auth-token");
if (token) {
const decoded = jwt.verify(token, secret);
req.user = decoded;
next();
} else {
res.status(401).json({
message: "No token provided."
});
}
} catch (err) {
res.status(400).json({
message: "Invalid token."
});
}
}
module.exports = auth; |
<filename>build/esm/shaders/glsl/move.position.js
export default /* glsl */ `uniform float transitionEnter;
uniform float transitionExit;
uniform vec4 transitionScale;
uniform vec4 transitionBias;
uniform float transitionSkew;
uniform float transitionActive;
uniform vec4 moveFrom;
uniform vec4 moveTo;
float ease(float t) {
t = clamp(t, 0.0, 1.0);
return 1.0 - (2.0 - t) * t;
}
vec4 getTransitionPosition(vec4 xyzw, inout vec4 stpq) {
if (transitionActive < 0.5) return xyzw;
float enter = transitionEnter;
float exit = transitionExit;
float skew = transitionSkew;
vec4 scale = transitionScale;
vec4 bias = transitionBias;
float factor = 1.0 + skew;
float offset = dot(vec4(1.0), stpq * scale + bias);
float a1 = ease(enter * factor - offset);
float a2 = ease(exit * factor + offset - skew);
return xyzw + a1 * moveFrom + a2 * moveTo;
}`;
|
<reponame>freight-trust/tradedocs
package com.freighttrust.schema.universal;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ParticipantRelatedParty complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ParticipantRelatedParty">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <all>
* <element name="Party" type="{http://www.freighttrust.com/schema/universal/2020/06}OrganizationAddress"/>
* <element name="RelatedPartyType">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="Contact"/>
* <enumeration value="Organization"/>
* <enumeration value="Email"/>
* </restriction>
* </simpleType>
* </element>
* <element name="IsActive" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="IsSubscribed" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="Location" type="{http://www.freighttrust.com/schema/universal/2020/06}UNLOCO" minOccurs="0"/>
* <element name="Relation" minOccurs="0">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <maxLength value="30"/>
* </restriction>
* </simpleType>
* </element>
* </all>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ParticipantRelatedParty", namespace = "http://www.freighttrust.com/schema/universal/2020/06", propOrder = {
})
public class ParticipantRelatedParty {
@XmlElement(name = "Party", namespace = "http://www.freighttrust.com/schema/universal/2020/06", required = true)
protected OrganizationAddress party;
@XmlElement(name = "RelatedPartyType", namespace = "http://www.freighttrust.com/schema/universal/2020/06", required = true)
protected String relatedPartyType;
@XmlElement(name = "IsActive", namespace = "http://www.freighttrust.com/schema/universal/2020/06")
protected Boolean isActive;
@XmlElement(name = "IsSubscribed", namespace = "http://www.freighttrust.com/schema/universal/2020/06")
protected Boolean isSubscribed;
@XmlElement(name = "Location", namespace = "http://www.freighttrust.com/schema/universal/2020/06")
protected UNLOCO location;
@XmlElement(name = "Relation", namespace = "http://www.freighttrust.com/schema/universal/2020/06")
protected String relation;
/**
* Gets the value of the party property.
*
* @return
* possible object is
* {@link OrganizationAddress }
*
*/
public OrganizationAddress getParty() {
return party;
}
/**
* Sets the value of the party property.
*
* @param value
* allowed object is
* {@link OrganizationAddress }
*
*/
public void setParty(OrganizationAddress value) {
this.party = value;
}
/**
* Gets the value of the relatedPartyType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRelatedPartyType() {
return relatedPartyType;
}
/**
* Sets the value of the relatedPartyType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRelatedPartyType(String value) {
this.relatedPartyType = value;
}
/**
* Gets the value of the isActive property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isIsActive() {
return isActive;
}
/**
* Sets the value of the isActive property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setIsActive(Boolean value) {
this.isActive = value;
}
/**
* Gets the value of the isSubscribed property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isIsSubscribed() {
return isSubscribed;
}
/**
* Sets the value of the isSubscribed property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setIsSubscribed(Boolean value) {
this.isSubscribed = value;
}
/**
* Gets the value of the location property.
*
* @return
* possible object is
* {@link UNLOCO }
*
*/
public UNLOCO getLocation() {
return location;
}
/**
* Sets the value of the location property.
*
* @param value
* allowed object is
* {@link UNLOCO }
*
*/
public void setLocation(UNLOCO value) {
this.location = value;
}
/**
* Gets the value of the relation property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRelation() {
return relation;
}
/**
* Sets the value of the relation property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRelation(String value) {
this.relation = value;
}
}
|
const fs = require("fs");
const pipeline = require("stream").pipeline;
const promisify = require("util").promisify;
const fetch = require('node-fetch').default;
const pipelinePromise = promisify(pipeline);
const filePath = './file-client';
const strOption = process.argv[2];
console.info(`Operate string: ${strOption}`);
const url = 'http://127.0.0.1:3000';
switch (strOption) {
case "upload": {
console.info(`Uploading...`);
let byteCurr = 0;
let byteTotal = fs.statSync(filePath).size;
let idInterval = 0;
const readStream = fs.createReadStream(filePath);
readStream.on("error", (err) => {
console.error(`Read file error!`);
console.error(err);
});
readStream.on("close", () => {
console.info(`Read file is complete!`);
});
const update = () => {
byteCurr = readStream.bytesRead;
if (byteTotal) {
console.info(`Uploading: ${((byteCurr / byteTotal) * 100).toFixed(2)}%`);
}
}
idInterval = setInterval(update, 500);
fetch(`${url}/upload`, {method: 'POST', body: readStream })
.then((response) => {
console.info(`Requesting ok!`);
response.headers.forEach((value, name) => {
console.info(`Header ${name}:${value}`);
});
clearInterval(idInterval);
console.info(`Upload file is complete!`);
})
.catch((err) => {
clearInterval(idInterval);
console.error('Uploading file error!');
console.error(err);
});
break;
}
case "download": {
console.info(`Downloading...`);
let byteCurr = 0;
let byteTotal = 0;
let idInterval = 0;
const writeSteam = fs.createWriteStream(filePath);
const update = () => {
byteCurr = writeSteam.bytesWritten;
if (byteTotal) {
console.info(`Downloading: ${((byteCurr / byteTotal) * 100).toFixed(2)}%`);
}
}
fetch(`${url}/download`)
.then(response => {
console.info(`Requesting ok!`);
response.headers.forEach((value, name) => {
console.info(`Header ${name}:${value}`);
});
byteTotal = Number(response.headers.get('content-length'));
idInterval = setInterval(update, 500);
return pipelinePromise(response.body, writeSteam);
})
.then(() => {
clearInterval(idInterval);
console.info(`Download file is complete!`);
})
.catch(err => {
clearInterval(idInterval);
console.error(`Downloading file error!`);
console.error(err);
});
break;
}
} |
# Download the gene id mapping from https://www.genenames.org/download/custom/
# by selecting "NCBI Gene ID", "Ensembl gene ID", and also
# "NCBI Gene ID(supplied by NCBI)" and "Ensembl ID(supplied by Ensembl)"
# These
N=$(
curl 'https://www.genenames.org/cgi-bin/download/custom?col=md_eg_id&col=md_ensembl_id&col=gd_pub_ensembl_id&col=gd_pub_eg_id&status=Approved&status=Entry%20Withdrawn&hgnc_dbtag=on&order_by=gd_app_sym_sort&format=text&submit=submit' \
| grep -v '^\s*$' \
| tee >(gzip -c - > ensembl_ncbi_gene_id_map.tsv.gz) \
| wc -l
)
echo Downloaded $N Ensembl Id to NCBI Gene Id mappings from https://www.genenames.org/download/custom/
|
#!/bin/sh
VERSION="1.0.63"
LDFLAGS="-X main.version=$VERSION"
KEY=$1 #The key to sign the package with
if [ "$KEY" = "" ]; then
echo "Must provide gpg signing key"
exit 1
fi
set -o xtrace
for ARCH in amd64 arm 386
do
for OS in linux darwin windows freebsd android
do
if [ "$ARCH" = "amd64" ] || [ "$OS" = "linux" ] || [ "$OS" = "freebsd" ] || [ "$OS" = "android" ]; then #process arm only for linux and freebsd
echo "$OS-$ARCH"
if [ "$OS" = "windows" ]; then
#Windows binary is exe
GOOS="$OS" GOARCH="$ARCH" go build -ldflags "$LDFLAGS" -o minion.exe minion.go
tar -czf "minion.$OS.$ARCH.tar.gz" minion.exe
else
if [ "$OS" = "android" ]; then
if [ "$ARCH" = "arm" ]; then
GOMOBILE="/home/sajal/go/pkg/gomobile" GOOS=android GOARCH=arm CC=$GOMOBILE/android-ndk-r10e/arm/bin/arm-linux-androideabi-gcc CXX=$GOMOBILE/android-ndk-r10e/arm/bin/arm-linux-androideabi-g++ CGO_ENABLED=1 GOARM=7 go build -p=8 -pkgdir=$GOMOBILE/pkg_android_arm -tags="" -ldflags="$LDFLAGS -extldflags=-pie" -o minion minion.go
tar -czf "minion.$OS.$ARCH.tar.gz" minion
fi
else
GOOS="$OS" GOARCH="$ARCH" go build -ldflags "$LDFLAGS" -o minion minion.go
tar -czf "minion.$OS.$ARCH.tar.gz" minion
fi
fi
sha256sum "minion.$OS.$ARCH.tar.gz" > "minion.$OS.$ARCH.tar.gz.sha256sum"
gpg --default-key $KEY --output "minion.$OS.$ARCH.tar.gz.sig" --detach-sig "minion.$OS.$ARCH.tar.gz"
rm minion
rm minion.exe
fi
done
done
/usr/local/bin/s3cmd --acl-public put *.tar.gz* "s3://tb-minion/"
rm *.tar.gz*
echo $VERSION > latest
s3cmd --acl-public put latest "s3://tb-minion/"
rm latest
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.hadoop.rdf.io.input.readers;
import java.util.Iterator;
import org.apache.jena.hadoop.rdf.types.QuadWritable;
import org.apache.jena.riot.system.ParserProfile;
import org.apache.jena.riot.tokens.Tokenizer;
import org.apache.jena.sparql.core.Quad ;
/**
* An abstract reader for line based quad formats
*
*
*
*/
public abstract class AbstractLineBasedQuadReader extends AbstractLineBasedNodeTupleReader<Quad, QuadWritable> {
@Override
protected Iterator<Quad> getIterator(String line, ParserProfile profile) {
Tokenizer tokenizer = getTokenizer(line);
return getQuadsIterator(tokenizer, profile);
}
@Override
protected QuadWritable createInstance(Quad q) {
return new QuadWritable(q);
}
protected abstract Tokenizer getTokenizer(String line);
protected abstract Iterator<Quad> getQuadsIterator(Tokenizer tokenizer, ParserProfile profile);
}
|
def to_uppercase(list):
return [item.upper() for item in list] |
import roomsService from '../service/rooms';
export default {
namespace: 'rooms',
state: {
rooms: [],
keyword: ''
},
subscriptions: {
setup({ dispatch, history }) {
history.listen(({ pathname }) => {
if (pathname == '/rooms') {
dispatch({ type: 'getAllRooms' });
}
});
}
},
reducers: {
changeKeyword(state, action) {
return { ...state, keyword: action.payload };
},
roomAdded(state, { payload }) {
return { ...state, rooms: [...state.rooms, payload] };
},
allRooms(state, { payload: rooms }) {
debugger;
return { ...state, rooms };
}
},
effects: {
*getAllRooms({ }, { put, call }) {
const { code, data: rooms } = yield call(roomsService.getAllRooms);
if (code == 0) {
yield put({ type: 'allRooms', payload: rooms });
}
},
*createRoom({ payload: name }, { put, select, call }) {
const { code, data: room } = yield call(roomsService.createRoom, { name });
if (code == 0) {
yield put({ type: 'roomAdded', payload: room });
}
}
}
} |
#!/bin/sh
copy_data_path=$1
if [ -z "$1" ]; then
#echo $script_dir
echo "usage : test_setup <test data directory>"
elif [ $1 = "/" ]; then
echo "usage : test_setup <test data directory>"
elif [ ! -e $1 ]; then
echo "not found : $1"
echo "usage : test_setup <test data directory>"
else
echo "test data setup... [test data directory $copy_data_path]"
cp -r ${copy_data_path}/* .
rm test_data_03.txt
ln -s test_data_01.txt test_data_03.txt
rm test_data_04
ln -s test_data_02 test_data_04
rmdir test_data_05
rm test2_data_03.txt
rm test2_data_03b.txt
ln -s test2_data_01.txt test2_data_03.txt
ln -s test2_data_01.txt test2_data_03b.txt
rm test2_data_04
rm test2_data_04a
ln -s test2_data_02 test2_data_04
ln -s test2_data_02 test2_data_04a
rmdir test2_data_05
rmdir test2_data_05a
fi
|
# dnvm.sh
# Source this file from your .bash-profile or script to use
# "Constants"
_DNVM_BUILDNUMBER="rc1-15532"
_DNVM_AUTHORS="Microsoft Open Technologies, Inc."
_DNVM_RUNTIME_PACKAGE_NAME="dnx"
_DNVM_RUNTIME_FRIENDLY_NAME=".NET Execution Environment"
_DNVM_RUNTIME_SHORT_NAME="DNX"
_DNVM_RUNTIME_FOLDER_NAME=".dnx"
_DNVM_COMMAND_NAME="dnvm"
_DNVM_PACKAGE_MANAGER_NAME="dnu"
_DNVM_VERSION_MANAGER_NAME=".NET Version Manager"
_DNVM_DEFAULT_FEED="https://www.nuget.org/api/v2"
_DNVM_DEFAULT_UNSTABLE_FEED="https://www.myget.org/F/aspnetvnext/api/v2"
_DNVM_UPDATE_LOCATION="https://raw.githubusercontent.com/aspnet/Home/dev/dnvm.sh"
if [ "$NO_COLOR" != "1" ]; then
# ANSI Colors
RCol='\e[0m' # Text Reset
# Regular Bold Underline High Intensity BoldHigh Intens Background High Intensity Backgrounds
Bla='\e[0;30m'; BBla='\e[1;30m'; UBla='\e[4;30m'; IBla='\e[0;90m'; BIBla='\e[1;90m'; On_Bla='\e[40m'; On_IBla='\e[0;100m';
Red='\e[0;31m'; BRed='\e[1;31m'; URed='\e[4;31m'; IRed='\e[0;91m'; BIRed='\e[1;91m'; On_Red='\e[41m'; On_IRed='\e[0;101m';
Gre='\e[0;32m'; BGre='\e[1;32m'; UGre='\e[4;32m'; IGre='\e[0;92m'; BIGre='\e[1;92m'; On_Gre='\e[42m'; On_IGre='\e[0;102m';
Yel='\e[0;33m'; BYel='\e[1;33m'; UYel='\e[4;33m'; IYel='\e[0;93m'; BIYel='\e[1;93m'; On_Yel='\e[43m'; On_IYel='\e[0;103m';
Blu='\e[0;34m'; BBlu='\e[1;34m'; UBlu='\e[4;34m'; IBlu='\e[0;94m'; BIBlu='\e[1;94m'; On_Blu='\e[44m'; On_IBlu='\e[0;104m';
Pur='\e[0;35m'; BPur='\e[1;35m'; UPur='\e[4;35m'; IPur='\e[0;95m'; BIPur='\e[1;95m'; On_Pur='\e[45m'; On_IPur='\e[0;105m';
Cya='\e[0;36m'; BCya='\e[1;36m'; UCya='\e[4;36m'; ICya='\e[0;96m'; BICya='\e[1;96m'; On_Cya='\e[46m'; On_ICya='\e[0;106m';
Whi='\e[0;37m'; BWhi='\e[1;37m'; UWhi='\e[4;37m'; IWhi='\e[0;97m'; BIWhi='\e[1;97m'; On_Whi='\e[47m'; On_IWhi='\e[0;107m';
fi
[[ "$_DNVM_BUILDNUMBER" = {{* ]] && _DNVM_BUILDNUMBER="HEAD"
__dnvm_has() {
type "$1" > /dev/null 2>&1
return $?
}
__dnvm_to_lower() {
echo "$1" | tr '[:upper:]' '[:lower:]'
}
if __dnvm_has "unsetopt"; then
unsetopt nomatch 2>/dev/null
fi
if [ -z "$DNX_USER_HOME" ]; then
eval DNX_USER_HOME="~/$_DNVM_RUNTIME_FOLDER_NAME"
fi
if [ -z "$DNX_GLOBAL_HOME" ]; then
eval DNX_GLOBAL_HOME="/usr/local/lib/dnx"
fi
if [ -z "$DNX_HOME" ]; then
# Set to the user home value
eval DNX_HOME="$DNX_USER_HOME:$DNX_GLOBAL_HOME"
elif [[ $DNX_HOME != *"$DNX_GLOBAL_HOME"* ]]; then
eval DNX_HOME="$DNX_HOME:$DNX_GLOBAL_HOME"
fi
_DNVM_USER_PACKAGES="$DNX_USER_HOME/runtimes"
_DNVM_GLOBAL_PACKAGES="$DNX_GLOBAL_HOME/runtimes"
_DNVM_ALIAS_DIR="$DNX_USER_HOME/alias"
_DNVM_DNVM_DIR="$DNX_USER_HOME/dnvm"
DNX_ACTIVE_FEED=""
__dnvm_current_os()
{
local uname=$(uname)
if [[ $uname == "Darwin" ]]; then
echo "darwin"
else
echo "linux"
fi
}
__dnvm_os_runtime_defaults()
{
local os=$1
if [[ $os == "win" ]]; then
echo "clr"
elif [[ $os == "linux" ]]; then
echo "mono"
elif [[ $os == "darwin" ]]; then
echo "mono"
else
echo "unknown os"
fi
}
__dnvm_runtime_bitness_defaults()
{
local runtime=$1
if [[ $runtime == "clr" ]]; then
echo "x86"
elif [[ $runtime == "coreclr" ]]; then
echo "x64"
else
echo "unknown runtime"
fi
}
__dnvm_query_feed() {
local url=$1
xml="$(curl $url 2>/dev/null)"
echo $xml | grep \<[a-zA-Z]:Version\>* >> /dev/null || return 1
version="$(echo $xml | sed 's/.*<[a-zA-Z]:Version>\([^<]*\).*/\1/')"
downloadUrl="$(echo $xml | sed 's/.*<content.*src="\([^"]*\).*/\1/')"
echo $version $downloadUrl
}
__dnvm_find_latest() {
local platform=$1
local arch=$2
local os=$3
if ! __dnvm_has "curl"; then
printf "%b\n" "${Red}$_DNVM_COMMAND_NAME needs curl to proceed. ${RCol}" >&2;
return 1
fi
if [[ $platform == "mono" ]]; then
#dnx-mono
local packageId="$_DNVM_RUNTIME_PACKAGE_NAME-$platform"
else
#dnx-coreclr-linux-x64
local packageId="$_DNVM_RUNTIME_PACKAGE_NAME-$platform-$os-$arch"
fi
local url="$DNX_ACTIVE_FEED/GetUpdates()?packageIds=%27$packageId%27&versions=%270.0%27&includePrerelease=true&includeAllVersions=false"
__dnvm_query_feed $url
return $?
}
__dnvm_find_package() {
local platform=$1
local arch=$2
local os=$3
local version=$4
if [[ $platform == "mono" ]]; then
#dnx-mono
local packageId="$_DNVM_RUNTIME_PACKAGE_NAME-$platform"
else
#dnx-coreclr-linux-x64
local packageId="$_DNVM_RUNTIME_PACKAGE_NAME-$platform-$os-$arch"
fi
local url="$DNX_ACTIVE_FEED/Packages()?\$filter=Id%20eq%27$packageId%27%20and%20Version%20eq%20%27$version%27"
__dnvm_query_feed $url
return $?
}
__dnvm_strip_path() {
echo "$1" | sed -e "s#$_DNVM_USER_PACKAGES/[^/]*$2[^:]*:##g" -e "s#:$_DNVM_USER_PACKAGES/[^/]*$2[^:]*##g" -e "s#$_DNVM_USER_PACKAGES/[^/]*$2[^:]*##g" | sed -e "s#$_DNVM_GLOBAL_PACKAGES/[^/]*$2[^:]*:##g" -e "s#:$_DNVM_GLOBAL_PACKAGES/[^/]*$2[^:]*##g" -e "s#$_DNVM_GLOBAL_PACKAGES/[^/]*$2[^:]*##g"
}
__dnvm_prepend_path() {
if [ -z "$1" ]; then
echo "$2"
else
echo "$2:$1"
fi
}
__dnvm_package_version() {
local runtimeFullName="$1"
echo "$runtimeFullName" | sed "s/[^.]*.\(.*\)/\1/"
}
__dnvm_package_name() {
local runtimeFullName="$1"
echo "$runtimeFullName" | sed "s/\([^.]*\).*/\1/"
}
__dnvm_package_runtime() {
local runtimeFullName="$1"
echo "$runtimeFullName" | sed "s/$_DNVM_RUNTIME_PACKAGE_NAME-\([^.-]*\).*/\1/"
}
__dnvm_package_arch() {
local runtimeFullName="$1"
if [[ "$runtimeFullName" =~ $_DNVM_RUNTIME_PACKAGE_NAME-[^-.]*-[^-.]*-[^-.]*\..* ]];
then
echo "$runtimeFullName" | sed "s/$_DNVM_RUNTIME_PACKAGE_NAME-[^-.]*-[^-.]*-\([^-.]*\)\..*/\1/"
fi
}
__dnvm_package_os() {
local runtimeFullName="$1"
if [[ "$runtimeFullName" =~ "mono" ]]; then
echo "linux/osx"
else
echo "$runtimeFullName" | sed "s/$_DNVM_RUNTIME_PACKAGE_NAME-[^-.]*-\([^.-]*\).*/\1/"
fi
}
__dnvm_update_self() {
local dnvmFileLocation="$_DNVM_DNVM_DIR/dnvm.sh"
if [ ! -e $dnvmFileLocation ]; then
local formattedDnvmFileLocation=`(echo $dnvmFileLocation | sed s=$HOME=~=g)`
local formattedDnvmHome=`(echo $_DNVM_DNVM_DIR | sed s=$HOME=~=g)`
local bashSourceLocation=${BASH_SOURCE}
local scriptLocation=$bashSourceLocation
if [ -z "${bashSourceLocation}" ]; then
local scriptLocation=${(%):-%x}
fi
printf "%b\n" "${Red}$formattedDnvmFileLocation doesn't exist. This command assumes you have installed dnvm in the usual location and are trying to update it. If you want to use update-self then dnvm.sh should be sourced from $formattedDnvmHome. dnvm is currently sourced from $scriptLocation ${RCol}"
return 1
fi
printf "%b\n" "${Cya}Downloading dnvm.sh from $_DNVM_UPDATE_LOCATION ${RCol}"
local httpResult=$(curl -L -D - "$_DNVM_UPDATE_LOCATION" -o "$dnvmFileLocation" -# | grep "^HTTP/1.1" | head -n 1 | sed "s/HTTP.1.1 \([0-9]*\).*/\1/")
[[ $httpResult == "404" ]] &&printf "%b\n" "${Red}404. Unable to download DNVM from $_DNVM_UPDATE_LOCATION ${RCol}" && return 1
[[ $httpResult != "302" && $httpResult != "200" ]] && echo "${Red}HTTP Error $httpResult fetching DNVM from $_DNVM_UPDATE_LOCATION ${RCol}" && return 1
source "$dnvmFileLocation"
}
__dnvm_promptSudo() {
local acceptSudo="$1"
local sudoMsg="$2"
local answer=
if [ "$acceptSudo" == "0" ]; then
echo $2
read -p "You may be prompted for your password via 'sudo' during this process. Is this Ok? (y/N) " answer
else
answer="y"
fi
if echo $answer | grep -iq "^y" ; then
return 1
else
return 0
fi
}
__dnvm_download() {
local runtimeFullName="$1"
local downloadUrl="$2"
local runtimeFolder="$3"
local force="$4"
local acceptSudo="$5"
local pkgName=$(__dnvm_package_name "$runtimeFullName")
local pkgVersion=$(__dnvm_package_version "$runtimeFullName")
local runtimeFile="$runtimeFolder/$runtimeFullName.nupkg"
if [ -n "$force" ]; then
printf "%b\n" "${Yel}Forcing download by deleting $runtimeFolder directory ${RCol}"
rm -rf "$runtimeFolder"
fi
if [ -e "$runtimeFolder" ]; then
printf "%b\n" "${Gre}$runtimeFullName already installed. ${RCol}"
return 0
fi
if ! __dnvm_has "curl"; then
printf "%b\n" "${Red}$_DNVM_COMMAND_NAME needs curl to proceed. ${RCol}" >&2;
return 1
fi
local useSudo=
mkdir -p "$runtimeFolder" > /dev/null 2>&1
if [ ! -d $runtimeFolder ]; then
if ! __dnvm_promptSudo $acceptSudo "In order to install dnx globally, dnvm will have to temporarily run as root." ; then
useSudo=sudo
sudo mkdir -p "$runtimeFolder" > /dev/null 2>&1 || return 1
else
return 1
fi
fi
echo "Downloading $runtimeFullName from $DNX_ACTIVE_FEED"
echo "Download: $downloadUrl"
local httpResult=$($useSudo curl -L -D - "$downloadUrl" -o "$runtimeFile" -# | grep "^HTTP/1.1" | head -n 1 | sed "s/HTTP.1.1 \([0-9]*\).*/\1/")
if [[ $httpResult == "404" ]]; then
printf "%b\n" "${Red}$runtimeFullName was not found in repository $DNX_ACTIVE_FEED ${RCol}"
printf "%b\n" "${Cya}This is most likely caused by the feed not having the version that you typed. Check that you typed the right version and try again. Other possible causes are the feed doesn't have a $_DNVM_RUNTIME_SHORT_NAME of the right name format or some other error caused a 404 on the server.${RCol}"
return 1
fi
[[ $httpResult != "302" && $httpResult != "200" ]] && echo "${Red}HTTP Error $httpResult fetching $runtimeFullName from $DNX_ACTIVE_FEED ${RCol}" && return 1
__dnvm_unpack $runtimeFile $runtimeFolder $useSudo
return $?
}
__dnvm_unpack() {
local runtimeFile="$1"
local runtimeFolder="$2"
local useSudo=$3
echo "Installing to $runtimeFolder"
if ! __dnvm_has "unzip"; then
echo "$_DNVM_COMMAND_NAME needs unzip to proceed." >&2;
return 1
fi
$useSudo unzip $runtimeFile -d $runtimeFolder > /dev/null 2>&1
[ -e "$runtimeFolder/[Content_Types].xml" ] && $useSudo rm "$runtimeFolder/[Content_Types].xml"
[ -e "$runtimeFolder/_rels/" ] && $useSudo rm -rf "$runtimeFolder/_rels/"
[ -e "$runtimeFolder/package/" ] && $useSudo rm -rf "$runtimeFolder/_package/"
[ -e "$runtimeFile" ] && $useSudo rm -f "$runtimeFile"
#Set dnx to be executable
if [[ -s "$runtimeFolder/bin/dnx" ]]; then
$useSudo chmod 775 "$runtimeFolder/bin/dnx"
fi
#Set dnu to be executable
if [[ -s "$runtimeFolder/bin/dnu" ]]; then
$useSudo chmod 775 "$runtimeFolder/bin/dnu"
fi
}
__dnvm_requested_version_or_alias() {
local versionOrAlias="$1"
local runtime="$2"
local arch="$3"
local os="$4"
local runtimeBin=$(__dnvm_locate_runtime_bin_from_full_name "$versionOrAlias")
# If the name specified is an existing package, just use it as is
if [ -n "$runtimeBin" ]; then
echo "$versionOrAlias"
else
if [ -e "$_DNVM_ALIAS_DIR/$versionOrAlias.alias" ]; then
local runtimeFullName=$(cat "$_DNVM_ALIAS_DIR/$versionOrAlias.alias")
if [[ ! -n "$runtime" && ! -n "$arch" ]]; then
echo "$runtimeFullName"
return
fi
local pkgVersion=$(__dnvm_package_version "$runtimeFullName")
fi
if [[ ! -n "$pkgVersion" ]]; then
local pkgVersion=$versionOrAlias
fi
local pkgArchitecture="x64"
local pkgSystem=$os
if [[ -z $runtime || "$runtime" == "mono" ]]; then
echo "$_DNVM_RUNTIME_PACKAGE_NAME-mono.$pkgVersion"
else
if [ "$arch" != "" ]; then
local pkgArchitecture="$arch"
fi
if [ "$os" == "" ]; then
local pkgSystem=$(__dnvm_current_os)
fi
echo "$_DNVM_RUNTIME_PACKAGE_NAME-$runtime-$pkgSystem-$pkgArchitecture.$pkgVersion"
fi
fi
}
# This will be more relevant if we support global installs
__dnvm_locate_runtime_bin_from_full_name() {
local runtimeFullName=$1
for v in `echo $DNX_HOME | tr ":" "\n"`; do
if [ -e "$v/runtimes/$runtimeFullName/bin" ]; then
echo "$v/runtimes/$runtimeFullName/bin" && return
fi
done
}
__echo_art() {
printf "%b" "${Cya}"
echo " ___ _ ___ ____ ___"
echo " / _ \/ |/ / | / / |/ /"
echo " / // / /| |/ / /|_/ / "
echo " /____/_/|_/ |___/_/ /_/ "
printf "%b" "${RCol}"
}
__dnvm_description() {
__echo_art
echo ""
echo "$_DNVM_VERSION_MANAGER_NAME - Version 1.0.0-$_DNVM_BUILDNUMBER"
[[ "$_DNVM_AUTHORS" != {{* ]] && echo "By $_DNVM_AUTHORS"
echo ""
echo "DNVM can be used to download versions of the $_DNVM_RUNTIME_FRIENDLY_NAME and manage which version you are using."
echo "You can control the URL of the stable and unstable channel by setting the DNX_FEED and DNX_UNSTABLE_FEED variables."
echo ""
printf "%b\n" "${Yel}Current feed settings:${RCol}"
printf "%b\n" "${Cya}Default Stable:${Yel} $_DNVM_DEFAULT_FEED"
printf "%b\n" "${Cya}Default Unstable:${Yel} $_DNVM_DEFAULT_UNSTABLE_FEED"
local dnxStableOverride="<none>"
[[ -n $DNX_FEED ]] && dnxStableOverride="$DNX_FEED"
printf "%b\n" "${Cya}Current Stable Override:${Yel} $dnxStableOverride"
local dnxUnstableOverride="<none>"
[[ -n $DNX_UNSTABLE_FEED ]] && dnxUnstableOverride="$DNX_UNSTABLE_FEED"
printf "%b\n" "${Cya}Current Unstable Override:${Yel} $dnxUnstableOverride${RCol}"
echo ""
}
__dnvm_version() {
echo "1.0.0-$_DNVM_BUILDNUMBER"
}
__dnvm_help() {
__dnvm_description
printf "%b\n" "${Cya}USAGE:${Yel} $_DNVM_COMMAND_NAME <command> [options] ${RCol}"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME upgrade [-f|-force] [-u|-unstable] [-g|-global] [-y]${RCol}"
echo " install latest $_DNVM_RUNTIME_SHORT_NAME from feed"
echo " adds $_DNVM_RUNTIME_SHORT_NAME bin to path of current command line"
echo " set installed version as default"
echo " -f|-force force upgrade. Overwrite existing version of $_DNVM_RUNTIME_SHORT_NAME if already installed"
echo " -u|-unstable use unstable feed. Installs the $_DNVM_RUNTIME_SHORT_NAME from the unstable feed"
echo " -r|-runtime <runtime> runtime flavor to install [mono or coreclr] (default: mono)"
echo " -g|-global Installs the latest $_DNVM_RUNTIME_SHORT_NAME in the configured global $_DNVM_RUNTIME_SHORT_NAME file location (default: /usr/local/lib/dnx current: $DNX_GLOBAL_HOME)"
echo " -y Assume Yes to all queries and do not prompt"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME install <semver>|<alias>|<nupkg>|latest [-r <runtime>] [-OS <OS>] [-alias <alias>] [-a|-arch <architecture>] [-p|-persistent] [-f|-force] [-u|-unstable] [-g|-global] [-y]${RCol}"
echo " <semver>|<alias> install requested $_DNVM_RUNTIME_SHORT_NAME from feed"
echo " <nupkg> install requested $_DNVM_RUNTIME_SHORT_NAME from local package on filesystem"
echo " latest install latest version of $_DNVM_RUNTIME_SHORT_NAME from feed"
echo " -OS <operating system> the operating system that the runtime targets (default:$(__dnvm_current_os))"
echo " -alias <alias> set alias <alias> for requested $_DNVM_RUNTIME_SHORT_NAME on install"
echo " -a|-arch <architecture> architecture to use (x64)"
echo " -p|-persistent set installed version as default"
echo " -f|-force force install. Overwrite existing version of $_DNVM_RUNTIME_SHORT_NAME if already installed"
echo " -u|-unstable use unstable feed. Installs the $_DNVM_RUNTIME_SHORT_NAME from the unstable feed"
echo " -r|-runtime <runtime> runtime flavor to install [mono or coreclr] (default: mono)"
echo " -g|-global Installs to the configured global $_DNVM_RUNTIME_SHORT_NAME file location (default: /usr/local/lib/dnx current: $DNX_GLOBAL_HOME)"
echo " -y Assume Yes to all queries and do not prompt"
echo ""
echo " adds $_DNVM_RUNTIME_SHORT_NAME bin to path of current command line"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME uninstall <semver> [-r|-runtime <runtime>] [-a|-arch <architecture>] [-OS <OS>]${RCol}"
echo " <semver> the version to uninstall"
echo " -r|-runtime <runtime> runtime flavor to uninstall [mono or coreclr] (default: mono)"
echo " -a|-arch <architecture> architecture to use (x64)"
echo " -OS <operating system> the operating system that the runtime targets (default:$(__dnvm_current_os))"
echo " -y Assume Yes to all queries and do not prompt"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME use <semver>|<alias>|<package>|none [-p|-persistent] [-r|-runtime <runtime>] [-a|-arch <architecture>] ${RCol}"
echo " <semver>|<alias>|<package> add $_DNVM_RUNTIME_SHORT_NAME bin to path of current command line "
echo " none remove $_DNVM_RUNTIME_SHORT_NAME bin from path of current command line"
echo " -p|-persistent set selected version as default"
echo " -r|-runtime <runtime> runtime flavor to use [mono or coreclr] (default: mono)"
echo " -a|-arch <architecture> architecture to use (x64)"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME run <semver>|<alias> <args...> ${RCol}"
echo " <semver>|<alias> the version or alias to run"
echo " <args...> arguments to be passed to $_DNVM_RUNTIME_SHORT_NAME"
echo ""
echo " runs the $_DNVM_RUNTIME_SHORT_NAME command from the specified version of the runtime without affecting the current PATH"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME exec <semver>|<alias> <command> <args...> ${RCol}"
echo " <semver>|<alias> the version or alias to execute in"
echo " <command> the command to run"
echo " <args...> arguments to be passed to the command"
echo ""
echo " runs the specified command in the context of the specified version of the runtime without affecting the current PATH"
echo " example: $_DNVM_COMMAND_NAME exec 1.0.0-beta4 $_DNVM_PACKAGE_MANAGER_NAME build"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME list [-detailed]${RCol}"
echo " -detailed display more detailed information on each runtime"
echo ""
echo " list $_DNVM_RUNTIME_SHORT_NAME versions installed "
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME alias ${RCol}"
echo " list $_DNVM_RUNTIME_SHORT_NAME aliases which have been defined"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME alias <alias> ${RCol}"
echo " display value of the specified alias"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME alias <alias> <semver>|<alias>|<package> ${RCol}"
echo " <alias> the name of the alias to set"
echo " <semver>|<alias>|<package> the $_DNVM_RUNTIME_SHORT_NAME version to set the alias to. Alternatively use the version of the specified alias"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME alias [-d|-delete] <alias> ${RCol}"
echo " remove the specified alias"
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME [help|-h|-help|--help] ${RCol}"
echo " displays this help text."
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME [version|-v|-version|--version] ${RCol}"
echo " print the dnvm version."
echo ""
printf "%b\n" "${Yel}$_DNVM_COMMAND_NAME update-self ${RCol}"
echo " updates dnvm itself."
}
dnvm()
{
if [ $# -lt 1 ]; then
__dnvm_description
printf "%b\n" "Use ${Yel}$_DNVM_COMMAND_NAME [help|-h|-help|--help] ${RCol} to display help text."
echo ""
return
fi
case $1 in
"help"|"-h"|"-help"|"--help" )
__dnvm_help
;;
"version"|"-v"|"-version"|"--version" )
__dnvm_version
;;
"update-self" )
__dnvm_update_self
;;
"upgrade" )
shift
$_DNVM_COMMAND_NAME install latest -p $@
;;
"install" )
[ $# -lt 2 ] && __dnvm_help && return
shift
local persistent=
local versionOrAlias=
local alias=
local force=
local unstable=
local os=
local runtime=
local arch=
local global=0
local acceptSudo=0
while [ $# -ne 0 ]
do
if [[ $1 == "-p" || $1 == "-persistent" ]]; then
local persistent="-p"
elif [[ $1 == "-alias" ]]; then
local alias=$2
shift
elif [[ $1 == "-f" || $1 == "-force" ]]; then
local force="-f"
elif [[ $1 == "-u" || $1 == "-unstable" ]]; then
local unstable="-u"
elif [[ $1 == "-r" || $1 == "-runtime" ]]; then
local runtime=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-OS" ]]; then
local os=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-y" ]]; then
local acceptSudo=1
elif [[ $1 == "-a" || $1 == "-arch" ]]; then
local arch=$(__dnvm_to_lower "$2")
shift
if [[ $arch != "x86" && $arch != "x64" ]]; then
printf "%b\n" "${Red}Architecture must be x86 or x64.${RCol}"
return 1
fi
elif [[ $1 == "-g" || $1 == "-global" ]]; then
local global=1
elif [[ -n $1 ]]; then
[[ -n $versionOrAlias ]] && echo "Invalid option $1" && __dnvm_help && return 1
local versionOrAlias=$1
fi
shift
done
if [[ $arch == "x86" && $runtime == "coreclr" && $os != "win" ]]; then
printf "%b\n" "${Red}Core CLR doesn't currently have a 32 bit build. You must use x64.${RCol}"
return 1
fi
if [ -z $unstable ]; then
DNX_ACTIVE_FEED="$DNX_FEED"
if [ -z "$DNX_ACTIVE_FEED" ]; then
DNX_ACTIVE_FEED="$_DNVM_DEFAULT_FEED"
else
printf "%b\n" "${Yel}Default stable feed ($_DNVM_DEFAULT_FEED) is being overridden by the value of the DNX_FEED variable ($DNX_FEED). ${RCol}"
fi
else
DNX_ACTIVE_FEED="$DNX_UNSTABLE_FEED"
if [ -z "$DNX_ACTIVE_FEED" ]; then
DNX_ACTIVE_FEED="$_DNVM_DEFAULT_UNSTABLE_FEED"
else
printf "%b\n" "${Yel}Default unstable feed ($_DNVM_DEFAULT_UNSTABLE_FEED) is being overridden by the value of the DNX_UNSTABLE_FEED variable ($DNX_UNSTABLE_FEED). ${RCol}"
fi
fi
if [[ -z $os ]]; then
os=$(__dnvm_current_os)
fi
if [[ $os == "osx" ]]; then
os="darwin"
fi
if [[ -z $runtime ]]; then
runtime=$(__dnvm_os_runtime_defaults "$os")
fi
if [[ -z $arch ]]; then
arch=$(__dnvm_runtime_bitness_defaults "$runtime")
fi
if [[ $runtime == "mono" ]] && ! __dnvm_has "mono"; then
printf "%b\n" "${Yel}It appears you don't have Mono available. Remember to get Mono before trying to run $DNVM_RUNTIME_SHORT_NAME application. ${RCol}" >&2;
fi
local runtimeDir=$_DNVM_USER_PACKAGES
if [ $global == 1 ]; then
runtimeDir=$_DNVM_GLOBAL_PACKAGES
fi
if [[ "$versionOrAlias" != *.nupkg ]]; then
if [[ "$versionOrAlias" == "latest" ]]; then
echo "Determining latest version"
read versionOrAlias downloadUrl < <(__dnvm_find_latest "$runtime" "$arch" "$os")
[[ $? == 1 ]] && echo "Error: Could not find latest version from feed $DNX_ACTIVE_FEED" && return 1
printf "%b\n" "Latest version is ${Cya}$versionOrAlias ${RCol}"
else
local runtimeFullName=$(__dnvm_requested_version_or_alias "$versionOrAlias" "$runtime" "$arch" "$os")
local runtimeVersion=$(__dnvm_package_version "$runtimeFullName")
read versionOrAlias downloadUrl < <(__dnvm_find_package "$runtime" "$arch" "$os" "$runtimeVersion")
[[ $? == 1 ]] && echo "Error: Could not find version $runtimeVersion in feed $DNX_ACTIVE_FEED" && return 1
fi
local runtimeFullName=$(__dnvm_requested_version_or_alias "$versionOrAlias" "$runtime" "$arch" "$os")
local runtimeFolder="$runtimeDir/$runtimeFullName"
local exist=0
for folder in `echo $DNX_HOME | tr ":" "\n"`; do
if [ -e "$folder/runtimes/$runtimeFullName" ]; then
echo "$runtimeFullName already installed in $folder"
exist=1
fi
done
if [[ $exist != 1 ]]; then
__dnvm_download "$runtimeFullName" "$downloadUrl" "$runtimeFolder" "$force" "$acceptSudo"
fi
[[ $? == 1 ]] && return 1
if [[ "$os" == $(__dnvm_current_os) ]]; then
$_DNVM_COMMAND_NAME use "$versionOrAlias" "$persistent" "-runtime" "$runtime" "-arch" "$arch"
[[ -n $alias ]] && $_DNVM_COMMAND_NAME alias "$alias" "$versionOrAlias"
fi
else
local runtimeFullName=$(basename $versionOrAlias | sed "s/\(.*\)\.nupkg/\1/")
local runtimeVersion=$(__dnvm_package_version "$runtimeFullName")
local runtimeFolder="$runtimeDir/$runtimeFullName"
local runtimeFile="$runtimeFolder/$runtimeFullName.nupkg"
local runtimeClr=$(__dnvm_package_runtime "$runtimeFullName")
if [ -n "$force" ]; then
printf "%b\n" "${Yel}Forcing download by deleting $runtimeFolder directory ${RCol}"
rm -rf "$runtimeFolder"
fi
if [ -e "$runtimeFolder" ]; then
echo "$runtimeFullName already installed"
else
local useSudo=
mkdir -p "$runtimeFolder" > /dev/null 2>&1
if [ ! -d $runtimeFolder ]; then
if ! __dnvm_promptSudo $acceptSudo "In order to install dnx globally, dnvm will have to temporarily run as root." ; then
useSudo=sudo
sudo mkdir -p "$runtimeFolder" > /dev/null 2>&1 || return 1
else
return 1
fi
fi
cp -a "$versionOrAlias" "$runtimeFile"
__dnvm_unpack "$runtimeFile" "$runtimeFolder" $useSudo
[[ $? == 1 ]] && return 1
fi
$_DNVM_COMMAND_NAME use "$runtimeVersion" "$persistent" -r "$runtimeClr"
[[ -n $alias ]] && $_DNVM_COMMAND_NAME alias "$alias" "$runtimeVersion"
fi
;;
"uninstall" )
[[ $# -lt 2 ]] && __dnvm_help && return
shift
local versionOrAlias=
local runtime=
local architecture=
local os=
local acceptSudo=0
while [ $# -ne 0 ]
do
if [[ $1 == "-r" || $1 == "-runtime" ]]; then
local runtime=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-a" || $1 == "-arch" ]]; then
local architecture=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-OS" ]]; then
local os=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-y" ]]; then
local acceptSudo=1
elif [[ -n $1 ]]; then
local versionOrAlias=$1
fi
shift
done
if [[ -z $os ]]; then
os=$(__dnvm_current_os)
elif [[ $os == "osx" ]]; then
os="darwin"
fi
if [[ -z $runtime ]]; then
runtime=$(__dnvm_os_runtime_defaults "$os")
fi
if [[ -z $architecture ]]; then
architecture=$(__dnvm_runtime_bitness_defaults "$runtime")
fi
# dnx-coreclr-linux-x64.1.0.0-beta7-12290
local runtimeFullName=$(__dnvm_requested_version_or_alias "$versionOrAlias" "$runtime" "$architecture" "$os")
for folder in `echo $DNX_HOME | tr ":" "\n"`; do
if [ -e "$folder/runtimes/$runtimeFullName" ]; then
local runtimeFolder="$folder/runtimes/$runtimeFullName"
fi
done
if [[ -e $runtimeFolder ]]; then
if [[ $runtimeFolder == *"$DNX_GLOBAL_HOME"* ]] ; then
if ! __dnvm_promptSudo $acceptSudo "In order to uninstall a global dnx, dnvm will have to temporarily run as root." ; then
local useSudo=sudo
fi
fi
$useSudo rm -r $runtimeFolder
echo "Removed $runtimeFolder"
else
echo "$runtimeFolder is not installed"
fi
if [ -d "$_DNVM_ALIAS_DIR" ]; then
for __dnvm_file in $(find "$_DNVM_ALIAS_DIR" -name *.alias); do
if [ $(cat $__dnvm_file) == "$runtimeFullName" ]; then
rm $__dnvm_file
fi
done
fi
;;
"use"|"run"|"exec" )
[[ $1 == "use" && $# -lt 2 ]] && __dnvm_help && return
local cmd=$1
local persistent=
local arch=
local runtime=
local versionOrAlias=
shift
if [ $cmd == "use" ]; then
while [ $# -ne 0 ]
do
if [[ $1 == "-p" || $1 == "-persistent" ]]; then
local persistent="true"
elif [[ $1 == "-a" || $1 == "-arch" ]]; then
local arch=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-r" || $1 == "-runtime" ]]; then
local runtime=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == -* ]]; then
echo "Invalid option $1" && __dnvm_help && return 1
elif [[ -n $1 ]]; then
[[ -n $versionOrAlias ]] && echo "Invalid option $1" && __dnvm_help && return 1
local versionOrAlias=$1
fi
shift
done
else
while [ $# -ne 0 ]
do
if [[ $1 == "-a" || $1 == "-arch" ]]; then
local arch=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-r" || $1 == "-runtime" ]]; then
local runtime=$(__dnvm_to_lower "$2")
shift
elif [[ -n $1 ]]; then
[[ -n $versionOrAlias ]] && break
local versionOrAlias=$1
fi
shift
done
fi
if [[ $cmd == "use" && $versionOrAlias == "none" ]]; then
echo "Removing $_DNVM_RUNTIME_SHORT_NAME from process PATH"
# Strip other version from PATH
PATH=$(__dnvm_strip_path "$PATH" "/bin")
if [[ -n $persistent && -e "$_DNVM_ALIAS_DIR/default.alias" ]]; then
echo "Setting default $_DNVM_RUNTIME_SHORT_NAME to none"
rm "$_DNVM_ALIAS_DIR/default.alias"
fi
return 0
fi
local runtimeFullName=$(__dnvm_requested_version_or_alias "$versionOrAlias" "$runtime" "$arch" "$(__dnvm_current_os)")
local runtimeBin=$(__dnvm_locate_runtime_bin_from_full_name "$runtimeFullName")
if [[ -z $runtimeBin ]]; then
echo "Cannot find $runtimeFullName, do you need to run '$_DNVM_COMMAND_NAME install $versionOrAlias'?"
return 1
fi
case $cmd in
"run")
local hostpath="$runtimeBin/dnx"
if [[ -e $hostpath ]]; then
$hostpath $@
return $?
else
echo "Cannot find $_DNVM_RUNTIME_SHORT_NAME in $runtimeBin. It may have been corrupted. Use '$_DNVM_COMMAND_NAME install $versionOrAlias -f' to attempt to reinstall it"
fi
;;
"exec")
(
PATH=$(__dnvm_strip_path "$PATH" "/bin")
PATH=$(__dnvm_prepend_path "$PATH" "$runtimeBin")
$@
)
return $?
;;
"use")
echo "Adding" $runtimeBin "to process PATH"
PATH=$(__dnvm_strip_path "$PATH" "/bin")
PATH=$(__dnvm_prepend_path "$PATH" "$runtimeBin")
if [[ -n $persistent ]]; then
local runtimeVersion=$(__dnvm_package_version "$runtimeFullName")
$_DNVM_COMMAND_NAME alias default "$runtimeVersion"
fi
;;
esac
;;
"alias" )
[[ $# -gt 9 ]] && __dnvm_help && return
[[ ! -e "$_DNVM_ALIAS_DIR/" ]] && mkdir "$_DNVM_ALIAS_DIR/" > /dev/null
if [[ $# == 1 ]]; then
echo ""
local format="%-25s %s\n"
printf "$format" "Alias" "Name"
printf "$format" "-----" "----"
if [ -d "$_DNVM_ALIAS_DIR" ]; then
for __dnvm_file in $(find "$_DNVM_ALIAS_DIR" -name *.alias); do
local alias="$(basename $__dnvm_file | sed 's/\.alias//')"
local name="$(cat $__dnvm_file)"
printf "$format" "$alias" "$name"
done
fi
echo ""
return
fi
shift
if [[ $1 == "-d" || $1 == "-delete" ]]; then
local name=$2
local aliasPath="$_DNVM_ALIAS_DIR/$name.alias"
[[ ! -e "$aliasPath" ]] && echo "Cannot remove alias, '$name' is not a valid alias name" && return 1
echo "Removing alias $name"
rm "$aliasPath" >> /dev/null 2>&1
return
fi
local name="$1"
if [[ $# == 1 ]]; then
[[ ! -e "$_DNVM_ALIAS_DIR/$name.alias" ]] && echo "There is no alias called '$name'" && return 1
cat "$_DNVM_ALIAS_DIR/$name.alias"
echo ""
return
fi
shift
local versionOrAlias="$1"
shift
while [ $# -ne 0 ]
do
if [[ $1 == "-a" || $1 == "-arch" ]]; then
local arch=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-r" || $1 == "-runtime" ]]; then
local runtime=$(__dnvm_to_lower "$2")
shift
elif [[ $1 == "-OS" ]]; then
local os=$(__dnvm_to_lower "$2")
shift
fi
shift
done
local runtimeFullName=$(__dnvm_requested_version_or_alias "$versionOrAlias" "$runtime" "$arch" "$os")
([[ ! -d "$_DNVM_USER_PACKAGES/$runtimeFullName" ]] && [[ ! -d "$_DNVM_GLOBAL_PACKAGES/$runtimeFullName" ]]) && echo "$runtimeFullName is not an installed $_DNVM_RUNTIME_SHORT_NAME version" && return 1
local action="Setting"
[[ -e "$_DNVM_ALIAS_DIR/$name.alias" ]] && action="Updating"
echo "$action alias '$name' to '$runtimeFullName'"
echo "$runtimeFullName" >| "$_DNVM_ALIAS_DIR/$name.alias"
;;
"unalias" )
[[ $# -ne 2 ]] && __dnvm_help && return
local name=$2
echo "This command has been deprecated. Use '$_DNVM_COMMAND_NAME alias -d' instead"
$_DNVM_COMMAND_NAME alias -d $name
return $?
;;
"list" )
[[ $# -gt 2 ]] && __dnvm_help && return
[[ ! -d $_DNVM_USER_PACKAGES ]] && echo "$_DNVM_RUNTIME_FRIENDLY_NAME is not installed." && return 1
local searchGlob="$_DNVM_RUNTIME_PACKAGE_NAME-*"
local runtimes=""
for location in `echo $DNX_HOME | tr ":" "\n"`; do
location+="/runtimes"
if [ -d "$location" ]; then
local oruntimes="$(find $location -name "$searchGlob" \( -type d -or -type l \) -prune -exec basename {} \;)"
for v in `echo $oruntimes | tr "\n" " "`; do
runtimes+="$v:$location"$'\n'
done
fi
done
[[ -z $runtimes ]] && echo 'No runtimes installed. You can run `dnvm install latest` or `dnvm upgrade` to install a runtime.' && return
echo ""
# Separate empty array declaration from initialization
# to avoid potential ZSH error: local:217: maximum nested function level reached
local arr
arr=()
# Z shell array-index starts at one.
local i=1
if [ -d "$_DNVM_ALIAS_DIR" ]; then
for __dnvm_file in $(find "$_DNVM_ALIAS_DIR" -name *.alias); do
if [ ! -d "$_DNVM_USER_PACKAGES/$(cat $__dnvm_file)" ] && [ ! -d "$_DNVM_GLOBAL_PACKAGES/$(cat $__dnvm_file)" ]; then
arr[$i]="$(basename $__dnvm_file | sed 's/\.alias//')/missing/$(cat $__dnvm_file)"
runtimes="$runtimes $(cat $__dnvm_file)"
else
arr[$i]="$(basename $__dnvm_file | sed 's/\.alias//')/$(cat $__dnvm_file)"
fi
let i+=1
done
fi
if [[ $2 == "-detailed" ]]; then
# Calculate widest alias
local widestAlias=5
for f in `echo $runtimes`; do
local pkgName=$(__dnvm_package_name "$f")
local pkgVersion=$(__dnvm_package_version "$f")
local alias=""
local delim=""
for i in "${arr[@]}"; do
if [[ ${i##*/} == "$pkgName.$pkgVersion" ]]; then
alias+="$delim${i%%/*}"
delim=", "
if [[ "${i%/*}" =~ \/missing$ ]]; then
alias+=" (missing)"
fi
fi
done
if [ "${#alias}" -gt "$widestAlias" ]; then
widestAlias=${#alias}
fi
done
local formatString="%-6s %-20s %-7s %-12s %-15s %-${widestAlias}s %s\n"
printf "$formatString" "Active" "Version" "Runtime" "Architecture" "OperatingSystem" "Alias" "Location"
printf "$formatString" "------" "-------" "-------" "------------" "---------------" "-----" "--------"
else
local formatString="%-6s %-20s %-7s %-12s %-15s %s\n"
printf "$formatString" "Active" "Version" "Runtime" "Architecture" "OperatingSystem" "Alias"
printf "$formatString" "------" "-------" "-------" "------------" "---------------" "-----"
fi
for f in `echo -e "$runtimes" | sort -t. -k2 -k3 -k4 -k1`; do
local location=`echo $f | sed 's/.*\([:]\)//'`
f=`echo $f | sed 's/\([:]\).*//'`
local formattedHome=`(echo $location | sed s=$HOME=~=g)`
local active=""
[[ $PATH == *"$location/$f/bin"* ]] && local active=" *"
local pkgRuntime=$(__dnvm_package_runtime "$f")
local pkgName=$(__dnvm_package_name "$f")
local pkgVersion=$(__dnvm_package_version "$f")
local pkgArch=$(__dnvm_package_arch "$f")
local pkgOs=$(__dnvm_package_os "$f")
local alias=""
local delim=""
for i in "${arr[@]}"; do
if [[ ${i##*/} == "$pkgName.$pkgVersion" ]]; then
alias+="$delim${i%%/*}"
delim=", "
if [[ "${i%/*}" =~ \/missing$ ]]; then
alias+=" (missing)"
formattedHome=""
fi
fi
done
if [[ $2 == "-detailed" ]]; then
printf "$formatString" "$active" "$pkgVersion" "$pkgRuntime" "$pkgArch" "$pkgOs" "$alias" "$formattedHome"
else
printf "$formatString" "$active" "$pkgVersion" "$pkgRuntime" "$pkgArch" "$pkgOs" "$alias"
fi
done
echo ""
;;
*)
echo "Unknown command $1"
return 1
esac
return 0
}
# Add the home location's bin directory to the path if it doesn't exist
[[ ":$PATH:" != *":$DNX_USER_HOME/bin:"* ]] && export PATH="$DNX_USER_HOME/bin:$PATH"
# Generate the command function using the constant defined above.
$_DNVM_COMMAND_NAME alias default >/dev/null && $_DNVM_COMMAND_NAME use default >/dev/null || true
|
package db;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Locale;
import java.util.ResourceBundle;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import helper.Utility;
public class dbHandler {
private String dbName;
private String dbUsername;
private String dbPassword;
private Connection dbConnection;
private Statement statement;
private ResultSet resultSet;
private ResultSetMetaData resultSetMeatData;
public String nameSession;
private String dbServerURL;
private String dbURL;
private String selectQuery;
private String dbDriver;
private String serverTimeZone = "?serverTimezone=";
private String[] acceptedImageSuffixes;
private ResourceBundle resourceBundle;
private static String resourceDir, tempImageDir, tempImageDirAbsolute;
private File tempImageDirAbsoluteFileObj;
private String key;
public dbHandler(String dbName, String dbUsername, String dbPassword) throws Exception {
this.dbName = dbName;
this.dbUsername = dbUsername;
this.dbPassword = <PASSWORD>;
try {
resourceBundle = ResourceBundle.getBundle("db.settings", new Locale(""));
dbServerURL = resourceBundle.getString("db_server_url").trim();
serverTimeZone = serverTimeZone + resourceBundle.getString("server_time_zone").trim();
dbDriver = resourceBundle.getString("db_driver").trim();
selectQuery = resourceBundle.getString("select_query") + " ";
// Here we specify the database full path as well as the time zone in which the
// query is executed.
dbURL = dbServerURL + "/" + dbName + serverTimeZone;
acceptedImageSuffixes = resourceBundle.getString("accepted_image_suffixes").trim().split(" ");
key = resourceBundle.getString("encryption_key");
} catch (Exception e) {
System.out.println(e);
throw new Exception();
}
}
private String openConnection() {
// For mySQL database the above code would look LIKE this:
try {
Class.forName(dbDriver);
// Here we CREATE connection to the database
dbConnection = DriverManager.getConnection(dbURL, dbUsername, dbPassword);
} catch (ClassNotFoundException e) {
return e.getLocalizedMessage();
} catch (SQLException e) {
return e.getLocalizedMessage();
}
return "";
}
private String closeConnection() {
try {
// Here we close all open streams
if (statement != null)
statement.close();
if (dbConnection != null)
dbConnection.close();
} catch (SQLException sqlexc) {
return "Exception occurred while closing database connection!";
}
return "";
}
public String Login(String dbTableName, String data) {
String query = selectQuery + dbTableName;
String[] columnData = data.split(";");
query = "SELECT * FROM " + dbTableName + " WHERE USERNAME='" + columnData[0] + "' AND PASSWORD=<PASSWORD>('"
+ columnData[1] + "')";
System.out.println(query);
StringBuilder queryResult = new StringBuilder();
queryResult.append(openConnection());
try {
statement = dbConnection.createStatement();
resultSet = statement.executeQuery(query);
if (resultSet.next()) {
queryResult.append("<style>" + "body {background-color: #f4f7c5; text-align: center;}"
+ "h1 {color: #ea907a}" + ".order_history {bottom: 80%} </style>");
queryResult.append("<h1>Hi " + columnData[0] + "!, do you want shopping? </h1></br>");
queryResult.append("<a href='http://localhost:8080/assignment9/index.html'>Shopping?</a>");
queryResult.append("</div>");
} else {
queryResult.append("<style>" + "body {background-color: #f4f7c5; text-align: center;}"
+ "h1 {color: #ea907a}" + ".order_history {bottom: 80%} </style>");
queryResult.append(
"<h1>Wrong password or username.<a href='http://localhost:8080/assignment9/login.html'>Try again!</a></h1>");
}
} catch (SQLException e) {
queryResult.append(e.getMessage());
} finally {
queryResult.append(closeConnection());
}
return queryResult.toString();
}
public String SignUp(String dbTableName, String data) {
if (data == null || data.isEmpty())
return Utility.getInvalidDataErrorMessage();
StringBuilder queryResult = new StringBuilder();
String query = selectQuery + dbTableName;
String[] columnData = data.split(";");
// Insert command
String insertCommand;
String checkCommand;
// check user does exist or not
checkCommand = "SELECT * FROM " + dbTableName + " WHERE USERNAME='" + columnData[0] + "'";
System.out.println(checkCommand);
queryResult.append(openConnection());
try {
statement = dbConnection.createStatement();
resultSet = statement.executeQuery(checkCommand);
if (resultSet.next()) {
queryResult.append(
"<p>Username is already taken.<a href='http://localhost:8080/assignment9/signup.html'>Try again!</a></p>");
} else {
insertCommand = "INSERT INTO " + dbTableName + " VALUES('" + columnData[0] + "',MD5('" + columnData[1]
+ "'))";
System.out.println(insertCommand);
try {
statement = dbConnection.createStatement();
statement.executeUpdate(insertCommand);
queryResult.append("<style>" + "body {background-color: #f4f7c5; text-align: center;}"
+ "h1 {color: #ea907a}</style>");
queryResult.append("<h1>Sign up successfully!</h1></br>");
queryResult.append("<a href='http://localhost:8080/assignment9/login.html'>Log in</a>");
queryResult.append("</div>");
} catch (SQLException e) {
queryResult.append("<p>" + e.getMessage() + "</p>");
} finally {
queryResult.append(closeConnection());
}
}
} catch (SQLException e) {
queryResult.append(e.getMessage());
} finally {
queryResult.append(closeConnection());
}
return queryResult.toString();
}
public String Checkout(String dbTableName, String data) {
if (data == null || data.isEmpty())
return Utility.getInvalidDataErrorMessage();
StringBuilder queryResult = new StringBuilder();
String[] columnData = data.split(";");
// Insert command
String insertCommand;
queryResult.append(openConnection());
insertCommand = "INSERT INTO `" + dbTableName
+ "`(`USERNAME`, `FULLNAME`, `ADDRESS`, `PRODUCTS`, `PRICE`) VALUES('" + columnData[0] + "', '"
+ columnData[1] + "', '" + columnData[2] + "', '" + columnData[3] + "', " + columnData[4] + ")";
System.out.println(insertCommand);
try {
statement = dbConnection.createStatement();
statement.executeUpdate(insertCommand);
queryResult.append("<style>" + "body {background-color: #f4f7c5; text-align: center;}"
+ "table {text-align: center; width:100%} " + "table, th, td {border: 1px solid black;} "
+ "h1 {color: #ea907a}" + ".order_history {bottom: 80%} </style>");
queryResult.append("<h1>Order successfully!</h1></br>");
queryResult.append("<div class='order_history'>");
queryResult.append("<a href='http://localhost:8080/assignment9/order_history'>Order history</a>");
queryResult.append(" ");
queryResult.append("<a href='http://localhost:8080/assignment9/logout'>Log out</a>");
queryResult.append("</div>");
} catch (SQLException e) {
queryResult.append("<p>" + e.getMessage() + "</p>");
} finally {
queryResult.append(closeConnection());
}
return queryResult.toString();
}
public String History(String dbTableName, String username) {
String query = selectQuery + dbTableName;
// Here we define SQL query to decrypt the aes_decrypted column and then convert
// it from HEX to char variable
query = "SELECT * FROM `" + dbTableName + "` WHERE USERNAME='" + username + "'";
StringBuilder queryResult = new StringBuilder();
queryResult.append("<style>" + "body {background-color: #ffd5cd;}" + "div {text-align: center;} "
+ "table {text-align: center; width:100%} " + "table, th, td {border: 1px solid black;} </style>");
queryResult.append("<div>");
queryResult.append("<h2>History order of username: " + username + "</h2>");
queryResult.append("<TABLE style='border:1px solid black;'><tr>");
queryResult.append(openConnection());
// Here we CREATE the statement object for executing SQL commands.
try {
statement = dbConnection.createStatement();
// Here we execute the SQL query and save the results to a ResultSet
// object
resultSet = statement.executeQuery(query);
// Here we get the metadata of the query results
resultSetMeatData = resultSet.getMetaData();
// Here we calculate the number of columns
int columns = resultSetMeatData.getColumnCount();
// Here we print column names in TABLE header cells
// Pay attention that the column index starts with 1
for (int i = 1; i <= columns; i++) {
queryResult.append("<th> " + resultSetMeatData.getColumnName(i) + "</th>");
}
queryResult.append("</tr>");
while (resultSet.next()) {
queryResult.append("<tr>");
// Here we print the value of each column
for (int i = 1; i <= columns; i++) {
if (resultSet.getObject(i) != null)
queryResult.append("<td>" + resultSet.getObject(i).toString() + "</td>");
else
queryResult.append("<td>---</td>");
}
/*
* out.println("<td>" + resultSet.getString(1)+"</td>"); out.println("<td>" +
* resultSet.getInt(2)+"</td>"); out.println("<td>" +
* resultSet.getInt(3)+"</td>");
*/
queryResult.append("</tr>");
}
queryResult.append("</TABLE></div>");
queryResult.append("<div class='order_history'>");
queryResult.append("<a href='http://localhost:8080/assignment9/index.html'>Continue shoppingy</a>");
queryResult.append(" ");
queryResult.append("<a href='http://localhost:8080/assignment9/logout'>Log out</a>");
queryResult.append("</div>");
} catch (SQLException e) {
queryResult.append(e.getMessage());
} finally {
queryResult.append(closeConnection());
}
return queryResult.toString();
}
public String getDbServerURL() {
return dbServerURL;
}
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
public String getDbUsername() {
return dbUsername;
}
public void setDbUsername(String dbUsername) {
this.dbUsername = dbUsername;
}
public void setDbPassword(String dbPassword) {
this.dbPassword = <PASSWORD>;
}
}
|
package com.padcmyanmar.hello_android_padc;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import com.padcmyanmar.hello_android_padc.R;
/**
* Created by User on 10/30/2017.
*/
public class My_Activity_1 extends AppCompatActivity{
Button btn = findViewById(R.id.button);
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.my_activity_1);
}
public void onActionClick(View view) {
btn.setText("Get Click Action");
}
}
|
// Define the Module struct representing a module in the programming language
struct Module {
// Define the fields of the Module struct
// For example:
// name: String,
// code: String,
// ...
}
// Define the Effect enum representing the effects of the compiled code
enum Effect {
// Define the variants of the Effect enum
// For example:
// Output(String),
// Error(String),
// ...
}
// Define the CompileError struct representing an error that can occur during compilation
struct CompileError {
// Define the fields of the CompileError struct
// For example:
// message: String,
// location: String,
// ...
}
// Implement the compile function
fn compile(m: Module) -> Result<Vec<Effect>, CompileError> {
// Implement the compilation process
// For example:
// Parse the code, perform semantic analysis, generate intermediate code, etc.
// Return a vector of Effect if compilation is successful
// Return a CompileError if an error occurs during compilation
} |
#!/bin/bash
# CMDVAR="-Djava.security.egd=file:/dev/./urandom","java -agentlib:jdwp=transport=dt_socket,address=0:8000,server=y,suspend=n -jar"
JAR=weather-1.0.0.jar
if [ ! -e $JAR ]; then
JAR=target/$JAR
if [ -e microservice.yaml ]; then
cp microservice.yaml ./target/
fi
fi
java $CMDVAR -jar ./$JAR
|
<reponame>trumank/jblocks<filename>src/main/java/jblocks/Point.java
package jblocks;
import org.jsfml.system.Vector2f;
public class Point {
public double x;
public double y;
public Point() {
x = 0;
y = 0;
}
public Point(double x, double y) {
this.x = x;
this.y = y;
}
public void setPosition(double x, double y) {
this.x = x;
this.y = y;
}
public Vector2f toVector() {
return new Vector2f((float) x, (float) y);
}
}
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.nodes.bc;
import java.util.List;
import org.olat.core.commons.modules.bc.FolderRunController;
import org.olat.core.commons.services.notifications.SubscriptionContext;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.gui.control.generic.dtabs.Activateable2;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.StateEntry;
import org.olat.core.util.Util;
import org.olat.core.util.vfs.NamedContainerImpl;
import org.olat.core.util.vfs.VFSContainer;
import org.olat.core.util.vfs.callbacks.VFSSecurityCallback;
import org.olat.course.run.CourseRuntimeController;
import org.olat.course.run.userview.UserCourseEnvironment;
import org.olat.repository.RepositoryEntry;
/**
*
* Initial date: 30 Sep 2019<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public class CourseDocumentsController extends BasicController implements Activateable2 {
private FolderRunController folderCtrl;
private BCCourseNodeNoFolderForm noFolderCtrl;
public CourseDocumentsController(UserRequest ureq, WindowControl wControl, UserCourseEnvironment userCourseEnv) {
super(ureq, wControl);
setTranslator(Util.createPackageTranslator(CourseRuntimeController.class, getLocale(), getTranslator()));
RepositoryEntry courseEntry = userCourseEnv.getCourseEnvironment().getCourseGroupManager().getCourseEntry();
SubscriptionContext subContext = CourseDocumentsFactory.getSubscriptionContext(courseEntry);
VFSSecurityCallback secCallback = CourseDocumentsFactory
.getSecurityCallback(userCourseEnv, ureq.getUserSession().getRoles().isGuestOnly(), subContext);
VFSContainer rootContainer = CourseDocumentsFactory.getFileContainer(userCourseEnv.getCourseEnvironment());
if (rootContainer != null) {
rootContainer = new NamedContainerImpl(translate("command.documents"), rootContainer);
rootContainer.setLocalSecurityCallback(secCallback);
folderCtrl = new FolderRunController(rootContainer, true, false, true, ureq, getWindowControl());
listenTo(folderCtrl);
putInitialPanel(folderCtrl.getInitialComponent());
} else {
noFolderCtrl = new BCCourseNodeNoFolderForm(ureq, getWindowControl());
listenTo(noFolderCtrl);
putInitialPanel(noFolderCtrl.getInitialComponent());
}
}
@Override
public void activate(UserRequest ureq, List<ContextEntry> entries, StateEntry state) {
if (folderCtrl != null) {
folderCtrl.activate(ureq, entries, state);
}
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
//
}
}
|
<filename>service/src/main/java/io/mifos/identity/internal/service/UserService.java
/*
* Copyright 2017 The Mifos Initiative.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.mifos.identity.internal.service;
import io.mifos.anubis.api.v1.domain.AllowedOperation;
import io.mifos.identity.api.v1.PermittableGroupIds;
import io.mifos.identity.api.v1.domain.Permission;
import io.mifos.identity.api.v1.domain.User;
import io.mifos.identity.internal.repository.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author <NAME>
*/
@Service
public class UserService {
private final Users users;
private final Roles roles;
@Autowired
UserService(final Users users, final Roles roles)
{
this.users = users;
this.roles = roles;
}
public List<User> findAll() {
return users.getAll().stream().map(UserService::mapUser).collect(Collectors.toList());
}
public Optional<User> findByIdentifier(final String identifier)
{
return users.get(identifier).map(UserService::mapUser);
}
static private User mapUser(final UserEntity u) {
return new User(u.getIdentifier(), u.getRole());
}
public Set<Permission> getPermissions(final String userIdentifier) {
final Optional<UserEntity> userEntity = users.get(userIdentifier);
final Optional<RoleEntity> roleEntity = userEntity.map(UserEntity::getRole).map(roles::get).orElse(Optional.empty());
final List<PermissionType> permissionEntities = roleEntity.map(RoleEntity::getPermissions).orElse(Collections.emptyList());
final List<Permission> permissions = RoleMapper.mapPermissions(permissionEntities);
permissions.add(new Permission(PermittableGroupIds.SELF_MANAGEMENT, AllowedOperation.ALL));
return new HashSet<>(permissions);
}
} |
TERMUX_PKG_HOMEPAGE=https://xmake.io/
TERMUX_PKG_DESCRIPTION="A cross-platform build utility based on Lua"
TERMUX_PKG_LICENSE="Apache-2.0"
TERMUX_PKG_MAINTAINER="Ruki Wang @waruqi"
TERMUX_PKG_VERSION=2.5.8
TERMUX_PKG_SRCURL=https://github.com/xmake-io/xmake/releases/download/v${TERMUX_PKG_VERSION}/xmake-v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=9f0eaa823ff4d0fab5b52e01b62f6db860a2ed32402ab51738903a07584c91cb
TERMUX_PKG_AUTO_UPDATE=true
TERMUX_PKG_BUILD_IN_SRC=true
|
#!/bin/bash
set -eu
packer build -var source_ami=ami-0417e362 docker-baked.json
|
<filename>src/components/ToggleButton/ToggleButton.test.js
import React from 'react';
import renderer from 'react-test-renderer';
import ToggleButton from './ToggleButton';
describe('<ToggleButton />', () => {
const event = { test: true };
const props = {
onClick: jest.fn(),
variant: 'check',
title: 'Toggle me real good!'
};
it('can render "check" variant', () => {
const tree = renderer.create(<ToggleButton {...props} />).toJSON();
expect(tree).toMatchSnapshot();
});
it('uses "check" variant as default', () => {
const component = renderer.create(<ToggleButton {...props} />);
const instance = component.getInstance();
expect(instance.props.variant).toEqual('check');
});
it('calls onClick on click', () => {
const component = renderer.create(<ToggleButton {...props} />);
const instance = component.getInstance();
expect(props.onClick).not.toHaveBeenCalled();
instance.handleClick(event);
expect(props.onClick).toHaveBeenCalledWith(event);
});
it('can render with icon-only prop', () => {
const newProps = {};
Object.assign(newProps, {
...props,
iconOnly: true
});
const tree = renderer.create(<ToggleButton {...newProps} />).toJSON();
expect(tree).toMatchSnapshot();
});
it('can render "switch" variant', () => {
props.variant = 'switch';
const tree = renderer.create(<ToggleButton {...props} />).toJSON();
expect(tree).toMatchSnapshot();
});
it('can render "grid-list" variant', () => {
props.variant = 'grid-list';
const tree = renderer.create(<ToggleButton {...props} />).toJSON();
expect(tree).toMatchSnapshot();
});
});
|
#!/bin/bash
#
# Install mrequests to a MicroPython board
MODULES=('defaultdict.py' 'mrequests.py' 'urlencode.py' 'urlparseqs.py' 'urlunquote.py')
for py in ${MODULES[*]}; do
echo "Compiling $py to ${py%.*}.mpy"
${MPY_CROSS:-mpy-cross} "$py"
${RSHELL:-rshell} --quiet \
-b ${BAUD:-9600} \
-p ${PORT:-/dev/ttyACM0} \
cp "${py%.*}".mpy "${DESTDIR:-/pyboard}"
done
|
<reponame>stefb965/JRAW<gh_stars>1-10
package net.dean.jraw.fluent;
import net.dean.jraw.RedditClient;
/**
* <p>Provides the basis of the fluent API. Every Reference has four things in common:
*
* <ol>
* <li>A Reference's constructor always has default (package-protected) visibility
* <li>A concrete Reference is always immutable
* <li>A Reference is never meant to be constructed by users of the library; they will be instantiated by the
* library only inside of the {@code net.dean.jraw.fluent} package.
* <li>A Reference does the bare minimum amount of work to complete its task
* </ol>
*
* <p>Each Reference is responsible for doing a minuscule amount of work in the grand scheme of the API.
*/
public interface Reference {
/** Gets the RedditClient that this Reference uses to send requests */
RedditClient getRedditClient();
}
|
#!/usr/bin/env bash
##
# Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
# @file checker-pr-audit-async.sh
# @brief It executes a build test whenever a PR is submitted.
# @see https://github.com/nnsuite/TAOS-CI
# @author Geunsik Lim <geunsik.lim@samsung.com>
# @dependency: gbs, tee, curl, grep, wc, cat, sed, awk, basename
# @param arguments are received by ci bot
# arg1: date(YmdHisu)
# arg2: commit number
# arg3: repository address of PR
# arg4: branch name
# arg5: PR number
# arg6: delivery id
#
# @see directory variables
# $dir_ci directory for webhooks (Absolute path)
# $dir_worker directory for PR workers
# $dir_commit directory for commits
#
# @modules:
# [MODULE] TAOS/pr-audit-build-tizen-x86_64 Check if 'gbs build -A x86_64' can be successfully passed.
# [MODULE] TAOS/pr-audit-build-tizen-armv7l Check if 'gbs build -A armv7l' can be successfully passed.
# [MODULE] TAOS/pr-audit-build-ubuntu Check if 'pdebuild' can be successfully passed.
# [MODULE] TAOS/pr-audit-build-yocto Check if 'devtool' can be successfully passed.
# [MODULE] TAOS/pr-audit-build-android Check if 'ndk-build' can be successfully passed.
# [MODULE] plugins-base Plugin group that consist of a well-maintained modules
# [MODULE] plugins-good Plugin group that follow Apache license with good quality
# [MODULE] plugins-staging Plugin group that does not have evaluation and aging test enough
# --------------------------- Pre-setting module ----------------------------------------------------------------------
input_date=$1
input_commit=$2
input_repo=$3
input_branch=$4
input_pr=$5
input_delivery_id=$6
# Note that the server administrator must declare variables after installing required packages.
echo "[DEBUG] Importing the config-server-admistrator.sh file.\n"
source ./config/config-server-administrator.sh
# Note the "source ./config/config-environment.sh" file can be called in another script
# instead of in this file. It is to support asynchronous operation from cibot.php
echo "[DEBUG] Importing the config-environment.sh file.\n"
source ./config/config-environment.sh
# Check if input arguments are correct.
if [[ $1 == "" || $2 == "" || $3 == "" || $4 == "" || $5 == "" || $6 == "" ]]; then
printf "[DEBUG] ERROR: Please, input correct arguments.\n"
exit 1
fi
# Import global variables
echo "[DEBUG] Importing a global variable module.\n"
source ./common/global-variable.sh
# Check if dependent packages are installed
source ./common/api_collection.sh
check_dependency gbs
check_dependency tee
check_dependency curl
check_dependency grep
check_dependency wc
check_dependency cat
check_dependency sed
check_dependency awk
check_dependency basename
echo "[DEBUG] Checked dependency packages.\n"
# Include a PR scheduler module to handle a run-queue and wait-queue while running a build tasks
echo "[DEBUG] Importing the PR scheduler.\n"
source ./common/pr-scheduler.sh
# Include a Out-of-PR(OOP) killer to handle lots of duplicated same PRs with LRU approach
echo "[DEBUG] Importing the OOP Killer.\n"
source ./common/out-of-pr-killer.sh
# Get user ID from the input_repo string
set -- "${input_repo}"
IFS="\/"; declare -a Array=($*); unset IFS;
user_id="@${Array[3]}"
# Set folder name uniquely to run CI in different folder per a PR.
dir_worker="repo-workers/pr-checker"
# Set project repo name of contributor
PRJ_REPO_OWNER=`echo $(basename "${input_repo%.*}")`
cd ..
export dir_ci=`pwd`
# Create dir_work folder
if [[ ! -d $dir_worker ]]; then
mkdir -p $dir_worker
fi
cd $dir_worker
export dir_worker=$dir_worker
# Check if dir_commit folder exists, then, create dir_commit folder
# let's keep the existing result although the same target directory already exists.
cd $dir_ci
export dir_commit=${dir_worker}/${input_pr}-${input_date}-${input_commit}
# Run the Out-of-PR (OOP) killer:
# Condition: If the developers try to re-send a lot of same PRs repeatedly,
# the OOP killer stops compulsorily the previous same PRs invoked by checker-pr-gateway.sh
run_oop_killer
# --------------------------- CI Trigger (wait queue) -----------------------------------------------------------------
if [[ $pr_comment_pr_updated == 1 ]]; then
# Inform all developers of their activity whenever PR submitter resubmit their PR after applying comments of reviews
message=":dart: **cibot**: $user_id has updated the pull request."
cibot_comment $TOKEN "$message" "$GITHUB_WEBHOOK_API/issues/$input_pr/comments"
fi
# Load the configuraiton file that user defined to build selectively.
echo "[MODULE] plugins-base: Plugin group that does have well-maintained features as a base module."
echo "[MODULE] plugins-good: Plugin group that follow Apache license with good quality"
echo "[MODULE] plugins-staging: Plugin group that does not has evaluation and aging test enough"
echo "[DEBUG] The current directory: $(pwd)"
source ${REFERENCE_REPOSITORY}/ci/taos/config/config-plugins-audit.sh 2>> ../audit_module_error.log
echo "[DEBUG] source ${REFERENCE_REPOSITORY}/ci/taos/config/config-plugins-audit.sh"
# Create new context name to monitor progress status of a checker
message="Trigger: wait queue. There are other build jobs and we need to wait.. The commit number is $input_commit."
cibot_report $TOKEN "pending" "(INFO)TAOS/pr-audit-all" "$message" "${CISERVER}${PRJ_REPO_UPSTREAM}/ci/${dir_commit}/" "$GITHUB_WEBHOOK_API/statuses/$input_commit"
for plugin in ${audit_plugins[*]}
do
echo -e "[DEBUG] -----------------------------"
if [[ ${plugin} == "pr-audit-build-tizen" ]]; then
for arch in $pr_build_arch_type
do
echo "[DEBUG] wait queue: Job is queued to run 'gbs build -A $arch (for Tizen)' command."
${plugin}-wait-queue $arch
done
else
echo "[DEBUG] wait queue: Job is queue to run $plugin"
${plugin}-wait-queue
fi
done
# --------------------------- audit module: start -----------------------------------------------------
echo "[DEBUG] The current directory: $(pwd)"
echo -e "[DEBUG] Starting an audit checker...."
echo -e "[DEBUG] dir_ci is '$dir_ci'"
echo -e "[DEBUG] dir_worker is '$dir_worker'"
echo -e "[DEBUG] dir_commit is '$dir_commit'"
echo -e "[DEBUG] Let's move to a git repository folder."
cd $dir_ci
cd $dir_commit
cd ./${PRJ_REPO_OWNER}
echo "[DEBUG] The current directory: $(pwd)"
echo -e "[MODULE] Exception Handling: Let's skip CI-Build/UnitTest in case of no buildable files. "
# Check if PR-build can be skipped.
# BUILD_MODE is created in order that developers can do debugging easily in console after adding new CI facility.
#
# Note that ../report/build_log_${input_pr}_output.txt includes both stdout(1) and stderr(2) in case of BUILD_MODE=1.
# BUILD_MODE=0 : run "gbs build" command without generating debugging information.
# BUILD_MODE=1 : run "gbs build" command with generation of debugging contents.
# BUILD_MODE=99: skip "gbs build" procedures to do debugging of another CI function.
FILELIST=`git show --pretty="format:" --name-only --diff-filter=AMRC`
BUILD_MODE=99
for file in $FILELIST
do
if [[ "$file" =~ ($SKIP_CI_PATHS_AUDIT)$ ]]; then
echo "[DEBUG] $file may be skipped."
else
echo "[DEBUG] $file cannot be skipped."
BUILD_MODE=0
break
fi
done
# Declare default variables
check_result="success"
global_check_result="success"
if [[ -d $REPOCACHE ]]; then
echo "[DEBUG] repocache, $REPOCACHE already exists. Good"
# TODO: periodically delete the contents of REPOCACHE. (e.g., every Sunday?)
else
echo "[DEBUG] repocache, $REPOCACHE does not exists. Create one"
# Delete if it's a file.
rm -f $REPOCACHE
mkdir -p $REPOCACHE
fi
echo "[DEBUG] Link to the RPM repo cache to accelerate GBS start up"
mkdir -p ./GBS-ROOT/local/
pushd ./GBS-ROOT/local
ln -s $REPOCACHE cache
popd
# --------------------------- CI Trigger (ready queue) --------------------------------------------------------
# Note that package build results in the unexpected build failure due to some reasons such as server issue,
# changes of build environment, and high overload of run queeue. So We need to provide ready queue to inform
# users of current status of a pull request.
message="Trigger: wait queue. The commit number is $input_commit."
cibot_report $TOKEN "pending" "(INFO)TAOS/pr-audit-all" "$message" "${CISERVER}${PRJ_REPO_UPSTREAM}/ci/${dir_commit}/" "$GITHUB_WEBHOOK_API/statuses/$input_commit"
for plugin in ${audit_plugins[*]}
do
echo -e "[DEBUG] -----------------------------"
if [[ ${plugin} == "pr-audit-build-tizen" ]]; then
for arch in $pr_build_arch_type
do
echo "[DEBUG] ready queue: Job is started to run 'gbs build -A $arch (for Tizen)' command."
${plugin}-ready-queue $arch
done
else
echo "[DEBUG] ready queue: Job is started to run $plugin"
${plugin}-ready-queue
fi
done
# --------------------------- CI Trigger (run queue) --------------------------------------------------------
# Note that major job is run qeue amon the queues while executing the audit checker. So we have to notify
# if the current status of pull reqeust is building or not.
message="Trigger: run queue. The commit number is $input_commit."
cibot_report $TOKEN "pending" "(INFO)TAOS/pr-audit-all" "$message" "${CISERVER}${PRJ_REPO_UPSTREAM}/ci/${dir_commit}/" "$GITHUB_WEBHOOK_API/statuses/$input_commit"
for plugin in ${audit_plugins[*]}
do
# Run the pull request scheduler: manage queues to minimize overhead possibility of the server
# due to (1) too many PRs and (2) the low-end server equipment.
# The 'pr_sched_runqueue' function is located in 'common' folder.
pr_sched_runqueue "The $plugin plugin module"
echo -e "-----------------------------"
if [[ ${plugin} == "pr-audit-build-tizen" ]]; then
for arch in $pr_build_arch_type
do
echo "[DEBUG] run queue: Compiling the source code to Tizen $arch RPM package."
${plugin}-run-queue $arch
done
else
echo "[DEBUG] run queue: Running the '$plugin' module"
${plugin}-run-queue
fi
done
if [[ ${BUILD_TEST_FAIL} -eq 1 ]]; then
# Comment a hint on failed PR to author.
message=":octocat: **cibot**: $user_id, A builder checker could not be completed because one of the checkers is not completed. In order to find out a reason, please go to ${CISERVER}${PRJ_REPO_UPSTREAM}/ci/${dir_commit}/."
cibot_comment $TOKEN "$message" "$GITHUB_WEBHOOK_API/issues/$input_pr/comments"
fi
# --------------------------- Report module: generate a log file and check other conditions --------------------------
# Save webhook information for debugging
echo ""
echo "[DEBUG] Start time : ${input_date}" >> ../report/build_log_${input_pr}_output.txt
echo "[DEBUG] Commit number : ${input_commit}" >> ../report/build_log_${input_pr}_output.txt
echo "[DEBUG] Repository : ${input_repo}" >> ../report/build_log_${input_pr}_output.txt
echo "[DEBUG] Branch name : ${input_branch}" >> ../report/build_log_${input_pr}_output.txt
echo "[DEBUG] PR number : ${input_pr}" >> ../report/build_log_${input_pr}_output.txt
echo "[DEBUG] X-GitHub-Delivery: ${input_delivery_id}" >> ../report/build_log_${input_pr}_output.txt
# Optimize size of log file (e.g., from 20MB to 1MB)
# remove unnecessary contents that are created by resource checker
__log_size_filter="/]]$\|for.*req_build.*in\|for.*}'\|']'$\|found=\|basename\|search_res\|local.*'target=/ d"
sed "${__log_size_filter}" ../report/build_log_${input_pr}_output.txt > ../report/build_log_${input_pr}_output_tmp.txt
rm -f ../report/build_log_${input_pr}_output.txt
mv ../report/build_log_${input_pr}_output_tmp.txt ../report/build_log_${input_pr}_output.txt
ls -al
# Inform developers of the warning message in case that the log file exceeds 10MB.
echo "[DEBUG] Check if the log file size exceeds 10MB."
FILESIZE=$(stat -c%s "../report/build_log_${input_pr}_output.txt")
if [[ $FILESIZE -le 10*1024*1024 ]]; then
echo "[DEBUG] Passed. The file size of build_log_${input_pr}_output.txt is $FILESIZE bytes."
check_result="success"
else
echo "[DEBUG] Failed. The file size of build_log_${input_pr}_output.txt is $FILESIZE bytes."
check_result="failure"
break
fi
# Add thousands separator in a number
FILESIZE_NUM=`echo $FILESIZE | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta'`
if [[ $check_result == "success" ]]; then
echo "[DEBUG] Good job. the log file does not exceed 10MB. The file size of build_log_${input_pr}_output.txt is $FILESIZE_NUM bytes."
else
# inform PR submitter of a hint in more detail
message=":fire: **cibot**: $user_id, Oooops. The log file exceeds 10MB due to incorrect commit(s). The file size of build_log_${input_pr}_output.txt is **$FILESIZE_NUM** bytes . Please resubmit after updating your PR to reduce the file size of build_log_${input_pr}_output.txt."
cibot_comment $TOKEN "$message" "$GITHUB_WEBHOOK_API/issues/$input_pr/comments"
fi
# --------------------------- Report module: submit global check result -----------------------------------------------
# Report if all modules are successfully completed or not.
echo "[DEBUG] Send a total report with global_check_result variable. global_check_result is ${global_check_result}. "
exit_code=0
if [[ $global_check_result == "success" ]]; then
# The global check is succeeded.
message="Successfully all audit modules are passed. Commit number is $input_commit."
cibot_report $TOKEN "success" "(INFO)TAOS/pr-audit-all" "$message" "${CISERVER}${PRJ_REPO_UPSTREAM}/ci/${dir_commit}/" "$GITHUB_WEBHOOK_API/statuses/$input_commit"
# If contributors want later, let's inform developers of CI test result to go to a review process as a final step before merging a PR
echo "[DEBUG] All audit modules are passed - it is ready to review! :shipit:. Note that CI bot has two sub-bots such as TAOS/pr-audit-all and TAOS/pr-format-all."
elif [[ $global_check_result == "failure" ]]; then
# The global check is failed.
message="Oooops. One of the audits is failed. Resubmit the PR after fixing correctly. Commit number is $input_commit."
cibot_report $TOKEN "failure" "(INFO)TAOS/pr-audit-all" "$message" "${CISERVER}${PRJ_REPO_UPSTREAM}/ci/${dir_commit}/" "$GITHUB_WEBHOOK_API/statuses/$input_commit"
exit_code=1
else
# The global check is failed due to CI error.
message="CI Error. There is a bug in CI script. Please contact the CI administrator."
cibot_report $TOKEN "error" "(INFO)TAOS/pr-audit-all" "$message" "${CISERVER}${PRJ_REPO_UPSTREAM}/ci/${dir_commit}/" "$GITHUB_WEBHOOK_API/statuses/$input_commit"
echo -e "[DEBUG] It seems that this script has a bug. Please check value of \$global_check_result."
exit_code=1
fi
# --------------------------- Cleaner: Remove unnecessary directories --------------------
# If you have to remove unnecessary directory or files as a final step
# Please append a command below.
echo "[DEBUG] The current directory: $(pwd)."
# Return with exit code
exit $exit_code
|
#!/bin/bash
#Extracts a particular signal from an 'ASCII generated' data from edfbrowser
cat Participant*_signals.txt | cut -f1 -d' ' | tail -n +2
echo -n "Which signal do you want > "
read signal
signVal=$(cat Participant*_signals.txt | cut -f1 -d' ' | tail -n +2 | grep $signal | cut -d, -f2)
dataFile=Participant*_data.txt
signal=$(($signal + 1)) #time
cat $dataFile | cut -d, -f $signal > "outFile_signal_$signVal"
|
#!/usr/bin/env bash
# ---------------------------------------------------------------------------
# HELPER FUNCTIONS
# echo an error message and exit the script
oops() {
echo "$0:" "$@" >&2
exit 1
}
# args: $1 = a binary you want to require e.g. tar, gpg, mail
# $2 = a message briefly describing what you need the binary for
require() {
command -v "$1" > /dev/null 2>&1 \
|| oops "you do not have '$1' installed; needed for: $2"
}
log() {
echo "install: $1"
}
# ---------------------------------------------------------------------------
# STUFF THAT NEEDS TO BE INSTALLED TO RUN THIS SCRIPT
require date "logging during script execution"
require python3 "for creating the virtual env that the scripts will work in"
# ---------------------------------------------------------------------------
# STUFF THAT NEEDS TO BE INSTALLED FOR SITE GENERATION AS A WHOLE TO WORK
require pandoc "for converting markdown notes to HTML"
require rsync "for pushing notes to a remote server"
# ---------------------------------------------------------------------------
# VARIABLES & FUNCTIONS
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cleanup() {
# no cleanup to do
log "Done!"
}
trap cleanup EXIT
check_venv() {
log "checking for venv..."
if [ ! -d "venv" ]; then
log "could not find a venv"
log "creating one with version: $(python3 --version)"
python3 -m venv venv
venv/bin/pip install --upgrade pip
venv/bin/pip install -r "$DIR/../requirements.txt"
else
log "venv already exists - skipping"
fi
}
# ---------------------------------------------------------------------------
# MAIN SCRIPT EXECUTION
check_venv
# ---------------------------------------------------------------------------
# CLEAN EXIT
exit 0
|
package com.ulfy.master.application.cm;
import com.ulfy.android.mvvm.IView;
import com.ulfy.master.application.base.BaseCM;
import com.ulfy.master.ui.cell.List5ChildCell;
public class List5ChildCM extends BaseCM {
public String name;
public List5ChildCM(String name) {
this.name = name;
}
@Override public Class<? extends IView> getViewClass() {
return List5ChildCell.class;
}
} |
import { Icon } from "@chakra-ui/react";
import React from "react";
export function Star(props) {
return (
<Icon viewBox="0 0 18 15" width="18px" height="15px" {...props}>
<path
d="M9.07688 12.1735L4.01319 14.8055L5.14396 9.52016L0.882935 5.8615L6.64591 5.22683L9.07688 0.333496L11.5079 5.22683L17.2708 5.8615L13.0098 9.52016L14.1406 14.8055L9.07688 12.1735Z"
fill="currentColor"
/>
</Icon>
);
}
|
def serialize(node):
if node is None:
return "#"
return str(node.val) + "," + serialize(node.left) + "," + serialize(node.right);
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
root.left.left = TreeNode(4)
print(serialize(root)) // prints "1,2,4,#,#,#,3,#,#," |
<gh_stars>0
/********************************************
* 文件名称: HistoricalCalVaR.java
* 系统名称: 外汇资金风险管理
* 模块名称: var计算
* 软件版权:
* 功能说明: 历史模拟法var计算处理类
* 系统版本: 2.0.0.1
* 开发人员: daijy
* 开发时间:
* 审核人员:
* 相关文档:
* 修改记录: 修改日期 修改人员 修改说明
* 20190125 daijy 使用历史法的mVaR计算
* 20190314 daijy 金额按目标币种进行四舍五入
*********************************************/
package com.wangxy.exoskeleton.risk.calvar;
import java.util.List;
import org.springframework.stereotype.Service;
import com.wangxy.exoskeleton.risk.exception.BizBussinessException;
import com.wangxy.exoskeleton.risk.exception.IErrMsg;
import com.wangxy.exoskeleton.risk.util.BigDecimalUtil;
import com.wangxy.exoskeleton.risk.var.HistoricalVaR;
import com.wangxy.exoskeleton.risk.var.VaRParam;
import com.wangxy.exoskeleton.risk.var.VaRResult;
@Service
public class HistoricalCalVaR extends AbstractCalVaR{
@Override
public VaRResult calVar(List<VaRParam> paramlist, double percentile, int simulatedtimes, int days) throws BizBussinessException {
double[] result;
try {
result = HistoricalVaR.getVaR(paramlist, percentile, days);
} catch (Exception e) {
throw new BizBussinessException(IErrMsg.ERR_COMMERR,e);
}
VaRResult vaRResult = new VaRResult();
vaRResult.setVar(result[0]);
//20190314 daijy 获取金额小数位,并四舍五入
int decimalPoint = getDecimalPoint(paramlist);
vaRResult.setVarAmt(BigDecimalUtil.round(result[1],decimalPoint));
return vaRResult;
}
protected double[] getMVaR(List<VaRParam> paramlist, double percentile, int days,VaRResult varResult) throws Exception {
//使用历史法的mVaR计算
return HistoricalVaR.getMVaR(paramlist, percentile, days);
}
}
|
#!/bin/bash
# START CONFIGURATION ##############################################
# set to absolute path of main directory of your QATrack+ deployment. Don't forget trailing slash
QATRACK_DIR=
# set to absolute path of where you want to keep your backups. Don't forget
# the trailing slash! Ideally this should be a remote/network disk rather
# than a local one!
BACKUP_DIR=
# database name to backup
DATABASE=
HOSTNAME=localhost
# username/PWD to use to access the database
USERNAME=qatrack
PASSWORD=qatrackpass
# backup uploaded files as well as the database
ENABLE_FILE_BACKUP=yes
# Which day to take the weekly backup from (1-7 = Monday-Sunday)
DAY_OF_WEEK_TO_KEEP=7
# Number of days to keep daily backups
DAYS_TO_KEEP=7
# How many weeks to keep weekly backups
WEEKS_TO_KEEP=5
# How many months to keep monthly backups
MONTHS_TO_KEEP=12
# END CONFIGURATION ################################################
function perform_backups()
{
SUFFIX=$1
FINAL_BACKUP_DIR=$BACKUP_DIR"`date +\%Y-\%m-\%d`$SUFFIX/"
echo "Making backup directory in $FINAL_BACKUP_DIR"
if ! mkdir -p $FINAL_BACKUP_DIR; then
echo "Cannot create backup directory in $FINAL_BACKUP_DIR." 1>&2
exit 1;
fi;
echo "Custom backup of $DATABASE"
if ! MYSQL_PWD=$PASSWORD mysqldump -h "$HOSTNAME" -u "$USERNAME" --databases "$DATABASE" | gzip > $FINAL_BACKUP_DIR"$DATABASE".gz.in_progress; then
echo "[!!ERROR!!] Failed to produce backup database $DATABASE"
else
mv $FINAL_BACKUP_DIR"$DATABASE".gz.in_progress $FINAL_BACKUP_DIR"$DATABASE".gz
fi
if [ $ENABLE_FILE_BACKUP = "yes" ]
then
FILE_BACKUP_PATH=$FINAL_BACKUP_DIR"uploads.tar.gz"
UPLOADS_DIR=$QATRACK_DIR"qatrack/media/uploads/"
CSS_PATH=$QATRACK_DIR"qatrack/static/qatrack_core/css/site.css"
echo "Backup of QATrack+ Uploads"
tar cvf - $UPLOADS_DIR | gzip -9 - > $FILE_BACKUP_PATH
cp $CSS_PATH $FINAL_BACKUP_DIR 2>/dev/null
fi
echo -e "\nDatabase backups complete!"
}
# MONTHLY BACKUPS
DAY_OF_MONTH=`date +%d`
EXPIRED_MONTH_DAYS=`expr $((($MONTHS_TO_KEEP * 30) + 1))`
if [ $DAY_OF_MONTH -eq 1 ];
then
# Delete all expired monthly directories
find $BACKUP_DIR -maxdepth 1 -mtime +$EXPIRED_MONTH_DAYS -name "*-monthly" -exec rm -rf '{}' ';'
perform_backups "-monthly"
exit 0;
fi
# WEEKLY BACKUPS
DAY_OF_WEEK=`date +%u` #1-7 (Monday-Sunday)
EXPIRED_DAYS=`expr $((($WEEKS_TO_KEEP * 7) + 1))`
if [ $DAY_OF_WEEK = $DAY_OF_WEEK_TO_KEEP ];
then
# Delete all expired weekly directories
find $BACKUP_DIR -maxdepth 1 -mtime +$EXPIRED_DAYS -name "*-weekly" -exec rm -rf '{}' ';'
perform_backups "-weekly"
exit 0;
fi
# DAILY BACKUPS
# Delete daily backups 7 days old or more
find $BACKUP_DIR -maxdepth 1 -mtime +$DAYS_TO_KEEP -name "*-daily" -exec rm -rf '{}' ';'
perform_backups "-daily"
|
#!/bin/bash
source activate tensorflow_p36
echo -e "\n****************\n CPU Info \n****************\n"
lscpu
echo -e "\n****************\n NVIDIA GPU Info \n****************\n"
nvidia-smi
echo -e "\n****************\n Memory Info \n****************\n"
free -h
echo -e "\n****************\n GCC Info \n****************\n"
gcc -v
echo -e "\n****************\n Python Info \n****************\n"
python3 --version
echo -e "\n****************\n Pip Packages \n****************\n"
pip3 list
echo -e "\n****************\n Benchmarking InceptionV3 \n****************\n"
python tensorrt_keras_inceptionv3-benchmark.py
echo -e "\n****************\n Benchmarking 2D U-Net \n****************\n"
python tensorrt_keras_2dunet-decatlon-benchmark.py
|
<filename>src/app/services/global.mnemonickeypad.service.ts
import { Injectable } from '@angular/core';
import { ModalController } from '@ionic/angular';
import { Subject } from 'rxjs';
import { MnemonicKeypadComponent } from '../components/mnemonic-keypad/mnemonic-keypad.component';
import { GlobalThemeService } from './global.theme.service';
import { ChineseMnemonicSuggestionProvider } from './mnemonickeypad/chinese.provider';
import { EnglishMnemonicSuggestionProvider } from './mnemonickeypad/english.provider';
import { FrenchMnemonicSuggestionProvider } from './mnemonickeypad/french.provider';
import { ItalianMnemonicSuggestionProvider } from './mnemonickeypad/italian.provider';
export type Preference<T> = {
key: string;
value: T;
}
export type MnemonicLanguage = {
code: string;
icon: string;
}
@Injectable({
providedIn: 'root'
})
export class GlobalMnemonicKeypadService {
public static instance: GlobalMnemonicKeypadService; // Convenient way to get this service from non-injected classes
private suggestionProviders = {
en: new EnglishMnemonicSuggestionProvider(),
fr: new FrenchMnemonicSuggestionProvider(),
zh: new ChineseMnemonicSuggestionProvider(),
it: new ItalianMnemonicSuggestionProvider()
}
private activeMnemonicModal: HTMLIonModalElement = null;
/** List of mnemonic words typed by the user during a single keypad session (open/close) */
public typedMnemonicWords: Subject<string[]> = new Subject();
/** Clipboard content pasted from the clipboard dialog */
public pastedContent: Subject<string> = new Subject();
constructor(private theme: GlobalThemeService, private modalCtrl: ModalController) {
GlobalMnemonicKeypadService.instance = this;
}
public getSupportedLanguages(): MnemonicLanguage[] {
return [
{ code: "en", icon: "assets/components/mnemonic-keypad/icons/flags/england_200_120.png" },
{ code: "zh", icon: "assets/components/mnemonic-keypad/icons/flags/china_200_120.png" },
{ code: "fr", icon: "assets/components/mnemonic-keypad/icons/flags/france_200_120.png" },
{ code: "it", icon: "assets/components/mnemonic-keypad/icons/flags/italy_200_120.png" }
];
}
/**
* List of languages codes supported for mnemonic input.
*/
public getSupportedLanguageCodes(): string[] {
return this.getSupportedLanguages().map(l => l.code);
}
/**
* Display the special mnemonic keypad that lets users enter their mnemonic words without
* typing them in the system keyboard (that could leak words somehow).
*
* Subscribe to the typedMnemonicWords subject to get typed words.
*
* Resolves when the popup is closed (input ended, or cancelled)
*
* @param numberOfExpectedWords The keypad auto closes once user chose this number of words.
*/
public promptMnemonic(numberOfExpectedWords: number, wordInputCb: (words: string[]) => void, pasteCb: (pasted: string) => void, modalShownCb?: () => void): Promise<void> {
// eslint-disable-next-line @typescript-eslint/no-misused-promises, no-async-promise-executor
return new Promise(async resolve => {
await this.dismissMnemonicPrompt();
this.activeMnemonicModal = await this.modalCtrl.create({
component: MnemonicKeypadComponent,
componentProps: {},
backdropDismiss: true, // Closeable
showBackdrop: false,
cssClass: !this.theme.darkMode ? "identity-mnemonickeypad-component" : 'identity-mnemonickeypad-component-dark'
});
let backContents = document.getElementsByTagName("ion-content");
let justBehindScreenContent = backContents[backContents.length - 1];
let wordsSub = this.typedMnemonicWords.subscribe(words => {
wordInputCb(words);
if (words && words.length === numberOfExpectedWords) {
void this.dismissMnemonicPrompt();
}
});
let pasteSub = this.pastedContent.subscribe(pasteCb);
void this.activeMnemonicModal.onDidDismiss().then((params) => {
// Restore background content original size
justBehindScreenContent.style.cssText = "";
wordsSub.unsubscribe();
pasteSub.unsubscribe();
resolve();
});
await this.activeMnemonicModal.present();
// Reduce the main visible content area to go over the keypad and thus be scrollable
justBehindScreenContent.style.cssText = "--padding-bottom : " + this.activeMnemonicModal.getElementsByTagName("ion-content")[0].clientHeight + "px !important";
// Remove modal backdrop to make the background content user scrollable
// On android and ios, <ion-backgrop> is at different positions in the shadow root container so
// we have to search it.
// Also, depending on different angular versions, shadow root is used or not.
let children = this.activeMnemonicModal.shadowRoot ? this.activeMnemonicModal.shadowRoot.children : this.activeMnemonicModal.children;
for (let i = 0; i < children.length; i++) {
let c = children[i];
if (c.tagName.toLowerCase() === "ion-backdrop") {
c.remove();
break;
}
}
modalShownCb?.();
});
}
public async dismissMnemonicPrompt(): Promise<void> {
if (this.activeMnemonicModal)
await this.activeMnemonicModal.dismiss();
this.activeMnemonicModal = null;
}
/**
* From a given sequence of typed letters (letters), finds suggestions in existing BIP39
* word lists.
*/
public getSuggestedMnemonicWords(lang: string, letters: string): string[] {
if (!letters)
return [];
return this.suggestionProviders[lang].getSuggestions(letters);
}
}
|
<gh_stars>0
package model
trait Executable {
def contents: List[String]
def resolveContent(contentBase: String, characList: List[Characteristic]): String =
characList.foldLeft (contentBase)((acc, charac) => acc.replaceAll(charac.key, charac.value))
}
case class Operation (val contentBase: String, val characteristics: List[Characteristic])
extends Executable {
def contents: List[String] = {
for (i <- 0 until 1) //Represents some factor of randomness
yield resolveContent(contentBase, characteristics)
} toList
}
case class Validation (val contentBase: String, val characteristics: List[Characteristic])
extends Executable {
def contents: List[String] = List(resolveContent(contentBase, characteristics))
} |
person_data = {
"first_name": "John",
"last_name": "Doe",
"age": 28
} |
#!/bin/bash -f
#
# This script reconciles the elasticsearch indexes against what is loded
# into stardog. The --noconfig flag is for running in the dev environment
# where the setenv.sh file does not exist. The --force flag is used
# to recompute indexes that already exist rather than skipping them.
#
config=1
force=0
while [[ "$#" -gt 0 ]]; do case $1 in
--noconfig) config=0;;
--force) force=1;;
*) arr=( "${arr[@]}" "$1" );;
esac; shift; done
if [ ${#arr[@]} -ne 0 ]; then
echo "Usage: $0 [--noconfig] [--force]"
echo " e.g. $0"
echo " e.g. $0 --noconfig"
echo " e.g. $0 --force"
exit 1
fi
# Set up ability to format json
jq --help >> /dev/null 2>&1
if [[ $? -eq 0 ]]; then
jq="jq ."
else
jq="python -m json.tool"
fi
echo "--------------------------------------------------"
echo "Starting ...`/bin/date`"
echo "--------------------------------------------------"
echo ""
# Setup configuration
echo " Setup configuration"
if [[ $config -eq 1 ]]; then
APP_HOME=/local/content/evsrestapi
CONFIG_DIR=${APP_HOME}/${APP_NAME}/config
CONFIG_ENV_FILE=${CONFIG_DIR}/setenv.sh
echo " config = $CONFIG_ENV_FILE"
. $CONFIG_ENV_FILE
if [[ $? -ne 0 ]]; then
echo "ERROR: $CONFIG_ENV_FILE does not exist or has a problem"
exit 1
fi
elif [[ -z $STARDOG_HOST ]]; then
echo "ERROR: STARDOG_HOST is not set"
exit 1
elif [[ -z $STARDOG_PORT ]]; then
echo "ERROR: STARDOG_PORT is not set"
exit 1
elif [[ -z $STARDOG_USERNAME ]]; then
echo "ERROR: STARDOG_USERNAME is not set"
exit 1
elif [[ -z $STARDOG_PASSWORD ]]; then
echo "ERROR: STARDOG_PASSWORD is not set"
exit 1
elif [[ -z $ES_SCHEME ]]; then
echo "ERROR: ES_SCHEME is not set"
exit 1
elif [[ -z $ES_HOST ]]; then
echo "ERROR: ES_HOST is not set"
exit 1
elif [[ -z $ES_PORT ]]; then
echo "ERROR: ES_PORT is not set"
exit 1
fi
if [[ $force -eq 1 ]]; then
echo " force = 1"
elif [[ $ES_CLEAN == "true" ]]; then
echo " force = 1 (ES_CLEAN=true)"
force=1
fi
curl -s -g -u "${STARDOG_USERNAME}:$STARDOG_PASSWORD" \
"http://${STARDOG_HOST}:${STARDOG_PORT}/admin/databases" |\
$jq | perl -ne 's/\r//; $x=0 if /\]/; if ($x) { s/.* "//; s/",?$//; print "$_"; };
$x=1 if/\[/;' > /tmp/db.$$.txt
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected problem listing databases"
exit 1
fi
echo " databases = " `cat /tmp/db.$$.txt`
ct=`cat /tmp/db.$$.txt | wc -l`
if [[ $ct -eq 0 ]]; then
echo "ERROR: no stardog databases, this is unexpected"
exit 1
fi
# Prep query to read all version info
echo " Lookup version info for latest terminology in stardog"
cat > /tmp/x.$$.txt << EOF
query=PREFIX owl:<http://www.w3.org/2002/07/owl#>
PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX xsd:<http://www.w3.org/2001/XMLSchema#>
PREFIX dc:<http://purl.org/dc/elements/1.1/>
PREFIX xml:<http://www.w3.org/2001/XMLSchema>
select ?graphName ?version where {
graph ?graphName {
?source a owl:Ontology .
?source owl:versionInfo ?version .
?source dc:date ?date .
?source rdfs:comment ?comment .
}
}
EOF
query=`cat /tmp/x.$$.txt`
# Run the query against each of the databases
/bin/rm -f /tmp/y.$$.txt
touch /tmp/y.$$.txt
for db in `cat /tmp/db.$$.txt`; do
curl -s -g -u "${STARDOG_USERNAME}:$STARDOG_PASSWORD" \
http://${STARDOG_HOST}:${STARDOG_PORT}/$db/query \
--data-urlencode "$query" -H "Accept: application/sparql-results+json" |\
$jq | perl -ne 'chop; $x=1 if /"version"/; $x=0 if /\}/; if ($x && /"value"/) {
s/.* "//; s/".*//; print "$_|'$db'\n"; } ' >> /tmp/y.$$.txt
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected problem obtaining $db versions from stardog"
exit 1
fi
done
# Sort by version then reverse by DB (NCIT2 goes before CTRP)
# this is because we need "monthly" to be indexed from the "monthlyDb"
# defined in ncit.json
/bin/sort -t\| -k 1,1 -k 2,2r -o /tmp/y.$$.txt /tmp/y.$$.txt
cat /tmp/y.$$.txt | sed 's/^/ version = /;'
if [[ $ES_CLEAN == "true" ]]; then
echo " Remove and recreate evs_metadata index"
curl -s -X DELETE "$ES_SCHEME://$ES_HOST:$ES_PORT/evs_metadata" >> /dev/null
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected error deleting evs_metadata index"
exit 1
fi
curl -s -X PUT "$ES_SCHEME://$ES_HOST:$ES_PORT/evs_metadata" >> /dev/null
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected error creating evs_metadata index"
exit 1
fi
fi
# set the max number of fields higher
# we can probably remove this when we figure a better answer
echo " Set index.mapping.total_fields.limit = 5000"
curl -s -X PUT "$ES_SCHEME://$ES_HOST:$ES_PORT/evs_metadata/_settings" \
-H "Content-type: application/json" -d '{ "index.mapping.total_fields.limit": 5000 }' >> /dev/null
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected error setting index.mapping.total_fields in evs_metadata"
exit 1
fi
# For each DB|version, check whether indexes already exist for that version
echo ""
export PATH="/usr/local/jdk1.8/bin/:$PATH"
# Handle the local setup
local=""
jar="../lib/evsrestapi.jar"
if [[ $config -eq 0 ]]; then
local="-Dspring.profiles.active=local"
jar=build/libs/`ls build/libs/ | grep evsrestapi | grep jar | head -1`
fi
export EVS_SERVER_PORT="8083"
for x in `cat /tmp/y.$$.txt`; do
echo " Check indexes for $x"
version=`echo $x | cut -d\| -f 1`
cv=`echo $version | perl -pe 's/\.//;'`
db=`echo $x | cut -d\| -f 2`
# if previous version and current version match, then skip
# this is a monthly that's in both NCIT2 and CTRP databases
if [[ $cv == $pv ]]; then
echo " SEEN $cv, continue"
continue
fi
exists=1
for y in `echo "evs_metadata concept_ncit_$cv evs_object_ncit_$cv"`; do
# Check for index
curl -s -o /tmp/x.$$.txt ${ES_SCHEME}://${ES_HOST}:${ES_PORT}/_cat/indices
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected problem attempting to list indexes"
exit 1
fi
# handle the no indexes case
ct=`grep $y /tmp/x.$$.txt | wc -l`
if [[ $ct -eq 0 ]]; then
echo " MISSING $y index"
exists=0
fi
done
if [[ $exists -eq 1 ]] && [[ $force -eq 0 ]]; then
echo " FOUND indexes for $version, continue"
else
if [[ $exists -eq 1 ]] && [[ $force -eq 1 ]]; then
echo " FOUND indexes for $version, force reindex anyway"
# Remove if this already exists
version=`echo $cv | perl -pe 's/.*_//;'`
echo " Remove indexes for ncit $version"
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
$DIR/remove.sh ncit $version > /tmp/x.$$ 2>&1
if [[ $? -ne 0 ]]; then
cat /tmp/x.$$ | sed 's/^/ /'
echo "ERROR: removing ncit $version indexes"
exit 1
fi
fi
# Run reindexing process (choose a port other than the one that it runs on)
export STARDOG_DB=$db
export EVS_SERVER_PORT="8083"
echo " Generate indexes for $STARDOG_DB $version"
echo "java $local -Xmx4096M -jar $jar --terminology ncit_$version --realTime --forceDeleteIndex" | sed 's/^/ /'
java $local -Xmx4096M -jar $jar --terminology ncit_$version --realTime --forceDeleteIndex
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected error building indexes"
exit 1
fi
# Set the indexes to have a larger max_result_window
echo " Set max result window to 150000 for concept_ncit_$cv"
curl -s -X PUT "$ES_SCHEME://$ES_HOST:$ES_PORT/concept_ncit_$cv/_settings" \
-H "Content-type: application/json" -d '{ "index" : { "max_result_window" : 150000 } }' >> /dev/null
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected error setting max_result_window"
exit 1
fi
fi
# track previous version, if next one is the same, don't index again.
pv=$cv
done
# Stale indexes are automatically cleaned up by the indexing process
# It checks against stardog and reconciles everything and updates latest flags
# regardless of whether there was new data
echo " Reconcile stale indexes and update flags"
echo " java $local -jar $jar --terminology ncit --skipConcepts"
java $local -jar $jar --terminology ncit --skipConcepts
if [[ $? -ne 0 ]]; then
echo "ERROR: unexpected error building indexes"
exit 1
fi
# Cleanup
/bin/rm -f /tmp/[xy].$$.txt /tmp/db.$$.txt /tmp/x.$$
echo ""
echo "--------------------------------------------------"
echo "Finished ...`/bin/date`"
echo "--------------------------------------------------"
|
<reponame>victorzottmann/dice-game<gh_stars>0
import { rollDice } from "./utils.js";
describe('rollDice', () => {
it('returns a number from 1-6', () => {
const number = rollDice();
expect(number).toBeGreaterThan(0);
expect(number).toBeLessThan(7);
})
})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.