text stringlengths 1 1.05M |
|---|
from .strategies import strategies, param_space
|
import React, { Component } from 'react';
import { AppRegistry, StyleSheet, Text, View } from 'react-native';
import { NavigationComponent } from 'react-native-material-bottom-navigation';
import { TabNavigator } from 'react-navigation';
import Icon from 'react-native-vector-icons/MaterialIcons';
import Main from '../Main';
import PastCalls from '../PastCalls';
/**
* react-navigation's TabNavigator.
*/
const TabNav = TabNavigator(
{
Main: { screen: Main },
PastCalls: { screen: PastCalls }
},
{
tabBarComponent: NavigationComponent,
tabBarPosition: 'bottom',
tabBarOptions: {
bottomNavigationOptions: {
labelColor: 'white',
rippleColor: 'white',
tabs: {
Main: {
barBackgroundColor: '#0aa0d9'
},
PastCalls: {
barBackgroundColor: '#ff564b'
}
}
}
}
}
)
export default class NewMain extends Component {
render() {
return <TabNav />;
}
}
|
import { BlockPage } from '../../features/blocks/ui/pages/BlockPage';
export default BlockPage;
|
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Exit on first error, print all commands.
set -ev
# Grab the Composer directory.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
# start index higher to allow for extras such as a class index.
INDEX=1210
# rely on the parsejs tool (that is used to check the external API) to get the suitable classes
node ${DIR}/../composer-common/lib/codegen/parsejs.js --format JSON --inputDir ${DIR}/../composer-client/lib --outputDir ${DIR}/jsondata
node ${DIR}/../composer-common/lib/codegen/parsejs.js --format JSON --inputDir ${DIR}/../composer-admin/lib --outputDir ${DIR}/jsondata
node ${DIR}/../composer-common/lib/codegen/parsejs.js --format JSON --inputDir ${DIR}/../composer-runtime/lib/api --outputDir ${DIR}/jsondata
node ${DIR}/../composer-common/lib/codegen/parsejs.js --format JSON --inputDir ${DIR}/../composer-common/lib --outputDir ${DIR}/jsondata
node ${DIR}/../composer-common/lib/codegen/parsejs.js --format JSON --single ${DIR}/../composer-runtime/lib/api.js --outputDir ${DIR}/jsondata
cd ${DIR}
node ./scripts/merge.js
# for each json file process using the class template
for file in ${DIR}/jsondata/*.json
do
echo "${file}"
BASENAME="$(basename -s .json ${file})"
((INDEX++))
${DIR}/apigen-opus/bin/cli1.js -i jsondata/${BASENAME} -t class.njk -o ${DIR}/jekylldocs/api --context "{\"index\":\"${INDEX}\"}"
done
# TODO, create the template a class index.
# This can be done by merging the json data from each class, and using a different template
${DIR}/apigen-opus/bin/cli1.js -i allData -t classindex.njk -o ${DIR}/jekylldocs/api --context "{\"index\":\"1205\"}"
# Copy the Main index doc into place
cp ${DIR}/scripts/api-doc-index.md.tpl ${DIR}/jekylldocs/api/api-doc-index.md
# all done |
<reponame>Hiswe/schools-out<filename>so-api/models/lessons.js
'use strict'
const Sequelize = require('sequelize')
const dayjs = require('dayjs')
const { sequelize } = require('../services')
function getHour(stringTime) {
const [h, m] = stringTime.split(`:`).map(v => ~~v)
return dayjs()
.set(`h`, h)
.set(`m`, m)
}
const days = Object.freeze([
`sunday`,
`monday`,
`tuesday`,
`wednesday`,
`thursday`,
`friday`,
`saturday`,
])
const Lesson = sequelize.define(
`lesson`,
{
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
name: {
type: Sequelize.STRING,
allowNull: false,
},
startHour: {
type: Sequelize.STRING,
allowNull: false,
},
endHour: {
type: Sequelize.VIRTUAL(Sequelize.STRING),
get() {
const start = this.getDataValue(`startHour`)
const startTime = getHour(start)
const duration = this.getDataValue(`duration`)
return startTime
.clone()
.add(duration, `h`)
.format(`HH:mm`)
},
},
duration: {
type: Sequelize.FLOAT,
allowNull: false,
},
// day of the week
day: {
type: Sequelize.INTEGER,
allowNull: false,
},
dayName: {
type: Sequelize.VIRTUAL(Sequelize.STRING),
get() {
const day = this.getDataValue(`day`)
return days[day] || `–`
},
},
schedule: {
type: Sequelize.VIRTUAL(Sequelize.STRING),
get() {
const start = this.getDataValue(`startHour`)
const startTime = getHour(start)
const duration = this.getDataValue(`duration`)
const endTime = startTime.clone().add(duration, `h`)
return `${startTime.format(`HH:mm`)}–${endTime.format(`HH:mm`)}`
},
},
// début/fin des cours
startAt: {
type: Sequelize.DATEONLY,
},
endAt: {
type: Sequelize.DATEONLY,
},
// can put “required skill” here (beginner…)
info: {
type: Sequelize.JSON,
defaultValue: {},
},
},
{
timestamps: false,
},
)
module.exports = Lesson
|
#ifndef MEMORY_CHECK_H
#define MEMORY_CHECK_H
int memory_creation_test();
int memory_destroy_check();
int memory_owner_check();
int memory_compare_check();
int memory_set_check();
int memory_move_check();
int memory_copy_check();
#endif // MEMORY_CHECK_H
|
package com.android_group10.needy.ui.InNeed;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.Spinner;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.ViewModelProvider;
import androidx.navigation.Navigation;
import androidx.recyclerview.widget.DefaultItemAnimator;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.android_group10.needy.Post;
import com.android_group10.needy.PostAdapter;
import com.android_group10.needy.R;
import com.android_group10.needy.ServiceType;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.util.ArrayList;
import java.util.Objects;
public class InNeedFragment extends Fragment {
private InNeedViewModel inNeedViewModel;
private View root;
public static ArrayList<Post> dataList = new ArrayList<>();
private final FirebaseDatabase db = FirebaseDatabase.getInstance();
private RecyclerView recyclerView;
private TextView textView;
private Spinner spinner;
private TextView hiddenText;
private EditText filterText;
private ImageButton applyFilters;
private ImageButton clearFilters;
public View onCreateView(@NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
inNeedViewModel =
new ViewModelProvider(this).get(InNeedViewModel.class);
root = inflater.inflate(R.layout.fragment_in_need, container, false);
recyclerView = root.findViewById(R.id.postRecyclerView_in_need);
textView = root.findViewById(R.id.text_default2);
filterText = root.findViewById(R.id.address_filter);
applyFilters = root.findViewById(R.id.btn_set_filter);
clearFilters = root.findViewById(R.id.btn_clear_filter);
recyclerView.setHasFixedSize(true);
recyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
recyclerView.setItemAnimator(new DefaultItemAnimator());
PostAdapter myPostAdapter = new PostAdapter(requireContext(), dataList, position -> {
Post clickedItem = dataList.get(position);
//Create action (from generated code by safeargs plugin) and navigate using it while passing the clicked post
InNeedFragmentDirections.ActionInNeedToOpenPost action = InNeedFragmentDirections.actionInNeedToOpenPost(clickedItem);
Navigation.findNavController(root).navigate(action);
});
recyclerView.setAdapter(myPostAdapter);
textView.setText(inNeedViewModel.getText().getValue());
hiddenText = root.findViewById(R.id.hidden_textView1);
spinner = root.findViewById(R.id.spinner1);
String[] items = {"Select a Service type here (optional)", ServiceType.WALK_A_DOG.toString(), ServiceType.SHOPPING.toString(), ServiceType.TRANSPORTATION.toString(), ServiceType.CLEANING.toString(), ServiceType.OTHER.toString()};
ArrayAdapter<String> adapter = new ArrayAdapter<>(getContext(), R.layout.simple_spinner_item, items);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_item);
spinner.setAdapter(adapter);
spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view,
int position, long id) {
hiddenText.setText(String.valueOf(parent.getItemIdAtPosition(position)));
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
ValueEventListener listListener = new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot snapshot) {
addData(snapshot, false);
}
@Override
public void onCancelled(@NonNull DatabaseError error) {
}
};
db.getReference().child("Posts").addValueEventListener(listListener);
return root;
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
applyFilters.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
int selectedServiceType = Integer.parseInt(hiddenText.getText().toString());
String addressFilter = filterText.getText().toString();
if (!(addressFilter.isEmpty() && selectedServiceType == 0)) {
ValueEventListener listListener = new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot snapshot) {
addData(snapshot, true);
}
@Override
public void onCancelled(@NonNull DatabaseError error) {
}
};
db.getReference().child("Posts").addValueEventListener(listListener);
}
}
});
clearFilters.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ValueEventListener listListener = new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot snapshot) {
filterText.setText("");
spinner.setSelection(0);
addData(snapshot, false);
}
@Override
public void onCancelled(@NonNull DatabaseError error) {
}
};
db.getReference().child("Posts").addValueEventListener(listListener);
}
});
}
private void addData(DataSnapshot snapshot, boolean filters) {
if (snapshot.hasChildren()) {
int count = 0;
if (snapshot.getChildrenCount() != count) {
dataList.clear();
for (DataSnapshot child : snapshot.getChildren()) {
Post object = child.getValue(Post.class);
assert object != null;
if (object.getPostStatus() == 1) {
object.setAuthorUID(String.valueOf(child.child("author").getValue()));
if (!filters) {
dataList.add(object);
} else {
int selectedServiceType = Integer.parseInt(hiddenText.getText().toString());
String addressFilter = filterText.getText().toString();
if (!addressFilter.isEmpty()) {
if (object.getCity().contains(addressFilter) || object.getZipCode().equals(addressFilter)) {
if (selectedServiceType > 0) {
if (object.getServiceType() == (selectedServiceType - 1)) {
dataList.add(object);
}
} else dataList.add(object);
}
} else if (selectedServiceType > 0) {
if (object.getServiceType() == (selectedServiceType - 1)) {
if (!addressFilter.isEmpty()) {
if (object.getCity().contains(addressFilter) || object.getZipCode().equals(addressFilter)) {
dataList.add(object);
}
} else dataList.add(object);
}
}
}
}
count++;
}
if (dataList.size() != 0) {
textView.setText("");
} else textView.setText(inNeedViewModel.getText().getValue());
//Update recyclerview adapter
if (recyclerView != null) {
PostAdapter postAdapter = (PostAdapter) recyclerView.getAdapter();
if (postAdapter != null) {
postAdapter.updateData(dataList);
}
}
}
}
}
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.gui;
import java.io.File;
import javax.swing.table.AbstractTableModel;
import org.fhwa.c2cri.gui.components.TestCaseCreationListener;
import org.fhwa.c2cri.gui.components.TestCaseEditJButton;
import org.fhwa.c2cri.gui.components.TestCaseViewJButton;
import org.fhwa.c2cri.testmodel.TestCase;
import org.fhwa.c2cri.testmodel.TestCases;
/**
* The Class TestCasesTableModel.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class TestCasesTableModel extends AbstractTableModel implements TestCaseCreationListener {
/** The Constant Title_Col. */
public static final int Title_Col = 0;
/** The Constant Source. */
public static final int Source = 1;
/** The Constant Source. */
public static final int Edit = 2;
/** The Constant Source. */
public static final int View = 3;
/** The test cases. */
private TestCases testCases;
/** The column names. */
private String[] columnNames = {"Test Case",
"Source","",""};
/**
* Instantiates a new test cases table model.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
private TestCasesTableModel() {
}
/**
* Instantiates a new test cases table model.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param testCases the test cases
*/
public TestCasesTableModel(TestCases testCases) {
super();
this.testCases = testCases;
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getColumnCount()
*/
public int getColumnCount() {
return columnNames.length;
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getRowCount()
*/
public int getRowCount() {
// System.out.println(" The number of rows was = "+projectRequirements.requirements.size());
return testCases.testCases.size();
}
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#getColumnName(int)
*/
public String getColumnName(int col) {
return columnNames[col];
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getValueAt(int, int)
*/
public Object getValueAt(int row, int col) {
final TestCase testCase = testCases.testCases.get(row);
switch (col) {
case Title_Col:
return testCase.getName();
case Source:
if ((testCase.getCustomDataLocation() == null)
|| (testCase.getCustomDataLocation().equals(""))) {
return "default";
} else {
return testCase.getCustomDataLocation();
}
case Edit:
final TestCaseEditJButton editButton = new TestCaseEditJButton(testCase, this, row);
editButton.setEnabled(true);
return editButton;
case View:
final TestCaseViewJButton viewButton = new TestCaseViewJButton(testCase);
viewButton.setEnabled(true);
return viewButton;
}
throw new IllegalArgumentException("Illegal column: "
+ col);
}
/*
* JTable uses this method to determine the default renderer/
* editor for each cell. If we didn't implement this method,
* then the last column would contain text ("true"/"false"),
* rather than a check box.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#getColumnClass(int)
*/
public Class getColumnClass(int c) {
return getValueAt(0, c).getClass();
}
/*
* Don't need to implement this method unless your table's
* editable.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#isCellEditable(int, int)
*/
public boolean isCellEditable(int row, int col) {
//Note that the data/cell address is constant,
//no matter where the cell appears onscreen.
if (col < 1) {
return false;
} else {
return true;
}
}
/*
* Don't need to implement this method unless your table's
* data can change.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#setValueAt(java.lang.Object, int, int)
*/
public void setValueAt(Object value, int row, int col) {
if ((row > -1) && (col == Source)) {
if ((new File((String) value).exists())) {
testCases.testCases.get(row).setCustomDataLocation((String) value);
System.out.println(" Firing Row " + row);
fireTableCellUpdated(row, col);
} else if (((String) value).equals("")) {
testCases.testCases.get(row).setCustomDataLocation((String) value);
fireTableCellUpdated(row, col);
}
}
}
@Override
/**
* Update
*/
public void testCaseCreatedUpdate(int row) {
fireTableCellUpdated(row,Source);
}
}
|
<reponame>BrunoGrisci/EngineeringDesignusingMultiObjectiveEvolutionaryAlgorithms
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.algorithm;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.StringReader;
import org.junit.Assert;
import org.moeaframework.Analyzer;
import org.moeaframework.Executor;
import org.moeaframework.core.spi.AlgorithmFactory;
/**
* Methods for comparing two algorithm implementations statistically.
*/
public abstract class AlgorithmTest {
/**
* Tests if two algorithms are statistically indifferent. The default
* {@link AlgorithmFactory} is used to create instances.
*
* @param problem the name of the problem to test
* @param algorithm1 the name of the first algorithm to test
* @param algorithm2 the name of the second algorithm to test
* @throws IOException should not occur
*/
public void test(String problem, String algorithm1, String algorithm2)
throws IOException {
test(problem, algorithm1, algorithm2, AlgorithmFactory.getInstance());
}
/**
* Tests if two algorithms are statistically indifferent.
*
* @param problem the name of the problem to test
* @param algorithm1 the name of the first algorithm to test
* @param algorithm2 the name of the second algorithm to test
* @param factory the factory used to construct the algorithms
* @throws IOException should not occur
*/
public void test(String problem, String algorithm1, String algorithm2,
AlgorithmFactory factory) throws IOException {
Analyzer analyzer = new Analyzer()
.withProblem(problem)
.includeAllMetrics()
.showAggregate()
.showStatisticalSignificance();
Executor executor = new Executor()
.withProblem(problem)
.usingAlgorithmFactory(factory)
.withMaxEvaluations(10000)
.distributeOnAllCores();
analyzer.addAll(algorithm1,
executor.withAlgorithm(algorithm1).runSeeds(10));
analyzer.addAll(algorithm2,
executor.withAlgorithm(algorithm2).runSeeds(10));
ByteArrayOutputStream output = null;
try {
output = new ByteArrayOutputStream();
analyzer.printAnalysis(new PrintStream(output));
Assert.assertTrue(countIndifferences(output.toString(), algorithm1)
>= 5);
} finally {
if (output != null) {
output.close();
}
}
}
/**
* Counts the number of indifferences in the statistical output by counting
* the number of lines matching
* <pre>
* Indifferent: [<algorithmName>]
* </pre>
*
* @param output the statistical output from
* {@link Analyzer#printAnalysis(PrintStream)}
* @param algorithmName the name of one of the algorithms being tested
* @return the number of indifferences in the statistical output
* @throws IOException should not occur
*/
public int countIndifferences(String output, String algorithmName)
throws IOException {
BufferedReader reader = null;
String line = null;
int count = 0;
try {
reader = new BufferedReader(new StringReader(output));
while ((line = reader.readLine()) != null) {
if (line.matches("^\\s*Indifferent:\\s*\\[" + algorithmName +
"\\]\\s*$")) {
count++;
}
}
} finally {
if (reader != null) {
reader.close();
}
}
return count;
}
}
|
#! /bin/bash
set -e
base=$( dirname $( readlink -f $0 ) )
name=$(basename $0 .sh)
cd $base
case "$name" in
*64)
echo "use 64-bit wine"
source wine-staging-64.rc
;;
*32)
echo "use 32-bit wine"
source wine-staging-32.rc
;;
*)
echo "UNKNOWN BITNESS from name \"$name\"" 1>&2
exit 1
;;
esac
echo "WINEPREFIX=$WINEPREFIX"
echo "DRIVE_C=$drive_c"
|
#!/bin/sh
unzip -o primesieve-6.2-win64-console.zip
echo "#!/bin/sh
./primesieve.exe \$@ > \$LOG_FILE" > primesieve-test
chmod +x primesieve-test
|
TERMUX_PKG_HOMEPAGE=https://libexpat.github.io/
TERMUX_PKG_DESCRIPTION="XML parsing C library"
TERMUX_PKG_LICENSE="BSD"
TERMUX_PKG_VERSION=2.2.6
TERMUX_PKG_REVISION=1
TERMUX_PKG_SHA256=17b43c2716d521369f82fc2dc70f359860e90fa440bea65b3b85f0b246ea81f2
TERMUX_PKG_SRCURL=https://github.com/libexpat/libexpat/releases/download/R_${TERMUX_PKG_VERSION//./_}/expat-$TERMUX_PKG_VERSION.tar.bz2
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="--without-xmlwf --without-docbook"
|
const SpotifyArtistLib = require('../lib/spotify/SpotifyArtistLib');
const { tokenExpiredHandler } = require('../utils/spotify/error-handlers');
class ArtistService {
constructor() {
this.spotifyArtistLib = new SpotifyArtistLib();
}
async getMultipleArtists(ids) {
let artists = null;
try {
artists = await this.spotifyArtistLib.getMultipleArtists(ids);
} catch (error) {
const { status } = error;
if (status === 401) {
const cb = () => this.spotifyArtistLib.getMultipleArtists(ids);
artists = await tokenExpiredHandler(cb);
} else {
throw new Error(error);
}
}
return artists;
}
async getArtistById(id) {
let artist = null;
try {
artist = await this.spotifyArtistLib.getArtistById(id);
} catch (error) {
const { status } = error;
if (status === 401) {
const cb = () => this.spotifyArtistLib.getArtistById(id);
artist = await tokenExpiredHandler(cb);
} else {
throw new Error(error);
}
}
return artist;
}
async getArtistAlbums(id) {
let artist = null;
try {
artist = await this.spotifyArtistLib.getArtistAlbums(id);
} catch (error) {
const { status } = error;
if (status === 401) {
const cb = () => this.spotifyArtistLib.getArtistAlbums(id);
artist = await tokenExpiredHandler(cb);
} else {
throw new Error(error);
}
}
return artist;
}
async getArtistTopTracks({ id, market }) {
let artist = null;
try {
artist = await this.spotifyArtistLib.getArtistTopTracks({ id, market });
} catch (error) {
const { status } = error;
if (status === 401) {
const cb = () => this.spotifyArtistLib.getArtistTopTracks({ id, market });
artist = await tokenExpiredHandler(cb);
} else {
throw new Error(error);
}
}
return artist;
}
async getArtistRelatedArtists(id) {
let artist = null;
try {
artist = await this.spotifyArtistLib.getArtistRelatedArtists(id);
} catch (error) {
const { status } = error;
if (status === 401) {
const cb = () => this.spotifyArtistLib.getArtistRelatedArtists(id);
artist = await tokenExpiredHandler(cb);
} else {
throw new Error(error);
}
}
return artist;
}
}
module.exports = ArtistService;
|
sentence = "He is working on it"
word = "working"
sentence = sentence.replace(word, "")
print(sentence) |
export MAIL_USERNAME='philipiaeveline13@gmail.com'
export MAIL_PASSWORD='eveline3434'
export API_BASE_URL='http://quotes.stormconsultancy.co.uk/random.json'
export SECRET_KEY='evel'
python3.7 manage.py server |
/*
Package cmn provides common functions for chaincode.
*/
package cmn
import (
"crypto/sha512"
"encoding/base64"
"encoding/hex"
"encoding/json"
"errors"
"github.com/hyperledger/fabric/core/chaincode/shim"
. "github.com/kenmazsyma/soila/chaincode/log"
)
// Put is a function for put data info ledger
// parameters :
// stub - object for accessing ledgers from chaincode
// key - key of target data
// returns :
// - whether error object or nil
func Put(stub shim.ChaincodeStubInterface, key string, val interface{}) error {
D("check parameter")
if val == nil {
return errors.New("invalid param")
}
D("convert parameter to json")
jsVal, err := json.Marshal(val)
if err != nil {
return err
}
D("put data to leder:%s", key)
err = stub.PutState(key, []byte(jsVal))
return err
}
// Delete is a function for delete data from ledger
// parameters :
// stub - object for accessing ledgers from chaincode
// key - key of target data
// returns :
// - whether error object or nil
func Delete(stub shim.ChaincodeStubInterface, key string) (err error) {
return stub.DelState(key)
}
type FuncGenKey func(shim.ChaincodeStubInterface, interface{}) (string, error)
// VerifyForRegistration is a function for verifying if parameters is valid before registering.
// parameters :
// stub - object for accessing ledgers from chaincode
// genkey - function for generating key
// args - target parameters for verify
// nofElm - expected number of args
// returns :
// key - generated key
// err - whether error object or nil
func VerifyForRegistration(stub shim.ChaincodeStubInterface, genkey FuncGenKey, args interface{}) (key string, err error) {
D("generate key")
key, err = genkey(stub, args)
if err != nil {
return
}
D("check if data already exists")
val, err := stub.GetState(key)
if err != nil {
return
}
if val != nil {
err = errors.New("data already exists.")
return
}
return
}
// VerifyForUpdate is a function for verifying if parameters is valid before updating.
// parameters :
// stub - object for accessing ledgers from chaincode
// args - target parameters for verify
// nofElm - expected number of args
// returns :
// ret - data got from ledger
// err - whether error object or nil
func VerifyForUpdate(stub shim.ChaincodeStubInterface, args []string, nofElm int) (ret []byte, err error) {
D("check parameter")
if err = CheckParam(args, nofElm); err != nil {
return
}
D("check if data exists:%s", args[0])
ret, err = stub.GetState(args[0])
if err != nil {
return
}
if len(ret) == 0 {
err = errors.New("data not found.")
return
}
return
}
// Get is a function for getting data from ledger
// parameters :
// stub - object for accessing ledgers from chaincode
// key - target parameters for verify
// returns :
// key - key of data
// res - data got from ledger
// err - whether error obejct or nil
func Get(stub shim.ChaincodeStubInterface, key string) (ret []interface{}, err error) {
D("get data from ledger:%s", key)
data, err := stub.GetState(key)
if err != nil {
return
}
if len(data) == 0 {
err = errors.New("data not found.")
return
}
ret = []interface{}{[]byte(key), data}
return
}
// Sha512 is a function for generate sha512 hash of target string
// parameters :
// stub - object for accessing ledgers from chaincode
// returns :
// - sha512 hash
func Sha512(v string) string {
h := sha512.New()
h.Write([]byte(v))
return hex.EncodeToString(h.Sum(nil))
}
// Sha512Ar is a function for generate sha512 hash of target string
// parameters :
// stub - object for accessing ledgers from chaincode
// returns :
// - sha512 hash
func Sha512Ar(v []string) string {
h := sha512.New()
for _, vv := range v {
h.Write([]byte(vv))
}
return hex.EncodeToString(h.Sum(nil))
}
// Sha512B is a function for generate sha512 hash of target binary data
// parameters :
// stub - object for accessing ledgers from chaincode
// returns :
// - sha512 hash
func Sha512B(v []byte) string {
h := sha512.New()
h.Write(v)
return hex.EncodeToString(h.Sum(nil))
}
// ToJSON is a function for generating json string of target object
// parameters :
// o - target object
// returns :
// - json string
// - whether error object or nil
func ToJSON(o interface{}) (string, error) {
data, err := json.Marshal(o)
return string(data), err
}
func CheckParam(prm []string, validlen int) error {
if len(prm) != validlen {
return errors.New("number of parameter is not valid.")
}
return nil
}
func DecodeBase64(src string) ([]byte, error) {
return base64.StdEncoding.DecodeString(src)
}
func EncodeBase64(src []byte) string {
return base64.StdEncoding.EncodeToString(src)
}
|
import React from 'react';
const SVG = ({
fill = '#000',
height = '100%',
width = '100%',
className = '',
viewBox = '0 0 16 16',
}) => (
<svg
className={className}
focusable="false"
height={height}
version="1.1"
viewBox={viewBox}
width={width}
x="0px"
xmlSpace="preserve"
xmlns="http://www.w3.org/2000/svg"
xmlnsXlink="http://www.w3.org/1999/xlink"
y="0px"
>
{/* Generator: Sketch 49.1 (51147) - http://www.bohemiancoding.com/sketch */}
<title>Page 1</title>
<desc>Created with Sketch.</desc>
<defs>
<polygon id="path-1" points="0 0.0002 12 0.0002 12 16 0 16" />
</defs>
<g fill="none" fillRule="evenodd" id="Style" stroke="none" strokeWidth="1">
<g
id="1---Tanner-UI_Iconography"
transform="translate(-824.000000, -671.000000)"
>
<g id="Page-1" transform="translate(824.000000, 671.000000)">
<mask fill={fill} id="mask-2">
<use xlinkHref="#path-1" />
</mask>
<g id="Clip-2" />
<path
d="M10,12.7642 L6,10.7642 L2.001,12.7632 L2.006,2.0002 L10,2.0002 L10,12.7642 Z M10,0.0002 L2,0.0002 C0.9,0.0002 0,0.9002 0,2.0002 L0,16.0002 L6,13.0002 L12,16.0002 L12,2.0002 C12,0.9002 11.1,0.0002 10,0.0002 Z"
fill={fill}
id="Fill-1"
mask="url(#mask-2)"
/>
</g>
</g>
</g>
</svg>
);
export default SVG;
|
# Get current work dir
WORK_DIR=$(pwd)
# Import global variables
source $WORK_DIR/scripts/config/env.sh
PYTHONPATH=$PYTHONPATH:$WORK_DIR python scripts/figures/figure7/pipeswitch_inception_v3/host_run_data.py $WORK_DIR/scripts/config/servers.txt |
<filename>app/src/test/java/org/apache/taverna/mobile/ui/anouncements/AnnouncementPresenterTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.taverna.mobile.ui.anouncements;
import org.apache.taverna.mobile.FakeRemoteDataSource;
import org.apache.taverna.mobile.R;
import org.apache.taverna.mobile.data.DataManager;
import org.apache.taverna.mobile.data.model.Announcements;
import org.apache.taverna.mobile.data.model.DetailAnnouncement;
import org.apache.taverna.mobile.utils.RxSchedulersOverrideRule;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.HashMap;
import java.util.Map;
import io.reactivex.Observable;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class AnnouncementPresenterTest {
@Rule
public final RxSchedulersOverrideRule rxSchedulersOverrideRule = new
RxSchedulersOverrideRule();
@Mock
DataManager dataManager;
@Mock
AnnouncementMvpView announcementMvpView;
Announcements announcements;
DetailAnnouncement announcement;
private AnnouncementPresenter announcementPresenter;
private Map<String, String> option;
@Before
public void setUp() {
announcementPresenter = new AnnouncementPresenter(dataManager);
announcementPresenter.attachView(announcementMvpView);
announcements = FakeRemoteDataSource.getAnnouncements();
announcement = FakeRemoteDataSource.getAnnouncement();
option = new HashMap<>();
option.put("order", "reverse");
option.put("page", String.valueOf(1));
}
@After
public void tearDown() {
announcementPresenter.detachView();
}
@Test
public void loadAllAnnouncement_validAnnouncementsData_ReturnsResults() {
when(dataManager.getAllAnnouncement(option)).thenReturn(
Observable.just(announcements));
announcementPresenter.loadAllAnnouncement(1);
verify(announcementMvpView, never()).showSnackBar(R.string.no_more_announcement_available);
verify(announcementMvpView, never()).removeLoadMoreProgressBar();
verify(announcementMvpView).showAllAnnouncement(announcements);
verify(announcementMvpView, never()).showSnackBar(R.string.failed_to_fetch_announcement);
}
@Test
public void loadAllAnnouncement_NULLAnnouncementsData_RemoveLoadMore() {
Announcements announcements = new Announcements();
when(dataManager.getAllAnnouncement(option)).thenReturn(
Observable.just(announcements));
announcementPresenter.loadAllAnnouncement(1);
verify(announcementMvpView).showSnackBar(R.string.no_more_announcement_available);
verify(announcementMvpView).removeLoadMoreProgressBar();
verify(announcementMvpView, never()).showAllAnnouncement(announcements);
verify(announcementMvpView, never()).showSnackBar(R.string.failed_to_fetch_announcement);
}
@Test
public void loadAllAnnouncement_RuntimeError_ShowError() {
when(dataManager.getAllAnnouncement(option)).thenReturn(
Observable.<Announcements>error(new RuntimeException()));
announcementPresenter.loadAllAnnouncement(1);
verify(announcementMvpView, never()).showSnackBar(R.string.no_more_announcement_available);
verify(announcementMvpView, never()).removeLoadMoreProgressBar();
verify(announcementMvpView, never()).showAllAnnouncement(announcements);
verify(announcementMvpView).showSnackBar(R.string.failed_to_fetch_announcement);
}
@Test
public void loadAnnouncementDetails_validAnnouncementData_ReturnsResults() {
when(dataManager.getAnnouncementDetail("1")).thenReturn(Observable.just(announcement));
announcementPresenter.loadAnnouncementDetails("1");
verify(announcementMvpView).showAnnouncementDetail(announcement);
verify(announcementMvpView, never()).showSnackBar(R.string.failed_to_fetch_announcement);
}
@Test
public void loadAnnouncementDetails_RuntimeError_ShowError() {
DetailAnnouncement detailAnnouncement = new DetailAnnouncement();
when(dataManager.getAnnouncementDetail("1")).thenReturn(Observable
.<DetailAnnouncement>error(new RuntimeException()));
announcementPresenter.loadAnnouncementDetails("1");
verify(announcementMvpView, never()).showAnnouncementDetail(detailAnnouncement);
verify(announcementMvpView).showSnackBar(R.string.failed_to_fetch_announcement);
}
} |
import React, { Component } from 'react';
import axios from 'axios';
class App extends Component {
state = {
posts: [],
postText: ''
};
componentDidMount() {
axios.get('/posts').then(res => {
this.setState({ posts: res.data });
});
}
handleChange = e => {
this.setState({ postText: e.target.value });
};
handleSubmit = e => {
e.preventDefault();
const newPost = {
text: this.state.postText
};
axios.post('/posts', newPost).then(res => {
this.setState({
posts: res.data,
postText: ''
});
});
};
render() {
return (
<div>
<h1>View Posts</h1>
{this.state.posts.map(post => (
<div key={post.id}>{post.text}</div>
))}
<hr />
<h1>Create Post</h1>
<form onSubmit={this.handleSubmit}>
<input value={this.state.postText} onChange={this.handleChange} />
<button>Submit</button>
</form>
</div>
);
}
}
export default App; |
<gh_stars>0
'use strict';
require('dotenv').config();
// Application Dependencies
const express = require('express');
const cors = require('cors');
const pg = require('pg');
const superagent = require('superagent');
const methodOverride = require('method-override');
const PORT = process.env.PORT || 3000;
const app = express();
app.use(cors());
//Configure Database
const client = new pg.Client(process.env.DATABASE_URL);
client.on('err', err => console.error(err));
// Application Middleware
app.use(express.urlencoded({ extended: true }));
app.use(express.static('public'));
// Post Method Override (in order to use PUT and DELETE)
// https://github.com/expressjs/method-override
app.use(methodOverride((request, response) => {
if (request.body && typeof request.body === 'object' && '_method' in request.body) {
let method = request.body._method;
delete request.body._method;
return method;
}
}));
// Set the view engine for server-side templating
app.set('view engine', 'ejs');
// API Routes
app.get('/', getBooks)
app.get('/searches/new', searchForm);
app.post('/searches', searchResults);
app.post('/books', saveBook)
app.get('/books/:id', getDetails);
app.put('/books/:id', updateBook);
app.delete('/books/:id', deleteBook);
app.get('*', (request, response) => response.status(404).send('This route does not exist'));
// HELPER FUNCTIONS
function Book(info) {
const placeholderImage = 'https://i.imgur.com/J5LVHEL.jpg';
let httpRegex = /^(http:\/\/)/g
this.title = info.title ? info.title : 'No title available';
this.author = info.authors ? info.authors[0] : 'No author available';
this.isbn = info.industryIdentifiers ? `ISBN_13 ${info.industryIdentifiers[0].identifier}` : 'No ISBN available';
this.image_url = info.imageLinks ? info.imageLinks.thumbnail.replace(httpRegex, 'https://') : placeholderImage;
this.description = info.description ? info.description : 'No description available';
this.id = info.industryIdentifiers ? `${info.industryIdentifiers[0].identifier}` : '';
}
// Retrieves any books currently in the database and displays them on the index home page
function getBooks(request, response) {
let SQL = 'SELECT * FROM books;';
return client.query(SQL)
.then (results => response.render('pages/index', {result: results.rows, count: results.rows.length}))
.catch(err => handleError(err, response));
}
// Renders the search form on the search page (searches/new.ejs)
function searchForm(request, response) {
response.render('pages/searches/new');
}
// Gets information from the Google Books API based on what the user searched for and renders those search results to the results page (searches/show.ejs)
function searchResults(request, response) {
let url = 'https://www.googleapis.com/books/v1/volumes?q=';
if (request.body.search[1] === 'title') { url += `+intitle:${request.body.search[0]}`; }
if (request.body.search[1] === 'author') { url += `+inauthor:${request.body.search[0]}`; }
superagent.get(url)
.then(apiResponse => apiResponse.body.items.map(bookResult => new Book(bookResult.volumeInfo)))
.then(results => response.render('pages/searches/show', { searchResults: results }))
.catch(err => handleError(err, response));
}
// Allows the user to save a book to the database
function saveBook(request, response) {
let lcBookshelf = request.body.bookshelf.toLowerCase();
let {title, author, isbn, image_url, description} = request.body;
let SQL = 'INSERT INTO books (title, author, isbn, image_url, description, bookshelf) VALUES ($1, $2, $3, $4, $5, $6) RETURNING id;';
let values = [title, author, isbn, image_url, description, lcBookshelf];
client.query(SQL, values)
.then(result => response.redirect(`/books/${result.rows[0].id}`))
.catch(handleError);
}
// Allows the user to view details about the book
function getDetails(request, response){
getBookshelves()
.then( shelves => {
let SQL = 'SELECT * FROM books WHERE id=$1;';
let values = [request.params.id];
client.query(SQL, values)
.then(result => {
response.render('pages/books/show', {book: result.rows[0], bookshelves: shelves.rows})
})
})
.catch(handleError);
}
function getBookshelves() {
let SQL = 'SELECT DISTINCT bookshelf FROM books ORDER BY bookshelf;';
return client.query(SQL);
}
// Allows user to update book information in the database
function updateBook(request, response) {
let {title, author, isbn, image_url, description, bookshelf} = request.body;
let SQL = 'UPDATE books SET title=$1, author=$2, isbn=$3, image_url=$4, description=$5, bookshelf=$6 WHERE id=$7';
let values = [title, author, isbn, image_url, description, bookshelf, request.params.id];
client.query(SQL, values)
.then(response.redirect(`/books/${request.params.id}`))
.catch(handleError);
}
// Allows user to delete a book from the database
function deleteBook(request, response) {
let SQL = 'DELETE FROM books WHERE id=$1;';
let values = [request.params.id];
return client.query(SQL, values)
.then(response.redirect('/'))
.catch(handleError);
}
function handleError(error, response) {
response.render('pages/error', { error: error });
}
client.connect()
.then( ()=> {
app.listen(PORT, ()=> {
console.log('server and db are up, listening on port ', PORT);
});
});
|
import re
def parse_function_declaration(declaration):
pattern = r"""
^(?P<ws>[ \t]*) # Capture leading whitespace
(?P<decl>.*\(\n # Capture the opening line, ending with (\n
(?:.*,\n)* # Lines with arguments, all ending with ,\n
.*)\)(?P<final>[); \t]*)\n # Capture last line with closing brace & optional following characters
(?![ \t]*\).*) # Don't match an already expanded decl
"""
match = re.match(pattern, declaration, re.VERBOSE)
if match:
ws = match.group('ws')
decl = match.group('decl').strip()
final = match.group('final').strip()
# Extracting function name, return type, and arguments
function_name = decl.split('(')[0].strip()
return_type = decl.split('(')[1].split(')')[0].strip()
arguments = [arg.strip() for arg in decl.split('(')[1].split(')')[1].split(',') if arg.strip()]
return {
"function_name": function_name,
"return_type": return_type,
"arguments": arguments
}
else:
return None
# Example usage
declaration = """
int add(
int a,
int b
)
"""
result = parse_function_declaration(declaration)
print(result)
# Output: {'function_name': 'add', 'return_type': 'int', 'arguments': ['int a', 'int b']} |
def majority_element(arr):
'''This function takes an array and returns the majority element, if
exists. Otherwise, None is returned.
'''
# Create a dictionary to store frequency of elements
elem_count = {}
for elem in arr:
if elem not in elem_count:
elem_count[elem] = 0
elem_count[elem] += 1
# Loop through the dictionary to find majority element
max_count = 0
majority_element = None
for elem,freq in elem_count.items():
if freq > max_count:
max_count = freq
majority_element = elem
return majority_element |
def count_vowels(s):
num_vowels = 0
for c in s.lower():
if c in 'aeiou':
num_vowels += 1
return num_vowels
print(count_vowels('hello world')) # 3 |
class WebsiteScraper(object):
def __init__(self, url):
self.url = url
def scrapedata(self):
response = requests.get(self.url)
soup = BeautifulSoup(response.text)
scraped_list = []
for item in soup.find_all('div'):
data = {
'name': item.get('title'),
'value': item.get_text()
}
scraped_list.append(data)
return scraped_list |
import { Controller, Get, Param, ParseIntPipe, Query } from '@nestjs/common'
import { OraclePriceAggregated, OraclePriceAggregatedMapper } from '@src/module.model/oracle.price.aggregated'
import { OracleTokenCurrencyMapper } from '@src/module.model/oracle.token.currency'
import { ApiPagedResponse } from '@src/module.api/_core/api.paged.response'
import { PaginationQuery } from '@src/module.api/_core/api.query'
import { PriceTicker, PriceTickerMapper } from '@src/module.model/price.ticker'
import { PriceOracle } from '@whale-api-client/api/prices'
import { OraclePriceFeedMapper } from '@src/module.model/oracle.price.feed'
import { OraclePriceAggregatedInterval, OraclePriceAggregatedIntervalMapper } from '@src/module.model/oracle.price.aggregated.interval'
@Controller('/prices')
export class PriceController {
constructor (
protected readonly oraclePriceAggregatedMapper: OraclePriceAggregatedMapper,
protected readonly oracleTokenCurrencyMapper: OracleTokenCurrencyMapper,
protected readonly priceTickerMapper: PriceTickerMapper,
protected readonly priceFeedMapper: OraclePriceFeedMapper,
protected readonly oraclePriceAggregatedIntervalMapper: OraclePriceAggregatedIntervalMapper
) {
}
@Get()
async list (
@Query() query: PaginationQuery
): Promise<ApiPagedResponse<PriceTicker>> {
const items = await this.priceTickerMapper.query(query.size, query.next)
return ApiPagedResponse.of(items, query.size, item => {
return item.sort
})
}
@Get('/:key')
async get (
@Param('key') key: string
): Promise<PriceTicker | undefined> {
return await this.priceTickerMapper.get(key)
}
@Get('/:key/feed')
async getFeed (
@Param('key') key: string,
@Query() query: PaginationQuery
): Promise<ApiPagedResponse<OraclePriceAggregated>> {
const items = await this.oraclePriceAggregatedMapper.query(key, query.size, query.next)
return ApiPagedResponse.of(items, query.size, item => {
return item.sort
})
}
@Get('/:key/feed/interval/:interval')
async getFeedWithInterval (
@Param('key') key: string,
@Param('interval', ParseIntPipe) interval: number,
@Query() query: PaginationQuery
): Promise<ApiPagedResponse<OraclePriceAggregatedInterval>> {
const priceKey = `${key}-${interval}`
const items = await this.oraclePriceAggregatedIntervalMapper.query(priceKey, query.size, query.next)
return ApiPagedResponse.of(items, query.size, item => {
return item.sort
})
}
@Get('/:key/oracles')
async listPriceOracles (
@Param('key') key: string,
@Query() query: PaginationQuery
): Promise<ApiPagedResponse<PriceOracle>> {
const items: PriceOracle[] = await this.oracleTokenCurrencyMapper.query(key, query.size, query.next)
// TODO(fuxingloh): need to index PriceOracle, this is not performant due to random read
for (const item of items) {
const feeds = await this.priceFeedMapper.query(`${key}-${item.oracleId}`, 1)
item.feed = feeds.length > 0 ? feeds[0] : undefined
}
return ApiPagedResponse.of(items, query.size, item => {
return item.oracleId
})
}
}
|
function find_row_with_largest_sum(arr){
let largestSum = 0;
let rowNumber = 0;
for(let i=0; i<arr.length; i++){
let sum = 0;
for(let j=0; j<arr[i].length; j++){
sum += arr[i][j];
}
if(sum > largestSum){
largestSum = sum;
rowNumber = i;
}
}
return "Row " + (rowNumber+1) + " has the largest sum, which is " + largestSum;
}
find_row_with_largest_sum(arr); |
def get_fully_qualified_class_name(config: dict, component_name: str) -> str:
if component_name in config:
module_path = config[component_name]['MODULE']
class_name = config[component_name]['CLASS']
return f"{module_path}.{class_name}"
else:
return "Component not found in the configuration dictionary." |
from typing import List
def generate_healthcheck_targets(fcgi_targets: List[str]) -> List[str]:
healthcheck_targets = []
for target in fcgi_targets:
if ".py" in target:
modified_target = target.split('.')[0]
modified_target = modified_target.split('_')[0] + "_py_" + modified_target.split('_', 1)[1]
healthcheck_targets.append(modified_target + "_" + target.split('_')[-1])
else:
healthcheck_targets.append(target)
return healthcheck_targets |
<filename>sources/VS/ThirdParty/wxWidgets/tests/controls/notebooktest.cpp
///////////////////////////////////////////////////////////////////////////////
// Name: tests/controls/notebooktest.cpp
// Purpose: wxNotebook unit test
// Author: <NAME>
// Created: 2010-07-02
// Copyright: (c) 2010 <NAME>
///////////////////////////////////////////////////////////////////////////////
#include "testprec.h"
#if wxUSE_NOTEBOOK
#ifndef WX_PRECOMP
#include "wx/app.h"
#include "wx/panel.h"
#endif // WX_PRECOMP
#include "wx/notebook.h"
#include "wx/scopedptr.h"
#include "bookctrlbasetest.h"
#include "testableframe.h"
class NotebookTestCase : public BookCtrlBaseTestCase, public CppUnit::TestCase
{
public:
NotebookTestCase() { m_notebook = NULL; m_numPageChanges = 0; }
virtual void setUp() wxOVERRIDE;
virtual void tearDown() wxOVERRIDE;
private:
virtual wxBookCtrlBase *GetBase() const wxOVERRIDE { return m_notebook; }
virtual wxEventType GetChangedEvent() const wxOVERRIDE
{ return wxEVT_NOTEBOOK_PAGE_CHANGED; }
virtual wxEventType GetChangingEvent() const wxOVERRIDE
{ return wxEVT_NOTEBOOK_PAGE_CHANGING; }
CPPUNIT_TEST_SUITE( NotebookTestCase );
wxBOOK_CTRL_BASE_TESTS();
CPPUNIT_TEST( Image );
CPPUNIT_TEST( RowCount );
CPPUNIT_TEST( NoEventsOnDestruction );
CPPUNIT_TEST_SUITE_END();
void RowCount();
void NoEventsOnDestruction();
void OnPageChanged(wxNotebookEvent&) { m_numPageChanges++; }
wxNotebook *m_notebook;
int m_numPageChanges;
wxDECLARE_NO_COPY_CLASS(NotebookTestCase);
};
// register in the unnamed registry so that these tests are run by default
CPPUNIT_TEST_SUITE_REGISTRATION( NotebookTestCase );
// also include in its own registry so that these tests can be run alone
CPPUNIT_TEST_SUITE_NAMED_REGISTRATION( NotebookTestCase, "NotebookTestCase" );
void NotebookTestCase::setUp()
{
m_notebook = new wxNotebook(wxTheApp->GetTopWindow(), wxID_ANY,
wxDefaultPosition, wxSize(400, 200));
AddPanels();
}
void NotebookTestCase::tearDown()
{
wxDELETE(m_notebook);
}
void NotebookTestCase::RowCount()
{
CPPUNIT_ASSERT_EQUAL(1, m_notebook->GetRowCount());
#ifdef __WXMSW__
wxDELETE(m_notebook);
m_notebook = new wxNotebook(wxTheApp->GetTopWindow(), wxID_ANY,
wxDefaultPosition, wxSize(400, 200),
wxNB_MULTILINE);
for( unsigned int i = 0; i < 10; i++ )
{
m_notebook->AddPage(new wxPanel(m_notebook), "Panel", false, 0);
}
CPPUNIT_ASSERT( m_notebook->GetRowCount() != 1 );
#endif
}
void NotebookTestCase::NoEventsOnDestruction()
{
// We can't use EventCounter helper here as it doesn't deal with the window
// it's connected to being destroyed during its life-time, so do it
// manually.
m_notebook->Bind(wxEVT_NOTEBOOK_PAGE_CHANGED,
&NotebookTestCase::OnPageChanged, this);
// Normally deleting a page before the selected one results in page
// selection changing and the corresponding event.
m_notebook->DeletePage(static_cast<size_t>(0));
CHECK( m_numPageChanges == 1 );
// But deleting the entire control shouldn't generate any events, yet it
// used to do under GTK+ 3 when a page different from the first one was
// selected.
m_notebook->ChangeSelection(1);
m_notebook->Destroy();
m_notebook = NULL;
CHECK( m_numPageChanges == 1 );
}
TEST_CASE("wxNotebook::AddPageEvents", "[wxNotebook][AddPage][event]")
{
wxNotebook* const
notebook = new wxNotebook(wxTheApp->GetTopWindow(), wxID_ANY,
wxDefaultPosition, wxSize(400, 200));
wxScopedPtr<wxNotebook> cleanup(notebook);
CHECK( notebook->GetSelection() == wxNOT_FOUND );
EventCounter countPageChanging(notebook, wxEVT_NOTEBOOK_PAGE_CHANGING);
EventCounter countPageChanged(notebook, wxEVT_NOTEBOOK_PAGE_CHANGED);
// Add the first page, it is special.
notebook->AddPage(new wxPanel(notebook), "Initial page");
// The selection should have been changed.
CHECK( notebook->GetSelection() == 0 );
// But no events should have been generated.
CHECK( countPageChanging.GetCount() == 0 );
CHECK( countPageChanged.GetCount() == 0 );
// Add another page without selecting it.
notebook->AddPage(new wxPanel(notebook), "Unselected page");
// Selection shouldn't have changed.
CHECK( notebook->GetSelection() == 0 );
// And no events should have been generated, of course.
CHECK( countPageChanging.GetCount() == 0 );
CHECK( countPageChanged.GetCount() == 0 );
// Finally add another page and do select it.
notebook->AddPage(new wxPanel(notebook), "Selected page", true);
// It should have become selected.
CHECK( notebook->GetSelection() == 2 );
// And events for the selection change should have been generated.
CHECK( countPageChanging.GetCount() == 1 );
CHECK( countPageChanged.GetCount() == 1 );
}
#endif //wxUSE_NOTEBOOK
|
package org.quark.microapidemo.utility;
import io.jsonwebtoken.*;
import org.quark.microapidemo.RunnerContext;
import org.quark.microapidemo.config.GlobalAppSettingsProperties;
import org.quark.microapidemo.config.GlobalConfig;
import javax.crypto.spec.SecretKeySpec;
import javax.xml.bind.DatatypeConverter;
import java.security.Key;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class JWTUtility {
public static String createJWT(String id, long userId, String role,
long ttlMillis) {
// The JWT signature algorithm we will be using to sign the token
SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.HS256;
long nowMillis = System.currentTimeMillis();
Date now = new Date(nowMillis);
// We will sign our JWT with our ApiKey secret
byte[] apiKeySecretBytes = DatatypeConverter.parseBase64Binary(getSecret());
Key signingKey = new SecretKeySpec(apiKeySecretBytes, signatureAlgorithm.getJcaName());
// Let's set the JWT Claims
JwtBuilder builder = Jwts.builder().setId(id)
.setIssuedAt(now)
.setSubject(GlobalConfig.JWTConfig.SUBJECT)
.setIssuer(GlobalConfig.JWTConfig.IISUSER)
.signWith(signatureAlgorithm, signingKey);
// if it has been specified, let's add the expiration
if (ttlMillis >= 0) {
long expMillis = nowMillis + ttlMillis;
Date exp = new Date(expMillis);
builder.setExpiration(exp);
}
Map<String, Object> header = new HashMap<String, Object>();
header.put(GlobalConfig.WebConfig.CLAIMS_USER_KEY, userId);
header.put(GlobalConfig.WebConfig.CLAIMS_ROLE_KEY, role);
builder.setHeader(header);
// Builds the JWT and serializes it to a compact, URL-safe string
return builder.compact();
}
public static Jws<Claims> parseRefreshJWT(String jwt) {
// This line will throw an exception if it is not a signed JWS (as expected)
Jws<Claims> claims = Jwts.parser()
.requireId(GlobalConfig.JWTConfig.JWTREFRESHID)
.requireSubject(GlobalConfig.JWTConfig.SUBJECT)
.requireIssuer(GlobalConfig.JWTConfig.IISUSER)
.setSigningKey(DatatypeConverter.parseBase64Binary(getSecret()))
.parseClaimsJws(jwt);
return claims;
}
public static Jws<Claims> parseJWT(String jwt) {
// This line will throw an exception if it is not a signed JWS (as expected)
Jws<Claims> claims = Jwts.parser()
.requireId(GlobalConfig.JWTConfig.JWTID)
.requireSubject(GlobalConfig.JWTConfig.SUBJECT)
.requireIssuer(GlobalConfig.JWTConfig.IISUSER)
.setSigningKey(DatatypeConverter.parseBase64Binary(getSecret()))
.parseClaimsJws(jwt);
return claims;
}
private static String getSecret() {
GlobalAppSettingsProperties globalAppSettingsProperties =
RunnerContext.getBean("GlobalAppSettings", GlobalAppSettingsProperties.class);
return globalAppSettingsProperties.getPrivateKey();
}
}
|
<filename>languages/sr.go
package languages
// SR - Serbian transliteration data.
var SR = map[rune]string{
0x110: "Dj",
0x111: "dj",
}
|
const { getInfo } = require('./getInfo.js');
const testHostApi = (arg, callback) => {
console.log('Test Host Api has been successfully called!', arg);
callback('From Host: OK!');
};
module.exports = {
getInfo,
testHostApi,
};
|
#!/bin/bash
NODE_INDEX=$(hostname | tail -c 2)
NODE_NAME=$(hostname)
configureAdminUser(){
chage -E -1 -I -1 -m 0 -M 99999 "${ADMINUSER}"
chage -l "${ADMINUSER}"
}
{{- if EnableHostsConfigAgent}}
configPrivateClusterHosts() {
systemctlEnableAndStart reconcile-private-hosts || exit $ERR_SYSTEMCTL_START_FAIL
}
{{- end}}
ensureRPC() {
systemctlEnableAndStart rpcbind || exit $ERR_SYSTEMCTL_START_FAIL
systemctlEnableAndStart rpc-statd || exit $ERR_SYSTEMCTL_START_FAIL
}
{{- if ShouldConfigTransparentHugePage}}
configureTransparentHugePage() {
ETC_SYSFS_CONF="/etc/sysfs.conf"
THP_ENABLED={{GetTransparentHugePageEnabled}}
if [[ "${THP_ENABLED}" != "" ]]; then
echo "${THP_ENABLED}" > /sys/kernel/mm/transparent_hugepage/enabled
echo "kernel/mm/transparent_hugepage/enabled=${THP_ENABLED}" >> ${ETC_SYSFS_CONF}
fi
THP_DEFRAG={{GetTransparentHugePageDefrag}}
if [[ "${THP_DEFRAG}" != "" ]]; then
echo "${THP_DEFRAG}" > /sys/kernel/mm/transparent_hugepage/defrag
echo "kernel/mm/transparent_hugepage/defrag=${THP_DEFRAG}" >> ${ETC_SYSFS_CONF}
fi
}
{{- end}}
{{- if ShouldConfigSwapFile}}
configureSwapFile() {
SWAP_SIZE_KB=$(expr {{GetSwapFileSizeMB}} \* 1000)
DISK_FREE_KB=$(df /dev/sdb1 | sed 1d | awk '{print $4}')
if [[ ${DISK_FREE_KB} -gt ${SWAP_SIZE_KB} ]]; then
SWAP_LOCATION=/mnt/swapfile
retrycmd_if_failure 24 5 25 fallocate -l ${SWAP_SIZE_KB}K ${SWAP_LOCATION} || exit $ERR_SWAP_CREAT_FAIL
chmod 600 ${SWAP_LOCATION}
retrycmd_if_failure 24 5 25 mkswap ${SWAP_LOCATION} || exit $ERR_SWAP_CREAT_FAIL
retrycmd_if_failure 24 5 25 swapon ${SWAP_LOCATION} || exit $ERR_SWAP_CREAT_FAIL
retrycmd_if_failure 24 5 25 swapon --show | grep ${SWAP_LOCATION} || exit $ERR_SWAP_CREAT_FAIL
echo "${SWAP_LOCATION} none swap sw 0 0" >> /etc/fstab
else
echo "Insufficient disk space creating swap file: request ${SWAP_SIZE_KB} free ${DISK_FREE_KB}"
exit $ERR_SWAP_CREAT_INSUFFICIENT_DISK_SPACE
fi
}
{{- end}}
{{- if ShouldConfigureHTTPProxy}}
configureEtcEnvironment() {
{{- if HasHTTPProxy }}
echo 'HTTP_PROXY="{{GetHTTPProxy}}"' >> /etc/environment
echo 'http_proxy="{{GetHTTPProxy}}"' >> /etc/environment
{{- end}}
{{- if HasHTTPSProxy }}
echo 'HTTPS_PROXY="{{GetHTTPSProxy}}"' >> /etc/environment
echo 'https_proxy="{{GetHTTPSProxy}}"' >> /etc/environment
{{- end}}
{{- if HasNoProxy }}
echo 'NO_PROXY="{{GetNoProxy}}"' >> /etc/environment
echo 'no_proxy="{{GetNoProxy}}"' >> /etc/environment
{{- end}}
}
{{- end}}
{{- if ShouldConfigureHTTPProxyCA}}
configureHTTPProxyCA() {
openssl x509 -outform pem -in /usr/local/share/ca-certificates/proxyCA.pem -out /usr/local/share/ca-certificates/proxyCA.crt || exit $ERR_HTTP_PROXY_CA_CONVERT
rm -f /usr/local/share/ca-certificates/proxyCA.pem
update-ca-certificates || exit $ERR_HTTP_PROXY_CA_UPDATE
}
{{- end}}
configureKubeletServerCert() {
KUBELET_SERVER_PRIVATE_KEY_PATH="/etc/kubernetes/certs/kubeletserver.key"
KUBELET_SERVER_CERT_PATH="/etc/kubernetes/certs/kubeletserver.crt"
openssl genrsa -out $KUBELET_SERVER_PRIVATE_KEY_PATH 2048
openssl req -new -x509 -days 7300 -key $KUBELET_SERVER_PRIVATE_KEY_PATH -out $KUBELET_SERVER_CERT_PATH -subj "/CN=${NODE_NAME}"
}
configureK8s() {
APISERVER_PUBLIC_KEY_PATH="/etc/kubernetes/certs/apiserver.crt"
touch "${APISERVER_PUBLIC_KEY_PATH}"
chmod 0644 "${APISERVER_PUBLIC_KEY_PATH}"
chown root:root "${APISERVER_PUBLIC_KEY_PATH}"
AZURE_JSON_PATH="/etc/kubernetes/azure.json"
touch "${AZURE_JSON_PATH}"
chmod 0600 "${AZURE_JSON_PATH}"
chown root:root "${AZURE_JSON_PATH}"
SP_FILE="/etc/kubernetes/sp.txt"
wait_for_file 1200 1 /etc/kubernetes/certs/client.key || exit $ERR_FILE_WATCH_TIMEOUT
wait_for_file 1200 1 "$SP_FILE" || exit $ERR_FILE_WATCH_TIMEOUT
set +x
echo "${APISERVER_PUBLIC_KEY}" | base64 --decode > "${APISERVER_PUBLIC_KEY_PATH}"
{{/* Perform the required JSON escaping */}}
SERVICE_PRINCIPAL_CLIENT_SECRET="$(cat "$SP_FILE")"
SERVICE_PRINCIPAL_CLIENT_SECRET=${SERVICE_PRINCIPAL_CLIENT_SECRET//\\/\\\\}
SERVICE_PRINCIPAL_CLIENT_SECRET=${SERVICE_PRINCIPAL_CLIENT_SECRET//\"/\\\"}
rm "$SP_FILE" # unneeded after reading from disk.
cat << EOF > "${AZURE_JSON_PATH}"
{
{{- if IsAKSCustomCloud}}
"cloud": "AzureStackCloud",
{{- else}}
"cloud": "{{GetTargetEnvironment}}",
{{- end}}
"tenantId": "${TENANT_ID}",
"subscriptionId": "${SUBSCRIPTION_ID}",
"aadClientId": "${SERVICE_PRINCIPAL_CLIENT_ID}",
"aadClientSecret": "${SERVICE_PRINCIPAL_CLIENT_SECRET}",
"resourceGroup": "${RESOURCE_GROUP}",
"location": "${LOCATION}",
"vmType": "${VM_TYPE}",
"subnetName": "${SUBNET}",
"securityGroupName": "${NETWORK_SECURITY_GROUP}",
"vnetName": "${VIRTUAL_NETWORK}",
"vnetResourceGroup": "${VIRTUAL_NETWORK_RESOURCE_GROUP}",
"routeTableName": "${ROUTE_TABLE}",
"primaryAvailabilitySetName": "${PRIMARY_AVAILABILITY_SET}",
"primaryScaleSetName": "${PRIMARY_SCALE_SET}",
"cloudProviderBackoffMode": "${CLOUDPROVIDER_BACKOFF_MODE}",
"cloudProviderBackoff": ${CLOUDPROVIDER_BACKOFF},
"cloudProviderBackoffRetries": ${CLOUDPROVIDER_BACKOFF_RETRIES},
"cloudProviderBackoffExponent": ${CLOUDPROVIDER_BACKOFF_EXPONENT},
"cloudProviderBackoffDuration": ${CLOUDPROVIDER_BACKOFF_DURATION},
"cloudProviderBackoffJitter": ${CLOUDPROVIDER_BACKOFF_JITTER},
"cloudProviderRateLimit": ${CLOUDPROVIDER_RATELIMIT},
"cloudProviderRateLimitQPS": ${CLOUDPROVIDER_RATELIMIT_QPS},
"cloudProviderRateLimitBucket": ${CLOUDPROVIDER_RATELIMIT_BUCKET},
"cloudProviderRateLimitQPSWrite": ${CLOUDPROVIDER_RATELIMIT_QPS_WRITE},
"cloudProviderRateLimitBucketWrite": ${CLOUDPROVIDER_RATELIMIT_BUCKET_WRITE},
"useManagedIdentityExtension": ${USE_MANAGED_IDENTITY_EXTENSION},
"userAssignedIdentityID": "${USER_ASSIGNED_IDENTITY_ID}",
"useInstanceMetadata": ${USE_INSTANCE_METADATA},
"loadBalancerSku": "${LOAD_BALANCER_SKU}",
"disableOutboundSNAT": ${LOAD_BALANCER_DISABLE_OUTBOUND_SNAT},
"excludeMasterFromStandardLB": ${EXCLUDE_MASTER_FROM_STANDARD_LB},
"providerVaultName": "${KMS_PROVIDER_VAULT_NAME}",
"maximumLoadBalancerRuleCount": ${MAXIMUM_LOADBALANCER_RULE_COUNT},
"providerKeyName": "k8s",
"providerKeyVersion": ""
}
EOF
set -x
if [[ "${CLOUDPROVIDER_BACKOFF_MODE}" = "v2" ]]; then
sed -i "/cloudProviderBackoffExponent/d" /etc/kubernetes/azure.json
sed -i "/cloudProviderBackoffJitter/d" /etc/kubernetes/azure.json
fi
configureKubeletServerCert
{{- if IsAKSCustomCloud}}
set +x
AKS_CUSTOM_CLOUD_JSON_PATH="/etc/kubernetes/{{GetTargetEnvironment}}.json"
touch "${AKS_CUSTOM_CLOUD_JSON_PATH}"
chmod 0600 "${AKS_CUSTOM_CLOUD_JSON_PATH}"
chown root:root "${AKS_CUSTOM_CLOUD_JSON_PATH}"
cat << EOF > "${AKS_CUSTOM_CLOUD_JSON_PATH}"
{
"name": "{{GetTargetEnvironment}}",
"managementPortalURL": "{{AKSCustomCloudManagementPortalURL}}",
"publishSettingsURL": "{{AKSCustomCloudPublishSettingsURL}}",
"serviceManagementEndpoint": "{{AKSCustomCloudServiceManagementEndpoint}}",
"resourceManagerEndpoint": "{{AKSCustomCloudResourceManagerEndpoint}}",
"activeDirectoryEndpoint": "{{AKSCustomCloudActiveDirectoryEndpoint}}",
"galleryEndpoint": "{{AKSCustomCloudGalleryEndpoint}}",
"keyVaultEndpoint": "{{AKSCustomCloudKeyVaultEndpoint}}",
"graphEndpoint": "{{AKSCustomCloudGraphEndpoint}}",
"serviceBusEndpoint": "{{AKSCustomCloudServiceBusEndpoint}}",
"batchManagementEndpoint": "{{AKSCustomCloudBatchManagementEndpoint}}",
"storageEndpointSuffix": "{{AKSCustomCloudStorageEndpointSuffix}}",
"sqlDatabaseDNSSuffix": "{{AKSCustomCloudSqlDatabaseDNSSuffix}}",
"trafficManagerDNSSuffix": "{{AKSCustomCloudTrafficManagerDNSSuffix}}",
"keyVaultDNSSuffix": "{{AKSCustomCloudKeyVaultDNSSuffix}}",
"serviceBusEndpointSuffix": "{{AKSCustomCloudServiceBusEndpointSuffix}}",
"serviceManagementVMDNSSuffix": "{{AKSCustomCloudServiceManagementVMDNSSuffix}}",
"resourceManagerVMDNSSuffix": "{{AKSCustomCloudResourceManagerVMDNSSuffix}}",
"containerRegistryDNSSuffix": "{{AKSCustomCloudContainerRegistryDNSSuffix}}",
"cosmosDBDNSSuffix": "{{AKSCustomCloudCosmosDBDNSSuffix}}",
"tokenAudience": "{{AKSCustomCloudTokenAudience}}",
"resourceIdentifiers": {
"graph": "{{AKSCustomCloudResourceIdentifiersGraph}}",
"keyVault": "{{AKSCustomCloudResourceIdentifiersKeyVault}}",
"datalake": "{{AKSCustomCloudResourceIdentifiersDatalake}}",
"batch": "{{AKSCustomCloudResourceIdentifiersBatch}}",
"operationalInsights": "{{AKSCustomCloudResourceIdentifiersOperationalInsights}}",
"storage": "{{AKSCustomCloudResourceIdentifiersStorage}}"
}
}
EOF
set -x
{{end}}
{{- if IsKubeletConfigFileEnabled}}
set +x
KUBELET_CONFIG_JSON_PATH="/etc/default/kubeletconfig.json"
touch "${KUBELET_CONFIG_JSON_PATH}"
chmod 0644 "${KUBELET_CONFIG_JSON_PATH}"
chown root:root "${KUBELET_CONFIG_JSON_PATH}"
cat << EOF > "${KUBELET_CONFIG_JSON_PATH}"
{{GetKubeletConfigFileContent}}
EOF
set -x
{{- end}}
}
configureCNI() {
{{/* needed for the iptables rules to work on bridges */}}
retrycmd_if_failure 120 5 25 modprobe br_netfilter || exit $ERR_MODPROBE_FAIL
echo -n "br_netfilter" > /etc/modules-load.d/br_netfilter.conf
configureCNIIPTables
}
configureCNIIPTables() {
if [[ "${NETWORK_PLUGIN}" = "azure" ]]; then
mv $CNI_BIN_DIR/10-azure.conflist $CNI_CONFIG_DIR/
chmod 600 $CNI_CONFIG_DIR/10-azure.conflist
if [[ "${NETWORK_POLICY}" == "calico" ]]; then
sed -i 's#"mode":"bridge"#"mode":"transparent"#g' $CNI_CONFIG_DIR/10-azure.conflist
elif [[ "${NETWORK_POLICY}" == "" || "${NETWORK_POLICY}" == "none" ]] && [[ "${NETWORK_MODE}" == "transparent" ]]; then
sed -i 's#"mode":"bridge"#"mode":"transparent"#g' $CNI_CONFIG_DIR/10-azure.conflist
fi
/sbin/ebtables -t nat --list
fi
}
disable1804SystemdResolved() {
ls -ltr /etc/resolv.conf
cat /etc/resolv.conf
{{- if Disable1804SystemdResolved}}
UBUNTU_RELEASE=$(lsb_release -r -s)
if [[ ${UBUNTU_RELEASE} == "18.04" ]]; then
echo "Ingorings systemd-resolved query service but using its resolv.conf file"
echo "This is the simplest approach to workaround resolved issues without completely uninstall it"
[ -f /run/systemd/resolve/resolv.conf ] && sudo ln -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf
ls -ltr /etc/resolv.conf
cat /etc/resolv.conf
fi
{{- else}}
echo "Disable1804SystemdResolved is false. Skipping."
{{- end}}
}
{{- if NeedsContainerd}}
ensureContainerd() {
{{- if TeleportEnabled}}
ensureTeleportd
{{- end}}
wait_for_file 1200 1 /etc/systemd/system/containerd.service.d/exec_start.conf || exit $ERR_FILE_WATCH_TIMEOUT
wait_for_file 1200 1 /etc/containerd/config.toml || exit $ERR_FILE_WATCH_TIMEOUT
wait_for_file 1200 1 /etc/sysctl.d/11-containerd.conf || exit $ERR_FILE_WATCH_TIMEOUT
retrycmd_if_failure 120 5 25 sysctl --system || exit $ERR_SYSCTL_RELOAD
systemctl is-active --quiet docker && (systemctl_disable 20 30 120 docker || exit $ERR_SYSTEMD_DOCKER_STOP_FAIL)
systemctlEnableAndStart containerd || exit $ERR_SYSTEMCTL_START_FAIL
}
{{- if and IsKubenet (not HasCalicoNetworkPolicy)}}
ensureNoDupOnPromiscuBridge() {
wait_for_file 1200 1 /opt/azure/containers/ensure-no-dup.sh || exit $ERR_FILE_WATCH_TIMEOUT
wait_for_file 1200 1 /etc/systemd/system/ensure-no-dup.service || exit $ERR_FILE_WATCH_TIMEOUT
systemctlEnableAndStart ensure-no-dup || exit $ERR_SYSTEMCTL_START_FAIL
}
{{- end}}
{{- if TeleportEnabled}}
ensureTeleportd() {
wait_for_file 1200 1 /etc/systemd/system/teleportd.service || exit $ERR_FILE_WATCH_TIMEOUT
systemctlEnableAndStart teleportd || exit $ERR_SYSTEMCTL_START_FAIL
}
{{- end}}
{{- else}}
ensureDocker() {
DOCKER_SERVICE_EXEC_START_FILE=/etc/systemd/system/docker.service.d/exec_start.conf
wait_for_file 1200 1 $DOCKER_SERVICE_EXEC_START_FILE || exit $ERR_FILE_WATCH_TIMEOUT
usermod -aG docker ${ADMINUSER}
DOCKER_MOUNT_FLAGS_SYSTEMD_FILE=/etc/systemd/system/docker.service.d/clear_mount_propagation_flags.conf
wait_for_file 1200 1 $DOCKER_MOUNT_FLAGS_SYSTEMD_FILE || exit $ERR_FILE_WATCH_TIMEOUT
DOCKER_JSON_FILE=/etc/docker/daemon.json
for i in $(seq 1 1200); do
if [ -s $DOCKER_JSON_FILE ]; then
jq '.' < $DOCKER_JSON_FILE && break
fi
if [ $i -eq 1200 ]; then
exit $ERR_FILE_WATCH_TIMEOUT
else
sleep 1
fi
done
systemctl is-active --quiet containerd && (systemctl_disable 20 30 120 containerd || exit $ERR_SYSTEMD_CONTAINERD_STOP_FAIL)
systemctlEnableAndStart docker || exit $ERR_DOCKER_START_FAIL
}
{{- end}}
{{- if NeedsContainerd}}
ensureMonitorService() {
{{/* Delay start of containerd-monitor for 30 mins after booting */}}
CONTAINERD_MONITOR_SYSTEMD_TIMER_FILE=/etc/systemd/system/containerd-monitor.timer
wait_for_file 1200 1 $CONTAINERD_MONITOR_SYSTEMD_TIMER_FILE || exit $ERR_FILE_WATCH_TIMEOUT
CONTAINERD_MONITOR_SYSTEMD_FILE=/etc/systemd/system/containerd-monitor.service
wait_for_file 1200 1 $CONTAINERD_MONITOR_SYSTEMD_FILE || exit $ERR_FILE_WATCH_TIMEOUT
systemctlEnableAndStart containerd-monitor.timer || exit $ERR_SYSTEMCTL_START_FAIL
}
{{- else}}
ensureMonitorService() {
{{/* Delay start of docker-monitor for 30 mins after booting */}}
DOCKER_MONITOR_SYSTEMD_TIMER_FILE=/etc/systemd/system/docker-monitor.timer
wait_for_file 1200 1 $DOCKER_MONITOR_SYSTEMD_TIMER_FILE || exit $ERR_FILE_WATCH_TIMEOUT
DOCKER_MONITOR_SYSTEMD_FILE=/etc/systemd/system/docker-monitor.service
wait_for_file 1200 1 $DOCKER_MONITOR_SYSTEMD_FILE || exit $ERR_FILE_WATCH_TIMEOUT
systemctlEnableAndStart docker-monitor.timer || exit $ERR_SYSTEMCTL_START_FAIL
}
{{- end}}
{{if IsIPv6DualStackFeatureEnabled}}
ensureDHCPv6() {
wait_for_file 3600 1 {{GetDHCPv6ServiceCSEScriptFilepath}} || exit $ERR_FILE_WATCH_TIMEOUT
wait_for_file 3600 1 {{GetDHCPv6ConfigCSEScriptFilepath}} || exit $ERR_FILE_WATCH_TIMEOUT
systemctlEnableAndStart dhcpv6 || exit $ERR_SYSTEMCTL_START_FAIL
retrycmd_if_failure 120 5 25 modprobe ip6_tables || exit $ERR_MODPROBE_FAIL
}
{{end}}
ensureKubelet() {
KUBELET_DEFAULT_FILE=/etc/default/kubelet
wait_for_file 1200 1 $KUBELET_DEFAULT_FILE || exit $ERR_FILE_WATCH_TIMEOUT
{{if IsKubeletClientTLSBootstrappingEnabled -}}
BOOTSTRAP_KUBECONFIG_FILE=/var/lib/kubelet/bootstrap-kubeconfig
wait_for_file 1200 1 $BOOTSTRAP_KUBECONFIG_FILE || exit $ERR_FILE_WATCH_TIMEOUT
{{- else -}}
KUBECONFIG_FILE=/var/lib/kubelet/kubeconfig
wait_for_file 1200 1 $KUBECONFIG_FILE || exit $ERR_FILE_WATCH_TIMEOUT
{{- end}}
KUBELET_RUNTIME_CONFIG_SCRIPT_FILE=/opt/azure/containers/kubelet.sh
wait_for_file 1200 1 $KUBELET_RUNTIME_CONFIG_SCRIPT_FILE || exit $ERR_FILE_WATCH_TIMEOUT
{{- if ShouldConfigureHTTPProxy}}
configureEtcEnvironment
{{- end}}
systemctlEnableAndStart kubelet || exit $ERR_KUBELET_START_FAIL
{{if HasAntreaNetworkPolicy}}
while [ ! -f /etc/cni/net.d/10-antrea.conf ]; do
sleep 3
done
{{end}}
{{if HasFlannelNetworkPlugin}}
while [ ! -f /etc/cni/net.d/10-flannel.conf ]; do
sleep 3
done
{{end}}
}
ensureMigPartition(){
systemctlEnableAndStart mig-partition || exit $ERR_SYSTEMCTL_START_FAIL
}
ensureSysctl() {
SYSCTL_CONFIG_FILE=/etc/sysctl.d/999-sysctl-aks.conf
wait_for_file 1200 1 $SYSCTL_CONFIG_FILE || exit $ERR_FILE_WATCH_TIMEOUT
retrycmd_if_failure 24 5 25 sysctl --system
}
ensureJournal() {
{
echo "Storage=persistent"
echo "SystemMaxUse=1G"
echo "RuntimeMaxUse=1G"
echo "ForwardToSyslog=yes"
} >> /etc/systemd/journald.conf
systemctlEnableAndStart systemd-journald || exit $ERR_SYSTEMCTL_START_FAIL
}
ensureK8sControlPlane() {
if $REBOOTREQUIRED || [ "$NO_OUTBOUND" = "true" ]; then
return
fi
retrycmd_if_failure 120 5 25 $KUBECTL 2>/dev/null cluster-info || exit $ERR_K8S_RUNNING_TIMEOUT
}
createKubeManifestDir() {
KUBEMANIFESTDIR=/etc/kubernetes/manifests
mkdir -p $KUBEMANIFESTDIR
}
writeKubeConfig() {
KUBECONFIGDIR=/home/$ADMINUSER/.kube
KUBECONFIGFILE=$KUBECONFIGDIR/config
mkdir -p $KUBECONFIGDIR
touch $KUBECONFIGFILE
chown $ADMINUSER:$ADMINUSER $KUBECONFIGDIR
chown $ADMINUSER:$ADMINUSER $KUBECONFIGFILE
chmod 700 $KUBECONFIGDIR
chmod 600 $KUBECONFIGFILE
set +x
echo "
---
apiVersion: v1
clusters:
- cluster:
certificate-authority-data: \"$CA_CERTIFICATE\"
server: $KUBECONFIG_SERVER
name: \"$MASTER_FQDN\"
contexts:
- context:
cluster: \"$MASTER_FQDN\"
user: \"$MASTER_FQDN-admin\"
name: \"$MASTER_FQDN\"
current-context: \"$MASTER_FQDN\"
kind: Config
users:
- name: \"$MASTER_FQDN-admin\"
user:
client-certificate-data: \"$KUBECONFIG_CERTIFICATE\"
client-key-data: \"$KUBECONFIG_KEY\"
" > $KUBECONFIGFILE
set -x
}
configClusterAutoscalerAddon() {
CLUSTER_AUTOSCALER_ADDON_FILE=/etc/kubernetes/addons/cluster-autoscaler-deployment.yaml
wait_for_file 1200 1 $CLUSTER_AUTOSCALER_ADDON_FILE || exit $ERR_FILE_WATCH_TIMEOUT
sed -i "s|<clientID>|$(echo $SERVICE_PRINCIPAL_CLIENT_ID | base64)|g" $CLUSTER_AUTOSCALER_ADDON_FILE
sed -i "s|<clientSec>|$(echo $SERVICE_PRINCIPAL_CLIENT_SECRET | base64)|g" $CLUSTER_AUTOSCALER_ADDON_FILE
sed -i "s|<subID>|$(echo $SUBSCRIPTION_ID | base64)|g" $CLUSTER_AUTOSCALER_ADDON_FILE
sed -i "s|<tenantID>|$(echo $TENANT_ID | base64)|g" $CLUSTER_AUTOSCALER_ADDON_FILE
sed -i "s|<rg>|$(echo $RESOURCE_GROUP | base64)|g" $CLUSTER_AUTOSCALER_ADDON_FILE
}
configACIConnectorAddon() {
ACI_CONNECTOR_CREDENTIALS=$(printf "{\"clientId\": \"%s\", \"clientSecret\": \"%s\", \"tenantId\": \"%s\", \"subscriptionId\": \"%s\", \"activeDirectoryEndpointUrl\": \"https://login.microsoftonline.com\",\"resourceManagerEndpointUrl\": \"https://management.azure.com/\", \"activeDirectoryGraphResourceId\": \"https://graph.windows.net/\", \"sqlManagementEndpointUrl\": \"https://management.core.windows.net:8443/\", \"galleryEndpointUrl\": \"https://gallery.azure.com/\", \"managementEndpointUrl\": \"https://management.core.windows.net/\"}" "$SERVICE_PRINCIPAL_CLIENT_ID" "$SERVICE_PRINCIPAL_CLIENT_SECRET" "$TENANT_ID" "$SUBSCRIPTION_ID" | base64 -w 0)
openssl req -newkey rsa:4096 -new -nodes -x509 -days 3650 -keyout /etc/kubernetes/certs/aci-connector-key.pem -out /etc/kubernetes/certs/aci-connector-cert.pem -subj "/C=US/ST=CA/L=virtualkubelet/O=virtualkubelet/OU=virtualkubelet/CN=virtualkubelet"
ACI_CONNECTOR_KEY=$(base64 /etc/kubernetes/certs/aci-connector-key.pem -w0)
ACI_CONNECTOR_CERT=$(base64 /etc/kubernetes/certs/aci-connector-cert.pem -w0)
ACI_CONNECTOR_ADDON_FILE=/etc/kubernetes/addons/aci-connector-deployment.yaml
wait_for_file 1200 1 $ACI_CONNECTOR_ADDON_FILE || exit $ERR_FILE_WATCH_TIMEOUT
sed -i "s|<creds>|$ACI_CONNECTOR_CREDENTIALS|g" $ACI_CONNECTOR_ADDON_FILE
sed -i "s|<rgName>|$RESOURCE_GROUP|g" $ACI_CONNECTOR_ADDON_FILE
sed -i "s|<cert>|$ACI_CONNECTOR_CERT|g" $ACI_CONNECTOR_ADDON_FILE
sed -i "s|<key>|$ACI_CONNECTOR_KEY|g" $ACI_CONNECTOR_ADDON_FILE
}
configAzurePolicyAddon() {
AZURE_POLICY_ADDON_FILE=/etc/kubernetes/addons/azure-policy-deployment.yaml
sed -i "s|<resourceId>|/subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP|g" $AZURE_POLICY_ADDON_FILE
}
{{if IsNSeriesSKU}}
installGPUDriversRun() {
{{- /* there is no file under the module folder, the installation failed, so clean up the dirty directory
when you upgrade the GPU driver version, please help check whether the retry installation issue is gone,
if yes please help remove the clean up logic here too */}}
set -x
MODULE_NAME="nvidia"
NVIDIA_DKMS_DIR="/var/lib/dkms/${MODULE_NAME}/${GPU_DV}"
KERNEL_NAME=$(uname -r)
if [ -d "${NVIDIA_DKMS_DIR}" ]; then
if [ -x "$(command -v dkms)" ]; then
dkms remove -m ${MODULE_NAME} -v ${GPU_DV} -k ${KERNEL_NAME}
else
rm -rf "${NVIDIA_DKMS_DIR}"
fi
fi
{{- /* we need to append the date to the end of the file because the retry will override the log file */}}
local log_file_name="/var/log/nvidia-installer-$(date +%s).log"
if [ ! -f "${GPU_DEST}/nvidia-drivers-${GPU_DV}" ]; then
downloadGPUDrivers
fi
sh $GPU_DEST/nvidia-drivers-$GPU_DV -s \
-k=$KERNEL_NAME \
--log-file-name=${log_file_name} \
-a --no-drm --dkms --utility-prefix="${GPU_DEST}" --opengl-prefix="${GPU_DEST}"
exit $?
}
configGPUDrivers() {
blacklistNouveau
addNvidiaAptRepo
installNvidiaContainerRuntime "${NVIDIA_CONTAINER_RUNTIME_VERSION}"
installNvidiaDocker "${NVIDIA_DOCKER_VERSION}"
# tidy
rm -rf $GPU_DEST/tmp
# reload containerd/dockerd
{{if NeedsContainerd}}
retrycmd_if_failure 120 5 25 pkill -SIGHUP containerd || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
{{else}}
retrycmd_if_failure 120 5 25 pkill -SIGHUP dockerd || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
{{end}}
# install gpu driver
setupGpuRunfileInstall
retrycmd_if_failure 120 5 25 nvidia-modprobe -u -c0 || exit $ERR_GPU_DRIVERS_START_FAIL
retrycmd_if_failure 120 5 25 nvidia-smi || exit $ERR_GPU_DRIVERS_START_FAIL
retrycmd_if_failure 120 5 25 ldconfig || exit $ERR_GPU_DRIVERS_START_FAIL
}
setupGpuRunfileInstall() {
mkdir -p $GPU_DEST/lib64 $GPU_DEST/overlay-workdir
retrycmd_if_failure 120 5 25 mount -t overlay -o lowerdir=/usr/lib/x86_64-linux-gnu,upperdir=${GPU_DEST}/lib64,workdir=${GPU_DEST}/overlay-workdir none /usr/lib/x86_64-linux-gnu || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
export -f installGPUDriversRun
retrycmd_if_failure 3 1 600 bash -c installGPUDriversRun || exit $ERR_GPU_DRIVERS_START_FAIL
mv ${GPU_DEST}/bin/* /usr/bin
echo "${GPU_DEST}/lib64" > /etc/ld.so.conf.d/nvidia.conf
retrycmd_if_failure 120 5 25 ldconfig || exit $ERR_GPU_DRIVERS_START_FAIL
umount -l /usr/lib/x86_64-linux-gnu
}
validateGPUDrivers() {
if [[ $(isARM64) == 1 ]]; then
# no GPU on ARM64
return
fi
retrycmd_if_failure 24 5 25 nvidia-modprobe -u -c0 && echo "gpu driver loaded" || configGPUDrivers || exit $ERR_GPU_DRIVERS_START_FAIL
which nvidia-smi
if [[ $? == 0 ]]; then
SMI_RESULT=$(retrycmd_if_failure 24 5 25 nvidia-smi)
else
SMI_RESULT=$(retrycmd_if_failure 24 5 25 $GPU_DEST/bin/nvidia-smi)
fi
SMI_STATUS=$?
if [[ $SMI_STATUS != 0 ]]; then
if [[ $SMI_RESULT == *"infoROM is corrupted"* ]]; then
exit $ERR_GPU_INFO_ROM_CORRUPTED
else
exit $ERR_GPU_DRIVERS_START_FAIL
fi
else
echo "gpu driver working fine"
fi
}
ensureGPUDrivers() {
if [[ $(isARM64) == 1 ]]; then
# no GPU on ARM64
return
fi
if [[ "${CONFIG_GPU_DRIVER_IF_NEEDED}" = true ]]; then
configGPUDrivers
else
# needs to happen even on gpu vhd because newer containerd/runc broke old
# nvidia-container-runtime. containerd [1.5.9, 1.4.12] + runc 1.0.2 don't work with
# old nvidia-container-runtime like 2.0.0, only new like 3.6.0
installNvidiaContainerRuntime "${NVIDIA_CONTAINER_RUNTIME_VERSION}"
installNvidiaDocker "${NVIDIA_DOCKER_VERSION}"
validateGPUDrivers
fi
systemctlEnableAndStart nvidia-modprobe || exit $ERR_GPU_DRIVERS_START_FAIL
}
{{end}}
#EOF
|
#!/usr/bin/env bash
set -x
mvn package \
-Dxtdb.xtdb-version=${XTDB_VERSION:-"xtdb-git-version"} \
-Dxtdb.artifact-version=${XTDB_ARTIFACT_VERSION:-"xtdb-git-version"} \
-Dxtdb.uberjar-name=${UBERJAR_NAME:-xtdb.jar} $@
|
#!/bin/sh
#
# Copyright (c) 2010 Johan Herland
#
test_description='Test notes merging with manual conflict resolution'
. ./test-lib.sh
# Set up a notes merge scenario with different kinds of conflicts
test_expect_success 'setup commits' '
test_commit 1st &&
test_commit 2nd &&
test_commit 3rd &&
test_commit 4th &&
test_commit 5th
'
commit_sha1=$(git rev-parse 1st^{commit})
commit_sha2=$(git rev-parse 2nd^{commit})
commit_sha3=$(git rev-parse 3rd^{commit})
commit_sha4=$(git rev-parse 4th^{commit})
commit_sha5=$(git rev-parse 5th^{commit})
verify_notes () {
notes_ref="$1"
git -c core.notesRef="refs/notes/$notes_ref" notes |
sort >"output_notes_$notes_ref" &&
test_cmp "expect_notes_$notes_ref" "output_notes_$notes_ref" &&
git -c core.notesRef="refs/notes/$notes_ref" log --format="%H %s%n%N" \
>"output_log_$notes_ref" &&
test_cmp "expect_log_$notes_ref" "output_log_$notes_ref"
}
cat <<EOF | sort >expect_notes_x
6e8e3febca3c2bb896704335cc4d0c34cb2f8715 $commit_sha4
e5388c10860456ee60673025345fe2e153eb8cf8 $commit_sha3
ceefa674873670e7ecd131814d909723cce2b669 $commit_sha2
EOF
cat >expect_log_x <<EOF
$commit_sha5 5th
$commit_sha4 4th
x notes on 4th commit
$commit_sha3 3rd
x notes on 3rd commit
$commit_sha2 2nd
x notes on 2nd commit
$commit_sha1 1st
EOF
test_expect_success 'setup merge base (x)' '
git config core.notesRef refs/notes/x &&
git notes add -m "x notes on 2nd commit" 2nd &&
git notes add -m "x notes on 3rd commit" 3rd &&
git notes add -m "x notes on 4th commit" 4th &&
verify_notes x
'
cat <<EOF | sort >expect_notes_y
e2bfd06a37dd2031684a59a6e2b033e212239c78 $commit_sha4
5772f42408c0dd6f097a7ca2d24de0e78d1c46b1 $commit_sha3
b0a6021ec006d07e80e9b20ec9b444cbd9d560d3 $commit_sha1
EOF
cat >expect_log_y <<EOF
$commit_sha5 5th
$commit_sha4 4th
y notes on 4th commit
$commit_sha3 3rd
y notes on 3rd commit
$commit_sha2 2nd
$commit_sha1 1st
y notes on 1st commit
EOF
test_expect_success 'setup local branch (y)' '
git update-ref refs/notes/y refs/notes/x &&
git config core.notesRef refs/notes/y &&
git notes add -f -m "y notes on 1st commit" 1st &&
git notes remove 2nd &&
git notes add -f -m "y notes on 3rd commit" 3rd &&
git notes add -f -m "y notes on 4th commit" 4th &&
verify_notes y
'
cat <<EOF | sort >expect_notes_z
cff59c793c20bb49a4e01bc06fb06bad642e0d54 $commit_sha4
283b48219aee9a4105f6cab337e789065c82c2b9 $commit_sha2
0a81da8956346e19bcb27a906f04af327e03e31b $commit_sha1
EOF
cat >expect_log_z <<EOF
$commit_sha5 5th
$commit_sha4 4th
z notes on 4th commit
$commit_sha3 3rd
$commit_sha2 2nd
z notes on 2nd commit
$commit_sha1 1st
z notes on 1st commit
EOF
test_expect_success 'setup remote branch (z)' '
git update-ref refs/notes/z refs/notes/x &&
git config core.notesRef refs/notes/z &&
git notes add -f -m "z notes on 1st commit" 1st &&
git notes add -f -m "z notes on 2nd commit" 2nd &&
git notes remove 3rd &&
git notes add -f -m "z notes on 4th commit" 4th &&
verify_notes z
'
# At this point, before merging z into y, we have the following status:
#
# commit | base/x | local/y | remote/z | diff from x to y/z
# -------|---------|---------|----------|---------------------------
# 1st | [none] | b0a6021 | 0a81da8 | added / added (diff)
# 2nd | ceefa67 | [none] | 283b482 | removed / changed
# 3rd | e5388c1 | 5772f42 | [none] | changed / removed
# 4th | 6e8e3fe | e2bfd06 | cff59c7 | changed / changed (diff)
# 5th | [none] | [none] | [none] | [none]
cat <<EOF | sort >expect_conflicts
$commit_sha1
$commit_sha2
$commit_sha3
$commit_sha4
EOF
cat >expect_conflict_$commit_sha1 <<EOF
<<<<<<< refs/notes/m
y notes on 1st commit
=======
z notes on 1st commit
>>>>>>> refs/notes/z
EOF
cat >expect_conflict_$commit_sha2 <<EOF
z notes on 2nd commit
EOF
cat >expect_conflict_$commit_sha3 <<EOF
y notes on 3rd commit
EOF
cat >expect_conflict_$commit_sha4 <<EOF
<<<<<<< refs/notes/m
y notes on 4th commit
=======
z notes on 4th commit
>>>>>>> refs/notes/z
EOF
cp expect_notes_y expect_notes_m
cp expect_log_y expect_log_m
git rev-parse refs/notes/y > pre_merge_y
git rev-parse refs/notes/z > pre_merge_z
test_expect_success 'merge z into m (== y) with default ("manual") resolver => Conflicting 3-way merge' '
git update-ref refs/notes/m refs/notes/y &&
git config core.notesRef refs/notes/m &&
test_must_fail git notes merge z >output &&
# Output should point to where to resolve conflicts
grep -q "\\.git/NOTES_MERGE_WORKTREE" output &&
# Inspect merge conflicts
ls .git/NOTES_MERGE_WORKTREE >output_conflicts &&
test_cmp expect_conflicts output_conflicts &&
( for f in $(cat expect_conflicts); do
test_cmp "expect_conflict_$f" ".git/NOTES_MERGE_WORKTREE/$f" ||
exit 1
done ) &&
# Verify that current notes tree (pre-merge) has not changed (m == y)
verify_notes y &&
verify_notes m &&
test "$(git rev-parse refs/notes/m)" = "$(cat pre_merge_y)"
'
cat <<EOF | sort >expect_notes_z
00494adecf2d9635a02fa431308d67993f853968 $commit_sha4
283b48219aee9a4105f6cab337e789065c82c2b9 $commit_sha2
0a81da8956346e19bcb27a906f04af327e03e31b $commit_sha1
EOF
cat >expect_log_z <<EOF
$commit_sha5 5th
$commit_sha4 4th
z notes on 4th commit
More z notes on 4th commit
$commit_sha3 3rd
$commit_sha2 2nd
z notes on 2nd commit
$commit_sha1 1st
z notes on 1st commit
EOF
test_expect_success 'change notes in z' '
git notes --ref z append -m "More z notes on 4th commit" 4th &&
verify_notes z
'
test_expect_success 'cannot do merge w/conflicts when previous merge is unfinished' '
test -d .git/NOTES_MERGE_WORKTREE &&
test_must_fail git notes merge z >output 2>&1 &&
# Output should indicate what is wrong
grep -q "\\.git/NOTES_MERGE_\\* exists" output
'
# Setup non-conflicting merge between x and new notes ref w
cat <<EOF | sort >expect_notes_w
ceefa674873670e7ecd131814d909723cce2b669 $commit_sha2
f75d1df88cbfe4258d49852f26cfc83f2ad4494b $commit_sha1
EOF
cat >expect_log_w <<EOF
$commit_sha5 5th
$commit_sha4 4th
$commit_sha3 3rd
$commit_sha2 2nd
x notes on 2nd commit
$commit_sha1 1st
w notes on 1st commit
EOF
test_expect_success 'setup unrelated notes ref (w)' '
git config core.notesRef refs/notes/w &&
git notes add -m "w notes on 1st commit" 1st &&
git notes add -m "x notes on 2nd commit" 2nd &&
verify_notes w
'
cat <<EOF | sort >expect_notes_w
6e8e3febca3c2bb896704335cc4d0c34cb2f8715 $commit_sha4
e5388c10860456ee60673025345fe2e153eb8cf8 $commit_sha3
ceefa674873670e7ecd131814d909723cce2b669 $commit_sha2
f75d1df88cbfe4258d49852f26cfc83f2ad4494b $commit_sha1
EOF
cat >expect_log_w <<EOF
$commit_sha5 5th
$commit_sha4 4th
x notes on 4th commit
$commit_sha3 3rd
x notes on 3rd commit
$commit_sha2 2nd
x notes on 2nd commit
$commit_sha1 1st
w notes on 1st commit
EOF
test_expect_success 'can do merge without conflicts even if previous merge is unfinished (x => w)' '
test -d .git/NOTES_MERGE_WORKTREE &&
git notes merge x &&
verify_notes w &&
# Verify that other notes refs has not changed (x and y)
verify_notes x &&
verify_notes y
'
cat <<EOF | sort >expect_notes_m
021faa20e931fb48986ffc6282b4bb05553ac946 $commit_sha4
5772f42408c0dd6f097a7ca2d24de0e78d1c46b1 $commit_sha3
283b48219aee9a4105f6cab337e789065c82c2b9 $commit_sha2
0a59e787e6d688aa6309e56e8c1b89431a0fc1c1 $commit_sha1
EOF
cat >expect_log_m <<EOF
$commit_sha5 5th
$commit_sha4 4th
y and z notes on 4th commit
$commit_sha3 3rd
y notes on 3rd commit
$commit_sha2 2nd
z notes on 2nd commit
$commit_sha1 1st
y and z notes on 1st commit
EOF
test_expect_success 'do not allow mixing --commit and --abort' '
test_must_fail git notes merge --commit --abort
'
test_expect_success 'do not allow mixing --commit and --strategy' '
test_must_fail git notes merge --commit --strategy theirs
'
test_expect_success 'do not allow mixing --abort and --strategy' '
test_must_fail git notes merge --abort --strategy theirs
'
test_expect_success 'finalize conflicting merge (z => m)' '
# Resolve conflicts and finalize merge
cat >.git/NOTES_MERGE_WORKTREE/$commit_sha1 <<EOF &&
y and z notes on 1st commit
EOF
cat >.git/NOTES_MERGE_WORKTREE/$commit_sha4 <<EOF &&
y and z notes on 4th commit
EOF
git notes merge --commit &&
# No .git/NOTES_MERGE_* files left
test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# Merge commit has pre-merge y and pre-merge z as parents
test "$(git rev-parse refs/notes/m^1)" = "$(cat pre_merge_y)" &&
test "$(git rev-parse refs/notes/m^2)" = "$(cat pre_merge_z)" &&
# Merge commit mentions the notes refs merged
git log -1 --format=%B refs/notes/m > merge_commit_msg &&
grep -q refs/notes/m merge_commit_msg &&
grep -q refs/notes/z merge_commit_msg &&
# Merge commit mentions conflicting notes
grep -q "Conflicts" merge_commit_msg &&
( for sha1 in $(cat expect_conflicts); do
grep -q "$sha1" merge_commit_msg ||
exit 1
done ) &&
# Verify contents of merge result
verify_notes m &&
# Verify that other notes refs has not changed (w, x, y and z)
verify_notes w &&
verify_notes x &&
verify_notes y &&
verify_notes z
'
cat >expect_conflict_$commit_sha4 <<EOF
<<<<<<< refs/notes/m
y notes on 4th commit
=======
z notes on 4th commit
More z notes on 4th commit
>>>>>>> refs/notes/z
EOF
cp expect_notes_y expect_notes_m
cp expect_log_y expect_log_m
git rev-parse refs/notes/y > pre_merge_y
git rev-parse refs/notes/z > pre_merge_z
test_expect_success 'redo merge of z into m (== y) with default ("manual") resolver => Conflicting 3-way merge' '
git update-ref refs/notes/m refs/notes/y &&
git config core.notesRef refs/notes/m &&
test_must_fail git notes merge z >output &&
# Output should point to where to resolve conflicts
grep -q "\\.git/NOTES_MERGE_WORKTREE" output &&
# Inspect merge conflicts
ls .git/NOTES_MERGE_WORKTREE >output_conflicts &&
test_cmp expect_conflicts output_conflicts &&
( for f in $(cat expect_conflicts); do
test_cmp "expect_conflict_$f" ".git/NOTES_MERGE_WORKTREE/$f" ||
exit 1
done ) &&
# Verify that current notes tree (pre-merge) has not changed (m == y)
verify_notes y &&
verify_notes m &&
test "$(git rev-parse refs/notes/m)" = "$(cat pre_merge_y)"
'
test_expect_success 'abort notes merge' '
git notes merge --abort &&
# No .git/NOTES_MERGE_* files left
test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# m has not moved (still == y)
test "$(git rev-parse refs/notes/m)" = "$(cat pre_merge_y)" &&
# Verify that other notes refs has not changed (w, x, y and z)
verify_notes w &&
verify_notes x &&
verify_notes y &&
verify_notes z
'
git rev-parse refs/notes/y > pre_merge_y
git rev-parse refs/notes/z > pre_merge_z
test_expect_success 'redo merge of z into m (== y) with default ("manual") resolver => Conflicting 3-way merge' '
test_must_fail git notes merge z >output &&
# Output should point to where to resolve conflicts
grep -q "\\.git/NOTES_MERGE_WORKTREE" output &&
# Inspect merge conflicts
ls .git/NOTES_MERGE_WORKTREE >output_conflicts &&
test_cmp expect_conflicts output_conflicts &&
( for f in $(cat expect_conflicts); do
test_cmp "expect_conflict_$f" ".git/NOTES_MERGE_WORKTREE/$f" ||
exit 1
done ) &&
# Verify that current notes tree (pre-merge) has not changed (m == y)
verify_notes y &&
verify_notes m &&
test "$(git rev-parse refs/notes/m)" = "$(cat pre_merge_y)"
'
cat <<EOF | sort >expect_notes_m
304dfb4325cf243025b9957486eb605a9b51c199 $commit_sha5
283b48219aee9a4105f6cab337e789065c82c2b9 $commit_sha2
0a59e787e6d688aa6309e56e8c1b89431a0fc1c1 $commit_sha1
EOF
cat >expect_log_m <<EOF
$commit_sha5 5th
new note on 5th commit
$commit_sha4 4th
$commit_sha3 3rd
$commit_sha2 2nd
z notes on 2nd commit
$commit_sha1 1st
y and z notes on 1st commit
EOF
test_expect_success 'add + remove notes in finalized merge (z => m)' '
# Resolve one conflict
cat >.git/NOTES_MERGE_WORKTREE/$commit_sha1 <<EOF &&
y and z notes on 1st commit
EOF
# Remove another conflict
rm .git/NOTES_MERGE_WORKTREE/$commit_sha4 &&
# Remove a D/F conflict
rm .git/NOTES_MERGE_WORKTREE/$commit_sha3 &&
# Add a new note
echo "new note on 5th commit" > .git/NOTES_MERGE_WORKTREE/$commit_sha5 &&
# Finalize merge
git notes merge --commit &&
# No .git/NOTES_MERGE_* files left
test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# Merge commit has pre-merge y and pre-merge z as parents
test "$(git rev-parse refs/notes/m^1)" = "$(cat pre_merge_y)" &&
test "$(git rev-parse refs/notes/m^2)" = "$(cat pre_merge_z)" &&
# Merge commit mentions the notes refs merged
git log -1 --format=%B refs/notes/m > merge_commit_msg &&
grep -q refs/notes/m merge_commit_msg &&
grep -q refs/notes/z merge_commit_msg &&
# Merge commit mentions conflicting notes
grep -q "Conflicts" merge_commit_msg &&
( for sha1 in $(cat expect_conflicts); do
grep -q "$sha1" merge_commit_msg ||
exit 1
done ) &&
# Verify contents of merge result
verify_notes m &&
# Verify that other notes refs has not changed (w, x, y and z)
verify_notes w &&
verify_notes x &&
verify_notes y &&
verify_notes z
'
cp expect_notes_y expect_notes_m
cp expect_log_y expect_log_m
test_expect_success 'redo merge of z into m (== y) with default ("manual") resolver => Conflicting 3-way merge' '
git update-ref refs/notes/m refs/notes/y &&
test_must_fail git notes merge z >output &&
# Output should point to where to resolve conflicts
grep -q "\\.git/NOTES_MERGE_WORKTREE" output &&
# Inspect merge conflicts
ls .git/NOTES_MERGE_WORKTREE >output_conflicts &&
test_cmp expect_conflicts output_conflicts &&
( for f in $(cat expect_conflicts); do
test_cmp "expect_conflict_$f" ".git/NOTES_MERGE_WORKTREE/$f" ||
exit 1
done ) &&
# Verify that current notes tree (pre-merge) has not changed (m == y)
verify_notes y &&
verify_notes m &&
test "$(git rev-parse refs/notes/m)" = "$(cat pre_merge_y)"
'
cp expect_notes_w expect_notes_m
cp expect_log_w expect_log_m
test_expect_success 'reset notes ref m to somewhere else (w)' '
git update-ref refs/notes/m refs/notes/w &&
verify_notes m &&
test "$(git rev-parse refs/notes/m)" = "$(git rev-parse refs/notes/w)"
'
test_expect_success 'fail to finalize conflicting merge if underlying ref has moved in the meantime (m != NOTES_MERGE_PARTIAL^1)' '
# Resolve conflicts
cat >.git/NOTES_MERGE_WORKTREE/$commit_sha1 <<EOF &&
y and z notes on 1st commit
EOF
cat >.git/NOTES_MERGE_WORKTREE/$commit_sha4 <<EOF &&
y and z notes on 4th commit
EOF
# Fail to finalize merge
test_must_fail git notes merge --commit >output 2>&1 &&
# .git/NOTES_MERGE_* must remain
test -f .git/NOTES_MERGE_PARTIAL &&
test -f .git/NOTES_MERGE_REF &&
test -f .git/NOTES_MERGE_WORKTREE/$commit_sha1 &&
test -f .git/NOTES_MERGE_WORKTREE/$commit_sha2 &&
test -f .git/NOTES_MERGE_WORKTREE/$commit_sha3 &&
test -f .git/NOTES_MERGE_WORKTREE/$commit_sha4 &&
# Refs are unchanged
test "$(git rev-parse refs/notes/m)" = "$(git rev-parse refs/notes/w)" &&
test "$(git rev-parse refs/notes/y)" = "$(git rev-parse NOTES_MERGE_PARTIAL^1)" &&
test "$(git rev-parse refs/notes/m)" != "$(git rev-parse NOTES_MERGE_PARTIAL^1)" &&
# Mention refs/notes/m, and its current and expected value in output
grep -q "refs/notes/m" output &&
grep -q "$(git rev-parse refs/notes/m)" output &&
grep -q "$(git rev-parse NOTES_MERGE_PARTIAL^1)" output &&
# Verify that other notes refs has not changed (w, x, y and z)
verify_notes w &&
verify_notes x &&
verify_notes y &&
verify_notes z
'
test_expect_success 'resolve situation by aborting the notes merge' '
git notes merge --abort &&
# No .git/NOTES_MERGE_* files left
test_might_fail ls .git/NOTES_MERGE_* >output 2>/dev/null &&
test_cmp /dev/null output &&
# m has not moved (still == w)
test "$(git rev-parse refs/notes/m)" = "$(git rev-parse refs/notes/w)" &&
# Verify that other notes refs has not changed (w, x, y and z)
verify_notes w &&
verify_notes x &&
verify_notes y &&
verify_notes z
'
cat >expect_notes <<EOF
foo
bar
EOF
test_expect_success 'switch cwd before committing notes merge' '
git notes add -m foo HEAD &&
git notes --ref=other add -m bar HEAD &&
test_must_fail git notes merge refs/notes/other &&
(
cd .git/NOTES_MERGE_WORKTREE &&
echo "foo" > $(git rev-parse HEAD) &&
echo "bar" >> $(git rev-parse HEAD) &&
git notes merge --commit
) &&
git notes show HEAD > actual_notes &&
test_cmp expect_notes actual_notes
'
test_done
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.TextTracking24 = factory());
}(this, (function () { 'use strict';
var _24 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: {
d:
'M25.29 19.29l-1.41 1.42L26.17 23H5.83l2.29-2.29-1.41-1.42L2 24l4.71 4.71 1.41-1.42L5.83 25h20.34l-2.29 2.29 1.41 1.42L30 24l-4.71-4.71zM26 17h2L23 3h-2l-5 14h2l1-3h6zm-6.33-5L22 5l2.33 7zM14 3l-4 12L6 3H4l5 14h2l5-14h-2z',
},
},
],
name: 'text--tracking',
size: 24,
};
return _24;
})));
|
#!/bin/bash
set -e
if [ "$1" = "/opt/logstash/bin/logstash" ]; then
exec "$1" agent -f /opt/conf/logstash.conf
else
exec "$@"
fi |
<filename>models/chims_models/states.js
let mongoose = require('mongoose');
// States Schema
let statesSchema = mongoose.Schema({
StateID: {
type: Number
},
StateCode: {
type: String
},
StateDesc: {
type: String
},
Country: {
type: String
}
});
let States = module.exports = mongoose.model('States', statesSchema); |
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.backward_codecs.lucene91;
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.hnsw.HnswGraph;
import org.apache.lucene.util.hnsw.NeighborQueue;
/**
* An {@link HnswGraph} where all nodes and connections are held in memory. This class is used to
* construct the HNSW graph before it's written to the index.
*/
public final class Lucene91OnHeapHnswGraph extends HnswGraph {
private final int maxConn;
private int numLevels; // the current number of levels in the graph
private int entryNode; // the current graph entry node on the top level
// Nodes by level expressed as the level 0's nodes' ordinals.
// As level 0 contains all nodes, nodesByLevel.get(0) is null.
private final List<int[]> nodesByLevel;
// graph is a list of graph levels.
// Each level is represented as List<NeighborArray> – nodes' connections on this level.
// Each entry in the list has the top maxConn neighbors of a node. The nodes correspond to vectors
// added to HnswBuilder, and the node values are the ordinals of those vectors.
// Thus, on all levels, neighbors expressed as the level 0's nodes' ordinals.
private final List<List<Lucene91NeighborArray>> graph;
// KnnGraphValues iterator members
private int upto;
private Lucene91NeighborArray cur;
Lucene91OnHeapHnswGraph(int maxConn, int levelOfFirstNode) {
this.maxConn = maxConn;
this.numLevels = levelOfFirstNode + 1;
this.graph = new ArrayList<>(numLevels);
this.entryNode = 0;
for (int i = 0; i < numLevels; i++) {
graph.add(new ArrayList<>());
// Typically with diversity criteria we see nodes not fully occupied;
// average fanout seems to be about 1/2 maxConn.
// There is some indexing time penalty for under-allocating, but saves RAM
graph.get(i).add(new Lucene91NeighborArray(Math.max(32, maxConn / 4)));
}
this.nodesByLevel = new ArrayList<>(numLevels);
nodesByLevel.add(null); // we don't need this for 0th level, as it contains all nodes
for (int l = 1; l < numLevels; l++) {
nodesByLevel.add(new int[] {0});
}
}
/**
* Returns the {@link NeighborQueue} connected to the given node.
*
* @param level level of the graph
* @param node the node whose neighbors are returned, represented as an ordinal on the level 0.
*/
public Lucene91NeighborArray getNeighbors(int level, int node) {
if (level == 0) {
return graph.get(level).get(node);
}
int nodeIndex = Arrays.binarySearch(nodesByLevel.get(level), 0, graph.get(level).size(), node);
assert nodeIndex >= 0;
return graph.get(level).get(nodeIndex);
}
@Override
public int size() {
return graph.get(0).size(); // all nodes are located on the 0th level
}
/**
* Add node on the given level
*
* @param level level to add a node on
* @param node the node to add, represented as an ordinal on the level 0.
*/
public void addNode(int level, int node) {
if (level > 0) {
// if the new node introduces a new level, add more levels to the graph,
// and make this node the graph's new entry point
if (level >= numLevels) {
for (int i = numLevels; i <= level; i++) {
graph.add(new ArrayList<>());
nodesByLevel.add(new int[] {node});
}
numLevels = level + 1;
entryNode = node;
} else {
// Add this node id to this level's nodes
int[] nodes = nodesByLevel.get(level);
int idx = graph.get(level).size();
if (idx < nodes.length) {
nodes[idx] = node;
} else {
nodes = ArrayUtil.grow(nodes);
nodes[idx] = node;
nodesByLevel.set(level, nodes);
}
}
}
graph.get(level).add(new Lucene91NeighborArray(maxConn + 1));
}
@Override
public void seek(int level, int targetNode) {
cur = getNeighbors(level, targetNode);
upto = -1;
}
@Override
public int nextNeighbor() {
if (++upto < cur.size()) {
return cur.node[upto];
}
return NO_MORE_DOCS;
}
/**
* Returns the current number of levels in the graph
*
* @return the current number of levels in the graph
*/
@Override
public int numLevels() {
return numLevels;
}
/**
* Returns the graph's current entry node on the top level shown as ordinals of the nodes on 0th
* level
*
* @return the graph's current entry node on the top level
*/
@Override
public int entryNode() {
return entryNode;
}
@Override
public NodesIterator getNodesOnLevel(int level) {
if (level == 0) {
return new NodesIterator(size());
} else {
return new NodesIterator(nodesByLevel.get(level), graph.get(level).size());
}
}
}
|
CODE_DIR=/Users/dennisleon/code
alias java_ls='/usr/libexec/java_home -V 2>&1 | grep -E "\d.\d.\d[,_]" | cut -d , -f 1 | colrm 1 4 | grep -v Home'
function java_use() {
export JAVA_HOME=$(/usr/libexec/java_home -v $1)
export PATH=$JAVA_HOME/bin:$PATH
java -version
}
p()
{
local project_looking_for project_found
project_looking_for=$1
project_found=$(find $CODE_DIR -name "$project_looking_for" -type d -maxdepth 1)
cd $project_found
}
_p() { _files -W $CODE_DIR -/; }
compdef _p p
idea () {
local project_looking_for project_found
project_looking_for=$1
idea_version=15
if [[ -e `pwd`/$project_looking_for ]]
then
open -a IntelliJ\ IDEA\ $idea_version -e `pwd`/$project_looking_for
fi
if [[ -e `pwd`/pom.xml && $project_looking_for = "pom.xml" ]]
then
open -a IntelliJ\ IDEA\ $idea_version -e `pwd`/pom.xml
fi
project_found=$(find $CODE_DIR -name "$project_looking_for" -type d -maxdepth 1)
if [ -e $project_found/pom.xml ]
then
open -a IntelliJ\ IDEA\ $idea_version -e $project_found/pom.xml
else
idea_proj_file="`basename $project_found`.ipr"
open -a IntelliJ\ IDEA\ $idea_version -e $project_found/$idea_proj_file
fi
}
_idea() { _files -W $CODE_DIR -/; }
compdef _idea idea
|
<filename>test/testMakeDirs.py
# -*-coding:utf-8 -*-
"""
@author: yansheng
@file: testMakeDirs.py
@time: 2019/9/14
"""
import os
# dirpath = "./nihao";
# os.mkdir(dirpath);
def mkdirs(path):
# 引入模块
import os
# 去除首末的空格
path = path.strip()
# 去除尾部 \ 符号
path = path.rstrip("\\")
"""
windows下文件名中不能含有:\ / : * ? " < > | 英文的这些字符 ,这里使用"'"、"-"进行替换。
\/:?| 用-替换
"<> 用'替换
"""
# 对于文件夹,有没有.好像都是同一个文件
# replace方法默认替换所有匹配项
path = path.replace(":","-").replace("?","-").replace("|","-")
path = path.replace("<", "'").replace(">", "'").replace("\"", "'")
# 判断路径是否存在,存在True,不存在False
isExists = os.path.exists(path)
# 判断结果
if not isExists:
# 如果不存在则创建目录,这里使用创建多重目录的函数
os.makedirs(path)
print('文件夹\''+path + '\'创建成功!')
return True
else:
# 如果目录存在则不创建,并提示目录已存在
print('文件夹\'' + path + '\'目录已存在!')
return False
# 定义要创建的目录
mkpath = "./你好/23#|"
# 调用函数
mkdirs(mkpath)
|
<gh_stars>1-10
package com.singularitycoder.folkdatabase.auth.model;
import com.google.firebase.firestore.Exclude;
public class AuthUserApprovalItem {
@Exclude
private String docId;
private String zone;
private String memberType;
private String directAuthority;
private String email;
private String shortName;
private String fullName;
private String profileImageUrl;
private String signUpStatus;
private String redFlagStatus;
private String approveRequestTimeStamp;
private String approveRequestEpochTimeStamp;
public AuthUserApprovalItem() {
}
public AuthUserApprovalItem(String docId, String zone, String memberType, String directAuthority, String email, String shortName, String fullName, String profileImageUrl, String signUpStatus, String redFlagStatus, String approveRequestTimeStamp, String approveRequestEpochTimeStamp) {
this.docId = docId;
this.zone = zone;
this.memberType = memberType;
this.directAuthority = directAuthority;
this.email = email;
this.shortName = shortName;
this.fullName = fullName;
this.profileImageUrl = profileImageUrl;
this.signUpStatus = signUpStatus;
this.redFlagStatus = redFlagStatus;
this.approveRequestTimeStamp = approveRequestTimeStamp;
this.approveRequestEpochTimeStamp = approveRequestEpochTimeStamp;
}
public void setDocId(String docId) {
this.docId = docId;
}
public String getDocId() {
return docId;
}
public String getZone() {
return zone;
}
public String getMemberType() {
return memberType;
}
public String getDirectAuthority() {
return directAuthority;
}
public String getEmail() {
return email;
}
public String getShortName() {
return shortName;
}
public String getFullName() {
return fullName;
}
public String getProfileImageUrl() {
return profileImageUrl;
}
public String getSignUpStatus() {
return signUpStatus;
}
public String getRedFlagStatus() {
return redFlagStatus;
}
public String getApproveRequestTimeStamp() {
return approveRequestTimeStamp;
}
public void setZone(String zone) {
this.zone = zone;
}
public void setMemberType(String memberType) {
this.memberType = memberType;
}
public void setDirectAuthority(String directAuthority) {
this.directAuthority = directAuthority;
}
public void setEmail(String email) {
this.email = email;
}
public void setShortName(String shortName) {
this.shortName = shortName;
}
public void setFullName(String fullName) {
this.fullName = fullName;
}
public void setProfileImageUrl(String profileImageUrl) {
this.profileImageUrl = profileImageUrl;
}
public void setSignUpStatus(String signUpStatus) {
this.signUpStatus = signUpStatus;
}
public void setRedFlagStatus(String redFlagStatus) {
this.redFlagStatus = redFlagStatus;
}
public void setApproveRequestTimeStamp(String approveRequestTimeStamp) {
this.approveRequestTimeStamp = approveRequestTimeStamp;
}
public String getApproveRequestEpochTimeStamp() {
return approveRequestEpochTimeStamp;
}
public void setApproveRequestEpochTimeStamp(String approveRequestEpochTimeStamp) {
this.approveRequestEpochTimeStamp = approveRequestEpochTimeStamp;
}
}
|
unsorted_list = [14, 5, 6, 2, 8, 1, 10, 15, 9, 0, 4, 3, 11, 12, 7]
# sorting algorithm
for i in range(len(unsorted_list) - 1):
min_index = i
for j in range(i + 1, len(unsorted_list)):
if unsorted_list[min_index] > unsorted_list[j]:
min_index = j
unsorted_list[i], unsorted_list[min_index] = unsorted_list[min_index], unsorted_list[i]
# print sorted list
print("Sorted list:", unsorted_list)
# Output: Sorted list: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 15] |
#!/bin/bash
FN="HIVcDNAvantWout03_1.26.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.10/data/experiment/src/contrib/HIVcDNAvantWout03_1.26.0.tar.gz"
"https://bioarchive.galaxyproject.org/HIVcDNAvantWout03_1.26.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-hivcdnavantwout03/bioconductor-hivcdnavantwout03_1.26.0_src_all.tar.gz"
)
MD5="06ee9224f92da911dc82dd179091f5dc"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
#! /bin/sh
# Copyright (C) 2011-2017 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Check that 'acconfig.h' is *not* automatically distributed when
# placed in a subdirectory.
# Related to automake bug#7819.
. test-init.sh
cat >> configure.ac <<END
AC_CONFIG_FILES([sub/Makefile])
AC_OUTPUT
END
cat > Makefile.am <<'END'
SUBDIRS = sub
sub/acconfig.h:
echo target $@ should not be built >&2; exit 1
check-local: distdir
ls -l $(distdir)/sub
test ! -f $(distdir)/sub/acconfig.h
END
mkdir sub
cat > sub/Makefile.am <<'END'
acconfig.h:
echo target $@ should not be built >&2; exit 1
check-local:
echo $(DISTFILES) | grep 'acconfig\.h' && exit 1; :
echo $(DIST_COMMON) | grep 'acconfig\.h' && exit 1; :
END
: > sub/acconfig.h
$ACLOCAL
$AUTOMAKE
$AUTOCONF
./configure
$MAKE check
:
|
<gh_stars>1-10
#ifndef INCLUDED_ENGINE_ITEMS_SHOTGUN_WEAPON_SUB_SYSTEM_H
#define INCLUDED_ENGINE_ITEMS_SHOTGUN_WEAPON_SUB_SYSTEM_H
#include "engine/items/common_sub_system_includes.h"
namespace engine {
class ShotgunWeaponSubSystem : public SubSystem, public SubSystemHolder
{
public:
DEFINE_SUB_SYSTEM_BASE( ShotgunWeaponSubSystem )
ShotgunWeaponSubSystem();
virtual void Init();
virtual void Update( Actor& actor, double DeltaTime );
private:
Scene& mScene;
Opt<WeaponItemSubSystem> mWeaponItemSubSystem;
ActorFactory& mActorFactory;
};
} // namespace engine
#endif//INCLUDED_ENGINE_ITEMS_SHOTGUN_WEAPON_SUB_SYSTEM_H
|
#include <iostream>
// Declaration of the base class for reference counted objects
struct cef_base_ref_counted_t {
int ref_count;
cef_base_ref_counted_t() : ref_count(1) {}
virtual ~cef_base_ref_counted_t() {
std::cout << "Object deleted" << std::endl;
}
};
// Function to increment the reference count of an object
extern "C" void add_ref(cef_base_ref_counted_t* base) {
if (base) {
base->ref_count++;
}
}
int main() {
// Example usage
cef_base_ref_counted_t* obj = new cef_base_ref_counted_t();
add_ref(obj); // Increment reference count
add_ref(obj); // Increment reference count again
delete obj; // Object deleted due to reaching zero reference count
return 0;
} |
import { LocalizationService } from './services/localizationservice';
import { OwnerService } from './services/ownerservice';
import { RouterConfiguration, Router } from 'aurelia-router';
import { autoinject } from 'aurelia-framework';
@autoinject
export class App {
private router: Router;
constructor(private owner:OwnerService, private localize: LocalizationService) {
}
configureRouter(config: RouterConfiguration, router: Router): void {
this.router = router;
config.title = this.localize.Get('cv-title');
config.map([
{ route: ['', 'home'], name: 'home', moduleId: 'home/home', title: `${this.owner.Get().Name}` }
]);
}
}
|
class DatasetProcessor:
def __init__(self, reader, decoder, num_samples, items_to_descriptions, **kwargs):
self.reader = reader
self.decoder = decoder
self.num_samples = num_samples
self.items_to_descriptions = items_to_descriptions
self.kwargs = kwargs
self.kwargs['data_sources'] = []
def add_data_sources(self, data_sources):
self.kwargs['data_sources'] = data_sources
# Example usage
data_processor = DatasetProcessor(reader='TextLineReader', decoder=DataDecoder(), num_samples=1000, items_to_descriptions={'images': 'Image data', 'labels': 'Class labels'})
data_processor.add_data_sources(['source1', 'source2'])
print(data_processor.reader) # Output: TextLineReader
print(data_processor.kwargs['data_sources']) # Output: ['source1', 'source2'] |
# from prefect import Flow, task
# from prefect.serialization.flow import FlowSchema
# @task
# def print_something():
# print('ok')
# f = Flow("ex", tasks=[print_something])
# f.run() # prints ok
# s = FlowSchema()
# f2 = s.load(f.serialize())
# f2.tasks # has print_something task
# f2.run() # doesn't print
import os
bob = os.environ.get("DOCKER_HOST", "<unix://var/run/docker.sock>")
print(bob) |
//
// UIButtonExtension.h
// UIButtonExtension
//
// Created by <NAME> on 9/28/20.
//
#import <Foundation/Foundation.h>
//! Project version number for UIButtonExtension.
FOUNDATION_EXPORT double UIButtonExtensionVersionNumber;
//! Project version string for UIButtonExtension.
FOUNDATION_EXPORT const unsigned char UIButtonExtensionVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <UIButtonExtension/PublicHeader.h>
|
#!/bin/bash
# install vimrc file
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ -f "${DIR}/.vimrc" ]
then
ln -s "${DIR}/.vimrc" "$HOME/.vimrc"
fi
# install pathogen
mkdir -p ~/.vim/autoload ~/.vim/bundle && curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim
# install badwolf and zenburn colorsheme
git clone https://github.com/sjl/badwolf.git ~/.vim/bundle/badwolf
git clone https://github.com/jnurmine/Zenburn.git ~/.vim/bundle/Zenburn
# install ctags and tagbar
sudo apt-get install ctags -y
git clone https://github.com/majutsushi/tagbar.git ~/.vim/bundle/tagbar
# install YouCompleteMe autocomplete
sudo apt install build-essential cmake python3-dev
git clone https://github.com/Valloric/YouCompleteMe.git ~/.vim/bundle/YouCompleteMe
pushd ~/.vim/bundle/YouCompleteMe
git submodule update --init --recursive
python3 install.py
# to get C semantic completion install libclang-7 and modify previous line to: python3 install.py --clang-completer
popd
# install vim-airline
git clone https://github.com/vim-airline/vim-airline ~/.vim/bundle/vim-airline
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
var portfolios_index_callDocumentReady_called = false;
var portfolioTableAjax = "";
var portfolioPrefsDialog = "";
$(document).ready(function () {
if (!portfolios_index_callDocumentReady_called)
{
portfolios_index_callDocumentReady_called = true;
if ($("#as_window").text() == "true")
{
// alert("it is a window");
}
else
{
portfolios_index_callDocumentReady();
}
}
});
function portfolios_index_callDocumentReady() {
requireCss("tables.css");
require("portfolios/shared.js");
// Required scripts (loaded for this js file)
//
// reatePortfolioDialog();
// $("#loader_progress").show();
// portfolioTableOld=$('#portfolio-table-old').dataTable({
// "aLengthMenu": [[-1, 10, 25, 50], ["All", 10, 25, 50]]
// });
$("#loader_progress").show();
createPortfolioTable();
$("#loader_progress").hide();
//
// $('#portfolio-table .portfolio-row').bind('click', function(){
// $(this).addClass('row_selected');
// portfolioID=$(this).find("#portfolio-id").text().strip();
// window.location = "/portfolio/edit/"+portfolioID;
// });
// $('.delete-portfolio-item').bind('ajax:success', function(xhr, data, status){
// $("#loader_progress").show();
// theTarget=this.parentNode.parentNode;
// var aPos = portfolioTableAjax.fnGetPosition( theTarget );
// portfolioTableAjax.fnDeleteRow(aPos);
// portfolioTableAjax.fnDraw();
// $("#loader_progress").hide();
// });
// $('.delete-portfolio-item').bind('ajax:error', function(xhr, data, error){
// alert("Error:" + JSON.parse(data.responseText)["error"]);
// $("#loader_progress").hide();
//
// });
// $(".edit_portfolio").bind('ajax:success', function (xhr, data, status) {
// $('#edit-password-dialog').dialog('close');
// });
// createPasswordDialog();
// createPortfolioDialog();
bindNewPortfolio();
$("a.button-link").button();
bindPreferences();
}
function deletePortfolio(portfolio_id)
{
var answer = confirm('Are you sure you want to delete this?')
if (answer) {
$.ajax({
url: '/portfolios/delete_ajax/?id=' + +portfolio_id,
success: function (data)
{
setUpPurrNotifier("Notice", "Item Successfully Deleted.");
portfolioTableAjax.fnDraw();
}
});
}
}
//function editPortfolio(portfolio_id)
//{
// var url = '/portfolios/edit/' + portfolio_id + '?request_type=window&window_type=iframe';
// $('iframe#portfolios-app-id', window.parent.document).attr("src", url);
//}
//function portfolioeditClickBinding(selector) {
// // selectors .edit-portfolio-item, tr.portfolio-row
//
// $(selector).unbind("click").one("click", function () {
// console.log($(this).find('#portfolio-id').text());
// var portfolio_id = $(this).find('#portfolio-id').text();
// var is_iframe = $("application-space").length > 0
//
// var url = '/portfolios/edit/' + portfolio_id + '?request_type=window&window_type=iframe';
// $(this).effect("highlight", {color: "#669966"}, 1000);
// if (is_iframe) {
// $('iframe#portfolios-app-id', window.parent.document).attr("src", url);
// portfolioeditClickBinding(this);
// }
// else
// {
// window.location = url;
//
// }
//
// });
//}
function loadPortfolioScreen() {
portfolio - action - area
}
function createPortfolioDialog() {
$('#edit-portfolio-dialog').dialog({
autoOpen: false,
width: 455,
height: 625,
modal: true,
buttons: {
"Delete": function () {
portfolio_id = $(".m-content div#portfolio-id").text().trim();
if (confirm("Are you sure you want to delete this portfolio?"))
{
$(this).dialog("close");
$.ajax({
url: '/portfolios/delete_ajax?id=' + portfolio_id,
success: function (data)
{
portfolioTableAjax.fnDraw();
}
});
}
else
{
}
},
"Ok": function () {
$(this).dialog("close");
portfolioTableAjax.fnDraw();
}
}
});
}
function createPortfolioTable() {
portfolioTableAjax = $('#portfolio-table').dataTable({
"iDisplayLength": 25,
"aLengthMenu": [[25, 50, 100], [25, 50, 100]],
"bStateSave": true,
"fnStateSave": function (oSettings, oData) {
localStorage.setItem('DataTables_portfolios_' + window.location.pathname, JSON.stringify(oData));
},
"fnStateLoad": function (oSettings) {
return JSON.parse(localStorage.getItem('DataTables_portfolios_' + window.location.pathname));
},
"bProcessing": true,
"bServerSide": true,
"aaSorting": [[1, "asc"]],
"sAjaxSource": "/portfolios/portfolio_table",
"fnRowCallback": function (nRow, aData, iDisplayIndex, iDisplayIndexFull) {
$(nRow).addClass('portfolio-row');
$(nRow).addClass('gradeA');
return nRow;
},
"fnInitComplete": function () {
// $(".best_in_place").best_in_place();
},
"fnDrawCallback": function () {
$(".best_in_place").best_in_place();
//portfolioeditClickBinding(".edit-portfolio-item");
portfolioeditClickBinding("tr.portfolio-row");
bindDeletePortfolio();
}
});
}
function bindNewPortfolio() {
$('a#new-portfolio').unbind().bind('ajax:beforeSend', function (e, xhr, settings) {
xhr.setRequestHeader('accept', '*/*;q=0.5, text/html, ' + settings.accepts.html);
$("body").css("cursor", "progress");
}).bind('ajax:success', function (xhr, data, status) {
$("body").css("cursor", "default");
portfolioTableAjax.fnDraw();
setUpPurrNotifier("Notice", "New Portfolio Created!'");
}).bind('ajax:error', function (evt, xhr, status, error) {
setUpPurrNotifier("Error", "Portfolio Creation Failed!'");
});
}
//function bindNewPortfolio() {
// $('a#new-portfolio').bind('ajax:beforeSend', function (evt, xhr, settings) {
// // alert("ajax:before");
// console.log('ajax:before');
// console.log(evt);
// console.log(xhr);
// console.log(settings);
//
// $("#loader_progress").show();
//
//
//
// }).bind('ajax:success', function (evt, data, status, xhr) {
// // alert("ajax:success");
// console.log('ajax:success');
// console.log(evt);
// console.log("date:" + data + ":");
//
// $("#loader_progress").show();
// console.log(data.id);
// editPortfolio(data.id);
//
// console.log(status);
// console.log(xhr);
//
// }).bind('ajax:error', function (evt, xhr, status, error) {
// // alert("ajax:failure");
// console.log('ajax:error');
// console.log(evt);
// console.log(xhr);
// console.log(status);
// console.log(error);
//
// alert("Error:" + JSON.parse(data.responseText)["error"]);
// $("#loader_progress").hide();
//
//
// }).bind('ajax:complete', function (evt, xhr, status) {
// // alert("ajax:complete");
// console.log('ajax:complete');
// console.log(evt);
// console.log(xhr);
// // console.log(status);
// $("#loader_progress").hide();
//
//
// });
//
//}
function bindDeletePortfolio() {
$(".delete-portfolio-item").on("click", function (e) {
// console.log($(this).parent().parent().parent().find('#portfolio-id').text());
var portfolio_id = $(this).parent().parent().parent().find('#portfolio-id').text();
deletePortfolio(portfolio_id);
return false;
});
}
// ************************************
//
// Create Edit Dialog Box
//
// ************************************
//function createAppDialog(theContent) {
//
//
// if ($("#app-dialog").length == 0)
// {
// var dialogContainer = "<div id='app-dialog'></div>";
// $("#portfolio").append($(dialogContainer));
// }
// else
// {
// dialogContainer = $("#app-dialog");
// }
// // $('#app-dialog').html(theContent);
// theContent = '<input type="hidden" autofocus="autofocus" />' + theContent
// theAppDialog = $('#app-dialog').dialog({
// autoOpen: false,
// modal: true,
// buttons: {
// "Close": function () {
// // Do what needs to be done to complete
// $(this).dialog("close");
// }
// },
// close: function (event, ui) {
// $('#app-dialog').html("");
// $('#app-dialog').dialog("destroy");
// },
// open: function (event, ui)
// {
// popUpAlertifExists();
// }
//
//
// });
//
// $('#app-dialog').html(theContent);
//
// theHeight = $('#app-dialog #dialog-height').text() || "500";
// theWidth = $('#app-dialog #dialog-width').text() || "500";
// theTitle = $('#app-dialog #dialog-name').text() || "Edit";
//
// theAppDialog.dialog({
// title: theTitle,
// width: theWidth,
// height: theHeight
// });
//
// return(theAppDialog)
//}
function bindPreferences() {
$('a#portfolio-prefs').unbind().bind('ajax:beforeSend', function (e, xhr, settings) {
xhr.setRequestHeader('accept', '*/*;q=0.5, text/html, ' + settings.accepts.html);
$("body").css("cursor", "progress");
}).bind('ajax:success', function (xhr, data, status) {
$("body").css("cursor", "default");
portfolioPrefsDialog = createAppDialog(data, "portfolio-prefs-dialog");
portfolioPrefsDialog.dialog('open');
portfolioPrefsDialog.dialog({
close: function (event, ui) {
portfolioPrefsDialog.html("");
portfolioPrefsDialog.dialog("destroy");
}
});
require("portfolios/portfolio_preferences.js");
portfolio_preferences_callDocumentReady();
//update_rolls_callDocumentReady();
// setupRolesSelection();
//
}).bind('ajax:error', function (evt, xhr, status, error) {
setUpPurrNotifier("Error", "Prefs could not be opened!'");
});
} |
#! /usr/bin/env bash
# Copyright 2014 Uno authors (see AUTHORS)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Start: Resolve Script Directory
SOURCE="${BASH_SOURCE[0]}"
while [[ -h "$SOURCE" ]]; do # resolve $SOURCE until the file is no longer a symlink
impl="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$impl/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
impl="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
bin="$( cd -P "$( dirname "$impl" )" && pwd )"
# Stop: Resolve Script Directory
# Determine FLUO_DEV - Use env variable set by user. If none set, calculate using bin dir
FLUO_DEV="${FLUO_DEV:-$( cd -P "${bin}"/.. && pwd )}"
export FLUO_DEV
if [[ -z "$FLUO_DEV" || ! -d "$FLUO_DEV" ]]
then
echo "FLUO_DEV=$FLUO_DEV is not a valid directory. Please make sure it exists"
exit 1
fi
HP=$HADOOP_PREFIX
HC=$HADOOP_CONF_DIR
ZH=$ZOOKEEPER_HOME
SH=$SPARK_HOME
AH=$ACCUMULO_HOME
FH=$FLUO_HOME
# Load env configuration
if [[ -f "$FLUO_DEV/conf/env.sh" ]]; then
source "$FLUO_DEV"/conf/env.sh
else
if [[ ! "version env" =~ $1 ]]; then echo "WARNING: uno is using default configuration at $FLUO_DEV/conf/env.sh.example"; fi
source "$FLUO_DEV"/conf/env.sh.example
fi
# Confirm that hadoop, accumulo, and zookeeper env variables are not set
if [[ ! "version env" =~ $1 ]]; then
if [[ -n "$HP" && "$HP" != "$HADOOP_PREFIX" ]]; then
echo "HADOOP_PREFIX in your shell env '$HP' needs to match your uno env.sh '$HADOOP_PREFIX'"
exit 1
fi
if [[ -n "$HC" && "$HC" != "$HADOOP_CONF_DIR" ]]; then
echo "HADOOP_CONF_DIR in your shell env '$HC' needs to match your uno env.sh '$HADOOP_CONF_DIR'"
exit 1
fi
if [[ -n "$ZH" && "$ZH" != "$ZOOKEEPER_HOME" ]]; then
echo "ZOOKEEPER_HOME in your shell env '$ZH' needs to match your uno env.sh '$ZOOKEEPER_HOME'"
exit 1
fi
if [[ -n "$SH" && "$SH" != "$SPARK_HOME" ]]; then
echo "SPARK_HOME in your shell env '$SH' needs to match your uno env.sh '$SPARK_HOME'"
exit 1
fi
if [[ -n "$AH" && "$AH" != "$ACCUMULO_HOME" ]]; then
echo "ACCUMULO_HOME in your shell env '$AH' needs to match your uno env.sh '$ACCUMULO_HOME'"
exit 1
fi
if [[ -n "$FH" && "$FH" != "$FLUO_HOME" ]]; then
echo "FLUO_HOME in your shell env '$FH' needs to match your uno env.sh '$FLUO_HOME'"
exit 1
fi
fi
# Confirm that env variables were set correctly
if [[ -n "$FLUO_REPO" && ! -d "$FLUO_REPO" ]]; then
echo "FLUO_REPO=$FLUO_REPO is not a valid directory. Please make sure it exists"
exit 1
fi
if [[ -n "$ACCUMULO_REPO" && ! -d "$ACCUMULO_REPO" ]]; then
echo "ACCUMULO_REPO=$ACCUMULO_REPO is not a valid directory. Please make sure it exists"
exit 1
fi
if [[ -z "$INSTALL" ]]; then
echo "INSTALL=$INSTALL needs to be set in env.sh"
exit 1
fi
if [[ ! -d "$INSTALL" ]]; then
mkdir -p "$INSTALL"
fi
: "${DATA_DIR:?"DATA_DIR is not set in env.sh"}"
: "${FLUO_VERSION:?"FLUO_VERSION is not set in env.sh"}"
: "${HADOOP_VERSION:?"HADOOP_VERSION is not set in env.sh"}"
: "${ZOOKEEPER_VERSION:?"ZOOKEEPER_VERSION is not set in env.sh"}"
: "${ACCUMULO_VERSION:?"ACCUMULO_VERSION is not set in env.sh"}"
: "${DOWNLOADS:?"DOWNLOADS is not set in env.sh"}"
: "${ACCUMULO_TARBALL:?"ACCUMULO_TARBALL is not set in env.sh"}"
: "${FLUO_TARBALL:?"FLUO_TARBALL is not set in env.sh"}"
: "${HADOOP_TARBALL:?"HADOOP_TARBALL is not set in env.sh"}"
: "${ZOOKEEPER_TARBALL:?"ZOOKEEPER_TARBALL is not set in env.sh"}"
: "${FLUO_HOME:?"FLUO_HOME is not set in env.sh"}"
: "${ZOOKEEPER_HOME:?"ZOOKEEPER_HOME is not set in env.sh"}"
: "${HADOOP_PREFIX:?"HADOOP_PREFIX is not set in env.sh"}"
: "${ACCUMULO_HOME:?"ACCUMULO_HOME is not set in env.sh"}"
: "${ACCUMULO_INSTANCE:?"ACCUMULO_INSTANCE is not set in env.sh"}"
: "${ACCUMULO_USER:?"ACCUMULO_USER is not set in env.sh"}"
: "${ACCUMULO_PASSWORD:?"ACCUMULO_PASSWORD is not set in env.sh"}"
: "${START_SPARK_HIST_SERVER:?"START_SPARK_HIST_SERVER is not set in env.sh"}"
: "${LOGS_DIR:?"LOGS_DIR is not set in env.sh"}"
: "${ACCUMULO_LOG_DIR:?"ACCUMULO_LOG_DIR is not set in env.sh"}"
: "${HADOOP_LOG_DIR:?"HADOOP_LOG_DIR is not set in env.sh"}"
: "${YARN_LOG_DIR:?"YARN_LOG_DIR is not set in env.sh"}"
: "${ZOO_LOG_DIR:?"ZOO_LOG_DIR is not set in env.sh"}"
hash shasum 2>/dev/null || { echo >&2 "shasum must be installed & on PATH. Aborting."; exit 1; }
hash sed 2>/dev/null || { echo >&2 "sed must be installed & on PATH. Aborting."; exit 1; }
if [[ "$OSTYPE" == "darwin"* ]]; then
export SED="sed -i .bak"
else
export SED="sed -i"
fi
|
#include <iostream>
#include <ctime>
#include <cstdlib>
int main()
{
srand(time(NULL));
int randomNumber = rand() % 10000 + 1;
std :: cout << randomNumber << std :: endl;
return 0;
} |
<reponame>bigint/lute-drop<filename>hardhat.config.ts
import "@nomiclabs/hardhat-etherscan";
import "@nomiclabs/hardhat-waffle";
import "@nomiclabs/hardhat-ethers";
import "@nomiclabs/hardhat-waffle";
import "@typechain/hardhat";
import "hardhat-gas-reporter";
import "solidity-coverage";
import dotenv from "dotenv";
import { task } from "hardhat/config";
import { deployLocal, deployTestnet, deployMainnet } from "./scripts/deploy";
dotenv.config();
task("accounts", "Prints the list of accounts", async (args, hre) => {
const accounts = await hre.ethers.getSigners();
for (const account of accounts) {
console.log(account.address);
}
});
task("deploy:local", "Deploys contracts", async (args, hre) => {
await deployLocal(hre.ethers);
});
task("deploy:testnet", "Deploys contracts", async (args, hre) => {
await deployTestnet(hre.ethers);
});
task("deploy:mainnet", "Deploys contracts", async (args, hre) => {
await deployMainnet(hre.ethers);
});
/**
* @type import('hardhat/config').HardhatUserConfig
*/
module.exports = {
solidity: {
version: "0.8.9",
settings: {
optimizer: {
enabled: true,
runs: 150,
},
},
},
networks: {
hardhat: {
initialBaseFeePerGas: 0,
forking: {
enabled: false,
url: process.env.ALCHEMY_API_KEY,
},
},
rinkeby: {
url: process.env.RINKEBY_URL || "",
accounts: { mnemonic: process.env.RINKEBY_MNEMONIC },
},
mumbai: {
url: process.env.MUMBAI_URL || "",
accounts: { mnemonic: process.env.MUMBAI_MNEMONIC },
},
polygon: {
url: process.env.POLYGON_URL || "",
accounts: { mnemonic: process.env.POLYGON_MNEMONIC },
},
},
gasReporter: {
enabled: process.env.REPORT_GAS !== undefined,
currency: "USD",
},
etherscan: {
apiKey: process.env.ETHERSCAN_API_KEY,
},
};
|
mkdir -p /pgdata/data
chown postgres:postgres /pgdata/data
chmod 700 /pgdata/data
/scripts/su-exec postgres /usr/bin/initdb -D /pgdata/data
/bin/cp -f /scripts/config/* /pgdata/data
chown -R postgres:postgres /pgdata/data
su - postgres
export POD_NAME="citus-0"
export POD_NAMESPACE="default"
export POD_GROUP="citus"
export SET_SIZE="2"
docker run -e POD_NAME="citus-0" \
-e POD_NAMESPACE="default" \
-e POD_GROUP="citus" \
-e SET_SIZE="2" \
jberkus/citus:0.4
|
#!/bin/bash
#------------------------------------------------------------------------
# Utility methods
#
fatal()
{
echo "credentials-local.sh: fatal: $1" 1>&2
exit 1
}
info()
{
echo "credentials-local.sh: info: $1" 1>&2
}
if [ -z "${NYPL_NEXUS_USER}" ]
then
fatal "NYPL_NEXUS_USER is not defined"
fi
if [ -z "${NYPL_NEXUS_PASSWORD}" ]
then
fatal "NYPL_NEXUS_PASSWORD is not defined"
fi
#------------------------------------------------------------------------
# Copy credentials into place.
#
info "installing keystore"
cp -v ".ci/credentials/APK Signing/nypl-keystore.jks" \
"release.jks" || exit 1
#------------------------------------------------------------------------
# Add the NYPL nexus properties to the project properties.
#
mkdir -p "${HOME}/.gradle" ||
fatal "could not create ${HOME}/.gradle"
cat ".ci/credentials/APK Signing/nypl-keystore.properties" >> "${HOME}/.gradle/gradle.properties" ||
fatal "could not read keystore properties"
CREDENTIALS_PATH=$(realpath ".ci/credentials") ||
fatal "could not resolve credentials path"
SIMPLYE_CREDENTIALS="${CREDENTIALS_PATH}/SimplyE/Android"
OPENEBOOKS_CREDENTIALS="${CREDENTIALS_PATH}/OpenEbooks/Android"
if [ ! -d "${SIMPLYE_CREDENTIALS}" ]
then
fatal "${SIMPLYE_CREDENTIALS} does not exist, or is not a directory"
fi
if [ ! -d "${OPENEBOOKS_CREDENTIALS}" ]
then
fatal "${OPENEBOOKS_CREDENTIALS} does not exist, or is not a directory"
fi
cp "${CREDENTIALS_PATH}/PlayStore/play_store_api_key.json" "simplified-app-simplye/play_store_api_key.json" ||
fatal "could not copy Play Store key"
cp "${CREDENTIALS_PATH}/PlayStore/play_store_api_key.json" "simplified-app-openebooks/play_store_api_key.json" ||
fatal "could not copy Play Store key"
cat >> "${HOME}/.gradle/gradle.properties" <<EOF
org.librarysimplified.drm.enabled=true
org.librarysimplified.nexus.depend=true
org.librarysimplified.nexus.username=${NYPL_NEXUS_USER}
org.librarysimplified.nexus.password=${NYPL_NEXUS_PASSWORD}
org.librarysimplified.app.assets.openebooks=${OPENEBOOKS_CREDENTIALS}
org.librarysimplified.app.assets.simplye=${SIMPLYE_CREDENTIALS}
EOF
#------------------------------------------------------------------------
# Addding slack webhook to environment
SLACK_WEBHOOK_URL=$(<.ci/credentials/SimplyE/slack-webhook.url) ||
fatal "Slack Webhook url not found."
cat >> ".env" <<EOF
SLACK_WEBHOOK_URL="${SLACK_WEBHOOK_URL}"
EOF
|
#Imports
import numpy as np
import tensorflow as tf
tf.random.set_random_seed(42)
# Input
x = tf.placeholder(tf.float32, shape=[None, 28, 28])
# Network
conv1 = tf.layers.conv2d(x, filters=32, kernel_size=3, strides=2,
padding='same', activation=tf.nn.relu)
pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=2, strides=2)
flatten = tf.layers.flatten(inputs=pool1)
d1 = tf.layers.dense(flatten, units=128, activation=tf.nn.relu)
logits = tf.layers.dense(d1, units=2)
# Output
probabilities = tf.nn.softmax(logits)
predictions = tf.argmax(probabilities, axis=1)
# Loss and training
labels = tf.placeholder(tf.int64, shape=[None])
loss = tf.reduce_mean(
tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels,
logits=logits))
optimizer = tf.train.AdamOptimizer()
train_op = optimizer.minimize(loss)
# Evaluation
metric_accuracy = tf.reduce_mean(
tf.cast(tf.equal(labels, predictions), tf.float32)) |
x=$(grep "; time" $1| sed 's/^.*: //' | sed 's/ms.*//')
let counter=0
for i in $x
do
# echo $i
if [[ "$i" =~ ^[0-9]+$ ]]
then
((sum += i))
((counter += 1))
fi
done
echo "sum of all time: $sum ms"
echo "number of requests: $counter"
echo "average latency: $(($sum/$counter)) ms"
|
using System;
// Partial implementation of the event handling class
public class EventHandler
{
// AddHandler method for subscribing to events
public void AddHandler(EventHandlerDelegate handler)
{
_handler += handler;
}
// TriggerEvent method for triggering events
public void TriggerEvent(object sender, EventArgs args)
{
_handler?.Invoke(sender, args);
}
private EventHandlerDelegate _handler;
// Delegate for event handler
public delegate void EventHandlerDelegate(object sender, EventArgs args);
}
// Custom event arguments class
public class SongEventArgs : EventArgs
{
public string SongName { get; set; }
public SongEventArgs(string songName)
{
SongName = songName;
}
}
// Main class for testing the event handling
public class Program
{
public static void Main()
{
EventHandler eventHandler = new EventHandler();
// Subscribe to the event
eventHandler.AddHandler(OnSongChanged);
// Trigger the event
eventHandler.TriggerEvent(null, new SongEventArgs("New Song"));
}
// Event handler method
public static void OnSongChanged(object sender, EventArgs args)
{
SongEventArgs songArgs = (SongEventArgs)args;
Console.WriteLine("Now playing: " + songArgs.SongName);
}
} |
/*
* @Author: dang
* @Date: 2021-08-11 09:36:00
* @LastEditTime: 2021-09-09 21:39:05
* @LastEditors: Please set LastEditors
* @Description: A worm
* @FilePath: \iot_gxhy_reservoirdam_web\src\views\systemManagement\user\api.js
*/
import request from "@/utils/request";
const prod = process.env.VUE_APP_BASE_API_6;
//更新用户基本信息
export function updateUser(query) {
return request({
url: "/updateUser",
baseURL: prod,
method: "post",
data: query
});
}
export function getList() {
return request({
url: "role/getAllRoles",
baseURL: prod,
method: "get"
});
}
export function seachByName(query) {
return request({
url: "role/getAllRolesByName",
baseURL: prod,
method: "post",
data: query
});
}
export function submitForm(query) {
return request({
url: "role/save",
baseURL: prod,
method: "post",
data: query
});
}
export function doDelete(query) {
return request({
url: "role/delete",
baseURL: prod,
method: "post",
data: query
});
}
export function doDeleteMultiple(query) {
return request({
url: "role/doDeleteMultiple",
baseURL: prod,
method: "post",
data: query
});
}
export function getRightTreeList(query) {
return request({
url: "menu/getRightTree",
baseURL: prod,
method: "get",
params: query
});
}
export function saveRight(query) {
return request({
url: "role/saveRight",
baseURL: prod,
method: "post",
data: query
});
}
export function getRolePermissions(id) {
return request({
url: "role/getRolePermissions/" + id,
baseURL: prod,
method: "get"
});
}
export function getAllRoles(query) {
return request({
url: "role/getAllRoles",
baseURL: prod,
method: "get"
});
}
// /addSystem添加子系统信息
export function addSystem(params) {
return request({
url: "/addSystem",
baseURL: prod,
method: "POST",
data: params
});
}
// /修改子系统信息
export function updateSystem(params) {
return request({
url: "/updateSystem",
baseURL: prod,
method: "POST",
data: params
});
}
// 删除系统
export function delSystem(params) {
return request({
url: "/delSystem",
baseURL: prod,
method: "POST",
data: params
});
}
// roleSet
export function roleSet(params) {
return request({
url: "/roleSet",
baseURL: prod,
method: "GET",
params: params
});
}
// 加载组织机构树
export function orgTree(params) {
return request({
url: "/orgTree",
baseURL: prod,
method: "GET",
params: params
});
}
export function orgListByCurrentUserOrgId(params){
return request({
url: "/orgList",
baseURL: prod,
method: "POST",
data: params
});
}
// 用户列表userList
export function userList(params) {
return request({
url: "/userList",
baseURL: prod,
method: "POST",
data: params
});
}
// register
export function register(params) {
return request({
url: "/register",
baseURL: prod,
method: "POST",
data: params
});
}
// 用户账号角色绑定 bindUserRole
export function bindUserRole(params) {
return request({
url: "/bindUserRole",
baseURL: prod,
method: "POST",
data: params
});
}
// lockAccount 锁定账号
export function lockAccount(params) {
return request({
url: "/lockAccount",
baseURL: prod,
method: "POST",
data: params
});
}
// 解锁账号 unlockAccount
export function unlockAccount(params) {
return request({
url: "/unlockAccount",
baseURL: prod,
method: "POST",
data: params
});
}
// verifyCode
export function verifyCode(params) {
return request({
url: "/verifyCode",
baseURL: prod,
method: "GET",
responseType: "blob",
params: params
});
}
|
import Axios from 'axios';
import database from '../firebase';
const apiUrl = 'https://jsonplaceholder.typicode.com/photos';
export const fetchBooksSuccess = (books) => {
return {
type: 'FETCH_BOOKS_SUCCESS',
books
}
};
export const createBookSuccess = (book) => {
return {
type: 'CREATE_BOOK_SUCCESS',
book
}
}
export const fetchBooks = () => {
return (dispatch) => {
return database.ref('/').once('value', response => {
const bookList = response.val();
dispatch(fetchBooksSuccess(bookList))
})
.catch((error) => {
throw(error)
})
}
// return (dispatch) => {
// return Axios.get(apiUrl)
// .then(response => {
// dispatch(fetchBooksSuccess(response.data))
// })
// .catch(error =>{
// throw(error)
// });
// }
}
export const createBook = (book) => {
return (dispatch) => {
return Axios.post(apiUrl, book)
.then(response => {
dispatch(createBookSuccess(response.data))
})
.catch(error => {
throw(error);
});
};
};
// export const createBook = (book) =>{
// return {
// type: 'CREATE_BOOK',
// book: book
// }
// };
|
/*
* Copyright (c) 2020. <NAME>, Partners Healthcare and members of Forome Association
*
* Developed by <NAME> and <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.forome.astorage.pastorage.schema.blocker.codec;
import net.minidev.json.JSONArray;
import net.minidev.json.JSONObject;
import org.forome.astorage.pastorage.schema.blocker.ADataDecodeEnv;
import org.forome.astorage.pastorage.schema.blocker.CodecData;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class CodecAGroup extends Codec {
private final List<String> mGroup;
private String mGroupName;
private final List<Codec> mItemCodecs;
public CodecAGroup(CodecData codecData, JSONObject schema_instr) {
super(codecData, schema_instr);
this.mGroup = ((JSONArray) schema_instr.get("group")).stream().map(o -> (String) o).collect(Collectors.toList());
this.mGroupName = schema_instr.getAsString("group-name");
this.mItemCodecs = new ArrayList<>();
for (Object o : (JSONArray) schema_instr.get("items")) {
this.mItemCodecs.add(codecData.create((JSONObject) o));
}
}
@Override
public boolean isAggregate() {
return true;
}
@Override
public Object decode(Object group_obj, ADataDecodeEnv dataDecodeEnv) {
JSONObject ret = new JSONObject();
for (Object oint_obj : ((JSONArray) group_obj)) {
JSONArray int_obj = (JSONArray) oint_obj;
String name = mGroup.get((Integer) int_obj.get(0));
JSONObject grp_obj = new JSONObject();
for (int idx = 0; idx < mItemCodecs.size(); idx++) {
Codec it = mItemCodecs.get(idx);
Number it_obj = null;
if (idx + 1 < int_obj.size()) {
it_obj = (Number) int_obj.get(idx + 1);
}
if (it.isAggregate()) {
throw new RuntimeException("Not implemented");
} else {
if (it_obj != null) {
grp_obj.put(it.getName(), it.decode(it_obj, dataDecodeEnv));
} else {
grp_obj.put(it.getName(), null);
}
}
}
ret.put(name, grp_obj);
}
return ret;
}
}
|
<filename>src/js/_router.js
pikaDeck.router = {};
(function() {
"use strict";
this.init = function() {
var hash = window.location.hash.split('?');
var search = window.location.search.replace('?', '');
var rawQuery = _getQuery(hash[1], search);
var query = _queryToObject(rawQuery);
var route = _getRoute(hash[0]);
var path = _getPath(hash[0]);
pikaDeck.store.push('route', route);
pikaDeck.store.push('path', path);
pikaDeck.store.push('rawQuery', rawQuery);
pikaDeck.store.push('query', query);
pikaDeck.hb.drawShell();
var deckList = localStorage.getItem('deckList');
if (deckList) {
deckList = deckList.split(',');
if (deckList.length) {
pikaDeck.store.push('deckList', deckList);
}
}
if (!(_inRoutes(hash[0]))) {
pikaDeck.ctrl.index.init();
return;
}
pikaDeck.ctrl[route].init();
};
var _getRoute = function (hash) {
return _splitHash(hash)[0] || 'index';
};
var _getPath = function (hash) {
return _splitHash(hash)[1] || '';
};
var _getQuery = function (hash, search) {
return (hash) ? hash : ((search) ? search : '');
};
var _inRoutes = function (hash) {
hash = hash || '';
var route = _splitHash(hash)[0];
var ctrl = pikaDeck.ctrl;
return (_isRoute(hash) && ctrl && typeof ctrl[route] === 'object' && typeof ctrl[route].init === 'function') ? true : false;
};
var _queryToObject = function (query) {
// TODO: Handle ~NUM passed in via query here.
var queryObj = {};
if (query === '') {
return queryObj;
}
query = _stripQuestionMark(query);
query = query.split('&');
for (var i = 0; i < query.length; i++) {
var item = query[i].split('=');
var name = item[0];
var value = decodeURIComponent(item[1]);
value = value.split('|').sort();
queryObj[name] = value;
}
return queryObj;
};
var _stripQuestionMark = function (query) {
return (query.indexOf('?') === 0) ? query.replace('?', '') : query;
};
var _isRoute = function (hash) {
return (hash && hash.indexOf('#!') === 0) ? true : false;
};
var _splitHash = function (hash) {
return (hash) ? hash.toLowerCase().replace('#!','').split('/') : '';
};
// Start - For Unit Testing Only
// this._inRoutes = _inRoutes;
// this._isRoute = _isRoute;
// this._splitHash = _splitHash;
// this._getPath = _getPath;
// End - For Unit Testing Only
}).apply(pikaDeck.router);
|
#!/bin/bash
source scripts/deploy-common.sh;
echo "--- :gcloud: Publishing to Artifact Registry...";
publish gcloud;
echo "Publish to Artifact Registry complete.";
|
import { useEffect, useState } from "react";
import Moment from 'react-moment';
import "./style.css";
function DataDisplay(props) {
// const [sortedNames, setSortedNames] = useState(null);
// const firstName = props.results;
// let sortedFirstNames = [firstName];
// console.log("this is what you want", sortedFirstNames);
// if (sortedNames !== null) {
// sortedNames.sort((a, b) => {
// if (a[sortedFirstNames] < b[sortedFirstNames]){
// return 1;
// }
// if (a[sortedFirstNames] > b[sortedFirstNames]){
// return -1;
// }
// return 0;
// });
// };
return (
<div>
<table className="emp-list" >
<thead>
<tr>
<th scope="col">Image</th>
<th scope="col">
Name
</th>
<th scope="col">Phone</th>
<th scope="col">Email</th>
<th scope="col">DOB</th>
</tr>
</thead>
<tbody>
{props.results.map((results, index) => (
<tr className="employee-data" >
<td className="emp-list-img" key={results.index}>
<img alt={results.title} className="img.fluid" src={results.picture.medium} />
</td>
<td className="emp-list-name" key={results.index}>
<p>{results.name.first} {results.name.last}</p>
</td>
<td className="emp-list-phone" key={results.index}>
<p>{results.cell}</p>
</td>
<td className="emp-list-email" key={results.index}>
<p>{results.email}</p>
</td>
<td className="emp-list-bday" key={results.index}>
<Moment date={results.dob.date} format= "MM/DD/YYYY"/>
</td>
</tr>
))}
</tbody>
</table>
</div>
);
}
export default DataDisplay;
|
BASEURL=https://${CIRCLE_BUILD_NUM}-41881188-gh.circle-artifacts.com/0/vsoch.github.io
sed -i "63 s,.*,destination: ./_site,g" "_config.yml"
sed -i "6 s,.*,baseurl: $BASEURL,g" "_config.yml"
|
#!/bin/bash
function is_fpga_installed {
lspci | grep -E -q "Xilinx|(1d22:2011)" && return 0 || return 1
}
function detect_fpga_type {
(lspci -d 1d22:2011 -nn | grep -q "") && eval "$1='cnn'" && return
(lspci -d 10ee:9038 -nn | grep -q "") && eval "$1='dev'" && return
(lspci -d 10ee:8038 -nn | grep -q "") && eval "$1='rsa'" && return
eval "$1='unknown'" && return
}
function detect_fpga_bdf {
local local_fpga_cnt=$(lspci | grep -E -c "Xilinx|(1d22:2011)")
if [[ $local_fpga_cnt -ne 1 ]]; then
eval "$1='error'" && return
fi
local local_fpga_bdf=$(lspci | grep -E "Xilinx|(1d22:2011)" | awk '{print $1}')
eval "$1=$local_fpga_bdf" && return
}
function detect_fpga_sysfs_path {
local local_fpga_sysfs_cnt=$(find /sys/bus/pci/devices -name "*$2" | wc -l)
if [[ $local_fpga_sysfs_cnt -ne 1 ]]; then
eval "$1='error'" && return
fi
local local_fpga_sysfs_path=$(readlink -f $(find /sys/bus/pci/devices -name "*$2"))
eval "$1=$local_fpga_sysfs_path" && return
}
|
function isPalindrome(str) {
// Remove non-alphanumeric characters and convert to lowercase
const cleanStr = str.replace(/[^a-zA-Z0-9]/g, '').toLowerCase();
// Check if the clean string is equal to its reverse
return cleanStr === cleanStr.split('').reverse().join('');
}
// Test cases
console.log(isPalindrome("A man, a plan, a canal, Panama")); // Output: true
console.log(isPalindrome("race a car")); // Output: false |
SELECT TOP 3 name FROM customers ORDER BY COUNT(purchase) DESC; |
#!/bin/bash
set -e
set -x
PYTHON_VERSION=$1
BITNESS=$2
if [[ "$PYTHON_VERSION" == "36" || "$BITNESS" == "32" ]]; then
# For Python 3.6 and 32-bit architecture use the regular
# test command (outside of the minimal Docker container)
cp $CONFTEST_PATH $CONFTEST_NAME
pytest --pyargs sklearn
python -m threadpoolctl -i sklearn
else
docker container run -e SKLEARN_SKIP_NETWORK_TESTS=1 \
-e OMP_NUM_THREADS=2 \
-e OPENBLAS_NUM_THREADS=2 \
--rm scikit-learn/minimal-windows \
powershell -Command "pytest --pyargs sklearn"
docker container run --rm scikit-learn/minimal-windows \
powershell -Command "python -m threadpoolctl -i sklearn"
fi
|
package models
import "gopkg.in/mgo.v2/bson"
/*
User Model
Represents a User, we uses bson keyword to tell the mgo driver how to name
the properties in mongodb document
*/
type User struct {
ID bson.ObjectId `bson:"_id" json:"id"`
FirstName string `bson:"first_name" json:"first_name"`
LastName string `bson:"last_name" json:"last_name"`
}
|
from flask import Flask, render_template, request, session
app = Flask(__name__)
app.secret_key = 'secretkey'
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html')
else:
email = request.form['email']
password = request.form['password']
session['email'] = email
session['password'] = password
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
# signup.html
<html>
<head>
<title>Sign up</title>
</head>
<body>
<form action='/signup' method='post'>
Email: <input type='text' name='email'>
Password: <input type='password' name='password'>
<input type='submit' value='Sign up'>
</form>
</body>
</html> |
#!/bin/sh
sed -i \
-e 's/rgb(0%,0%,0%)/#3c3c3c/g' \
-e 's/rgb(100%,100%,100%)/#d4d4d4/g' \
-e 's/rgb(50%,0%,0%)/#3c3c3c/g' \
-e 's/rgb(0%,50%,0%)/#97bf60/g' \
-e 's/rgb(0%,50.196078%,0%)/#97bf60/g' \
-e 's/rgb(50%,0%,50%)/#4c4c4c/g' \
-e 's/rgb(50.196078%,0%,50.196078%)/#4c4c4c/g' \
-e 's/rgb(0%,0%,50%)/#d4d4d4/g' \
"$@"
|
import re
text = "This is a text about code"
pattern = r"\bcode\b"
result = re.findall(pattern, text)
print(result) |
<gh_stars>10-100
import React, { useState } from 'react';
import { Link } from 'react-router-dom';
import Navlink from './Navlink/Navlink';
import Sidebar from './Sidebar/Sidebar';
import routes from '../../../shared/routes';
import classes from './navbar.module.css';
import logo from '../../../assets/logo.svg';
import { auth } from '../../../shared/firebase';
export default function Navbar() {
const [isMenuOpen, setIsMenuOpen] = useState(false);
const signOutHandler = () => {
auth.signOut();
};
const onMenuClick = () => {
setIsMenuOpen(!isMenuOpen);
};
const isAuthenticated = auth.currentUser !== null;
return (
<>
<nav className={classes.nav}>
<Link to="/" className={classes.logo}>
<img src={logo} alt="crypto-crowdfund-logo" />
</Link>
<div
className={`${classes.navLinkContainer} ${
isAuthenticated && classes.threeByThree
}`}
>
<Navlink to={routes.CAMPAIGNS}>Campaigns</Navlink>
{isAuthenticated && (
<Navlink to={`${routes.ACCOUNT}?uid=${auth.currentUser!.uid}`}>
Account
</Navlink>
)}
{isAuthenticated ? (
<Navlink to={routes.HOME} onClick={signOutHandler} shouldOverride>
Sign Out
</Navlink>
) : (
<Navlink to={routes.SIGN_IN}>Sign In</Navlink>
)}
</div>
<button className={classes.toggler} onClick={onMenuClick}>
<div
className={`${classes.bar1} ${isMenuOpen && classes.crossBar1}`}
/>
<div
className={`${classes.bar2} ${isMenuOpen && classes.crossBar2}`}
/>
<div
className={`${classes.bar3} ${isMenuOpen && classes.crossBar3}`}
/>
</button>
</nav>
<Sidebar
isAuthenticated={isAuthenticated}
isOpen={isMenuOpen}
onClose={() => setIsMenuOpen(false)}
/>
</>
);
}
|
import argparse
from .face_detect import sort_faces
def argParser():
"""
The main CLI function
"""
parser = argparse.ArgumentParser()
parser.add_argument("path", help="Path to the folder where all your images are stored.",
type=str)
args = parser.parse_args()
try:
# print(args)
sort_faces(args.path)
except Exception as e:
print(e)
|
package main
import (
"context"
"errors"
"fmt"
"strings"
"testing"
"github.com/google/go-github/github"
)
func TestCreateDeployment(t *testing.T) {
repoName := "testowner/testrepo"
client := newTestGitHubClient()
deployment, err := createDeployment(
client,
PullRequestEvent{
Repository: GitHubRepository{FullName: repoName},
PullRequest: GitHubPullRequest{Number: 123},
},
"test",
)
if err != nil {
t.Error("error running createDeployment:", err)
}
if *deployment.Ref != "pull/123/head" {
t.Errorf("createDeployment did not format ref properly. Expected %s, received %s", "pull/123/head", *deployment.Ref)
}
if *deployment.Environment != "test" {
t.Errorf("environment did not match. Expected %s, received %s", "test", *deployment.Environment)
}
if *deployment.RepositoryURL != fmt.Sprintf("https://www.github.com/%s", repoName) {
t.Errorf("createDeployment did not format owner/repo properly. Expected %s, received %s", fmt.Sprintf("https://www.github.com/%s", repoName), *deployment.RepositoryURL)
}
}
type testRepoClient struct{}
func TestCreateDeploymentStatus(t *testing.T) {
repoName := "testowner/testrepo"
client := newTestGitHubClient()
deploymentStatus, err := createDeploymentStatus(
client,
123,
PullRequestEvent{
Repository: GitHubRepository{FullName: repoName},
PullRequest: GitHubPullRequest{Number: 123},
},
"test",
"success",
"https://www.example.com",
)
if err != nil {
t.Error("error running createDeploymentStatus:", err)
}
// mainly checking that github.CreateDeploymentStatus was properly called with the values
// passed into this package's createDeploymentStatus function
if deploymentStatus.GetDeploymentURL() != "https://www.example.com" {
t.Errorf("environment URL did not match. Expected %s, received %s", "https://www.example.com", deploymentStatus.GetDeploymentURL())
}
if deploymentStatus.GetState() != "success" {
t.Errorf("state did not match. Expected %s, received %s", "success", deploymentStatus.GetState())
}
}
func newTestGitHubClient() GitHubClient {
return GitHubClient{Repositories: testRepoClient{}}
}
// CreateDeployment does some basic checking to make sure the method is called with the expected params, according to
// https://developer.github.com/v3/repos/deployments/
func (c testRepoClient) CreateDeployment(ctx context.Context, owner string, repo string, req *github.DeploymentRequest) (*github.Deployment, *github.Response, error) {
// check for valid owner and repo names (basically, test that they were split properly)
if a := strings.Split(owner, "/"); len(a) != 1 {
return &github.Deployment{}, &github.Response{}, errors.New("owner name invalid (contains slash)")
}
if b := strings.Split(repo, "/"); len(b) != 1 {
return &github.Deployment{}, &github.Response{}, errors.New("repo name invalid (contains slash)")
}
return &github.Deployment{
Ref: github.String(req.GetRef()),
ID: github.Int64(123),
RepositoryURL: github.String(fmt.Sprintf("https://www.github.com/%s/%s", owner, repo)),
Environment: req.Environment,
}, &github.Response{}, nil
}
func (c testRepoClient) CreateDeploymentStatus(ctx context.Context, owner string, repo string, deployment int64, req *github.DeploymentStatusRequest) (*github.DeploymentStatus, *github.Response, error) {
// check for valid owner and repo names
if a := strings.Split(owner, "/"); len(a) != 1 {
return &github.DeploymentStatus{}, &github.Response{}, errors.New("owner name invalid (contains slash)")
}
if b := strings.Split(repo, "/"); len(b) != 1 {
return &github.DeploymentStatus{}, &github.Response{}, errors.New("repo name invalid (contains slash)")
}
return &github.DeploymentStatus{
DeploymentURL: req.EnvironmentURL,
State: req.State,
}, &github.Response{}, nil
}
|
#Author : Sharmo , Sarita
# This is a bash utility that helps shipping the required data across EC2 Master/Slave instances
ip=`cat /tmp/hostEntry.txt|cut -d' ' -f2-`
path=/tmp
fileNumber=0
suffix=".txt"
keyValue=KEY-VALUE
for line in $ip
do
rm -rf $path/filename
mkdir $path/filename
mv $path/$fileNumber$suffix $path/filename/.
echo "moving " + $path/$fileNumber$suffix
scp -i $path/$keyValue -o StrictHostKeyChecking=no -r $path/filename ec2-user@$line:/tmp/.
sleep 3
fileNumber=$(( fileNumber + 1))
done
echo 0
|
<reponame>nilslice/crates.io
ALTER TABLE versions ALTER COLUMN features DROP NOT NULL;
ALTER TABLE versions ALTER COLUMN features DROP DEFAULT;
ALTER TABLE versions ALTER COLUMN features SET DATA TYPE text;
|
var fs = require('fs');
var path = require('path');
var basename = path.basename(module.filename);
module.exports = function(app) {
fs
.readdirSync(__dirname)
.filter(function(file) {
return (file.indexOf('.') !== 0) && (file !== basename);
})
.forEach(function(file) {
if (file.slice(-3) !== '.js') return;
var controller = require(path.join(__dirname, file));
app.use(controller.routes());
});
};
|
sudo apt-get install terminator
sudo apt-get install vim
sudo apt-get install zsh
sudo apt-get install git
sudo apt-get install g++
sh -c "$(wget https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh -O -)"
|
// 15815. 천재 수학자 성필
// 2021.11.10
// 자료구조, 스택
#include<iostream>
#include<stack>
#include<string>
using namespace std;
int main()
{
int ans = 0;
stack<int> st;
string s;
cin >> s;
int a, b;
for (int i = 0; i < s.size(); i++)
{
switch (s[i])
{
case '+':
a = st.top();
st.pop();
b = st.top();
st.pop();
st.push(b + a);
break;
case '-':
a = st.top();
st.pop();
b = st.top();
st.pop();
st.push(b - a);
break;
case '*':
a = st.top();
st.pop();
b = st.top();
st.pop();
st.push(b * a);
break;
case '/':
a = st.top();
st.pop();
b = st.top();
st.pop();
st.push(b / a);
break;
default:
st.push(s[i] - '0');
break;
}
}
cout << st.top() << endl;
return 0;
}
|
CUDA_VISIBLE_DEVICES=0 \
python -m torch.distributed.launch \
--nproc_per_node=1 \
--master_port=1717 \
train_3DMM_v4.py \
--name debug \
--path /glab2/Users/ljiayi/Semantic_Face/Generative_Model/3DMM/stylegan2-pytorch/lmdbs/com_LS_TG_2 \
--arch stylegan2 \
--iter 3 \
--batch 1 \
--n_sample 1 \
--size 512 \
--r1 10.0 \
--path_regularize 2.0 \
--path_batch_shrink 2 \
--d_reg_every 16 \
--g_reg_every 4 \
--mixing 0.9 \
--lr 0.002 \
--channel_multiplier 2 \
--local_rank 0 \
|
# Start bot
./ConsoleApp1 |
<filename>index.js
var express = require('express');
var app = express();
app.use('/', express.static(__dirname + '/src'));
app.set('port', (process.env.PORT || 5000));
app.listen(app.get('port'));
|
<reponame>qngapparat/soak-js
const amazon = require('./amazon');
const google = require('./google');
const { getExecutingPlatform } = require('./utils');
/**
*
* @param {Function} func The userfunction to run
* @param {SoakConfig} config Optional config
*/
function universalSoak(func, config = {}) {
// kind of a function factory
return (first, second, ...rest) => {
const platform = getExecutingPlatform(first, second, ...rest)
if (platform === 'google') {
return google(func, config)(first, second, ...rest)
}
if(platform === 'amazon') {
return amazon(func, config)(first, second, ...rest)
}
return `Support for ${platform} not implemented yet ://`
}
}
module.exports = universalSoak; |
<filename>tests/test_runstatus_page_maintenance.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pytest
from exoscale.api.runstatus import *
from datetime import timezone
class TestRunstatusPageMaintenance:
def test_add_event(self, exo, runstatus_page):
page = Page._from_rs(exo.runstatus, runstatus_page())
test_maintenance_title = "Database server upgrade"
test_maintenance_description = (
"We're upgrading the database server hardware to add more memory"
)
test_maintenance_service = "db"
test_maintenance_start_date = datetime(2019, 8, 2, 4, 0)
test_maintenance_end_date = datetime(2019, 8, 2, 5, 0)
exo.runstatus._post(
url="/pages/{p}/services".format(p=page.name),
json={"name": test_maintenance_service},
)
exo.runstatus._post(
url="/pages/{p}/maintenances".format(p=page.name),
json={
"title": test_maintenance_title,
"description": test_maintenance_description,
"start_date": test_maintenance_start_date.isoformat(),
"end_date": test_maintenance_end_date.isoformat(),
"services": [test_maintenance_service],
},
)
res = exo.runstatus._get(
url="/pages/{p}/maintenances".format(p=page.name)
).json()
maintenance = Maintenance._from_rs(exo.runstatus, res["results"][0], page)
maintenance.add_event(description="Stopping server", status="in-progress")
maintenance.add_event(description="Upgrading memory")
maintenance.add_event(description="Restarting server")
res = exo.runstatus._get(
url="/pages/{p}/maintenances/{i}/events".format(
p=page.name, i=maintenance.id
)
).json()
assert len(res["results"]) == 3
assert res["results"][0]["text"] == "Restarting server"
assert res["results"][0]["status"] == "in-progress"
assert res["results"][1]["text"] == "Upgrading memory"
assert res["results"][1]["status"] == "in-progress"
assert res["results"][2]["text"] == "Stopping server"
assert res["results"][2]["status"] == "in-progress"
def test_update(self, exo, runstatus_page):
page = Page._from_rs(exo.runstatus, runstatus_page())
test_maintenance_title = "Database server upgrade"
test_maintenance_title_edited = "Database server upgrade (edited)"
test_maintenance_description = (
"We're upgrading the database server hardware to add more memory"
)
test_maintenance_description_edited = (
"We're upgrading the database server hardware to add more memory (edited)"
)
test_maintenance_service = "db1"
test_maintenance_service_edited = "db2"
test_maintenance_start_date = datetime(2019, 8, 2, 4, 0)
test_maintenance_start_date_edited = datetime(2019, 8, 2, 14, 0)
test_maintenance_end_date = datetime(2019, 8, 2, 5, 0)
test_maintenance_end_date_edited = datetime(2019, 8, 2, 15, 0)
for i in [test_maintenance_service, test_maintenance_service_edited]:
exo.runstatus._post(
url="/pages/{p}/services".format(p=page.name), json={"name": i}
)
exo.runstatus._post(
url="/pages/{p}/maintenances".format(p=page.name),
json={
"title": test_maintenance_title,
"description": test_maintenance_description,
"start_date": test_maintenance_start_date.isoformat(),
"end_date": test_maintenance_end_date.isoformat(),
"services": [test_maintenance_service],
},
)
res = exo.runstatus._get(
url="/pages/{p}/maintenances".format(p=page.name)
).json()
maintenance = Maintenance._from_rs(exo.runstatus, res["results"][0], page)
maintenance.update(
title=test_maintenance_title_edited,
description=test_maintenance_description_edited,
start_date=test_maintenance_start_date_edited,
end_date=test_maintenance_end_date_edited,
services=[test_maintenance_service_edited],
)
res = exo.runstatus._get(
url="/pages/{p}/maintenances/{m}".format(p=page.name, m=maintenance.id)
).json()
assert res["title"] == test_maintenance_title_edited
assert maintenance.title == test_maintenance_title_edited
assert res["description"] == test_maintenance_description_edited
assert maintenance.description == test_maintenance_description_edited
assert (
rstime_to_datetime(res["start_date"]) == test_maintenance_start_date_edited
)
assert maintenance.start_date == test_maintenance_start_date_edited
assert rstime_to_datetime(res["end_date"]) == test_maintenance_end_date_edited
assert maintenance.end_date == test_maintenance_end_date_edited
assert res["services"] == [test_maintenance_service_edited]
assert maintenance.services == [test_maintenance_service_edited]
def test_close(self, exo, runstatus_page):
page = Page._from_rs(exo.runstatus, runstatus_page())
test_maintenance_title = "Database server upgrade"
test_maintenance_description = (
"We're upgrading the database server hardware to add more memory"
)
test_maintenance_service = "db"
test_maintenance_start_date = datetime(2019, 8, 2, 4, 0)
test_maintenance_end_date = datetime(2019, 8, 2, 5, 0)
exo.runstatus._post(
url="/pages/{p}/services".format(p=page.name),
json={"name": test_maintenance_service},
)
exo.runstatus._post(
url="/pages/{p}/maintenances".format(p=page.name),
json={
"title": test_maintenance_title,
"description": test_maintenance_description,
"start_date": test_maintenance_start_date.isoformat(),
"end_date": test_maintenance_end_date.isoformat(),
"services": [test_maintenance_service],
},
)
res = exo.runstatus._get(
url="/pages/{p}/maintenances".format(p=page.name)
).json()
maintenance = Maintenance._from_rs(exo.runstatus, res["results"][0], page)
maintenance.close("We're done here")
res = exo.runstatus._get(
url="/pages/{p}/maintenances/{i}/events".format(
p=page.name, i=maintenance.id
)
).json()
assert len(res["results"]) == 1
assert res["results"][0]["text"] == "We're done here"
assert res["results"][0]["status"] == "completed"
def test_properties(self, exo, runstatus_page):
page = Page._from_rs(exo.runstatus, runstatus_page())
test_maintenance_title = "Database server upgrade"
test_maintenance_description = (
"We're upgrading the database server hardware to add more memory"
)
test_maintenance_services = ["db1", "db2"]
test_maintenance_start_date = datetime(2019, 8, 2, 4, 0)
test_maintenance_end_date = datetime(2019, 8, 2, 5, 0)
for i in test_maintenance_services:
exo.runstatus._post(
url="/pages/{p}/services".format(p=page.name), json={"name": i}
)
exo.runstatus._post(
url="/pages/{p}/maintenances".format(p=page.name),
json={
"title": test_maintenance_title,
"description": test_maintenance_description,
"start_date": test_maintenance_start_date.isoformat(),
"end_date": test_maintenance_end_date.isoformat(),
"services": [test_maintenance_services[0]],
},
)
res = exo.runstatus._get(
url="/pages/{p}/maintenances".format(p=page.name)
).json()
maintenance = Maintenance._from_rs(exo.runstatus, res["results"][0], page)
for i in [1, 2, 3]:
exo.runstatus._post(
url="/pages/{p}/maintenances/{i}/events".format(
p=page.name, i=maintenance.id
),
json={"text": "Update #{}".format(i), "status": maintenance.status},
)
maintenance_events = list(maintenance.events)
assert len(maintenance_events) == 3
assert maintenance_events[0].description == "Update #3"
assert maintenance_events[1].description == "Update #2"
assert maintenance_events[2].description == "Update #1"
|
<gh_stars>1-10
# Generated by Django 3.2.12 on 2022-02-11 14:06
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='UserInfo',
fields=[
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('nid', models.AutoField(primary_key=True, serialize=False)),
('nick_name', models.CharField(max_length=16, verbose_name='昵称')),
('sign_status', models.IntegerField(choices=[(0, '用户名注册'), (1, '手机号注册'), (2, '邮箱注册'), (3, 'QQ注册')], default=0, verbose_name='注册方式')),
('tel', models.CharField(blank=True, max_length=12, null=True, verbose_name='手机号')),
('integral', models.IntegerField(default=20, verbose_name='用户积分')),
('token', models.CharField(blank=True, max_length=64, null=True, verbose_name='TOKEN')),
],
options={
'verbose_name_plural': '用户',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Advert',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=32, null=True, verbose_name='产品名称')),
('href', models.URLField(verbose_name='跳转链接')),
('img', models.FileField(blank=True, help_text='单图', null=True, upload_to='advert/', verbose_name='图片地址')),
('img_list', models.TextField(blank=True, help_text='上传图片请用线上地址,使用;隔开多张图片', null=True, verbose_name='图片组')),
('is_show', models.BooleanField(default=False, verbose_name='是否展示')),
('author', models.CharField(blank=True, max_length=32, null=True, verbose_name='广告主')),
('abstract', models.CharField(blank=True, max_length=128, null=True, verbose_name='产品简介')),
],
options={
'verbose_name_plural': '广告',
},
),
migrations.CreateModel(
name='Articles',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(blank=True, max_length=32, null=True, verbose_name='标题')),
('abstract', models.CharField(blank=True, max_length=128, null=True, verbose_name='文章简介')),
('content', models.TextField(blank=True, null=True, verbose_name='文章内容')),
('create_date', models.DateTimeField(auto_now_add=True, null=True, verbose_name='文章发布日期')),
('change_date', models.DateTimeField(auto_now=True, null=True, verbose_name='文章修改日期')),
('status', models.IntegerField(choices=[(0, '未发布'), (1, '已发布')], verbose_name='文章保存状态')),
('recommend', models.BooleanField(default=True, verbose_name='是否上推荐')),
('look_count', models.IntegerField(default=0, verbose_name='文章阅读量')),
('comment_count', models.IntegerField(default=0, verbose_name='文章评论量')),
('digg_count', models.IntegerField(default=0, verbose_name='文章点赞量')),
('collects_count', models.IntegerField(default=0, verbose_name='文章收藏数')),
('category', models.IntegerField(blank=True, choices=[(0, '前端'), (1, '后端')], null=True, verbose_name='文章分类')),
('author', models.CharField(blank=True, max_length=16, null=True, verbose_name='作者')),
('source', models.CharField(blank=True, max_length=32, null=True, verbose_name='来源')),
],
options={
'verbose_name_plural': '文章',
},
),
migrations.CreateModel(
name='Avatars',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('url', models.FileField(upload_to='avatars/', verbose_name='用户头像地址')),
],
options={
'verbose_name_plural': '用户头像',
},
),
migrations.CreateModel(
name='Cover',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('url', models.FileField(upload_to='article_img/', verbose_name='文章封面地址')),
],
options={
'verbose_name_plural': '文章封面',
},
),
migrations.CreateModel(
name='Feedback',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('email', models.EmailField(max_length=254, verbose_name='邮箱')),
('content', models.TextField(verbose_name='反馈信息')),
('status', models.BooleanField(default=False, verbose_name='是否处理')),
('processing_content', models.TextField(blank=True, null=True, verbose_name='回复的内容')),
],
options={
'verbose_name_plural': '用户反馈',
},
),
migrations.CreateModel(
name='History',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=32, verbose_name='事件名称')),
('content', models.TextField(verbose_name='事件内容')),
('create_date', models.DateField(null=True, verbose_name='创建时间')),
('drawing', models.TextField(blank=True, null=True, verbose_name='配图组,以;隔开')),
],
options={
'verbose_name_plural': '回忆录',
},
),
migrations.CreateModel(
name='MenuImg',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('url', models.FileField(upload_to='site_bg/', verbose_name='图片地址')),
],
options={
'verbose_name_plural': '站点背景图',
},
),
migrations.CreateModel(
name='MyInfo',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=32, verbose_name='名字')),
('job', models.CharField(max_length=128, verbose_name='工作')),
('email', models.EmailField(max_length=64, verbose_name='邮箱')),
('site_url', models.CharField(max_length=32, verbose_name='网站链接')),
('addr', models.CharField(max_length=16, verbose_name='地址')),
('bilibili_url', models.URLField(verbose_name='哔哩哔哩链接')),
('github_url', models.URLField(verbose_name='GitHub链接')),
('wechat_img', models.FileField(upload_to='my_info/', verbose_name='微信图片')),
('qq_img', models.FileField(upload_to='my_info/', verbose_name='QQ图片')),
],
options={
'verbose_name_plural': '个人信息',
},
),
migrations.CreateModel(
name='NavCategory',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=16, verbose_name='分类标题')),
('icon', models.CharField(max_length=32, verbose_name='分类图标')),
],
options={
'verbose_name_plural': '导航分类',
},
),
migrations.CreateModel(
name='New',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('create_date', models.DateTimeField(auto_now_add=True, verbose_name='获取时间')),
],
options={
'verbose_name_plural': '新闻爬取',
},
),
migrations.CreateModel(
name='Site',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=32, verbose_name='网站标题')),
('abstract', models.CharField(max_length=128, verbose_name='网站简介')),
('key_words', models.CharField(max_length=128, verbose_name='网站关键字')),
('record', models.CharField(max_length=32, verbose_name='网站备案号')),
('create_date', models.DateTimeField(verbose_name='建站日期')),
('version', models.CharField(max_length=16, verbose_name='网站版本号')),
('icon', models.FileField(upload_to='site_icon/', verbose_name='网站图标')),
],
options={
'verbose_name_plural': '网站信息',
},
),
migrations.CreateModel(
name='Tags',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=16, verbose_name='标签名字')),
],
options={
'verbose_name_plural': '文章标签',
},
),
migrations.CreateModel(
name='Navs',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('icon_href', models.URLField(blank=True, help_text='在线链接', null=True, verbose_name='图标链接')),
('icon', models.FileField(blank=True, help_text='文件优先级大于在线链接', null=True, upload_to='site_icon/', verbose_name='网站图标')),
('title', models.CharField(max_length=32, verbose_name='网站标题')),
('abstract', models.CharField(max_length=128, null=True, verbose_name='网站简介')),
('create_date', models.DateTimeField(auto_now=True, verbose_name='创建时间')),
('href', models.URLField(verbose_name='网站链接')),
('status', models.IntegerField(choices=[(0, '待审核'), (1, '已通过'), (2, '被驳回')], default=0, verbose_name='导航状态')),
('nav_category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app01.navcategory', verbose_name='网站导航的分类')),
],
options={
'verbose_name_plural': '网站导航',
},
),
migrations.CreateModel(
name='Moods',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=16, verbose_name='发布人')),
('create_date', models.DateTimeField(auto_now=True, verbose_name='发布时间')),
('content', models.TextField(verbose_name='心情内容')),
('drawing', models.TextField(blank=True, null=True, verbose_name='配图组,以;隔开')),
('comment_count', models.IntegerField(default=0, verbose_name='评论数')),
('digg_count', models.IntegerField(default=0, verbose_name='点赞数')),
('avatar', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app01.avatars', verbose_name='心情的发布头像')),
],
options={
'verbose_name_plural': '心情',
},
),
migrations.CreateModel(
name='MoodComment',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=16, null=True, verbose_name='评论人')),
('content', models.TextField(verbose_name='评论内容')),
('digg_count', models.IntegerField(default=0, verbose_name='点赞数')),
('create_date', models.DateTimeField(auto_now=True, verbose_name='评论时间')),
('avatar', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app01.avatars', verbose_name='心情的发布头像')),
('mood', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app01.moods', verbose_name='评论的心情')),
],
options={
'verbose_name_plural': '心情评论',
},
),
migrations.CreateModel(
name='Menu',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('menu_title', models.CharField(max_length=16, null=True, verbose_name='菜单名称')),
('menu_title_en', models.CharField(max_length=32, null=True, verbose_name='菜单英文名称')),
('title', models.CharField(max_length=32, null=True, verbose_name='slogan')),
('abstract', models.TextField(help_text='多个之间按分号区分', null=True, verbose_name='slogan介绍')),
('abstract_time', models.IntegerField(default=8, help_text='单位秒,默认是8秒', verbose_name='slogan切换时间')),
('rotation', models.BooleanField(default=True, verbose_name='是否轮播slogan介绍')),
('menu_rotation', models.BooleanField(default=False, help_text='多选默认会轮播', verbose_name='是否轮播banner图')),
('menu_time', models.IntegerField(default=8, help_text='单位秒,默认是8秒', verbose_name='背景图切换时间')),
('menu_url', models.ManyToManyField(help_text='可以多选,多选就会轮播', to='app01.MenuImg', verbose_name='菜单图片')),
],
options={
'verbose_name_plural': '站点背景',
},
),
migrations.CreateModel(
name='Comment',
fields=[
('nid', models.AutoField(primary_key=True, serialize=False)),
('digg_count', models.IntegerField(default=0, verbose_name='点赞')),
('content', models.TextField(verbose_name='评论内容')),
('comment_count', models.IntegerField(default=0, verbose_name='子评论数')),
('drawing', models.TextField(blank=True, null=True, verbose_name='配图')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app01.articles', verbose_name='评论文章')),
('parent_comment', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='app01.comment', verbose_name='是否是父评论')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='评论者')),
],
options={
'verbose_name_plural': '评论',
},
),
migrations.AddField(
model_name='articles',
name='cover',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='app01.cover', verbose_name='文章封面'),
),
migrations.AddField(
model_name='articles',
name='tag',
field=models.ManyToManyField(blank=True, to='app01.Tags', verbose_name='文章标签'),
),
migrations.AddField(
model_name='userinfo',
name='avatar',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='app01.avatars', verbose_name='用户头像'),
),
migrations.AddField(
model_name='userinfo',
name='collects',
field=models.ManyToManyField(to='app01.Articles', verbose_name='收藏的文章'),
),
migrations.AddField(
model_name='userinfo',
name='groups',
field=models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups'),
),
migrations.AddField(
model_name='userinfo',
name='user_permissions',
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
),
]
|
<reponame>planetsolutions/pa-front
import {Component, HostListener, OnDestroy, OnInit} from '@angular/core';
import {ActivatedRoute, Router} from '@angular/router';
import {animate, state, style, transition, trigger} from '@angular/animations';
import { Search, SearchComposition, Application, ResultMaster, ResultMasterColumnTypes, SortOptions,
ResultMasterPanelTabColumn, Platforms, CmisConstants, SearchResultRow, SearchResultRowColumn,
CmisObject, PagedList, XForm, Doc } from '../../index';
import 'rxjs/add/operator/first';
import 'rxjs/add/observable/forkJoin';
import 'rxjs/add/observable/interval';
import {Observable} from 'rxjs/Observable';
import {ApiService} from '../../services/api/api.service';
import {CommunicationService} from '../../services/communication.service';
import {SipService} from '../../sip/sip.service';
import {TranslateService} from '@ngx-translate/core';
import {SearchFormService} from './search-form/search-form.service';
import {ObjectsListSetupService} from '../../objects-list/setup/objects-list-setup.service';
import {ExportService, ExportTypes} from './export/export.service';
import {AlertsService} from '../../alerts/alerts.service';
import {Subscription} from 'rxjs/Subscription';
import {PagingTypes} from '../../objects-list/objects-list.component';
import {DisplayTypes} from '../../objects-list/objects-list.component';
import {PreviewService} from '../../sip/preview/preview.service';
import {environment} from '../../../environments/environment';
@Component({
selector: 'app-searches',
templateUrl: './searches.component.html',
styleUrls: ['./searches.component.css'],
animations: [
trigger('panelWidthTrigger', [
state('expanded', style({ width: '220px' })),
state('collapsed', style({ width: '30px' })),
transition('collapsed => expanded', animate('200ms ease-in')),
transition('expanded => collapsed', animate('200ms 200ms ease-out'))
]),
trigger('bodyMarginTrigger', [
state('expanded', style({ marginLeft: '230px' })),
state('collapsed', style({ marginLeft: '40px' })),
transition('collapsed => expanded', animate('200ms ease-in')),
transition('expanded => collapsed', animate('200ms 200ms ease-out'))
])
]
})
export class SearchesComponent implements OnInit, OnDestroy {
public application: Application;
public loadingSearches = true;
public searches: Search[] = [];
public selectedSearch: Search;
public lastSelectedSearch: Search;
public selectedFolder: CmisObject;
private selectedFolderPath: CmisObject[];
public lastSelectedFolderPath: CmisObject[];
private selectedSearchComposition: SearchComposition;
public xForm: XForm;
public setupDisp = null;
public dataRows: SearchResultRow[];
public dataTotalCount = -1;
public dataTotalCountLoading = false;
public rootId: string = null;
public platforms = Platforms;
public isLoading = false;
public tab1Active = true;
public tab2Active = false;
private folderData: CmisObject[];
private pageNum = 1;
private currentSearchRequest: string;
private currentSearchFormData: any;
private rootFolderColumns: ResultMasterPanelTabColumn[];
private sortOptions: SortOptions;
private defaultSortOptions: SortOptions;
private selectedRowsIds: string[];
private onColChangeSubs: Subscription;
private pagingType: string;
private displayType: string;
private displayTypeRoot: string;
private isLastPage = false;
private autoRefreshTask: Subscription = null;
private rootFolder: Doc;
breadCrumbs: { name: string, id: string, func?: any }[] = [];
pageSize = 0;
expanded = true;
expandedState = 'expanded';
menuMaxHeight = 0;
ftQuery = '';
ftAggregations: any;
constructor(private apiService: ApiService, private exportService: ExportService, private alertService: AlertsService,
private route: ActivatedRoute, private listSetupService: ObjectsListSetupService,
private sipService: SipService, private searchFormService: SearchFormService,
private communicationService: CommunicationService, private translate: TranslateService,
private previewService: PreviewService) {
}
ngOnInit() {
const retrieveAppId = this.route.params.first()
.map((params) => params['app_uuid']);
retrieveAppId.flatMap((appId: string) => this.apiService.getApplicationInfo(appId))
.subscribe((application: Application) => {
this.application = application;
this.communicationService.set('application', this.application);
retrieveAppId.flatMap((appId: string) =>
this.apiService.getApplicationSearches(appId)
).subscribe(
(search) => {
this.searches.push(search);
this.loadingSearches = false;
},
null,
() => {
this.loadingSearches = false;
if (this.searches.length > 0) {
this.selectSearch(this.searches[0], true);
}
}
);
});
if (window.innerWidth < environment.minScreenSize) {
this.expanded = false;
this.expandedState = 'collapsed';
}
this.calcHeight();
this.onColChangeSubs = this.listSetupService.getDispColumns().subscribe((setup) => {
this.setupDisp = setup;
if (setup.displayType && setup.displayType !== '') {
this.displayType = setup.displayType;
} else if (this.displayTypeRoot && this.displayTypeRoot !== '') {
this.displayType = this.displayTypeRoot;
} else {
this.displayType = this.calcDisplayType();
}
this.onItemsPageChange(1);
});
}
ngOnDestroy(): void {
if (this.onColChangeSubs) {
this.onColChangeSubs.unsubscribe();
}
}
toggleExpandedState() {
this.expandedState = this.expanded ? 'collapsed' : 'expanded';
this.expanded = !this.expanded;
}
selectSearch(search: Search, noContent = false) {
if (this.selectedSearch && this.selectedSearch.uuid === search.uuid) { return; }
this.lastSelectedSearch = JSON.parse(JSON.stringify(search));
this.ftQuery = '';
this.selectedFolder = null;
this.selectedFolderPath = null;
this.xForm = null;
this.currentSearchFormData = null;
this.dataRows = null;
this.dataTotalCount = -1;
this.pageNum = 1;
this.sortOptions = undefined;
this.isLoading = true;
// this.communicationService.set('search-form-xform', Observable.of(null));
this.selectedSearch = search;
this.currentSearchRequest = null;
this.currentSearchFormData = null;
this.apiService.getSearchCompositions(search.uuid).first()
.subscribe((sc: SearchComposition) => {
this.selectedSearchComposition = sc;
if (sc.xformUUID === null) {
this.isLoading = false;
return Observable.empty();
}
this.apiService.getXform(sc.xformUUID).subscribe((form: XForm) => {
this.xForm = form;
if (form.searchData.pagination === false) {
this.pagingType = PagingTypes.CONTINUATION;
} else {
this.pagingType = PagingTypes.PAGES;
}
this.apiService.getResultMaster(sc.resultMasterUUID)
.subscribe((v: ResultMaster) => {
const cols = v.getMainPanel().tabs[0].columns
.filter((c: ResultMasterPanelTabColumn) => !c.hidden);
this.initColumnsLinks(cols);
this.listSetupService.setColumns(cols);
if (!noContent) {
// this.loadSearchContents();
}
});
});
});
}
public onTreeFolderSelected(folders: CmisObject[]) {
this.lastSelectedFolderPath = JSON.parse(JSON.stringify(folders));
this.selectedSearch = null;
this.selectedFolder = folders[0];
this.selectedFolderPath = folders;
this.dataRows = null;
this.folderData = [];
this.dataTotalCount = -1;
this.pageNum = 1;
this.sortOptions = undefined;
this.isLoading = true;
this.listSetupService.setColumns(this.rootFolderColumns);
this.ftQuery = '';
this.ftAggregations = null;
this.pagingType = PagingTypes.PAGES;
this.breadCrumbs = [];
const that = this;
for (let i = folders.length - 1; i >= 0; i--) {
this.putBreadCrumb({
name: folders[i].name, id: folders[i].id, func: (i === 0 ? null : function (index: number) {
const bc = that.breadCrumbs[index];
const newFolders = that.selectedFolderPath.slice(folders.length - index - 1);
that.onTreeFolderSelected(newFolders);
})
}, true);
}
}
public onRootFolderLoaded(rootDoc: Doc) {
this.rootId = rootDoc.id;
this.rootFolder = rootDoc;
if (rootDoc.data && rootDoc.data.defaultSorting && rootDoc.data.defaultSorting.colName) {
this.defaultSortOptions = rootDoc.data.defaultSorting;
}
if (rootDoc.data && rootDoc.data.displayType) {
this.displayTypeRoot = rootDoc.data.displayType;
}
if (rootDoc.data && rootDoc.data.fields) {
this.rootFolderColumns = rootDoc.data.fields;
this.initColumnsLinks(this.rootFolderColumns);
} else {
this.translate.get('title').subscribe(() => {
this.rootFolderColumns = [
<ResultMasterPanelTabColumn>{
name: CmisConstants.CMIS_PROP_ID,
label: this.translate.instant('search.data.id'),
hidden: false,
type: ResultMasterColumnTypes.STRING,
sortable: true
},
<ResultMasterPanelTabColumn>{
name: CmisConstants.CMIS_PROP_NAME,
label: this.translate.instant('search.data.title'),
hidden: false,
type: ResultMasterColumnTypes.STRING,
sortable: true,
link: true
},
<ResultMasterPanelTabColumn>{
name: CmisConstants.CMIS_PROP_TYPE,
label: this.translate.instant('search.data.type'),
hidden: false,
type: ResultMasterColumnTypes.STRING,
sortable: true
},
<ResultMasterPanelTabColumn>{
name: CmisConstants.CMIS_PROP_MODIFIED,
label: this.translate.instant('search.data.modified'),
hidden: false,
type: ResultMasterColumnTypes.DATETIME,
sortable: true
}
]
})
}
}
private initColumnsLinks(columns: ResultMasterPanelTabColumn[]): void {
if (this.application.platform !== Platforms.IA) {
// add link to columns if none were assigned in composition
const links = columns.filter((c: ResultMasterPanelTabColumn) => c.link);
if (links.length === 0) {
columns.filter((c: ResultMasterPanelTabColumn) => {
if (c.dataType !== ResultMasterColumnTypes.ICON) {
c.link = true;
}
return true;
});
}
} else {
columns.filter((c: ResultMasterPanelTabColumn) => {
if (c.type === ResultMasterColumnTypes.CONTENT) {
c.link = true;
}
return true;
});
}
}
private loadSearchContents() {
let totalCount = -1;
let obsCountFinished = false;
const query = this.currentSearchRequest;
let dtResponse = new Date();
this.apiService.executeSearch(this.selectedSearchComposition.uuid, query, this.pageNum, this.pageSize, this.sortOptions)
.subscribe((page: PagedList<SearchResultRow>) => {
dtResponse = new Date();
this.isLoading = false;
if (this.pagingType === PagingTypes.PAGES || !this.dataRows || this.pageNum === 1) {
this.dataRows = page.data;
} else {
this.dataRows = this.dataRows.concat(page.data);
}
this.isLastPage = page.isLast;
if (totalCount < 0 ) {
totalCount = page.total;
if (obsCountFinished) { // если числа нет, а числовой запрос отработал, то берем число из ответа данных
this.dataTotalCount = totalCount;
this.dataTotalCountLoading = false;
if (!query || query === '') { // если поисковый запрос пустой, то подставляем настроечное значение
if (this.xForm.searchData.default_count) {
this.dataTotalCount = this.xForm.searchData.default_count;
}
}
}
}
this.putBreadCrumb({name: this.selectedSearch.name, id: null});
});
if (!this.pageNum || this.pageNum === 1) {
this.dataTotalCountLoading = true;
this.dataTotalCount = 0;
this.apiService.getSearchResultsCount(this.selectedSearchComposition.uuid, query)
.subscribe((count) => {
this.dataTotalCount = count;
totalCount = count;
this.dataTotalCountLoading = false;
obsCountFinished = true;
}, (err) => {
console.log(err)
if (totalCount > -1) { // если данные уже вернулись, то берем число от туда
this.dataTotalCount = totalCount;
this.dataTotalCountLoading = false;
}
if (!query || query === '') { // если поисковый запрос пустой, то подставляем настроечное значение
if (this.xForm.searchData.default_count) {
this.dataTotalCount = this.xForm.searchData.default_count;
}
}
totalCount = this.dataTotalCount;
obsCountFinished = true;
});
}
}
private getPageSize(): number {
let pageSize = this.pageSize;
if (this.displayType === DisplayTypes.TILES) {
pageSize = 30;
} else if (this.pagingType === PagingTypes.CONTINUATION) {
pageSize = 100;
} else {
pageSize = this.setupDisp.pageSize;
}
return pageSize;
}
private loadFTSearchContents(filter?: {name: string, value: string}[]) {
const params = {
query: {
},
aggregations: {}
};
if (!filter) {
params.query['match'] = {
_all : this.ftQuery
}
} else {
params.query['bool'] = {
must: {
match: {
_all: this.ftQuery
}
},
filter: {
bool: {
must: []
}
}
}
}
if (this.rootFolderColumns) {
this.rootFolderColumns.filter( (col: ResultMasterPanelTabColumn) => {
if (col.faceted) {
params.aggregations[col.label] = {
terms: {field: 'data.' + col.name + '.keyword'}
}
if (filter) {
for (let i = 0; i < filter.length; i++) {
if (col.label === filter[i].name) {
const obj = {terms: {}};
obj.terms['data.' + col.name + '.keyword'] = [filter[i].value];
params.query['bool'].filter.bool.must.push(obj);
}
}
}
}
})
}
let obs: Observable<any>;
if (this.application.useFTSearch === 'direct') {
obs = this.apiService.executeFTSearchDirect(params, this.pageNum, this.pageSize,
this.sortOptions && this.sortOptions.colName ? this.sortOptions : null);
} else {
if (filter) {
params['filter'] = params.query;
}
params['query'] = this.ftQuery;
params['rootId'] = this.rootId;
obs = this.apiService.executeFTSearch(params, this.pageNum, this.pageSize,
this.sortOptions && this.sortOptions.colName ? this.sortOptions : null);
}
obs.subscribe((result) => {
this.isLoading = false;
this.dataTotalCount = result.docs.total;
this.dataRows = result.docs.data.map((obj: Doc) => {
const cmis = this.mapDocToCmisObj(obj);
return this.mapCmisObjectToResultSet(cmis);
}
);
if (result.aggregations) {
this.ftAggregations = result.aggregations;
this.tab1Active = true;
this.tab2Active = false;
} else {
this.ftAggregations = null;
}
});
}
public createDoc(): void {
this.sipService.create(this.selectedFolder ? this.selectedFolder.id : null, this.application).subscribe((id: string) => {
if (id) {
this.refresh();
}
});
}
public createFolder(): void {
this.sipService.create(this.selectedFolder ? this.selectedFolder.id : null, this.application, null, 'folder')
.subscribe((id: string) => {
if (id) {
this.refresh();
}
});
}
public editFolder(): void {
this.sipService.open(this.selectedFolder.id, this.application, this.selectedFolder.type, null, true )
.subscribe((result) => {
if (result) {
this.refresh()
}
});
}
private mapCmisObjectToResultSet(obj: CmisObject): SearchResultRow {
const row: any = {id: obj.id};
row.columns = [];
this.rootFolderColumns.map((col: ResultMasterPanelTabColumn) => {
if (obj[col.name]) {
row.columns.push({name: col.name, value: obj[col.name]});
} else if (obj.data && obj.data[col.name]) {
row.columns.push({name: col.name, value: obj.data[col.name]});
} else if (col.name === 'fileMimeType' || col.name === 'cmis:contentStreamFileName') {
if (obj.type === 'folder' || obj.baseType === 'cmis:folder') {
row.columns.push({name: col.name, value: 'folder'});
}
}
});
const result = new SearchResultRow(row);
if (!result.columns.has('type')) {
result.columns.set('type', new SearchResultRowColumn({name: 'type', value: obj.type, rows: null}));
}
return result;
}
private mapDocToCmisObj(doc: Doc): CmisObject {
const cmis = new CmisObject();
cmis.id = doc.id;
cmis.type = doc.type;
cmis.name = doc.title;
cmis.baseType = doc.baseType;
cmis.description = doc.description;
cmis.lastModified = doc.lastModified;
cmis.data = doc.data;
cmis.data[CmisConstants.CMIS_PROP_FILE_NAME] = doc.fileName;
cmis.data[CmisConstants.CMIS_PROP_NAME] = doc.title;
cmis.data[CmisConstants.CMIS_PROP_BASETYPE] = doc.baseType;
cmis.data[CmisConstants.CMIS_PROP_TYPE] = doc.type;
cmis.data[CmisConstants.CMIS_PROP_ID] = doc.id;
cmis.data[CmisConstants.CMIS_PROP_MODIFIED] = doc.lastModified;
return cmis;
}
private loadFolderContents(): void {
let sortOptions = (this.sortOptions && this.sortOptions.colName ? this.sortOptions : null);
if (!sortOptions && this.defaultSortOptions && this.defaultSortOptions.colName) {
sortOptions = this.defaultSortOptions;
}
this.apiService.getCmisData(this.selectedFolder.id, '', this.pageSize, this.pageNum, sortOptions)
.subscribe((page: PagedList<CmisObject>) => {
this.isLoading = false;
this.folderData = page.data;
this.dataTotalCount = page.total;
this.dataTotalCountLoading = false;
this.dataRows = page.data.map((obj: CmisObject) =>
this.mapCmisObjectToResultSet(obj)
);
});
}
public refresh(): void {
this.onItemsPageChange(this.pageNum)
}
public onItemOpen(link: {row: SearchResultRow, col: ResultMasterPanelTabColumn, preview?: boolean}) {
if (this.application.platform !== Platforms.IA) {
if (link.preview) {
const fileName = link.row.get(link.col.name).value;
this.previewService.launch(link.row.id, fileName);
return;
}
if (this.folderData && this.folderData.length) {
for (let i = 0; i < this.folderData.length; i++) {
if (this.folderData[i].id === link.row.id) {
if (this.folderData[i].baseType === CmisConstants.CMIS_TYPE_FOLDER) {
let foldersPath = [this.folderData[i]];
if (this.selectedFolderPath && this.selectedFolderPath.length > 0) {
foldersPath = foldersPath.concat(this.selectedFolderPath);
}
this.onTreeFolderSelected(foldersPath);
} else {
this.sipService.open(this.folderData[i].id, this.application, this.folderData[i].type)
.subscribe((result) => {
if (result) {
this.refresh()
}
});
}
return;
}
}
};
this.sipService.open(link.row.id, this.application,
link.row.columns.has('type') ? link.row.columns.get('type').value : null
)
.subscribe((result) => {
if (result) {
this.refresh()
}
});
} else {
this.apiService.downloadContent(this.application.uuid, link.row.get(link.col.name).value);
}
}
public onItemColumnEdit(eventData: {row: SearchResultRow, col: ResultMasterPanelTabColumn, newValue: string}) {
const id = eventData.row.id;
const type = eventData.row.columns.get('type').value;
const that = this;
this.apiService.getDocument(id, type).subscribe((doc: Doc) => {
if (doc.hasOwnProperty(eventData.col.name)) {
doc[eventData.col.name] = eventData.newValue
} else {
doc.data[eventData.col.name] = eventData.newValue
}
that.apiService.saveDocumentData(doc, that.selectedFolder ? that.selectedFolder.id : null).subscribe((result) => {
if (result) {
that.refresh()
}
})
});
}
public onSort(options: SortOptions) {
if (this.dataTotalCount === 0) { return }
this.sortOptions = options;
this.onItemsPageChange(1);
}
public onItemsPageChange(pageNum: number): void {
this.selectedRowsIds = [];
this.pageNum = pageNum;
this.pageSize = this.getPageSize();
if (!!this.autoRefreshTask) { this.autoRefreshTask.unsubscribe(); }
if (this.selectedFolder) {
this.loadFolderContents();
this.initAutoRefresh();
} else if (this.selectedSearch) {
this.loadSearchContents();
} else if (this.ftQuery !== '') {
this.loadFTSearchContents()
}
}
public openSearch(): void {
this.searchFormService.open(this.xForm, this.selectedSearch.name, this.currentSearchFormData)
.subscribe((query) => {
this.isLoading = true;
this.dataRows = null;
this.dataTotalCount = -1;
this.pageNum = 1;
this.currentSearchRequest = query.query;
this.currentSearchFormData = query.formData;
if (this.expanded) {
this.toggleExpandedState();
}
this.loadSearchContents();
});
}
public ftSearch(query: string) {
if (query && query !== '') {
this.selectedFolder = null;
this.selectedFolderPath = null;
this.selectedSearch = null;
this.isLoading = true;
this.dataRows = null;
this.dataTotalCount = -1;
this.pageNum = 1;
this.sortOptions = undefined;
this.pagingType = PagingTypes.PAGES;
this.listSetupService.setColumns(this.rootFolderColumns);
this.putBreadCrumb({name: this.ftQuery, id: null });
}
}
public setupColumns() {
this.listSetupService.changeSettings(this.pagingType === PagingTypes.CONTINUATION);
}
public export(selected = false) {
const params = {application: this.application, type: null, data: null, dataName: null};
if (selected) {
if (this.selectedRowsIds && this.selectedRowsIds.length > 0) {
params.type = ExportTypes.EXPORT_SELECTED;
params.data = {compositionId: this.selectedSearch ? this.selectedSearchComposition.uuid : this.rootId, selectedIds: this.selectedRowsIds};
} else {
this.alertService.info({text: 'export.nothing', title: 'export.title'})
return;
}
} else {
if (this.selectedFolder) {
params.type = ExportTypes.EXPORT_FOLDER;
params.data = this.selectedFolder.id;
params.dataName = this.selectedFolder.name;
} else if (this.selectedSearch) {
params.type = ExportTypes.EXPORT_SEARCH;
params.data = {compositionId: this.selectedSearchComposition.uuid, query: this.currentSearchRequest, sortOptions: this.sortOptions};
params.dataName = this.selectedSearch.name;
} else if (this.ftQuery !== '') {
params.type = ExportTypes.EXPORT_FTSEARCH;
params.data = this.ftQuery;
params.dataName = this.ftQuery;
} else {
this.alertService.info({text: 'export.nothing', title: 'export.title'})
return
}
}
this.exportService.openDialog(params)
}
public onRowsSelected(selectedIds) {
this.selectedRowsIds = selectedIds;
}
public onAggregationSelected(agr: {name: string, value: string}[]) {
this.loadFTSearchContents(agr);
}
public openLastFolder() {
if (this.lastSelectedFolderPath) {
this.onTreeFolderSelected(this.lastSelectedFolderPath);
} else if (!this.rootId) {
console.log('loading root folder...');
const that = this;
this.apiService.getApplicationTreeRoot(this.application.uuid).subscribe(rootId => {
this.apiService.getDocument(rootId).subscribe((rootDoc: Doc) => {
that.onRootFolderLoaded(rootDoc);
const root = that.mapDocToCmisObj(rootDoc);
root.hasChildren = true;
that.onTreeFolderSelected([root]);
});
});
} else if (this.rootFolder) {
const root = this.mapDocToCmisObj(this.rootFolder);
root.hasChildren = true;
this.onTreeFolderSelected([root]);
} else {
throw 'Root data is unavailable!'
}
}
public openLastSearch() {
if (this.lastSelectedSearch) {
this.selectSearch(this.lastSelectedSearch);
}
}
private calcHeight() {
this.menuMaxHeight = window.innerHeight - 160;
if (this.menuMaxHeight < 200) {
this.menuMaxHeight = 200;
}
}
private putBreadCrumb(crumb: {name: string, id: string, func?: any}, append = false ) {
if (append) {
this.breadCrumbs.push(crumb);
} else {
this.breadCrumbs = [crumb];
}
}
private initAutoRefresh() {
if (this.pageNum === 1 && this.setupDisp.autoRefresh > 0) {
this.autoRefreshTask = Observable.interval(this.setupDisp.autoRefresh * 1000)
.subscribe(
() => {
this.loadFolderContents();
},
error => {console.log(error)});
}
}
private calcDisplayType(): string {
if (window.innerWidth < environment.minScreenSize) {
return DisplayTypes.TILES;
} else {
return DisplayTypes.TABLE;
}
}
@HostListener('window:resize', ['$event'])
onResize(event) {
this.calcHeight();
}
}
|
<reponame>ocamler/expense-www<gh_stars>1-10
import $ from 'jquery';
import React, { Component } from 'react';
import { connect } from 'react-redux';
@connect(
state => ({
location_name: state.location_name
})
)
export default class extends Component {
render() {
const { location_name } = this.props;
const disabled = !$.trim(location_name);
return (
<p className="text-center">
<input id="noGPS" type="checkbox" />
{' '}
<label htmlFor="noGPS">{'Don\'t track GPS for this entry'}</label><br />
<button id="geotag"
className="btn btn-lg btn-default"
type="button"
disabled={disabled} >
<i className="fa fa-floppy-o fa-lg no-fa" aria-hidden="true"></i>
{' Store for later'.replace(/ /g, "\u00a0")}
</button>
<button id="next"
className="btn btn-lg btn-default"
type="button"
disabled={disabled} >
{'Next '.replace(/ /g, "\u00a0")}
<i className="fa fa-caret-right fa-lg no-fa" aria-hidden="true"></i>
</button>
</p>
)
}
}
|
<reponame>seawindnick/javaFamily<gh_stars>1-10
package com.java.study.algorithm.zuo.cadvanced.advanced_class_03;
/**
* Morris遍历 利用Morris遍历实现二叉树的先序,中序,后续遍历,时间复 杂度O(N),额外空间复杂度O(1)。
*/
public class Code_01_MorrisTraversal{
} |
def knapsack(weights, values, max_weight):
n = len(weights)
bag = []
value = 0
while max_weight > 0 and n > 0:
weight_ratio = [values[i]/weights[i] for i in range(n)]
max_index = weight_ratio.index(max(weight_ratio))
if weights[max_index] > max_weight:
n -= 1
continue
bag.append(max_index)
value += values[max_index]
max_weight -= weights[max_index]
del weights[max_index], values[max_index]
n -= 1
return bag, value
weights = [4, 2, 3, 2]
values = [10, 2, 4, 7]
max_weight = 7
bag, value = knapsack(weights, values, max_weight) |
<filename>src/pages/index.js
import Link from 'gatsby-link';
import PropTypes from 'prop-types';
import React, { PureComponent } from 'react';
import { css } from 'glamor';
import { COLORS } from 'theme';
const container = css({
marginTop: '30px',
display: 'flex',
justifyContent: 'center',
width: '100%',
});
const postWrapper = css({
maxWidth: '1170px',
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'center',
width: 'inherit',
padding: '0 15px',
// '@media only screen and (max-width: 768px)': {
// flexDirection: 'column'
// }
});
const postContainer = css({
minWidth: '260px',
maxWidth: '360px',
width: '100%',
display: 'flex',
flexDirection: 'column',
padding: '0 15px',
marginBottom: '30px',
// '@media only screen and (max-width: 768px)': {
// maxWidth: '100%'
// }
});
const featuredImage = css({
height: '360px',
width: '100%',
backgroundRepeat: 'no-repeat',
backgroundSize: 'cover',
backgroundPosition: 'center top',
'&:hover > div': {
opacity: 1
}
});
const featuredImageOverlay = css({
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'rgba(0,0,0,0.7)',
width: 'inherit',
height: 'inherit',
opacity: 0,
transition: 'opacity 300ms',
});
const featuredImageIcon = css({
color: 'white'
});
const contentWrapper = css({
backgroundColor: '#EEEEED',
padding: '20px 40px 40px 40px'
});
const dateStyles = css({
fontSize: '12px',
textTransform: 'uppercase',
lineHeight: '33px',
letterSpacing: '2px',
fontFamily: 'Montserrat, sans-serif'
});
const headingStyles = css({
fontFamily: 'Prata, serif',
fontWeight: '400',
fontSize: '26px',
lineHeight: '48px',
color: 'black',
});
const descriptionStyles = css({
fontFamily: 'Prata, serif',
fontWeight: '400',
fontSize: '15px',
color: 'rgb(102, 102, 102)',
marginBottom: '1em',
paddingTop: '10px',
lineHeight: '28px'
});
const linkStyles = css({
textDecoration: 'uppercase',
fontFamily: 'Montserrat, sans-serif',
lineHeight: '30px',
letterSpacing: '2px',
fontSize: '12px',
color: COLORS.LANA_GREEN,
marginTop: '20px',
display: 'block',
minHeight: '42px'
});
class Posts extends PureComponent {
static propTypes = {
data: PropTypes.shape({
allMarkdownRemark: PropTypes.shape({
edges: PropTypes.array
})
})
};
mapPostItem (item) {
const { node: post } = item;
const {
id,
fields: {
slug
},
frontmatter: {
title,
date,
description,
thumbnail
},
excerpt
} = post;
/*
post = {
id,
fields: {
slug
},
frontmatter: {
title,
date
},
excerpt,
}
*/
return (
<div key={id} {...postContainer}>
<Link to={slug}>
<div {...featuredImage} style={{ backgroundImage: `url(${thumbnail})` }}>
<div {...featuredImageOverlay}>
<div {...featuredImageIcon}>{'>'}</div>
</div>
</div>
</Link>
<div {...contentWrapper}>
<div {...dateStyles}>{date}</div>
<div {...headingStyles}>{title}</div>
<div {...descriptionStyles}>{description}</div>
<Link to={slug} {...linkStyles}>READ MORE</Link>
</div>
</div>
);
}
render () {
const { data } = this.props;
const { edges: posts } = data.allMarkdownRemark;
return (
<div {...container}>
<div {...postWrapper}>
{/*<div className="content">*/}
{/*<h1 className="has-text-weight-bold is-size-2">Latest Stories</h1>*/}
{/*</div>*/}
{posts
.filter(post => post.node.frontmatter.templateKey === 'blog-post')
.map(this.mapPostItem)}
</div>
</div>
)
}
}
export default Posts;
export const pageQuery = graphql`
query IndexQuery {
allMarkdownRemark(sort: { order: DESC, fields: [frontmatter___date] }) {
edges {
node {
excerpt(pruneLength: 400)
id
fields {
slug
}
frontmatter {
title
description
thumbnail
templateKey
date(formatString: "MMMM DD, YYYY")
}
}
}
}
}
`;
|
#!/bin/bash
set -e # exit on any error
# short version of http://www.howtoforge.com/vboxheadless-running-virtual-machines-with-virtualbox-4.1-on-a-headless-ubuntu-12.04-server
EXTENSION_PACK_URL='http://download.virtualbox.org/virtualbox/4.3.4/Oracle_VM_VirtualBox_Extension_Pack-4.3.4-91027.vbox-extpack'
echo 'deb http://download.virtualbox.org/virtualbox/debian precise contrib' | sudo tee --append /etc/apt/sources.list
wget -q http://download.virtualbox.org/virtualbox/debian/oracle_vbox.asc -O- | sudo apt-key add -
sudo apt-get update
sudo apt-get --no-install-recommends --yes install linux-headers-$(uname -r) build-essential dkms virtualbox-4.3
cd /tmp
wget $EXTENSION_PACK_URL
sudo VBoxManage extpack install $(basename $EXTENSION_PACK_URL)
sudo adduser `id -u` vboxusers
|
#!/bin/bash
# Set prompt colors
RED='\033[0;31m'
GREEN='\u001b[32m'
YELLOW='\u001b[33m'
NC='\033[0m'
# Check if user is root
if [ $EUID -ne 0 ]
then echo -e "${RED}-Run as Root-${NC}"
exit
fi
# Print out IP addresses
ifconfig
# Prompt user for IP and network addres
echo "Enter your ip address:"
read ip_address
echo "Enter your network address:"
read net_address
# Allow connections (22,80,443,3306)
ufw limit from $net_address to any app "OpenSSH"
ufw allow "Apache Full"
ufw allow from $ip_address to any port 3306
# Default policies (Deny in, allow out)
ufw default deny incoming
ufw default allow outoing
echo -e "${GREEN}-Firewall Set-${NC}"
|
<gh_stars>10-100
/**
* Created by FDD on 2017/10/12.
* @desc 自定义鹰眼控件
*/
import ol from 'openlayers';
import { BASE_CLASS_NAME, OVERVIEWMAP } from '../constants';
import * as htmlUtils from '../utils/dom';
import * as Events from '../utils/events';
ol.control.OverviewMapH = function (options = {}) {
/**
* @type {boolean}
* @private
*/
this.collapsed_ = options.collapsed !== undefined ? options.collapsed : true;
/**
* @private
* @type {boolean}
*/
this.collapsible_ =
options.collapsible !== undefined ? options.collapsible : true;
if (!this.collapsible_) {
this.collapsed_ = false;
}
let className =
options.className !== undefined ? options.className : 'hmap-overview-map';
let element = htmlUtils.create(
'div',
className + ' ' + BASE_CLASS_NAME.CLASS_UNSELECTABLE
);
/**
* @type {Element}
* @private
*/
this.ovmapDiv_ = htmlUtils.create('div', 'hmap-overview-map-target', element);
/**
* 收起按钮
* @type {Element}
* @private
*/
if (this.collapsible_) {
this.collapsElement_ = htmlUtils.create(
'div',
'hmap-overview-map-button',
element
);
Events.listen(this.collapsElement_, 'click', this.handleClick_, this);
}
/**
* @type {ol.Map}
* @private
*/
this.ovmap_ = new ol.Map({
controls: new ol.Collection(),
interactions: new ol.Collection(),
view: options.view
});
let render = options.render ? options.render : ol.control.OverviewMapH.render;
ol.control.Control.call(this, {
element: element,
render: render,
target: options.target
});
this.addBoxControl_();
};
ol.inherits(ol.control.OverviewMapH, ol.control.Control);
/**
* 添加图层
* @param options
* @private
*/
ol.control.OverviewMapH.prototype.addOptionLayers_ = function (options) {
let ovmap = this.ovmap_;
if (options.layers) {
options.layers.forEach(function (layer) {
ovmap.addLayer(layer);
}, this);
}
};
/**
* 计算鼠标位置
* @param mousePosition
* @returns {{clientX: number, clientY: *}}
*/
ol.control.OverviewMapH.computeDesiredMousePosition = function (
mousePosition,
overlayBox
) {
return {
clientX: mousePosition.clientX - overlayBox.offsetWidth / 2,
clientY: mousePosition.clientY + overlayBox.offsetHeight / 2
};
};
/**
* 处理移动事件
* @private
*/
ol.control.OverviewMapH.prototype.move_ = function (event) {
const overlayBox = this.boxOverlay_.getElement();
let coordinates = this.ovmap_.getEventCoordinate(
ol.control.OverviewMapH.computeDesiredMousePosition(event, overlayBox)
);
this.boxOverlay_.setPosition(coordinates);
};
/**
* 移动结束事件
* @param event
* @private
*/
ol.control.OverviewMapH.prototype.endMoving_ = function (event) {
let coordinates = this.ovmap_.getEventCoordinate(event);
this.getMap()
.getView()
.setCenter(coordinates);
Events.unListen(window, 'mousemove', this.move_, this);
Events.unListen(window, 'mouseup', this.endMoving_, this);
};
/**
* 添加事件
* @private
*/
ol.control.OverviewMapH.prototype.addEvent_ = function () {
Events.listen(window, 'mousemove', this.move_, this);
Events.listen(window, 'mouseup', this.endMoving_, this);
};
/**
* 添加box
* @private
*/
ol.control.OverviewMapH.prototype.addBoxControl_ = function () {
let box = htmlUtils.create('div', 'hmap-overview-map-box');
Events.listen(box, 'mousedown', this.addEvent_, this);
this.boxOverlay_ = new ol.Overlay({
position: [0, 0],
positioning: 'bottom-left',
element: box
});
this.ovmap_.addOverlay(this.boxOverlay_);
};
/**
* setMap
* @param map
*/
ol.control.OverviewMapH.prototype.setMap = function (map) {
let oldMap = this.getMap();
if (map === oldMap) {
return;
}
if (oldMap) {
let oldView = oldMap.getView();
if (oldView) {
this.unbindView_(oldView);
}
this.ovmap_.setTarget(null);
}
ol.control.Control.prototype.setMap.call(this, map);
if (map) {
this.ovmap_.setTarget(this.ovmapDiv_);
Events.listen(map, 'propertychange', this.handleMapPropertyChange_, this);
if (this.ovmap_.getLayers().getLength() === 0) {
this.ovmap_.setLayerGroup(map.getLayerGroup());
}
let view = map.getView();
if (view) {
this.bindView_(view);
if (this.isDef(view)) {
this.ovmap_.updateSize();
this.resetExtent_();
}
}
}
};
/**
* 判断视图是否定义
* @param view
* @returns {boolean}
*/
ol.control.OverviewMapH.prototype.isDef = function (view) {
return !!view.getCenter() && view.getResolution() !== undefined;
};
/**
* 处理地图属性变化
* @param event
* @private
*/
ol.control.OverviewMapH.prototype.handleMapPropertyChange_ = function (event) {
if (event.key === 'view') {
let oldView = event.oldValue;
if (oldView) {
this.unbindView_(oldView);
}
let newView = this.getMap().getView();
this.bindView_(newView);
}
};
/**
* 注册视图变化事件
* @param view
* @private
*/
ol.control.OverviewMapH.prototype.bindView_ = function (view) {
Events.listen(view, 'change:rotation', this.handleRotationChanged_, this);
};
/**
* 取消视图事件绑定
* @param view
* @private
*/
ol.control.OverviewMapH.prototype.unbindView_ = function (view) {
Events.unListen(view, 'change:rotation', this.handleRotationChanged_, this);
};
/**
* 处理视图旋转
* @private
*/
ol.control.OverviewMapH.prototype.handleRotationChanged_ = function () {
this.ovmap_.getView().setRotation(
this.getMap()
.getView()
.getRotation()
);
};
/**
* 更新控件要素
* @param mapEvent
*/
ol.control.OverviewMapH.render = function (mapEvent) {
this.validateExtent_();
this.updateBox_();
};
/**
* 重新调整范围,避免过大或者过小
* @private
*/
ol.control.OverviewMapH.prototype.validateExtent_ = function () {
let map = this.getMap();
let ovmap = this.ovmap_;
let mapSize = /** @type {ol.Size} */ (map.getSize());
let view = map.getView();
let extent = view.calculateExtent(mapSize);
let ovmapSize = /** @type {ol.Size} */ (ovmap.getSize());
let ovview = ovmap.getView();
let ovextent = ovview.calculateExtent(ovmapSize);
let topLeftPixel = ovmap.getPixelFromCoordinate(ol.extent.getTopLeft(extent));
let bottomRightPixel = ovmap.getPixelFromCoordinate(
ol.extent.getBottomRight(extent)
);
let boxWidth = Math.abs(topLeftPixel[0] - bottomRightPixel[0]);
let boxHeight = Math.abs(topLeftPixel[1] - bottomRightPixel[1]);
let ovmapWidth = ovmapSize[0];
let ovmapHeight = ovmapSize[1];
if (
boxWidth < ovmapWidth * OVERVIEWMAP.MIN_RATIO ||
boxHeight < ovmapHeight * OVERVIEWMAP.MIN_RATIO ||
boxWidth > ovmapWidth * OVERVIEWMAP.MAX_RATIO ||
boxHeight > ovmapHeight * OVERVIEWMAP.MAX_RATIO
) {
this.resetExtent_();
} else if (!ol.extent.containsExtent(ovextent, extent)) {
this.recenter_();
}
};
/**
* 重新设置视图范围
* @private
*/
ol.control.OverviewMapH.prototype.resetExtent_ = function () {
if (OVERVIEWMAP.MAX_RATIO === 0 || OVERVIEWMAP.MIN_RATIO === 0) {
return;
}
let map = this.getMap();
let ovmap = this.ovmap_;
let mapSize = /** @type {ol.Size} */ (map.getSize());
let view = map.getView();
let extent = view.calculateExtent(mapSize);
let ovview = ovmap.getView();
let steps =
Math.log(OVERVIEWMAP.MAX_RATIO / OVERVIEWMAP.MIN_RATIO) / Math.LN2;
let ratio = 1 / (Math.pow(2, steps / 2) * OVERVIEWMAP.MIN_RATIO);
this.scaleFromCenter(extent, ratio);
ovview.fit(extent);
};
/**
* 计算缩放
* @param extent
* @param value
*/
ol.control.OverviewMapH.prototype.scaleFromCenter = function (extent, value) {
let deltaX = ((extent[2] - extent[0]) / 2) * (value - 1);
let deltaY = ((extent[3] - extent[1]) / 2) * (value - 1);
extent[0] -= deltaX;
extent[2] += deltaX;
extent[1] -= deltaY;
extent[3] += deltaY;
};
/**
* 重新设置视图中心
* @private
*/
ol.control.OverviewMapH.prototype.recenter_ = function () {
let map = this.getMap();
let ovmap = this.ovmap_;
let view = map.getView();
let ovview = ovmap.getView();
ovview.setCenter(view.getCenter());
};
/**
* Update the box using the main map extent
* @private
*/
ol.control.OverviewMapH.prototype.updateBox_ = function () {
let map = this.getMap();
let ovmap = this.ovmap_;
let mapSize = /** @type {ol.Size} */ (map.getSize());
let view = map.getView();
let ovview = ovmap.getView();
let rotation = view.getRotation();
let overlay = this.boxOverlay_;
let box = this.boxOverlay_.getElement();
let extent = view.calculateExtent(mapSize);
let ovresolution = ovview.getResolution();
let bottomLeft = ol.extent.getBottomLeft(extent);
let topRight = ol.extent.getTopRight(extent);
let rotateBottomLeft = this.calculateCoordinateRotate_(rotation, bottomLeft);
overlay.setPosition(rotateBottomLeft);
if (box) {
box.style.width =
Math.abs((bottomLeft[0] - topRight[0]) / ovresolution) + 'px';
box.style.height =
Math.abs((topRight[1] - bottomLeft[1]) / ovresolution) + 'px';
}
};
/**
* 计算坐标角度
* @param rotation
* @param coordinate
* @returns {*}
* @private
*/
ol.control.OverviewMapH.prototype.calculateCoordinateRotate_ = function (
rotation,
coordinate
) {
let coordinateRotate;
let map = this.getMap();
let view = map.getView();
let currentCenter = view.getCenter();
if (currentCenter) {
coordinateRotate = [
coordinate[0] - currentCenter[0],
coordinate[1] - currentCenter[1]
];
ol.coordinate.rotate(coordinateRotate, rotation);
ol.coordinate.add(coordinateRotate, currentCenter);
}
return coordinateRotate;
};
/**
* 处理点击事件
* @param event
* @private
*/
ol.control.OverviewMapH.prototype.handleClick_ = function (event) {
event.preventDefault();
this.handleToggle_();
};
/**
* @private
*/
ol.control.OverviewMapH.prototype.handleToggle_ = function () {
if (this.collapsed_) {
this.collapsed_ = false;
event.target.style.backgroundPosition = '-40px -405px';
this.element.style.width = '17px';
this.element.style.height = '17px';
} else {
this.collapsed_ = true;
event.target.style.backgroundPosition = '-40px -386px';
this.element.style.width = '120px';
this.element.style.height = '120px';
}
let ovmap = this.ovmap_;
if (!this.collapsed_ && !ovmap) {
ovmap.updateSize();
this.resetExtent_();
Events.listenOnce(ovmap, 'postrender', this.updateBox_, this);
}
};
/**
* 返回鹰眼是否可折叠
* @returns {*|boolean}
*/
ol.control.OverviewMapH.prototype.getCollapsible = function () {
return this.collapsible_;
};
/**
* 设置鹰眼是否可折叠
* @param collapsible
*/
ol.control.OverviewMapH.prototype.setCollapsible = function (collapsible) {
if (this.collapsible_ === collapsible) {
return;
}
this.collapsible_ = collapsible;
if (!collapsible && this.collapsed_) {
this.handleToggle_();
}
};
/**
* 设置鹰眼收起状态
* @param collapsed
*/
ol.control.OverviewMapH.prototype.setCollapsed = function (collapsed) {
if (!this.collapsible_ || this.collapsed_ === collapsed) {
return;
}
this.handleToggle_();
};
/**
* 判断鹰眼是否收起
* @returns {boolean|*}
*/
ol.control.OverviewMapH.prototype.getCollapsed = function () {
return this.collapsed_;
};
/**
* 返回当前鹰眼
* @returns {ol.Map}
*/
ol.control.OverviewMapH.prototype.getOverviewMap = function () {
return this.ovmap_;
};
const olControlOverviewMap = ol.control.OverviewMapH;
export default olControlOverviewMap;
|
<filename>core/src/test/java/org/hisp/dhis/android/core/program/internal/ProgramEndpointCallShould.java
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.program.internal;
import org.hisp.dhis.android.core.arch.api.executors.internal.APIDownloader;
import org.hisp.dhis.android.core.arch.api.executors.internal.APIDownloaderImpl;
import org.hisp.dhis.android.core.arch.api.fields.internal.Fields;
import org.hisp.dhis.android.core.arch.api.filters.internal.Filter;
import org.hisp.dhis.android.core.arch.api.payload.internal.Payload;
import org.hisp.dhis.android.core.arch.handlers.internal.Handler;
import org.hisp.dhis.android.core.common.BaseCallShould;
import org.hisp.dhis.android.core.program.Program;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.internal.util.collections.Sets;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import io.reactivex.Single;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(JUnit4.class)
public class ProgramEndpointCallShould extends BaseCallShould {
@Mock
private ProgramService programService;
@Mock
private Handler<Program> programHandler;
@Captor
private ArgumentCaptor<Fields<Program>> fieldsCaptor;
@Captor
private ArgumentCaptor<Filter<Program, String>> filterCaptor;
@Captor
private ArgumentCaptor<String> accessDataReadFilter;
private Single<Payload<Program>> apiCall = Single.just(Payload.emptyPayload());
@Mock
private APIDownloader mockedApiDownloader;
private Single<List<Program>> programCallResult = Single.just(Collections.emptyList());
private Set<String> programUids = Sets.newSet("programUid");
@Before
@SuppressWarnings("unchecked")
public void setUp() throws Exception {
super.setUp();
when(mockedApiDownloader.downloadPartitioned(same(programUids), anyInt(), any(Handler.class), any())).thenReturn(programCallResult);
when(programService.getPrograms(any(Fields.class), any(Filter.class), anyString(),
anyBoolean())).thenReturn(apiCall);
}
@Test
public void call_api_downloader() {
new ProgramCall(programService, programHandler, mockedApiDownloader).download(programUids).blockingGet();
verify(mockedApiDownloader).downloadPartitioned(same(programUids), anyInt(), any(Handler.class), any());
}
@Test
public void call_service_for_real_api_downloader() {
when(programService.getPrograms(
fieldsCaptor.capture(), filterCaptor.capture(), accessDataReadFilter.capture(), anyBoolean())
).thenReturn(apiCall);
new ProgramCall(programService, programHandler, new APIDownloaderImpl(resourceHandler)).download(programUids).blockingGet();
assertThat(fieldsCaptor.getValue()).isEqualTo(ProgramFields.allFields);
assertThat(filterCaptor.getValue().values().iterator().next()).isEqualTo("programUid");
assertThat(accessDataReadFilter.getValue()).isEqualTo("access.data.read:eq:true");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.