text stringlengths 1 1.05M |
|---|
<reponame>wujia28762/Tmate<gh_stars>0
package com.honyum.elevatorMan.net;
import com.honyum.elevatorMan.net.base.Response;
import com.honyum.elevatorMan.net.base.ResponseBody;
public class GetDistanceResponse extends Response {
private GetDistanceBody body;
public GetDistanceBody getBody() {
return body;
}
public void setBody(GetDistanceBody body) {
this.body = body;
}
public class GetDistanceBody extends ResponseBody {
private long reachDistance;
public long getReachDistance() {
return reachDistance;
}
public void setReachDistance(long reachDistance) {
this.reachDistance = reachDistance;
}
}
public static GetDistanceResponse getGetDistance(String json) {
return (GetDistanceResponse) parseFromJson(GetDistanceResponse.class, json);
}
}
|
export enum Subjects {
ItemCreated = 'item:created',
ItemUpdated = 'item:updated',
OrderCreated = 'order:created',
OrderCancelled = 'order:cancelled',
}
|
from typing import List
def extract_information(code: str) -> List[str]:
extracted_elements = []
for line in code.split('\n'):
line = line.strip()
if line and not line.startswith('#'):
start_index = line.find('(')
end_index = line.find(')')
if start_index != -1 and end_index != -1:
extracted_elements.append(line[start_index + 1:end_index].strip())
return extracted_elements
# Test the function with the provided example
code = '''
# (
# acc_to_join[np.newaxis].T
# ,self.ClassRecall[np.newaxis].T
'''
print(extract_information(code)) # Output: ['acc_to_join[np.newaxis].T', 'self.ClassRecall[np.newaxis].T'] |
<filename>stats/pitching.py
import pandas as pd
import matplotlib.pyplot as plt
from data import games
# Select all plays
plays = games[games['type'] == 'play']
# Select all strike outs
strike_outs = plays[plays['event'].str.contains('K')]
# Group by Year and Game
strike_outs = strike_outs.groupby(['year', 'game_id']).size()
# Reset Index
strike_outs = strike_outs.reset_index(name='strike_outs')
# Apply an Operation to Multiple Columns
strike_outs = strike_outs.loc[:, ['year', 'strike_outs']].apply(pd.to_numeric)
# Plot df
strike_outs.plot(x='year', y='strike_outs', kind='scatter').legend(['Strike Outs'])
plt.show()
|
class AddUserIdToAddress < ActiveRecord::Migration[6.0]
def up
add_reference :addresses, :user
User.find_each do |user|
address = Address.find_by(id: user.address_id)
address.update!(user_id: user.id) if address
end
end
def down
Address.find_each do |address|
user = User.find_by(id: address.user_id)
user.update!(address_id: address.id) if user
end
remove_reference :addresses, :user
end
end
|
<reponame>ShubhamCanMakeCommit/excalikey
import Island from "./Island";
export default Island;
|
<filename>LuceneEvaluation/src/main/java/Indexer.java<gh_stars>1-10
import at.ac.tuwien.ifs.query.BM25SimilarityLossless;
import org.apache.commons.cli.*;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.apache.lucene.benchmark.byTask.feeds.*;
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Properties;
/**
* Main class, is used to index TREC-8 (given path) into a lucene index
*/
public class Indexer {
private static CommandLine parsedArgs;
public static void main(String[] args) {
Options options = new Options();
options.addRequiredOption("o","out-index-dir",true,
"directory for the index");
options.addRequiredOption("d","data-folder",true,
"where the to be indexed files are");
options.addRequiredOption("t","type",true,
"lossless/1-byte - document length");
options.addRequiredOption("a","analyzer",true,
"english/stop-lower-only");
CommandLineParser parser = new DefaultParser();
try {
parsedArgs = parser.parse( options, args );
}catch (ParseException e){
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp( "indexer", options );
return;
}
try {
File indexFolder = new File(parsedArgs.getOptionValue("o"));
if(!indexFolder.exists()){
indexFolder.mkdirs();
}
Directory dir = FSDirectory.open(Paths.get(parsedArgs.getOptionValue("o")));
Analyzer analyzer=null;
switch (parsedArgs.getOptionValue("a")){
case "english":
analyzer = new EnglishAnalyzer(StopWords.nltkStopWords());
break;
case "stop-lower-only":
analyzer = new StopAnalyzer(StopWords.nltkStopWords());
break;
default:
throw new RuntimeException("analyzer not supported: "+parsedArgs.getOptionValue("a"));
}
IndexWriterConfig iwc = new IndexWriterConfig(analyzer);
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
iwc.setRAMBufferSizeMB(256.0 * 4);
switch (parsedArgs.getOptionValue("t")){
case "lossless":
iwc.setSimilarity(new BM25SimilarityLossless(1.2f,0.6f));
break;
case "1-byte":
iwc.setSimilarity(new BM25Similarity(1.2f,0.6f));
break;
default:
throw new RuntimeException("type not supported: "+parsedArgs.getOptionValue("t"));
}
IndexWriter writer = new IndexWriter(dir, iwc);
indexWithTrecContentSource(parsedArgs.getOptionValue("d"),writer);
writer.close();
} catch (IOException e) {
System.out.println("Error: " + e.getMessage());
e.printStackTrace();
}
}
private static void indexWithTrecContentSource(String dataDir, IndexWriter index) throws IOException {
final long tStart = System.currentTimeMillis();
int docCount = 0;
TrecContentSource tcs = createTrecSource(dataDir);
System.out.println("Start indexing ...");
while (true) {
DocData dd = new DocData();
try {
dd = tcs.getNextDocData(dd);
} catch (Exception e) {
if (e instanceof NoMoreDataException) {
break;
} else {
System.err.println("Failed: " + e.getMessage());
continue;
}
}
Document doc = getDocumentFromDocData(dd);
if (doc == null) {
break;
}
docCount++;
if ((docCount % 10000) == 0) {
System.out.println("Total MB: " + tcs.getTotalBytesCount()/1000000 + " \t Docs: " + docCount + " (" + (System.currentTimeMillis() - tStart) / 1000.0 + " sec)");
}
index.addDocument(doc);
}
System.out.println("----- Fnished ---- (" + (System.currentTimeMillis() - tStart) / 1000.0 + " sec)");
System.out.println("Total MB: " + tcs.getTotalBytesCount()/1000000);
System.out.println("Total items: " + tcs.getTotalItemsCount());
}
// from https://github.com/lintool/IR-Reproducibility/blob/master/systems/lucene/ingester/src/main/java/luceneingester/TrecIngester.java
private static TrecContentSource createTrecSource(String dataDir) {
System.out.println("Gather files ... (this might take a long while)");
TrecContentSource tcs = new TrecContentSource();
Properties props = new Properties();
props.setProperty("print.props", "false");
props.setProperty("content.source.verbose", "false");
props.setProperty("content.source.excludeIteration", "true");
props.setProperty("docs.dir", dataDir);
props.setProperty("trec.doc.parser", "org.apache.lucene.benchmark.byTask.feeds.TrecParserByPath");
props.setProperty("content.source.forever", "false");
tcs.setConfig(new Config(props));
try {
tcs.resetInputs();
} catch (IOException e) {
e.printStackTrace();
}
return tcs;
}
private static Document getDocumentFromDocData(DocData dd) {
Document doc = new Document();
doc.add(new StringField("docname", dd.getName(), Field.Store.YES));
doc.add(new TextField("body",dd.getTitle() + " " + dd.getBody(), Field.Store.NO));
return doc;
}
}
|
#!/bin/bash
# Define the ANSI escape code for green color
green=$(tput setaf 2)
# Define the ANSI escape code to reset the color
reset=$(tput sgr0)
# Define the printMessage function to print "Ok!" in green color and return 0
printMessage() {
echo "${green}Ok!${reset}"
return 0
}
# Call the printMessage function
printMessage |
#!/bin/bash
usage() {
cat <<EOF
Usage: $0 [options]
-h print usage
-b Bucket where scripts/templates are stored
EOF
exit 1
}
while getopts ":b:c:" o; do
case "${o}" in
b)
BUILD_BUCKET=${OPTARG}
;;
c)
STORAGE_BUCKET=${OPTARG}
;;
*)
usage
;;
esac
done
shift $((OPTIND-1))
[[ $# -gt 0 ]] && usage;
DOWNLOADLINK=https://s3.amazonaws.com/${BUILD_BUCKET}
DOWNLOADSTORAGE=https://s3.amazonaws.com/${STORAGE_BUCKET}
# ------------------------------------------------------------------
# Download all the scripts needed for HANA install
# ------------------------------------------------------------------
wget ${DOWNLOADLINK}/scripts/cluster-watch-engine.sh --output-document=/root/install/cluster-watch-engine.sh
wget ${DOWNLOADLINK}/scripts/install-prereq.sh --output-document=/root/install/install-prereq.sh
wget ${DOWNLOADLINK}/scripts/install-prereq-sles.sh --output-document=/root/install/install-prereq-sles.sh
wget ${DOWNLOADLINK}/scripts/install-prereq-rhel.sh --output-document=/root/install/install-prereq-rhel.sh
wget ${DOWNLOADLINK}/scripts/install-aws.sh --output-document=/root/install/install-aws.sh
wget ${DOWNLOADLINK}/scripts/install-master.sh --output-document=/root/install/install-master.sh
wget ${DOWNLOADLINK}/scripts/install-hana-master.sh --output-document=/root/install/install-hana-master.sh
wget ${DOWNLOADLINK}/scripts/install-worker.sh --output-document=/root/install/install-worker.sh
wget ${DOWNLOADLINK}/scripts/install-hana-worker.sh --output-document=/root/install/install-hana-worker.sh
wget ${DOWNLOADLINK}/scripts/reconcile-ips.py --output-document=/root/install/reconcile-ips.py
wget ${DOWNLOADLINK}/scripts/reconcile-ips.sh --output-document=/root/install/reconcile-ips.sh
wget ${DOWNLOADLINK}/scripts/wait-for-master.sh --output-document=/root/install/wait-for-master.sh
wget ${DOWNLOADLINK}/scripts/wait-for-workers.sh --output-document=/root/install/wait-for-workers.sh
wget ${DOWNLOADLINK}/scripts/config.sh --output-document=/root/install/config.sh
wget ${DOWNLOADLINK}/scripts/cleanup.sh --output-document=/root/install/cleanup.sh
wget ${DOWNLOADLINK}/scripts/fence-cluster.sh --output-document=/root/install/fence-cluster.sh
wget ${DOWNLOADLINK}/scripts/signal-complete.sh --output-document=/root/install/signal-complete.sh
wget ${DOWNLOADLINK}/scripts/signal-failure.sh --output-document=/root/install/signal-failure.sh
wget ${DOWNLOADLINK}/scripts/interruptq.sh --output-document=/root/install/interruptq.sh
wget ${DOWNLOADLINK}/scripts/os.sh --output-document=/root/install/os.sh
wget ${DOWNLOADLINK}/scripts/validate-install.sh --output-document=/root/install/validate-install.sh
wget ${DOWNLOADLINK}/scripts/signalFinalStatus.sh --output-document=/root/install/signalFinalStatus.sh
wget ${DOWNLOADLINK}/scripts/writeconfig.sh --output-document=/root/install/writeconfig.sh
wget ${DOWNLOADLINK}/scripts/create-attach-volume.sh --output-document=/root/install/create-attach-volume.sh
wget ${DOWNLOADLINK}/scripts/configureVol.sh --output-document=/root/install/configureVol.sh
wget ${DOWNLOADLINK}/scripts/create-attach-single-volume.sh --output-document=/root/install/create-attach-single-volume.sh
wget ${DOWNLOADLINK}/scripts/check-hana-version.sh --output-document=/root/install/check-hana-version.sh
wget ${DOWNLOADLINK}/scripts/sap-hana-tmpfs.sh --output-document=/root/install/sap-hana-tmpfs.sh
wget ${DOWNLOADLINK}/scripts/sap-hana-tmpfs.service --output-document=/root/install/sap-hana-tmpfs.service
wget ${DOWNLOADSTORAGE}/storage.json --output-document=/root/install/storage.json
for f in download_media.py extract.sh get_advancedoptions.py postprocess.py signal-precheck-failure.sh signal-precheck-status.sh signal-precheck-success.sh build_storage.py
do
wget ${DOWNLOADLINK}/scripts/${f} --output-document=/root/install/${f}
done
|
<reponame>rbernalber/archimedes-js
import { Injectable } from '@angular/core'
import { EvictCache, Query } from '@archimedes/arch'
@EvictCache
@Injectable({
providedIn: 'root'
})
export class BazQry extends Query<number> {
async internalExecute(param: void): Promise<number> {
return 42
}
}
|
#include "simple_lib.h"
int main (void)
{
helloWorld();
return 0;
}
|
<reponame>benoitc/pypy<filename>pypy/objspace/std/kwargsdict.py
## ----------------------------------------------------------------------------
## dict strategy (see dictmultiobject.py)
from pypy.rlib import rerased, jit
from pypy.objspace.std.dictmultiobject import (DictStrategy,
IteratorImplementation,
ObjectDictStrategy,
StringDictStrategy)
class KwargsDictStrategy(DictStrategy):
erase, unerase = rerased.new_erasing_pair("kwargsdict")
erase = staticmethod(erase)
unerase = staticmethod(unerase)
def wrap(self, key):
return self.space.wrap(key)
def unwrap(self, wrapped):
return self.space.str_w(wrapped)
def get_empty_storage(self):
d = ([], [])
return self.erase(d)
def is_correct_type(self, w_obj):
space = self.space
return space.is_w(space.type(w_obj), space.w_str)
def _never_equal_to(self, w_lookup_type):
return False
def iter(self, w_dict):
return KwargsDictIterator(self.space, self, w_dict)
def w_keys(self, w_dict):
return self.space.newlist([self.space.wrap(key) for key in self.unerase(w_dict.dstorage)[0]])
def setitem(self, w_dict, w_key, w_value):
space = self.space
if self.is_correct_type(w_key):
self.setitem_str(w_dict, self.unwrap(w_key), w_value)
return
else:
self.switch_to_object_strategy(w_dict)
w_dict.setitem(w_key, w_value)
def setitem_str(self, w_dict, key, w_value):
self._setitem_str_indirection(w_dict, key, w_value)
@jit.look_inside_iff(lambda self, w_dict, key, w_value:
jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
def _setitem_str_indirection(self, w_dict, key, w_value):
keys, values_w = self.unerase(w_dict.dstorage)
result = []
for i in range(len(keys)):
if keys[i] == key:
values_w[i] = w_value
break
else:
# limit the size so that the linear searches don't become too long
if len(keys) >= 16:
self.switch_to_string_strategy(w_dict)
w_dict.setitem_str(key, w_value)
else:
keys.append(key)
values_w.append(w_value)
def setdefault(self, w_dict, w_key, w_default):
# XXX could do better, but is it worth it?
self.switch_to_object_strategy(w_dict)
return w_dict.setdefault(w_key, w_default)
def delitem(self, w_dict, w_key):
# XXX could do better, but is it worth it?
self.switch_to_object_strategy(w_dict)
return w_dict.delitem(w_key)
def length(self, w_dict):
return len(self.unerase(w_dict.dstorage)[0])
def getitem_str(self, w_dict, key):
return self._getitem_str_indirection(w_dict, key)
@jit.look_inside_iff(lambda self, w_dict, key: jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
def _getitem_str_indirection(self, w_dict, key):
keys, values_w = self.unerase(w_dict.dstorage)
result = []
for i in range(len(keys)):
if keys[i] == key:
return values_w[i]
return None
def getitem(self, w_dict, w_key):
space = self.space
if self.is_correct_type(w_key):
return self.getitem_str(w_dict, self.unwrap(w_key))
elif self._never_equal_to(space.type(w_key)):
return None
else:
self.switch_to_object_strategy(w_dict)
return w_dict.getitem(w_key)
def w_keys(self, w_dict):
l = self.unerase(w_dict.dstorage)[0]
return self.space.newlist_str(l[:])
def values(self, w_dict):
return self.unerase(w_dict.dstorage)[1][:] # to make non-resizable
def items(self, w_dict):
space = self.space
keys, values_w = self.unerase(w_dict.dstorage)
result = []
for i in range(len(keys)):
result.append(space.newtuple([self.wrap(keys[i]), values_w[i]]))
return result
def popitem(self, w_dict):
keys, values_w = self.unerase(w_dict.dstorage)
key = keys.pop()
w_value = values_w.pop()
return (self.wrap(key), w_value)
def clear(self, w_dict):
w_dict.dstorage = self.get_empty_storage()
def switch_to_object_strategy(self, w_dict):
strategy = self.space.fromcache(ObjectDictStrategy)
keys, values_w = self.unerase(w_dict.dstorage)
d_new = strategy.unerase(strategy.get_empty_storage())
for i in range(len(keys)):
d_new[self.wrap(keys[i])] = values_w[i]
w_dict.strategy = strategy
w_dict.dstorage = strategy.erase(d_new)
def switch_to_string_strategy(self, w_dict):
strategy = self.space.fromcache(StringDictStrategy)
keys, values_w = self.unerase(w_dict.dstorage)
storage = strategy.get_empty_storage()
d_new = strategy.unerase(storage)
for i in range(len(keys)):
d_new[keys[i]] = values_w[i]
w_dict.strategy = strategy
w_dict.dstorage = storage
def view_as_kwargs(self, w_dict):
return self.unerase(w_dict.dstorage)
class KwargsDictIterator(IteratorImplementation):
def __init__(self, space, strategy, dictimplementation):
IteratorImplementation.__init__(self, space, strategy, dictimplementation)
keys, values_w = strategy.unerase(self.dictimplementation.dstorage)
self.iterator = iter(range(len(keys)))
# XXX this potentially leaks
self.keys = keys
self.values_w = values_w
def next_entry(self):
# note that this 'for' loop only runs once, at most
for i in self.iterator:
return self.space.wrap(self.keys[i]), self.values_w[i]
else:
return None, None
|
var classarmnn_1_1profiling_1_1_i_packet_buffer =
[
[ "~IPacketBuffer", "classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a9ded593bdc39f70c3e135e649ab3e42e", null ],
[ "Commit", "classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a4586c90cbeb7804b32dad8c1bd6ae242", null ],
[ "GetWritableData", "classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a0dd0b11d60c79e8c89ed1b851a45030d", null ],
[ "Release", "classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a4dc7ecb0d192d6cccc9287daaf0eca0f", null ]
]; |
import { Router } from 'express'
import LogController from '../controllers/logs.controller'
const router: Router = Router()
router.get('/logs', LogController.getAllLogs)
router.delete('/logs', LogController.bulkDeleteLogs)
export default router
|
<filename>packages/languages/command-core/src/ja-jp/info.ts
import {
Context,
DescriptionData,
getPrefixWithContext,
} from "protocol_command-schema-core";
export const commandHelp = (
ctx: Context
): Record<"command" | "key", DescriptionData> => {
const cmd: string = (ctx.runningCommand && ctx.runningCommand[0]) ?? "help";
const prefix = getPrefixWithContext(ctx);
return {
command: {
summary: [
"ヘルプを表示するコマンドです。",
`${prefix}${cmd}で全体のhelpを確認。`,
`${prefix}${cmd} \`\`カテゴリ名\`\`でカテゴリについて詳細を確認。`,
`${prefix}${cmd} \`\`コマンド名\`\`でコマンドについて詳細を確認。`,
"一部のコマンドは追加のヘルプを持ちます。詳細は各コマンドのfooterを参照してください。",
].join("\n"),
},
key: {
summary:
"何についてのヘルプを表示するか指定します。\nコマンドの他にもカテゴリ等を指定できます。\n省略可能です。",
},
};
};
export const commandInfo: Record<"command", DescriptionData> = {
command: {
summary: "このbotについての情報を表示します。",
},
};
export const commandInvite: Record<"command", DescriptionData> = {
command: {
summary: "このbotの招待リンクとサポートサーバーの招待リンクを表示します。",
},
};
export const commandPing: Record<"command", DescriptionData> = {
command: {
summary: "botの応答時間を確認します。",
},
};
export const commandStats: Record<"command", DescriptionData> = {
command: {
summary: "botの統計情報を表示します。",
},
};
|
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using Newtonsoft.Json;
using NodaTime;
using NUnit.Framework;
using QuantConnect.Securities;
using QuantConnect.Util;
package com.quantconnect.lean.Tests.Common.Util
{
[TestFixture]
public class MarketHoursDatabaseJsonConverterTests
{
[Test]
public void HandlesRoundTrip() {
database = MarketHoursDatabase.FromDataFolder();
result = JsonConvert.SerializeObject(database, Formatting.Indented);
deserializedDatabase = JsonConvert.DeserializeObject<MarketHoursDatabase>(result);
originalListing = database.ExchangeHoursListing.ToDictionary();
foreach (kvp in deserializedDatabase.ExchangeHoursListing) {
original = originalListing[kvp.Key];
Assert.AreEqual(original.DataTimeZone, kvp.Value.DataTimeZone);
CollectionAssert.AreEqual(original.ExchangeHours.Holidays, kvp.Value.ExchangeHours.Holidays);
foreach (value in Enum.GetValues(typeof(DayOfWeek))) {
day = (DayOfWeek) value;
o = original.ExchangeHours.MarketHours[day];
d = kvp.Value.ExchangeHours.MarketHours[day];
foreach (pair in o.Segments.Zip(d.Segments, Tuple.Create)) {
Assert.AreEqual(pair.Item1.State, pair.Item2.State);
Assert.AreEqual(pair.Item1.Start, pair.Item2.Start);
Assert.AreEqual(pair.Item1.End, pair.Item2.End);
}
}
}
}
[Test, Ignore( "This is provided to make it easier to convert your own market-hours-database.csv to the new format")]
public void ConvertMarketHoursDatabaseCsvToJson() {
directory = Path.Combine(Globals.DataFolder, "market-hours");
input = Path.Combine(directory, "market-hours-database.csv");
output = Path.Combine(directory, Path.GetFileNameWithoutExtension(input) + ".json");
allHolidays = Directory.EnumerateFiles(Path.Combine(Globals.DataFolder, "market-hours"), "holidays-*.csv").Select(x =>
{
dates = new HashSet<DateTime>();
market = Path.GetFileNameWithoutExtension(x).Replace( "holidays-", string.Empty);
foreach (line in File.ReadAllLines(x).Skip(1).Where(l -> !l.StartsWith( "#"))) {
csv = line Extensions.toCsv( );
dates.Add(new DateTime(int.Parse(csv[0]), int.Parse(csv[1]), int.Parse(csv[2])));
}
return new KeyValuePair<String, IEnumerable<DateTime>>(market, dates);
}).ToDictionary();
database = FromCsvFile(input, allHolidays);
File.WriteAllText(output, JsonConvert.SerializeObject(database, Formatting.Indented));
}
#region These methods represent the old way of reading MarketHoursDatabase from csv and are left here to allow users to convert
/**
* Creates a new instance of the <see cref="MarketHoursDatabase"/> class by reading the specified csv file
*/
* @param file The csv file to be read
* @param holidaysByMarket The holidays for each market in the file, if no holiday is present then none is used
@returns A new instance of the <see cref="MarketHoursDatabase"/> class representing the data in the specified file
public static MarketHoursDatabase FromCsvFile( String file, ImmutableMap<String, IEnumerable<DateTime>> holidaysByMarket) {
exchangeHours = new Map<SecurityDatabaseKey, MarketHoursDatabase.Entry>();
if( !File.Exists(file)) {
throw new FileNotFoundException( "Unable to locate market hours file: " + file);
}
// skip the first header line, also skip #'s as these are comment lines
foreach (line in File.ReadLines(file).Where(x -> !x.StartsWith( "#")).Skip(1)) {
SecurityDatabaseKey key;
hours = FromCsvLine(line, holidaysByMarket, out key);
if( exchangeHours.ContainsKey(key)) {
throw new Exception( "Encountered duplicate key while processing file: " + file + ". Key: " + key);
}
exchangeHours[key] = hours;
}
return new MarketHoursDatabase(exchangeHours);
}
/**
* Creates a new instance of <see cref="SecurityExchangeHours"/> from the specified csv line and holiday set
*/
* @param line The csv line to be parsed
* @param holidaysByMarket The holidays this exchange isn't open for trading by market
* @param key The key used to uniquely identify these market hours
@returns A new <see cref="SecurityExchangeHours"/> for the specified csv line and holidays
private static MarketHoursDatabase.Entry FromCsvLine( String line,
ImmutableMap<String, IEnumerable<DateTime>> holidaysByMarket,
out SecurityDatabaseKey key) {
csv = line.split(',');
marketHours = new List<LocalMarketHours>(7);
// timezones can be specified using Tzdb names (America/New_York) or they can
// be specified using offsets, UTC-5
dataTimeZone = ParseTimeZone(csv[0]);
exchangeTimeZone = ParseTimeZone(csv[1]);
//market = csv[2];
//symbol = csv[3];
//type = csv[4];
symbol = StringUtils.isEmpty(csv[3]) ? null : csv[3];
key = new SecurityDatabaseKey(csv[2], symbol, (SecurityType)Enum.Parse(typeof(SecurityType), csv[4], true));
int csvLength = csv.Length;
for (int i = 1; i < 8; i++) // 7 days, so < 8
{
// the 4 here is because 4 times per day, ex_open,open,close,ex_close
if( 4*i + 4 > csvLength - 1) {
break;
}
hours = ReadCsvHours(csv, 4*i + 1, (DayOfWeek) (i - 1));
marketHours.Add(hours);
}
IEnumerable<DateTime> holidays;
if( !holidaysByMarket.TryGetValue(key.Market, out holidays)) {
holidays = Enumerable.Empty<DateTime>();
}
exchangeHours = new SecurityExchangeHours(exchangeTimeZone, holidays, marketHours.ToDictionary(x -> x.DayOfWeek));
return new MarketHoursDatabase.Entry(dataTimeZone, exchangeHours);
}
private static ZoneId ParseTimeZone( String tz) {
// handle UTC directly
if( tz.equals( "UTC") return Global.UTC_ZONE_ID;
// if it doesn't start with UTC then it's a name, like America/New_York
if( !tz.StartsWith( "UTC")) return ZoneIdProviders.Tzdb[tz];
// it must be a UTC offset, parse the offset as hours
// define the time zone as a constant offset time zone in the form: 'UTC-3.5' or 'UTC+10'
millisecondsOffset = (int) Duration.ofHours(double.Parse(tz.Replace( "UTC", string.Empty))).TotalMilliseconds;
return ZoneId.ForOffset(Offset.FromMilliseconds(millisecondsOffset));
}
private static LocalMarketHours ReadCsvHours( String[] csv, int startIndex, DayOfWeek dayOfWeek) {
ex_open = csv[startIndex];
if( ex_open.equals( "-") {
return LocalMarketHours.ClosedAllDay(dayOfWeek);
}
if( ex_open.equals( "+") {
return LocalMarketHours.OpenAllDay(dayOfWeek);
}
open = csv[startIndex + 1];
close = csv[startIndex + 2];
ex_close = csv[startIndex + 3];
ex_open_time = ParseHoursToTimeSpan(ex_open);
open_time = ParseHoursToTimeSpan(open);
close_time = ParseHoursToTimeSpan(close);
ex_close_time = ParseHoursToTimeSpan(ex_close);
if( ex_open_time == Duration.ZERO
&& open_time == Duration.ZERO
&& close_time == Duration.ZERO
&& ex_close_time == Duration.ZERO) {
return LocalMarketHours.ClosedAllDay(dayOfWeek);
}
return new LocalMarketHours(dayOfWeek, ex_open_time, open_time, close_time, ex_close_time);
}
private static Duration ParseHoursToTimeSpan( String ex_open) {
return Duration.ofHours(double.Parse(ex_open, CultureInfo.InvariantCulture));
}
#endregion
}
}
|
ids = []
for data in [10, 20, 30]:
ids.append(str(uuid.uuid4())) |
package debug
import (
"log"
"strconv"
)
// Debugging levels.
const (
Level0 = 0
Level1 = 1
Level2 = 2
)
// Level defines the debugging level
//nolint:gochecknoglobals
var Level = 0
// Print displays a debug message according to its level.
func Print(level int, logger *log.Logger, msg string, v ...interface{}) {
if level <= Level {
levelString := strconv.Itoa(level)
prefix := "[DEBUG-" + levelString + "]"
logger.Printf(prefix+" "+msg, v...)
}
}
|
#!/usr/bin/env bash
# NOTE: requires GDAL to be installed
# set this to taste - NOTE: you can't use "~" for your home folder
output_folder="/Users/$(whoami)/tmp"
# full addresses
ogr2ogr -f FlatGeobuf ${output_folder}/address-principals-202108.fgb \
PG:"host=localhost dbname=geo user=postgres password=password port=5432" "gnaf_202108.address_principals(geom)"
# just GNAF PIDs and point geometries
ogr2ogr -f FlatGeobuf ${output_folder}/address-principals-lite-202102.fgb \
PG:"host=localhost dbname=geo user=postgres password=password port=5432" -sql "select gnaf_pid, ST_Transform(geom, 4326) as geom from gnaf_202102.address_principals"
# display locality boundaries
ogr2ogr -f FlatGeobuf ${output_folder}/address-principals-202108.fgb \
PG:"host=localhost dbname=geo user=postgres password=password port=5432" "admin_bdys_202108.locality_bdys_display(geom)"
# OPTIONAL - copy files to AWS S3 and allow public read access (requires AWSCLI installed and your AWS credentials setup)
cd ${output_folder}
for f in *-202108.fgb;
do
aws --profile=default s3 cp --storage-class REDUCED_REDUNDANCY ./${f} s3://minus34.com/opendata/geoscape-202108/flatgeobuf/${f};
aws --profile=default s3api put-object-acl --acl public-read --bucket minus34.com --key opendata/geoscape-202108/flatgeobuf/${f}
echo "${f} uploaded to AWS S3"
done
|
<gh_stars>1-10
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.core5.http2.impl.io;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import org.apache.hc.core5.http.ConnectionClosedException;
import org.apache.hc.core5.http2.H2ConnectionException;
import org.apache.hc.core5.http2.H2CorruptFrameException;
import org.apache.hc.core5.http2.H2Error;
import org.apache.hc.core5.http2.H2TransportMetrics;
import org.apache.hc.core5.http2.frame.FrameConsts;
import org.apache.hc.core5.http2.frame.FrameFlag;
import org.apache.hc.core5.http2.frame.RawFrame;
import org.apache.hc.core5.http2.impl.BasicH2TransportMetrics;
import org.apache.hc.core5.util.Args;
/**
* Frame input buffer for HTTP/2 blocking connections.
*
* @since 5.0
*/
public final class FrameInputBuffer {
private final BasicH2TransportMetrics metrics;
private final int maxFramePayloadSize;
private final byte[] buffer;
private int off;
private int dataLen;
FrameInputBuffer(final BasicH2TransportMetrics metrics, final int bufferLen, final int maxFramePayloadSize) {
Args.notNull(metrics, "HTTP2 transport metrcis");
Args.positive(maxFramePayloadSize, "Maximum payload size");
this.metrics = metrics;
this.maxFramePayloadSize = maxFramePayloadSize;
this.buffer = new byte[bufferLen];
this.dataLen = 0;
}
public FrameInputBuffer(final BasicH2TransportMetrics metrics, final int maxFramePayloadSize) {
this(metrics, FrameConsts.HEAD_LEN + maxFramePayloadSize, maxFramePayloadSize);
}
public FrameInputBuffer(final int maxFramePayloadSize) {
this(new BasicH2TransportMetrics(), maxFramePayloadSize);
}
boolean hasData() {
return this.dataLen > 0;
}
void fillBuffer(final InputStream inStream, final int requiredLen) throws IOException {
while (dataLen < requiredLen) {
if (off > 0) {
System.arraycopy(buffer, off, buffer, 0, dataLen);
off = 0;
}
final int bytesRead = inStream.read(buffer, off + dataLen, buffer.length - dataLen);
if (bytesRead == -1) {
if (dataLen > 0) {
throw new H2CorruptFrameException("Corrupt or incomplete HTTP2 frame");
}
throw new ConnectionClosedException();
}
dataLen += bytesRead;
this.metrics.incrementBytesTransferred(bytesRead);
}
}
public RawFrame read(final InputStream inStream) throws IOException {
fillBuffer(inStream, FrameConsts.HEAD_LEN);
final int payloadOff = FrameConsts.HEAD_LEN;
final int payloadLen = (buffer[off] & 0xff) << 16 | (buffer[off + 1] & 0xff) << 8 | (buffer[off + 2] & 0xff);
final int type = buffer[off + 3] & 0xff;
final int flags = buffer[off + 4] & 0xff;
final int streamId = Math.abs(buffer[off + 5] & 0xff) << 24 | (buffer[off + 6] & 0xff << 16) | (buffer[off + 7] & 0xff) << 8 | (buffer[off + 8] & 0xff);
if (payloadLen > maxFramePayloadSize) {
throw new H2ConnectionException(H2Error.FRAME_SIZE_ERROR, "Frame size exceeds maximum");
}
final int frameLen = payloadOff + payloadLen;
fillBuffer(inStream, frameLen);
if ((flags & FrameFlag.PADDED.getValue()) > 0) {
if (payloadLen == 0) {
throw new H2ConnectionException(H2Error.PROTOCOL_ERROR, "Inconsistent padding");
}
final int padding = buffer[off + FrameConsts.HEAD_LEN] & 0xff;
if (payloadLen < padding + 1) {
throw new H2ConnectionException(H2Error.PROTOCOL_ERROR, "Inconsistent padding");
}
}
final ByteBuffer payload = payloadLen > 0 ? ByteBuffer.wrap(buffer, off + payloadOff, payloadLen) : null;
final RawFrame frame = new RawFrame(type, flags, streamId, payload);
off += frameLen;
dataLen -= frameLen;
this.metrics.incrementFramesTransferred();
return frame;
}
public H2TransportMetrics getMetrics() {
return metrics;
}
}
|
#!/usr/bin/bash
export ALTERAPATH=/home/iobundle/Intel/Altera_full/18.0
export LM_LICENSE_FILE=1801@localhost:$ALTERAPATH/../1-MVXX5H_License.dat
nios=/home/iobundle/Intel/Altera_full/18.0/nios2eds/nios2_command_shell.sh
TOP_MODULE="iob_timer"
$nios quartus_sh -t ../timer.tcl "$1" "$2" "$3"
$nios quartus_map --read_settings_files=on --write_settings_files=off $TOP_MODULE -c $TOP_MODULE
$nios quartus_fit --read_settings_files=off --write_settings_files=off $TOP_MODULE -c $TOP_MODULE
$nios quartus_cdb --read_settings_files=off --write_settings_files=off $TOP_MODULE -c $TOP_MODULE --merge=on
$nios quartus_cdb iob_timer -c iob_timer --incremental_compilation_export=iob_timer_0.qxp --incremental_compilation_export_partition_name=Top --incremental_compilation_export_post_synth=on --incremental_compilation_export_post_fit=off --incremental_compilation_export_routing=on --incremental_compilation_export_flatten=on
|
<gh_stars>1000+
alter table question modify id bigint auto_increment not null;
alter table `user` modify id bigint auto_increment not null |
<reponame>togiter/RRWallet
package com.renrenbit.rrwallet.utils;
import rx.Observable;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
public class ThreadScheduleUtils {
public static <T> Observable<T> observeOnMainThread(Observable<T> obs) {
return obs.observeOn(AndroidSchedulers.mainThread());
}
public static <T> Observable<T> subscribeOnIoThread(Observable<T> obs) {
return obs.subscribeOn(Schedulers.io());
}
public static <T> Observable<T> simpleScheduleThread(Observable<T> obs) {
return subscribeOnIoThread(observeOnMainThread(obs));
}
}
|
<filename>elasta-composer/src/main/java/elasta/composer/respose/generator/impl/JsonObjectResponseGeneratorImpl.java
package elasta.composer.respose.generator.impl;
import elasta.composer.respose.generator.JsonObjectResponseGenerator;
import io.vertx.core.json.JsonObject;
/**
* Created by sohan on 6/30/2017.
*/
final public class JsonObjectResponseGeneratorImpl<T> implements JsonObjectResponseGenerator<T> {
@Override
public JsonObject apply(T t) throws Throwable {
return ((JsonObject) t);
}
}
|
#!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
ROOT="$(cd $DIR/../ && pwd)"
ARCH=$(uname -m)
if [[ $SHELL == "/bin/zsh" ]]; then
RC_FILE="$HOME/.zshrc"
elif [[ $SHELL == "/bin/bash" ]]; then
RC_FILE="$HOME/.bashrc"
fi
# Install brew if required
if [[ $(command -v brew) == "" ]]; then
echo "Installing Hombrew"
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
echo "[ ] installed brew t=$SECONDS"
# make brew available now
if [[ $ARCH == "x86_64" ]]; then
echo 'eval "$(/usr/local/homebrew/bin/brew shellenv)"' >> $RC_FILE
eval "$(/usr/local/homebrew/bin/brew shellenv)"
else
echo 'eval "$(/opt/homebrew/bin/brew shellenv)"' >> $RC_FILE
eval "$(/opt/homebrew/bin/brew shellenv)"
fi
fi
# TODO: remove protobuf,protobuf-c,swig when casadi can be pip installed
brew bundle --file=- <<-EOS
brew "cmake"
brew "cppcheck"
brew "git-lfs"
brew "zlib"
brew "bzip2"
brew "capnp"
brew "coreutils"
brew "eigen"
brew "ffmpeg"
brew "glfw"
brew "libarchive"
brew "libusb"
brew "libtool"
brew "llvm"
brew "openssl"
brew "pyenv"
brew "qt@5"
brew "zeromq"
brew "protobuf"
brew "protobuf-c"
brew "swig"
cask "gcc-arm-embedded"
EOS
echo "[ ] finished brew install t=$SECONDS"
BREW_PREFIX=$(brew --prefix)
# archive backend tools for pip dependencies
export LDFLAGS="$LDFLAGS -L${BREW_PREFIX}/opt/zlib/lib"
export LDFLAGS="$LDFLAGS -L${BREW_PREFIX}/opt/bzip2/lib"
export CPPFLAGS="$CPPFLAGS -I${BREW_PREFIX}/opt/zlib/include"
export CPPFLAGS="$CPPFLAGS -I${BREW_PREFIX}/opt/bzip2/include"
# pycurl curl/openssl backend dependencies
export LDFLAGS="$LDFLAGS -L${BREW_PREFIX}/opt/openssl@3/lib"
export CPPFLAGS="$CPPFLAGS -I${BREW_PREFIX}/opt/openssl@3/include"
export PYCURL_SSL_LIBRARY=openssl
# openpilot environment
if [ -z "$OPENPILOT_ENV" ] && [ -n "$RC_FILE" ] && [ -z "$CI" ]; then
echo "source $ROOT/tools/openpilot_env.sh" >> $RC_FILE
source "$ROOT/tools/openpilot_env.sh"
echo "Added openpilot_env to RC file: $RC_FILE"
fi
# install python dependencies
$ROOT/update_requirements.sh
eval "$(pyenv init --path)"
echo "[ ] installed python dependencies t=$SECONDS"
# install casadi
VENV=`pipenv --venv`
PYTHON_VER=3.8
PYTHON_VERSION=$(cat $ROOT/.python-version)
if [ ! -f "$VENV/include/casadi/casadi.hpp" ]; then
echo "-- casadi manual install"
cd /tmp/ && curl -L https://github.com/casadi/casadi/archive/refs/tags/ge6.tar.gz --output casadi.tar.gz
tar -xzf casadi.tar.gz
cd casadi-ge6/ && mkdir -p build && cd build
cmake .. \
-DWITH_PYTHON=ON \
-DWITH_EXAMPLES=OFF \
-DCMAKE_INSTALL_PREFIX:PATH=$VENV \
-DPYTHON_PREFIX:PATH=$VENV/lib/python$PYTHON_VER/site-packages \
-DPYTHON_LIBRARY:FILEPATH=$HOME/.pyenv/versions/$PYTHON_VERSION/lib/libpython$PYTHON_VER.dylib \
-DPYTHON_EXECUTABLE:FILEPATH=$HOME/.pyenv/versions/$PYTHON_VERSION/bin/python \
-DPYTHON_INCLUDE_DIR:PATH=$HOME/.pyenv/versions/$PYTHON_VERSION/include/python$PYTHON_VER \
-DCMAKE_CXX_FLAGS="-ferror-limit=0" -DCMAKE_C_FLAGS="-ferror-limit=0"
CFLAGS="-ferror-limit=0" make -j$(nproc) && make install
else
echo "---- casadi found in venv. skipping build ----"
fi
echo
echo "---- OPENPILOT SETUP DONE ----"
echo "Open a new shell or configure your active shell env by running:"
echo "source $RC_FILE"
|
package com.vidviz.back;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import java.io.File;
@Configuration
//@EnableWebMvc
public class MvcConfig implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/**")
.addResourceLocations("classpath:/static/")
.addResourceLocations("classpath:/public/")
.addResourceLocations("classpath:/resources/");
registry
.addResourceHandler("/videos/**")
.addResourceLocations("file:///" + new File("videos").getAbsolutePath() + "/");
}
}
|
function findMaxElementInRow(arr) {
let result = [];
for (let i = 0; i < arr.length; i++) {
let max = arr[i][0];
for (let j = 0; j < arr[i].length; j++) {
if (max < arr[i][j]) {
max = arr[i][j];
}
}
result.push(max);
}
return result;
}
findMaxElementInRow([[2, 4, 6], [8, 10, 12], [14, 16, 18]]) // [6, 12, 18] |
#!/bin/sh
java -jar ./jars/antlr-4.7.2-complete.jar -Dlanguage=Go -o parser Mixed.g
|
export const Month_Names_Full = [
'January',
'February',
'March',
'April',
'May',
'June',
'July',
'August',
'September',
'October',
'November',
'December',
];
export const Month_Names_Short = [
'Jan',
'Feb',
'Mar',
'Apr',
'May',
'Jun',
'Jul',
'Aug',
'Sep',
'Oct',
'Nov',
'Dec',
];
export const Weekday_Names_Short = [
'Sun',
'Mon',
'Tue',
'Wed',
'Thu',
'Fri',
'Sat',
];
|
<reponame>abricos/abricos-mod-uprofile
var Component = new Brick.Component();
Component.requires = {
mod: [
{name: 'sys', files: ['application.js']},
{name: '{C#MODNAME}', files: ['base.js', 'model.js']}
]
};
Component.entryPoint = function(NS){
var COMPONENT = this,
SYS = Brick.mod.sys,
UID = Brick.env.user.id;
NS.roles = new Brick.AppRoles('{C#MODNAME}', {
isAdmin: 50,
isWrite: 30,
isView: 10
});
SYS.Application.build(COMPONENT, {}, {
_addUsersToCache: function(userList){
if (!userList){
return;
}
userList.each(function(user){
this._addUserToCache(user);
}, this);
},
_addUserToCache: function(user){
if (!user){
return;
}
var cacheUserList = this.get('userList'),
userid = user.get('id');
if (cacheUserList.getById(userid)){
cacheUserList.removeById(userid);
}
cacheUserList.add(user);
},
initializer: function(){
NS.roles.load(function(){
if (UID > 0){
this.user(UID, function(err, result){
this.initCallbackFire();
}, this);
} else {
this.initCallbackFire();
}
}, this);
}
}, [], {
APPS: {
urating: {}
},
ATTRS: {
isLoadAppStructure: {value: true},
User: {value: NS.User},
UserList: {value: NS.UserList},
Profile: {value: NS.Profile},
profileList: {
readOnly: true,
getter: function(){
if (!this._profileList){
this._profileList = new NS.ProfileList({appInstance: this});
}
return this._profileList;
}
},
userList: {
readOnly: true,
getter: function(){
if (!this._userList){
this._userList = new NS.UserList({appInstance: this});
}
return this._userList;
}
}
},
REQS: {
profile: {
args: ['userid'],
attribute: false,
type: 'model:Profile',
cache: function(userid){
return this.get('profileList').getById(userid);
},
onResponse: function(profile){
if (!profile){
return;
}
var userid = profile.get('id'),
profileList = this.get('profileList');
if (profileList.getById(userid)){
profileList.removeById(userid);
}
profileList.add(profile);
return profile;
}
},
profileSave: {
args: ['profile']
},
passwordSave: {
args: ['password']
},
avatarRemove: {
args: ['userid']
},
friendList: {
attribute: true,
type: 'modelList:UserList',
onResponse: function(userList){
this._addUsersToCache(userList);
return userList;
}
},
userSearch: {
args: ['search'],
type: 'modelList:UserList',
onResponse: function(userList){
this._addUsersToCache(userList);
return userList;
}
},
user: {
args: ['userid'],
type: 'model:User',
cache: function(userid){
return this.get('userList').getById(userid | 0);
},
onResponse: function(user){
this._addUserToCache(user);
return user;
}
},
userListByIds: {
args: ['userids'],
type: 'modelList:UserList',
requestDataHandle: function(rData){
var userList = this.get('userList'),
orig = rData.userids,
userids = [],
cacheUsers = new NS.UserList({appInstance: this});
for (var i = 0, userid, user; i < orig.length; i++){
userid = orig[i] | 0;
if (userid === 0){
continue;
}
user = userList.getById(userid);
if (user){
cacheUsers.add(user);
} else {
userids[userids.length] = userid;
}
}
this._tempCacheUsers = cacheUsers;
rData.userids = userids;
return rData;
},
cache: function(userids){
var retUserList = this._tempCacheUsers;
if (userids.length === 0){
this._tempCacheUsers = null;
return retUserList;
}
return null;
},
onResponse: function(userList){
this._addUsersToCache(userList);
if (this._tempCacheUsers){
this._tempCacheUsers.each(function(user){
userList.add(user);
});
}
this._tempCacheUsers = null;
return userList;
}
}
},
URLS: {
ws: "#app={C#MODNAMEURI}/wspace/ws/",
profile: {
view: function(userid){
return this.getURL('ws') + 'profile/ProfileWidget/' + (userid | 0) + '/';
}
},
config: {
subscribe: function(){
return this.getURL('ws') + 'subscribe/SubscribeConfigWidget/';
},
publicity: function(){
return this.getURL('ws') + 'publicity/PublicityConfigWidget/';
},
}
}
});
}; |
// @flow
import './rule'; // break cyclical dependency deadlock – #87
import Container from './container';
import LazyResult from './lazy-result';
import Processor from './processor';
import warnOnce from './warn-once';
/**
* Represents a CSS file and contains all its parsed nodes.
*
* @extends Container
*
* @example
* const root = postcss.parse('a{color:black} b{z-index:2}');
* root.type //=> 'root'
* root.nodes.length //=> 2
*/
class Root extends Container {
constructor(defaults) {
super(defaults);
this.type = 'root';
if (!this.nodes) this.nodes = [];
}
removeChild(child) {
child = this.index(child);
if (child === 0 && this.nodes.length > 1) {
this.nodes[1].raws.before = this.nodes[child].raws.before;
}
return super.removeChild(child);
}
normalize(child, sample, type) {
const nodes = super.normalize(child);
if (sample) {
if (type === 'prepend') {
if (this.nodes.length > 1) {
sample.raws.before = this.nodes[1].raws.before;
} else {
delete sample.raws.before;
}
} else if (this.first !== sample) {
nodes.forEach(node => {
node.raws.before = sample.raws.before;
});
}
}
return nodes;
}
/**
* Returns a {@link Result} instance representing the root’s CSS.
*
* @param {processOptions} [opts] - options with only `to` and `map` keys
*
* @return {Result} result with current root’s CSS
*
* @example
* const root1 = postcss.parse(css1, { from: 'a.css' });
* const root2 = postcss.parse(css2, { from: 'b.css' });
* root1.append(root2);
* const result = root1.toResult({ to: 'all.css', map: true });
*/
toResult(opts = {}) {
const lazy = new LazyResult(new Processor(), this, opts);
return lazy.stringify();
}
remove(child) {
warnOnce('Root#remove is deprecated. Use Root#removeChild');
this.removeChild(child);
}
prevMap() {
warnOnce('Root#prevMap is deprecated. Use Root#source.input.map');
return this.source.input.map;
}
/**
* @memberof Root#
* @member {object} raws - Information to generate byte-to-byte equal
* node string as it was in the origin input.
*
* Every parser saves its own properties,
* but the default CSS parser uses:
*
* * `after`: the space symbols after the last child to the end of file.
* * `semicolon`: is the last child has an (optional) semicolon.
*
* @example
* postcss.parse('a {}\n').raws //=> { after: '\n' }
* postcss.parse('a {}').raws //=> { after: '' }
*/
}
export default Root;
|
// https://www.codechef.com/APRIL09/problems/B5
#include <iostream>
#include <queue>
#include <tuple>
using namespace std;
typedef tuple<int, int> ii;
typedef deque<ii> dqii;
int n, k, m[1001][1001], x[1001][1001], y[1001][1001];
int main() {
cin >> n >> k;
for (int i = 0; i < n; i++) for (int j = 0; j < n; j++) cin >> m[i][j];
for (int i = 0; i < n; i++) {
dqii q;
for (int j = 0; j < k; j++) {
while (!q.empty()) {
int u, v;
tie(u, v) = q.back();
if (v < m[i][j]) break;
q.pop_back();
}
q.push_back({j, m[i][j]});
}
for (int j = 0; j < n - k; j++) {
int u, v;
tie(u, v) = q.front();
x[i][j] = v;
if (u <= j) q.pop_front();
while (!q.empty()) {
int u, v;
tie(u, v) = q.back();
if (v < m[i][j+k]) break;
q.pop_back();
}
q.push_back({j+k, m[i][j+k]});
}
x[i][n - k] = get<1>(q.front());
}
for (int j = 0; j < n - k + 1; j++) {
dqii q;
for (int i = 0; i < k; i++) {
while (!q.empty()) {
int u, v;
tie(u, v) = q.back();
if (v < x[i][j]) break;
q.pop_back();
}
q.push_back({i, x[i][j]});
}
for (int i = 0; i < n - k; i++) {
int u, v;
tie(u, v) = q.front();
y[i][j] = v;
if (u <= i) q.pop_front();
while (!q.empty()) {
int u, v;
tie(u, v) = q.back();
if (v < x[i+k][j]) break;
q.pop_back();
}
q.push_back({i+k, x[i+k][j]});
}
y[n - k][j] = get<1>(q.front());
}
for (int i = 0; i < n - k + 1; i++) {
for (int j = 0; j < n - k + 1; j++) {
cout << y[i][j];
if (j < n - k) cout << " ";
}
cout << endl;
}
}
|
#!/bin/sh
set -euo pipefail
alias goswagger="docker run --rm -it --user $(id -u):$(id -g) -e GOPATH=$GOPATH:/go -v $HOME:$HOME -w $(pwd) quay.io/goswagger/swagger:v0.27.0"
rm -rf server/*
rm -rf client/*
goswagger generate server -f swagger.yaml -t ./server
goswagger generate client -f swagger.yaml -t ./client
|
#!/usr/bin/env bash
if [ -n "$PLUGIN_CIPHER" ]; then
export TRANSCRYPT_CIPHER=$PLUGIN_CIPHER
fi
if [ -z "$TRANSCRYPT_CIPHER" ]; then
echo "The environment variable TRANSCRYPT_CIPHER or PLUGIN_CIPHER are required." > /dev/stderr
exit 1
fi
if [ -n "$PLUGIN_PASSWORD" ]; then
export TRANSCRYPT_PASSWORD=$PLUGIN_PASSWORD
fi
if [ -z "$TRANSCRYPT_PASSWORD" ]; then
echo "The environment variable TRANSCRYPT_PASSWORD or PLUGIN_PASSWORD are required." > /dev/stderr
exit 1
fi
transcrypt -y -c $TRANSCRYPT_CIPHER -p $TRANSCRYPT_PASSWORD
|
<reponame>enowmbi/hotel_reservation_system
class RoomCategory < ApplicationRecord
has_many :rooms, dependent: :destroy
validates :name, presence: true
validates :name, uniqueness: true
validates :description, presence: true
validates :price, numericality: {greater_than_or_equal_to: 1}
end
|
<reponame>Lanseria/video-downloader
import { dialog } from "electron";
import * as os from "os";
import * as path from "path";
import { Injectable } from "../decorators";
import { mainWindow } from "../index";
@Injectable("FileService")
export class FileService {
static instance: any;
constructor() {
if (FileService.instance) {
return FileService.instance;
}
FileService.instance = this;
}
public init() {
this.onOpenFile();
// this.onOpenFolder();
// this.reg =
// /\.(MP4|WebM|Ogg|mkv|avi|MOV|ASF|WMV|NAVI|3GP|FLV|F4V|RMVB|HDDVD|rm|rmvb|mp3)$/i;
}
/**
* It opens a file dialog and returns the path of the selected file.
* @returns The file path of the selected file.
*/
onOpenFile() {
return dialog.showOpenDialog(mainWindow, {
title: "打开文件",
properties: ["openFile", "showHiddenFiles"],
message: "打开媒体文件",
filters: [
{
name: "media",
extensions: [
"MP4",
"WebM",
"Ogg",
"mkv",
"avi",
"MOV",
"ASF",
"WMV",
"NAVI",
"3GP",
"FLV",
"F4V",
"RMVB",
"HDDVD",
"rm",
"rmvb",
"MP3",
"flac",
],
},
],
});
}
/**
* It shows a save dialog box with the title and default path set.
* @param {string} title - The title of the dialog window.
* @param {string} filePath - The path to the file to be saved.
* @returns The file path.
*/
onSaveFile(title: string, filePath: string) {
return dialog.showSaveDialog(mainWindow, {
title,
defaultPath: path.join(os.homedir(), filePath),
});
}
/**
* It opens a dialog box to select a JSON file to import.
* @returns The dialog.showOpenDialog() method returns an array of file paths.
*/
onOpenJsonFile() {
return dialog.showOpenDialog(mainWindow, {
title: "导入数据",
properties: ["openFile", "showHiddenFiles"],
message: "打开JSON",
filters: [
{
name: "json",
extensions: ["json"],
},
],
});
}
/**
* It opens a dialog box that allows the user to select a folder.
* @returns The dialog.showOpenDialog() method returns an array of file paths.
*/
onOpenFolder() {
return dialog.showOpenDialog(mainWindow, {
title: "打开文件夹",
properties: ["openDirectory"],
});
}
}
FileService.instance = null;
|
import { Omit, IHoldingVersion } from 'shared/types/app';
import { SidesDisplayMethod } from '../../placeOrder';
import { IGenericVersionedTypes, GetSettingsAssoc, GetFormSettingsAssoc } from '../helpers';
import * as V1 from './v1';
interface IPlaceOrderFormSettings {
sidesDisplayMethod: SidesDisplayMethod;
}
type IPlaceOrderSettings = IPlaceOrderFormSettings;
interface IWidgetsSettingsAssoc extends Omit<GetSettingsAssoc<V1.IVersionedTypes>, 'place-order'> {
'place-order': IPlaceOrderSettings;
}
interface IWidgetsFormSettingsAssoc extends Omit<GetFormSettingsAssoc<V1.IVersionedTypes>, 'place-order'> {
'place-order': IPlaceOrderFormSettings;
}
export type IVersionedTypes = IGenericVersionedTypes<
V1.IVersionedTypes['WidgetKind'],
IWidgetsSettingsAssoc,
IWidgetsFormSettingsAssoc
>;
export interface IUserConfig extends Omit<V1.IUserConfig, 'presets' | 'version'>, IHoldingVersion<2> {
presets: Array<IVersionedTypes['IPreset']>;
}
|
class CloudManager:
def __init__(self):
self.all_instances = {}
self.instance_public_ip = {}
self.all_volumes = {}
self.setup_results = {}
def create_instance(self, instance_id):
self.all_instances[instance_id] = []
def attach_volume(self, instance_id, volume_id):
if instance_id in self.all_instances:
self.all_instances[instance_id].append(volume_id)
else:
raise ValueError("Instance not found")
def associate_public_ip(self, instance_id, public_ip):
self.instance_public_ip[instance_id] = public_ip
def display_setup_results(self):
return self.setup_results |
def dyn_2d_up_operation(x, lf_2d, k_sz, sf=2):
"""
Dynamic 2d upsampling
"""
with nn.parameter_scope("Dynamic_2D_Upsampling"):
y = []
sz = lf_2d.shape
lf_2d_new = F.reshape(lf_2d, (sz[0], sz[1], 1, sz[2], 1))
lf_2d_new = F.tile(lf_2d_new, (1, 1, sf, 1, sf))
lf_2d_new = F.reshape(lf_2d_new, (sz[0], sz[1]*sf, sz[2]*sf))
y = F.convolution_2d(x, lf_2d_new, stride=(sf, sf), pad=(k_sz//2, k_sz//2))
return y |
<filename>src/main/java/com/mammb/code/jpostal/source/PostalSourceFetcher.java<gh_stars>1-10
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mammb.code.jpostal.source;
import java.io.BufferedOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/**
* PostalSourceFetcher.
*
* @author naotsugu
*/
public class PostalSourceFetcher {
private final Path baseDir;
private final boolean recycle;
private String sourceUrl;
private PostalSourceFetcher(Path baseDir, String sourceUrl, boolean recycle) {
this.baseDir = baseDir;
this.recycle = recycle;
this.sourceUrl = sourceUrl;
}
/**
* Create the {@code PostalSourceFetcher} instance.
* @param postalSource the source of postal
* @return the {@code PostalSourceFetcher} instance
*/
public static PostalSourceFetcher of(PostalSource postalSource) {
return new PostalSourceFetcher(Paths.get("./"), postalSource.url(), false);
}
/**
* Create the {@code PostalSourceFetcher} instance.
* @param postalSource the source of postal
* @return the {@code PostalSourceFetcher} instance
*/
public static PostalSourceFetcher recycleOf(PostalSource postalSource) {
return new PostalSourceFetcher(Paths.get("./"), postalSource.url(), true);
}
/**
* Fetch the postal dictionary csv.
* @return the path of fetched file
*/
public Path fetch() {
Path zipPath = baseDir.resolve(fetchedPath(sourceUrl));
if (!(recycle && Files.exists(zipPath))) {
zipPath = fetch(sourceUrl, zipPath);
}
return unzip(zipPath);
}
private static Path fetchedPath(String url) {
final String fileName = url.substring(url.lastIndexOf("/") + 1);
return Paths.get(fileName);
}
private static Path fetch(String url, Path toPath) {
try (ReadableByteChannel rbc = Channels.newChannel(new URL(url).openStream());
FileOutputStream os = new FileOutputStream(toPath.toFile())) {
os.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
} catch (IOException e) {
throw new RuntimeException(e);
}
return toPath;
}
private static Path unzip(final Path zipPath) {
try {
Path tempDir = Files.createTempDirectory(PostalSource.class.getSimpleName() + ".");
unzip(zipPath, tempDir, StandardCharsets.UTF_8);
return Files.list(tempDir)
.filter(p -> p.getFileName().toString().toLowerCase().endsWith(".csv"))
.findFirst()
.orElseThrow();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void unzip(
final Path zipPath,
final Path unzipLocation,
final Charset charset) throws IOException {
if (!Files.exists(unzipLocation)) {
Files.createDirectories(unzipLocation);
}
try (ZipInputStream zipInputStream = new ZipInputStream(
Files.newInputStream(zipPath),
charset)) {
ZipEntry entry = zipInputStream.getNextEntry();
while (entry != null) {
Path filePath = unzipLocation.resolve(entry.getName());
if (!entry.isDirectory()) {
unzipFiles(zipInputStream, filePath);
} else {
Files.createDirectories(filePath);
}
zipInputStream.closeEntry();
entry = zipInputStream.getNextEntry();
}
}
}
private static void unzipFiles(
final ZipInputStream zipInputStream,
final Path unzipFilePath) throws IOException {
try (BufferedOutputStream bos = new BufferedOutputStream(
new FileOutputStream(unzipFilePath.toAbsolutePath().toString()))) {
byte[] bytesIn = new byte[1024 * 5];
int read;
while ((read = zipInputStream.read(bytesIn)) != -1) {
bos.write(bytesIn, 0, read);
}
}
}
}
|
<filename>app.py
from flask import Flask, request, jsonify
from wabot import WABot
import json
app = Flask(__name__)
@app.route('/', methods=['POST'])
def home():
if request.method == 'POST':
bot = WABot(request.json)
return bot.processing()
if(__name__) == '__main__':
app.run()
|
#!/bin/bash
VERSION=4.2.11.1
if [ ! -f Dockerfile ]; then
echo "not a docker configuration"
return 1
fi
docker stop karaf
docker rm karaf
docker rmi mhus/apache-karaf:$VERSION
if [ "$1" = "clean" ]; then
DOCKER_BUILDKIT=0 docker build --no-cache -t mhus/apache-karaf:$VERSION .
else
DOCKER_BUILDKIT=0 docker build --progress plain -t mhus/apache-karaf:$VERSION .
fi
if [ "$1" == "test" ]; then
docker run -it -v ~/.m2:/home/user/.m2 -p 15005:5005 -p 18181:8181 --name karaf mhus/apache-karaf:$VERSION debug
fi
if [ "$1" = "push" ]; then
docker push "mhus/apache-karaf:$VERSION"
fi
|
<filename>src/icons/PhLinkSimpleHorizontalBreak.js<gh_stars>1-10
/* GENERATED FILE */
import { html, svg, define } from "hybrids";
const PhLinkSimpleHorizontalBreak = {
color: "currentColor",
size: "1em",
weight: "regular",
mirrored: false,
render: ({ color, size, weight, mirrored }) => html`
<svg
xmlns="http://www.w3.org/2000/svg"
width="${size}"
height="${size}"
fill="${color}"
viewBox="0 0 256 256"
transform=${mirrored ? "scale(-1, 1)" : null}
>
${weight === "bold" &&
svg`<path d="M103.99316,175.99609h-40a48,48,0,0,1,0-96h40" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<path d="M152,175.99609h40a48,48,0,0,0,0-96H152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>`}
${weight === "duotone" &&
svg`<path d="M103.99316,175.99609h-40a48,48,0,0,1,0-96h40" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<path d="M152,175.99609h40a48,48,0,0,0,0-96H152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
${weight === "fill" &&
svg`<g>
<path d="M103.99316,167.99609h-40a40,40,0,0,1,0-80h40a8,8,0,0,0,0-16h-40a56,56,0,0,0,0,112h40a8,8,0,1,0,0-16Z"/>
<path d="M192,71.99609H152a8,8,0,0,0,0,16h40a40,40,0,0,1,0,80H152a8,8,0,0,0,0,16h40a56,56,0,0,0,0-112Z"/>
</g>`}
${weight === "light" &&
svg`<path d="M103.99316,175.99609h-40a48,48,0,0,1,0-96h40" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<path d="M152,175.99609h40a48,48,0,0,0,0-96H152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>`}
${weight === "thin" &&
svg`<path d="M103.99316,175.99609h-40a48,48,0,0,1,0-96h40" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<path d="M152,175.99609h40a48,48,0,0,0,0-96H152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>`}
${weight === "regular" &&
svg`<path d="M103.99316,175.99609h-40a48,48,0,0,1,0-96h40" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<path d="M152,175.99609h40a48,48,0,0,0,0-96H152" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
</svg>
`,
};
define("ph-link-simple-horizontal-break", PhLinkSimpleHorizontalBreak);
export default PhLinkSimpleHorizontalBreak;
|
<filename>test.js
import test from 'ava';
import isPublicRepo from '.';
test('public', async t => {
const isPublic = await isPublicRepo('vadimdemedes/ink');
t.true(isPublic);
});
test('private', async t => {
const isPublic = await isPublicRepo('vadimdemedes/ava-next');
t.false(isPublic);
});
|
const express = require('express');
const router = express.Router();
const preset_commands = require('../common/preset_commands');
/* GET home page. */
router.get('/', function(req, res, next) {
const group_names = Object.keys(preset_commands);
res.render('index', {
title: 'Which Remote?',
group_names,
portrait_css: `.group_bar {
height: calc(100%/${Math.min(4, group_names.length)});
line-height: calc(100vh/${Math.min(4, group_names.length)});
}`,
landscape_css: `.group_bar {
height: calc(100%/${Math.min(2, group_names.length)});
line-height: calc(100vh/${Math.min(2, group_names.length)});
}`,
});
});
module.exports = router;
|
import numpy as np
class ObservationProcessor:
def __init__(self, model):
self.model = model
def extract_qpos(self, obs):
"""
Extracts the qpos from the given observation.
Args:
obs (np.ndarray): The observation array.
Returns:
np.ndarray: The qpos extracted from the observation.
"""
state = np.insert(obs, 0, 0.) # Insert a 0. at the beginning of the observation array
qpos = state[:self.model.nq] # Extract the first nq elements as qpos
return qpos |
import random
def main():
print correctness_test()
def poker(hands):
"""Return a list of the winning hands: poker([hand,...]) => [hand,...]"""
return allmax(hands, key=hand_rank)
def allmax(iterable, key=None):
"""Return a list of all items equal to the max of the iterable"""
result, maxval = [], None
key = key or (lambda x: x)
for x in iterable:
xval = key(x)
if not result or xval > maxval:
result, maxval = [x], xval
elif xval == maxval:
result.append(x)
return result
def deal(num_hands, n=5, deck=[r+s for r in '23456789TJQKA' for s in 'SHDC']):
"""Return a list of num_hands different poker hands, each with n cards"""
random.shuffle(deck)
# Doesn't work when you want more than 52 cards total
return [deck[n*i:n*(i+1)] for i in range(num_hands)]
def hand_rank(hand):
"""Return a value indicating the rank of a hand."""
ranks = card_ranks(hand)
if straight(ranks) and flush(hand):
return (8, max(ranks))
elif kind(4, ranks):
return (7, kind(4, ranks), kind(1,ranks))
elif kind(3, ranks) and kind(2, ranks):
return (6, kind(3, ranks), kind(2, ranks))
elif flush(hand):
return (5, ranks)
elif straight(ranks):
return (4, max(ranks))
elif kind(3, ranks):
return (3, kind(3, ranks), ranks)
elif two_pair(ranks):
return (2, two_pair(ranks), ranks)
elif kind(2, ranks):
return (1, kind(2, ranks), ranks)
else:
return (0, ranks)
def card_ranks(hand):
"""Return a list of the ranks, sorted with higher first"""
ranks = ['--23456789TJQKA'.index(r) for r,suit in hand]
ranks.sort(reverse=True)
return [5,4,3,2,1] if (ranks == [14,5,4,3,2]) else ranks
def straight(ranks):
"""Return True if there is a straight"""
return (max(ranks) - min(ranks) == 4) and len(set(ranks)) == 5
def flush(hand):
"""Return True if there is a flush"""
suits = [s for r,s in hand]
return len(set(suits)) == 1
def kind(n, ranks):
"""Return the card for which there is n-of-a-kind, else return None"""
for card in ranks:
if ranks.count(card) == n: return card
return None
def two_pair(ranks):
"""Return a tuple of two pairs if there is one, else return None"""
hi_pair = kind(2, ranks)
lo_pair = kind(2, list(reversed(ranks)))
if hi_pair and (lo_pair != hi_pair):
return (hi_pair, lo_pair)
return None
def correctness_test():
"""Test cases for our poker program"""
sf = "6C 7C 8C 9C TC".split() # straight flush
fk = "9D 9H 9S 9C 7D".split() # four of a kind
fh = "TD TC TH 7C 7D".split() # full house
tp = "5S 5D 9H 9C 6S".split() # two pair
s1 = "AS 2S 3S 4S 5C".split() # straight, ace low
s2 = "2C 3C 4C 5S 6S".split() # straight
ah = "AS 2S 3S 4S 6C".split() # ace high
sh = "2S 3S 4S 6C 7D".split() # 7 high
assert card_ranks(sf) == [10,9,8,7,6]
assert card_ranks(fk) == [9,9,9,9,7]
assert card_ranks(fh) == [10,10,10,7,7]
fkranks = card_ranks(fk)
tpranks = card_ranks(tp)
assert kind(4, fkranks) == 9
assert kind(3, fkranks) == None
assert kind(2, fkranks) == None
assert kind(1, fkranks) == 7
assert two_pair(fkranks) == None
assert two_pair(tpranks) == (9,5)
assert straight([9,8,7,6,5]) == True
assert straight([9,8,8,6,5]) == False
assert flush(sf) == True
assert flush(fk) == False
assert poker([sf, fk, fh]) == [sf]
assert poker([fk, fh]) == [fk]
assert poker([fh, fh]) == [fh, fh]
assert poker([sf]) == [sf]
assert poker([fh]*99 + [sf]) == [sf]
assert poker([s1, ah, sh]) == [s1]
assert hand_rank(sf) == (8,10)
assert hand_rank(fk) == (7,9,7)
assert hand_rank(fh) == (6,10,7)
return "tests passed"
def statistical_test(n=700):
"""Sample n random hands and print a table of percentages for each type of hand"""
pass
if __name__ == '__main__':
main()
|
package server
import (
"crypto/tls"
"log"
"time"
"github.com/aukbit/pluto/v6/common"
"github.com/aukbit/pluto/v6/discovery"
"github.com/aukbit/pluto/v6/server/router"
"github.com/rs/zerolog"
"google.golang.org/grpc"
)
// Option is used to set options for the server.
type Option interface {
apply(*Server)
}
// optionFunc wraps a func so it satisfies the Option interface.
type optionFunc func(*Server)
func (f optionFunc) apply(s *Server) {
f(s)
}
// ID server id
func ID(id string) Option {
return optionFunc(func(s *Server) {
s.cfg.ID = id
})
}
// Name server name
func Name(n string) Option {
return optionFunc(func(s *Server) {
s.cfg.Name = common.SafeName(n, DefaultName)
})
}
// Description server description
func Description(d string) Option {
return optionFunc(func(s *Server) {
s.cfg.Description = d
})
}
// Addr server address
func Addr(a string) Option {
return optionFunc(func(s *Server) {
s.cfg.Addr = a
})
}
// Mux server multiplexer
func Mux(m *router.Router) Option {
return optionFunc(func(s *Server) {
s.cfg.Mux = m
})
}
// TLSConfig server multiplexer
func TLSConfig(certFile, keyFile string) Option {
return optionFunc(func(s *Server) {
cer, err := tls.LoadX509KeyPair(certFile, keyFile)
if err != nil {
log.Printf("ERROR tls.LoadX509KeyPair %v", err)
return
}
s.cfg.TLSConfig = &tls.Config{
MinVersion: tls.VersionTLS12,
CurvePreferences: []tls.CurveID{tls.CurveP521, tls.CurveP384, tls.CurveP256},
PreferServerCipherSuites: true,
Certificates: []tls.Certificate{cer},
}
s.cfg.Format = "https"
})
}
// GRPCRegister register client gRPC function
func GRPCRegister(fn GRPCRegisterServiceFunc) Option {
return optionFunc(func(s *Server) {
s.cfg.GRPCRegister = fn
s.cfg.Format = "grpc"
})
}
// Middlewares slice with router.Middleware
func Middlewares(m ...router.Middleware) Option {
return optionFunc(func(s *Server) {
s.cfg.mu.Lock()
defer s.cfg.mu.Unlock()
s.cfg.Middlewares = append(s.cfg.Middlewares, m...)
})
}
// UnaryServerInterceptors slice with grpc.UnaryServerInterceptor
func UnaryServerInterceptors(i ...grpc.UnaryServerInterceptor) Option {
return optionFunc(func(s *Server) {
s.cfg.mu.Lock()
defer s.cfg.mu.Unlock()
s.cfg.UnaryServerInterceptors = append(s.cfg.UnaryServerInterceptors, i...)
})
}
// StreamServerInterceptors slice with grpc.StreamServerInterceptor
func StreamServerInterceptors(i ...grpc.StreamServerInterceptor) Option {
return optionFunc(func(s *Server) {
s.cfg.mu.Lock()
defer s.cfg.mu.Unlock()
s.cfg.StreamServerInterceptors = append(s.cfg.StreamServerInterceptors, i...)
})
}
// Discovery service discoery
func Discovery(d discovery.Discovery) Option {
return optionFunc(func(s *Server) {
s.cfg.Discovery = d
})
}
// Logger sets a shallow copy from an input logger
func Logger(l zerolog.Logger) Option {
return optionFunc(func(s *Server) {
s.logger = l
})
}
// ReadTimeout is used by the http server to set a maximum duration before
// timing out read of the request. The default timeout is 10 seconds.
func ReadTimeout(t time.Duration) Option {
return optionFunc(func(s *Server) {
s.cfg.ReadTimeout = t
})
}
// WriteTimeout is used by the http server to set a maximum duration before
// timing out write of the response. The default timeout is 10 seconds.
func WriteTimeout(t time.Duration) Option {
return optionFunc(func(s *Server) {
s.cfg.WriteTimeout = t
})
}
|
#!/bin/bash
cd $(dirname "${BASH_SOURCE[0]}")
set -ex
docker tag sourcegraph/alpine:$VERSION sourcegraph/alpine:latest
docker push sourcegraph/alpine:$VERSION
docker push sourcegraph/alpine:latest
|
CREATE TABLE cars (
car_id INT AUTO_INCREMENT PRIMARY KEY,
make VARCHAR(20) NOT NULL,
model VARCHAR(20) NOT NULL,
year INTEGER NOT NULL,
color VARCHAR(20) NOT NULL,
price DECIMAL NOT NULL
); |
<reponame>Melgo4/ICS4U<gh_stars>0
public class PersonsScore implements Comparable<PersonsScore> {
private final String name;
private final int score;
public PersonsScore(String name, int score) {
this.name = name;
this.score = score;
}
@Override
public int compareTo(PersonsScore o) {
int i = Integer.compare(o.score, this.score);
if(i == 0)return this.name.compareTo(o.name);
return i;
}
@Override
public boolean equals(Object o) {
if(this == o)return true;
if(o instanceof PersonsScore) {
PersonsScore that = (PersonsScore) o;
return score == that.score && this.name.equals(that.name);
} else if(o instanceof String) {
//Hack to make the linked list work.
return this.name.equals(o);
}
return false;
}
@Override
public int hashCode() {
return this.name.hashCode() * 31 + this.score;
}
@Override
public String toString() {
return "PersonsScore{" + "name='" + this.name + '\'' + ", score=" + this.score + '}';
}
}
|
python train.py -s market1501 -t market1501 \
--flip-eval --eval-freq 1 \
--label-smooth \
--criterion htri \
--lambda-htri 0.1 \
--data-augment crop random-erase \
--margin 1.2 \
--train-batch-size 64 \
--height 384 \
--width 128 \
--optim adam --lr 0.0003 \
--stepsize 20 40 \
--gpu-devices 0 \
--max-epoch 80 \
--save-dir path/to/dir \
--arch ghostnet \
--use-of \
--abd-dan cam pam \
--abd-np 2 \
--shallow-cam \
--use-ow |
import glob
import os
import os.path as osp
import cv2
import numpy as np
scenes = glob.glob('main_xml/scene*')
maxNum = 0
for scene in scenes:
lightDirs = glob.glob(osp.join(scene, 'light_*') )
for lightDir in lightDirs:
maskNames = glob.glob(osp.join(lightDir, 'mask*.png') )
num = 0
for maskName in maskNames:
im = cv2.imread(maskName )
im = im.astype(np.float32 )
if np.sum(im) == 0:
num += 1
if num > maxNum:
maxNum = num
print(lightDir, num )
print(maxNum )
|
sudo systemctl start elasticsearch.service
|
#!/bin/bash
source set_main_dir.sh
PROCESS_PID_FILE="temp.pid"
rm -f $PROCESS_PID_FILE
mkdir -p log
case $1 in
quarkus)
echo "Testing Quarkus startup time..."
echo "Start:" `(date +"%T.%3N")` >> "log/quarkus_startup.txt"
java -jar $MAIN_DIR/runs/quarkus-app/quarkus-run.jar & echo $! > $PROCESS_PID_FILE
./pinger.sh "log/quarkus_startup.txt"
PID=$(cat $PROCESS_PID_FILE)
kill $PID
rm -f $PROCESS_PID_FILE
;;
quarkus_native)
echo "Testing Quarkus native startup time..."
echo "Start:" `(date +"%T.%3N")` >> "log/quarkus_native_startup.txt"
$MAIN_DIR/runs/quarkusrest_native & echo $! > $PROCESS_PID_FILE
./pinger.sh "log/quarkus_native_startup.txt"
PID=$(cat $PROCESS_PID_FILE)
kill $PID
rm -f $PROCESS_PID_FILE
;;
micronaut)
echo "Testing Micronaut startup time..."
echo "Start:" `(date +"%T.%3N")` >> "log/micronaut_startup.txt"
java -jar $MAIN_DIR/runs/micronautrest.jar & echo $! > $PROCESS_PID_FILE
./pinger.sh "log/micronaut_startup.txt"
PID=$(cat $PROCESS_PID_FILE)
kill $PID
rm -f $PROCESS_PID_FILE
;;
micronaut_native)
echo "Testing Micronaut native startup time..."
echo "Start:" `(date +"%T.%3N")` >> "log/micronaut_native_startup.txt"
$MAIN_DIR/runs/micronautrest_native & echo $! > $PROCESS_PID_FILE
./pinger.sh "log/micronaut_native_startup.txt"
PID=$(cat $PROCESS_PID_FILE)
kill $PID
rm -f $PROCESS_PID_FILE
;;
spring)
echo "Testing Spring startup time..."
echo "Start:" `(date +"%T.%3N")` >> "log/spring_startup.txt"
java -jar $MAIN_DIR/runs/springrest.jar & echo $! > $PROCESS_PID_FILE
./pinger.sh "log/spring_startup.txt"
PID=$(cat $PROCESS_PID_FILE)
kill $PID
rm -f $PROCESS_PID_FILE
;;
spring_jetty)
echo "Testing Spring_jetty startup time..."
echo "Start:" `(date +"%T.%3N")` >> "log/spring_jetty_startup.txt"
java -jar $MAIN_DIR/runs/springrest_jetty.jar & echo $! > $PROCESS_PID_FILE
./pinger.sh "log/spring_jetty_startup.txt"
PID=$(cat $PROCESS_PID_FILE)
kill $PID
rm -f $PROCESS_PID_FILE
;;
*)
echo "Choose among: quarkus, quarkus_native, micronaut, micronaut_native, spring, spring_jetty"
;;
esac |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ha;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.ha.HealthMonitor.State;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
import org.apache.hadoop.test.MultithreadedTestUtil.TestingThread;
import org.apache.zookeeper.KeeperException.NoNodeException;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.server.ZooKeeperServer;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Ints;
/**
* Harness for starting two dummy ZK FailoverControllers, associated with
* DummyHAServices. This harness starts two such ZKFCs, designated by
* indexes 0 and 1, and provides utilities for building tests around them.
*/
public class MiniZKFCCluster {
private final TestContext ctx;
private final ZooKeeperServer zks;
private DummyHAService svcs[];
private DummyZKFCThread thrs[];
private Configuration conf;
private DummySharedResource sharedResource = new DummySharedResource();
private static final Log LOG = LogFactory.getLog(MiniZKFCCluster.class);
public MiniZKFCCluster(Configuration conf, ZooKeeperServer zks) {
this.conf = conf;
// Fast check interval so tests run faster
conf.setInt(CommonConfigurationKeys.HA_HM_CHECK_INTERVAL_KEY, 50);
conf.setInt(CommonConfigurationKeys.HA_HM_CONNECT_RETRY_INTERVAL_KEY, 50);
conf.setInt(CommonConfigurationKeys.HA_HM_SLEEP_AFTER_DISCONNECT_KEY, 50);
svcs = new DummyHAService[2];
svcs[0] = new DummyHAService(HAServiceState.INITIALIZING,
new InetSocketAddress("svc1", 1234));
svcs[0].setSharedResource(sharedResource);
svcs[1] = new DummyHAService(HAServiceState.INITIALIZING,
new InetSocketAddress("svc2", 1234));
svcs[1].setSharedResource(sharedResource);
this.ctx = new TestContext();
this.zks = zks;
}
/**
* Set up two services and their failover controllers. svc1 is started
* first, so that it enters ACTIVE state, and then svc2 is started,
* which enters STANDBY
*/
public void start() throws Exception {
// Format the base dir, should succeed
thrs = new DummyZKFCThread[2];
thrs[0] = new DummyZKFCThread(ctx, svcs[0]);
assertEquals(0, thrs[0].zkfc.run(new String[]{"-formatZK"}));
ctx.addThread(thrs[0]);
thrs[0].start();
LOG.info("Waiting for svc0 to enter active state");
waitForHAState(0, HAServiceState.ACTIVE);
LOG.info("Adding svc1");
thrs[1] = new DummyZKFCThread(ctx, svcs[1]);
thrs[1].start();
waitForHAState(1, HAServiceState.STANDBY);
}
/**
* Stop the services.
* @throws Exception if either of the services had encountered a fatal error
*/
public void stop() throws Exception {
for (DummyZKFCThread thr : thrs) {
if (thr != null) {
thr.interrupt();
}
}
if (ctx != null) {
ctx.stop();
}
sharedResource.assertNoViolations();
}
/**
* @return the TestContext implementation used internally. This allows more
* threads to be added to the context, etc.
*/
public TestContext getTestContext() {
return ctx;
}
public DummyHAService getService(int i) {
return svcs[i];
}
public ActiveStandbyElector getElector(int i) {
return thrs[i].zkfc.getElectorForTests();
}
public DummyZKFC getZkfc(int i) {
return thrs[i].zkfc;
}
public void setHealthy(int idx, boolean healthy) {
svcs[idx].isHealthy = healthy;
}
public void setFailToBecomeActive(int idx, boolean doFail) {
svcs[idx].failToBecomeActive = doFail;
}
public void setFailToBecomeStandby(int idx, boolean doFail) {
svcs[idx].failToBecomeStandby = doFail;
}
public void setFailToFence(int idx, boolean doFail) {
svcs[idx].failToFence = doFail;
}
public void setUnreachable(int idx, boolean unreachable) {
svcs[idx].actUnreachable = unreachable;
}
/**
* Wait for the given HA service to enter the given HA state.
* This is based on the state of ZKFC, not the state of HA service.
* There could be difference between the two. For example,
* When the service becomes unhealthy, ZKFC will quit ZK election and
* transition to HAServiceState.INITIALIZING and remain in that state
* until the service becomes healthy.
*/
public void waitForHAState(int idx, HAServiceState state)
throws Exception {
DummyZKFC svc = getZkfc(idx);
while (svc.getServiceState() != state) {
ctx.checkException();
Thread.sleep(50);
}
}
/**
* Wait for the ZKFC to be notified of a change in health state.
*/
public void waitForHealthState(int idx, State state)
throws Exception {
ZKFCTestUtil.waitForHealthState(thrs[idx].zkfc, state, ctx);
}
/**
* Wait for the given elector to enter the given elector state.
* @param idx the service index (0 or 1)
* @param state the state to wait for
* @throws Exception if it times out, or an exception occurs on one
* of the ZKFC threads while waiting.
*/
public void waitForElectorState(int idx,
ActiveStandbyElector.State state) throws Exception {
ActiveStandbyElectorTestUtil.waitForElectorState(ctx,
getElector(idx), state);
}
/**
* Expire the ZK session of the given service. This requires
* (and asserts) that the given service be the current active.
* @throws NoNodeException if no service holds the lock
*/
public void expireActiveLockHolder(int idx)
throws NoNodeException {
Stat stat = new Stat();
byte[] data = zks.getZKDatabase().getData(
DummyZKFC.LOCK_ZNODE, stat, null);
assertArrayEquals(Ints.toByteArray(svcs[idx].index), data);
long session = stat.getEphemeralOwner();
LOG.info("Expiring svc " + idx + "'s zookeeper session " + session);
zks.closeSession(session);
}
/**
* Wait for the given HA service to become the active lock holder.
* If the passed svc is null, waits for there to be no active
* lock holder.
*/
public void waitForActiveLockHolder(Integer idx)
throws Exception {
DummyHAService svc = idx == null ? null : svcs[idx];
ActiveStandbyElectorTestUtil.waitForActiveLockData(ctx, zks,
DummyZKFC.SCOPED_PARENT_ZNODE,
(idx == null) ? null : Ints.toByteArray(svc.index));
}
/**
* Expires the ZK session associated with service 'fromIdx', and waits
* until service 'toIdx' takes over.
* @throws Exception if the target service does not become active
*/
public void expireAndVerifyFailover(int fromIdx, int toIdx)
throws Exception {
Preconditions.checkArgument(fromIdx != toIdx);
getElector(fromIdx).preventSessionReestablishmentForTests();
try {
expireActiveLockHolder(fromIdx);
waitForHAState(fromIdx, HAServiceState.STANDBY);
waitForHAState(toIdx, HAServiceState.ACTIVE);
} finally {
getElector(fromIdx).allowSessionReestablishmentForTests();
}
}
/**
* Test-thread which runs a ZK Failover Controller corresponding
* to a given dummy service.
*/
private class DummyZKFCThread extends TestingThread {
private final DummyZKFC zkfc;
public DummyZKFCThread(TestContext ctx, DummyHAService svc) {
super(ctx);
this.zkfc = new DummyZKFC(conf, svc);
}
@Override
public void doWork() throws Exception {
try {
assertEquals(0, zkfc.run(new String[0]));
} catch (InterruptedException ie) {
// Interrupted by main thread, that's OK.
}
}
}
static class DummyZKFC extends ZKFailoverController {
private static final String DUMMY_CLUSTER = "dummy-cluster";
public static final String SCOPED_PARENT_ZNODE =
ZKFailoverController.ZK_PARENT_ZNODE_DEFAULT + "/" +
DUMMY_CLUSTER;
private static final String LOCK_ZNODE =
SCOPED_PARENT_ZNODE + "/" + ActiveStandbyElector.LOCK_FILENAME;
private final DummyHAService localTarget;
public DummyZKFC(Configuration conf, DummyHAService localTarget) {
super(conf, localTarget);
this.localTarget = localTarget;
}
@Override
protected byte[] targetToData(HAServiceTarget target) {
return Ints.toByteArray(((DummyHAService)target).index);
}
@Override
protected HAServiceTarget dataToTarget(byte[] data) {
int index = Ints.fromByteArray(data);
return DummyHAService.getInstance(index);
}
@Override
protected void loginAsFCUser() throws IOException {
}
@Override
protected String getScopeInsideParentNode() {
return DUMMY_CLUSTER;
}
@Override
protected void checkRpcAdminAccess() throws AccessControlException {
}
@Override
protected InetSocketAddress getRpcAddressToBindTo() {
return new InetSocketAddress(0);
}
@Override
protected void initRPC() throws IOException {
super.initRPC();
localTarget.zkfcProxy = this.getRpcServerForTests();
}
@Override
protected PolicyProvider getPolicyProvider() {
return null;
}
}
}
|
#!/bin/bash
#########################################################################
# File Name: only-hdmi.sh
# Created Time: 2020年12月12日 星期六 19时09分41秒
#########################################################################
xrandr --output HDMI-1 --auto --output LVDS-1 --off
|
<filename>include/core/util/pointer_align.h
#pragma once
#include <core/types.h>
namespace core
{
inline void* align_up(void* p, size_t align)
{
uintptr_t pi = uintptr_t(p);
const size_t mod = pi % align;
if (mod)
pi += (align - mod);
return (void*)pi;
}
inline void* align_down(void* p, size_t align)
{
uintptr_t pi = uintptr_t(p);
const size_t mod = pi % align;
if (mod)
pi -= mod;
return (void*)pi;
}
}
|
function removeDuplicates($arr)
{
// Temporary array
$tempArray = array();
// Iterating through the original array
for ($i = 0; $i < count($arr); $i++)
{
// Check if the element is already in the temporary array
if (!in_array($arr[$i], $tempArray))
{
// Push it into the temp arr
$tempArray[] = $arr[$i];
}
}
// Return the unique array
return $tempArray;
}
// Sample usage
$arr = [2, 3, 4, 2, 5, 3];
$unique_arr = removeDuplicates($arr);
print_r($unique_arr); |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
// THIS IS A GENERATED FILE. DO NOT MODIFY MANUALLY. @see scripts/compile-icons.js
import * as React from 'react';
interface SVGRProps {
title?: string;
titleId?: string;
}
const EuiIconString = ({
title,
titleId,
...props
}: React.SVGProps<SVGSVGElement> & SVGRProps) => (
<svg
width={16}
height={16}
viewBox="0 0 16 16"
xmlns="http://www.w3.org/2000/svg"
aria-labelledby={titleId}
{...props}
>
{title ? <title id={titleId}>{title}</title> : null}
<path
fillRule="evenodd"
d="M9.297 3L8.93 5.102h1.351l-.32 1.828H8.609l-.656 3.883c-.036.265-.02.466.05.601.071.135.247.208.528.219.11.005.334-.008.672-.04L9.016 13.5a4.16 4.16 0 01-1.383.195c-.797-.01-1.393-.244-1.79-.703-.395-.458-.557-1.08-.484-1.867l.688-4.195H5l.313-1.828h1.046L6.727 3h2.57z"
/>
</svg>
);
export const icon = EuiIconString;
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-3012-1
#
# Security announcement date: 2016-06-20 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:31 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - wget:1.15-1ubuntu1.14.04.2
#
# Last versions recommanded by security team:
# - wget:1.15-1ubuntu1.14.04.2
#
# CVE List:
# - CVE-2016-4971
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade wget=1.15-1ubuntu1.14.04.2 -y
|
#!/bin/sh
# //TODO Needs testing
cd config.d
for i in _*
do
rm -rf "${HOME}/${i/_/.}"
done
# //TODO Restore backups? Note, this depends on the backup process preserving directory structure
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def printMax(a, b):
if a > b:
print a, 'is maximum'
else:
print b, 'is maximum'
#printMax(3, 4)
x = 5
y = 7
printMax(x, y) |
class AddMetadataToPosts < ActiveRecord::Migration[5.0]
def change
add_column :posts, :metadata, :json
end
end
|
// javascript: (function () { var d = document, s = d.createElement("script") s.setAttribute("src", "http://localhost:8000/embed.js") d.body.appendChild(s) })()
d = document
s = d.createElement("script")
s.setAttribute("src", "https://code.jquery.com/jquery-1.12.4.js")
d.body.appendChild(s)
function openBar() {
document.getElementById('takcobar-content').style.marginTop = '35px'
document.getElementById('takcobar-btn').innerHTML = ' ▼ 🌮'
document.getElementById('takcobar-btn').onclick = closeBar
}
function closeBar() {
const neg = String($('#takcobar-content').height());
document.getElementById('takcobar-content').style.marginTop = `-${neg}px`
document.getElementById('takcobar-btn').innerHTML = ' ꠵ 🌮'
document.getElementById('takcobar-btn').onclick = openBar
$("#takcobar-tabs > a").removeClass("tab-active")
$('#takcobar-content').empty()
}
function showLoader() {
const loader = document.getElementById('takcobar-loader')
if (loader) { loader.style.display = 'inline' }
}
function hideLoader() {
const loader = document.getElementById('takcobar-loader')
if (loader) { loader.style.display = 'none' }
}
function getJSON(url, success, error) {
showLoader()
var request = new XMLHttpRequest()
request.open('GET', url, true)
request.onload = function() {
hideLoader()
if (this.status >= 200 && this.status < 400) {
success(JSON.parse(this.response))
} else {
if (error) { error() }
}
}
request.onerror = function() {
hideLoader()
if (error) { error(error) }
}
request.send()
}
function postJSON(url, payload, success, error) {
showLoader()
var request = new XMLHttpRequest()
request.open('POST', url, true)
request.onload = function() {
hideLoader()
if (this.status >= 200 && this.status < 400) {
success(JSON.parse(this.response))
} else {
if (error) { error() }
}
}
request.onerror = function() {
hideLoader()
if (error) { error(error) }
}
request.send(payload)
}
function populateForm(frm, data) {
$.each(data, function(key, value){
var $ctrl = $('[name='+key+']', frm);
if($ctrl.is('select')){
$("option",$ctrl).each(function(){
if (this.value==value) { this.selected=true; }
});
}
else {
switch($ctrl.attr("type")) {
case "text" : case "hidden": case "textarea":
$ctrl.val(value);
break;
case "radio" : case "checkbox":
$ctrl.each(function(){
if($(this).attr('value') == value) { $(this).attr("checked",value); } });
break;
}
}
});
}
function next() {
window.curTableIndex++ ;
if (window.curTableIndex == window.allTables.length) { window.curTableIndex = 0; }
highlightTable(window.curTableIndex)
}
function prev() {
if (window.curTableIndex ==0 ) { window.curTableIndex = window.allTables.length; }
window.curTableIndex-- ;
highlightTable(window.curTableIndex)
}
function highlightTable() {
closeBar()
$('#curTableIndex').text(window.curTableIndex + 1)
el = $(window.allTables[window.curTableIndex]);
highlighted = $('.shadow-pulse').addClass('shadow-pulse-reverse')
el.addClass('shadow-pulse');
$('html, body').addClass("no-pointer-events").animate({
scrollTop: el.offset().top - 200
}, {
duration: 500,
complete: function () {
highlighted.removeClass('shadow-pulse').removeClass('shadow-pulse-reverse');
}
});
}
tabs = {
"Reshape": () => {
context = {
"Page Title": '<a href="#">' + $('h1')[0].innerText + '</a>',
"Header": $(el).prevAll(':header').first().children()[0].innerText,
}
rows = $.map(context, (v,k) => `<tr><th>${k}</th><td>${v}</td></tr>`).join('')
$('#takcobar-content').append(
$(`<div><h4>Context</h4><table>${rows}</table></div>`)
)
$('#takcobar-content').append(
$(`<div>
<h4>Heuristics</h4>
<form>
<input type="checkbox" />
<select>
<option>Regex</option>
</select>
<input type="text" value="foobar" />
</form
</div>`)
)
el = window.allTables[window.curTableIndex]
payload = {
outerHTML: el.outerHTML,
classList: el.classList,
context: context
}
postJSON('http://localhost:5000/reshape', JSON.stringify(payload), (data)=>{
$('#takcobar-content').append(
$(`<div></div>`).text(JSON.stringify(data.context))
)
}, (e)=>{
console.error(e);
})
},
"Cluster": () => {
$('#takcobar-content').append(
$(`<div><h4>Matchers</h4></div>`)
)
},
"Link": () => {
$('#takcobar-content').append(
$(`<div><h4>Label Index</h4></div>`)
)
$('#takcobar-content').append(
$(`<div><h4>KG</h4></div>`)
)
},
"Extract": () => {
$('#takcobar-content').append(
$(`<div><table><tr><th>Entity</th><th>Statements</th></tr></table</div>`)
)
},
}
function addBar() {
var sidebar = Object.assign(document.createElement('sidebar'), {
id: 'takcobar',
innerHTML: `
<div id="takcobar-top">
<a href="javascript:void(0)" id="takcobar-btn" onclick="closeBar()"> </a>
<input type="button" onclick="prev()" value="👈" />
Table <span id="curTableIndex"></span> / <span id="totalTables"></span>
<input type="button" onclick="next()" value="👉" />
<span id="takcobar-tabs">
<a href="javascript:void(0)">Reshape</a>
<a href="javascript:void(0)">Cluster</a>
<a href="javascript:void(0)">Link</a>
<a href="javascript:void(0)">Extract</a>
</span>
<img id="takcobar-loader" src="http://localhost:5000/load.gif" height="100%"/>
</div>
<div id="takcobar-content">
</div>
`,
})
window.allTables = $('.wikitable');
if (window.allTables.length) {
document.getElementsByTagName('body')[0].appendChild(sidebar)
$("#takcobar-tabs > a").click((event) => {
const e = $(event.target)
if (e.hasClass("tab-active")) {
closeBar()
} else {
$("#takcobar-tabs > a").removeClass("tab-active")
$('#takcobar-content').empty()
e.addClass("tab-active")
openBar()
tabs[e.text()]()
}
})
window.curTableIndex = 0;
$('#totalTables').text(window.allTables.length)
$('#curTableIndex').text(window.curTableIndex + 1)
highlightTable()
}
}
bar = document.getElementById('takcobar')
if (bar) { bar.remove() }
var style = Object.assign(document.createElement('link'), {
type: "text/css",
rel: "stylesheet",
href: "http://localhost:5000/bar.css?" + Math.random(), // prevent cache
onload: addBar,
})
document.getElementsByTagName('head')[0].appendChild(style)
|
package io.opensphere.core.preferences;
import java.io.IOException;
import java.net.URL;
import javax.xml.bind.JAXBException;
import org.apache.log4j.Logger;
import io.opensphere.core.util.XMLUtilities;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.core.util.security.CipherFactory;
/**
* Loads preferences from the classpath.
*/
public class ClasspathPreferencesPersistenceManager implements PreferencesPersistenceManager
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(ClasspathPreferencesPersistenceManager.class);
@Override
public void delete(String topic)
{
throw new UnsupportedOperationException(
ClasspathPreferencesPersistenceManager.class.getName() + " does not support delete.");
}
@Override
public InternalPreferencesIF load(String topic, CipherFactory cipherFactory, boolean compressed)
{
if (cipherFactory != null)
{
throw new UnsupportedOperationException(getClass().getName() + " does not support encryption.");
}
if (compressed)
{
throw new UnsupportedOperationException(getClass().getName() + " does not support compression.");
}
String resourceName = getResourcePath(topic);
URL url = Thread.currentThread().getContextClassLoader().getResource(resourceName);
if (url == null)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Classpath preferences for topic [" + topic + "] not found at url [" + resourceName + "]");
}
return null;
}
try
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("Loading preferences for topic [" + topic + "] from URL [" + url + "]");
}
PreferencesImpl pref = XMLUtilities.readXMLObject(url.openStream(), PreferencesImpl.class);
pref.setPreferencesSaveable(false);
return pref;
}
catch (JAXBException | IOException e)
{
LOGGER.error("Error loading preferences file from URL [" + url + "]: " + e, e);
return null;
}
}
@Override
public void save(Preferences preferences, CipherFactory cipherFactory, boolean compression) throws IOException, JAXBException
{
throw new UnsupportedOperationException(
ClasspathPreferencesPersistenceManager.class.getName() + " does not support save.");
}
@Override
public boolean supportsCompression()
{
return false;
}
@Override
public boolean supportsEncryption()
{
return false;
}
@Override
public boolean supportsSave()
{
return false;
}
/**
* Get the classpath resource name for a preferences topic.
*
* @param topic The preferences topic.
* @return The resource name.
*/
protected String getResourceName(String topic)
{
String name = topic.replaceAll(" ", "").replaceAll("\n", "");
if (!name.endsWith(".xml"))
{
name = StringUtilities.concat(name, ".xml");
}
return name;
}
/**
* Get the classpath resource path for a preferences topic.
*
* @param topic The preferences topic.
* @return The resource path.
*/
protected String getResourcePath(String topic)
{
return StringUtilities.concat("prefs/", getResourceName(topic));
}
}
|
struct Config {
refresh_token: String,
client_id: String,
secret_key: String,
grpc_host: String,
}
fn parse_config(file_content: &str) -> Result<Config, String> {
let fields: Vec<&str> = file_content.split(',').map(|s| s.trim()).collect();
if fields.len() != 4 {
return Err("Invalid number of fields in the configuration file".to_string());
}
let refresh_token = fields[0].trim().to_string();
let client_id = fields[1].trim().to_string();
let secret_key = fields[2].trim().to_string();
let grpc_host = fields[3].trim().to_string();
Ok(Config {
refresh_token,
client_id,
secret_key,
grpc_host,
})
} |
var group__DAP__Config__Timestamp__gr =
[
[ "TIMESTAMP_GET", "group__DAP__Config__Timestamp__gr.html#gaf9bdc40d3a256fc2cc4d26b295993d9c", null ]
]; |
#!/usr/bin/env bash
cat get-char_prompts.sql | sqlite3 dayi4.db
|
#!/bin/bash
# shellcheck disable=SC1090,SC2034
#===============================================================================
#
# FILE: bin/send_file.sh
#
USAGE='send_file.sh [-h|--help] "CHAT[ID]" "file|URL" "caption ...." [type] [debug]'
#
# DESCRIPTION: send a file to the given user/group
#
# OPTIONS: CHAT[ID] - ID number of CHAT or BOTADMIN to send to yourself
# file - local file to send, must be an absolute path or relative to pwd
# Note: must not contain .. or . and located below BASHBOT_ETC
# URL - send an URL instead local file
#
# caption - message to send with file
# type - photo, video, sticker, voice, document (optional)
#
# -h - display short help
# --help - this help
#
# Set BASHBOT_HOME to your installation directory
#
# LICENSE: WTFPLv2 http://www.wtfpl.net/txt/copying/
# AUTHOR: KayM (gnadelwartz), kay@rrr.de
# CREATED: 25.12.2020 20:24
#
#### $$VERSION$$ v1.40-0-gf9dab50
#===============================================================================
####
# parse args
SEND="send_file"
# set bashbot environment
source "${0%/*}/bashbot_env.inc.sh" "${5:-debug}" # $5 debug
print_help "$1"
####
# ready, do stuff here -----
if [ "$1" == "BOTADMIN" ]; then
CHAT="${BOT_ADMIN}"
else
CHAT="$1"
fi
FILE="$2"
# convert to absolute path if not start with / or http://
[[ ! ( "$2" == "/"* || "$2" =~ ^https*:// || "$2" == "file_id://"*) ]] && FILE="${PWD}/$2"
# send message in selected format
"${SEND}" "${CHAT}" "${FILE}" "$3" "$4"
# output send message result
print_result
|
<filename>verification/util/tests/int_set_test.cpp
/*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "util/int_set.h"
#ifdef PANDA_CATCH2
#include <rapidcheck/catch.h>
#include "util/tests/environment.h"
#endif
using namespace panda::verifier;
namespace panda::verifier::test {
#ifdef PANDA_CATCH2
namespace {
const EnvOptions Options {"VERIFIER_TEST"};
using T = size_t;
// to actually get to the threshold in tests
constexpr size_t THRESHOLD = 32;
using StdSetT = std::set<T>;
using IntSetT = IntSet<T, THRESHOLD>;
void AssertSetsEqual(const StdSetT &model, const IntSetT &sut)
{
RC_ASSERT(model.size() == sut.Size());
for (auto x : model) {
RC_ASSERT(sut.Contains(x));
}
RC_TAG(sut.Size() < THRESHOLD ? "sorted vector" : "bitvector");
}
template <typename StreamT>
void AssertLazySetsEqual(const StdSetT &model, StreamT &&sut)
{
Index<size_t> tmp = sut();
size_t size = 0;
while (tmp.IsValid()) {
RC_ASSERT(model.find(tmp) != model.end());
size++;
tmp = sut();
}
RC_ASSERT(model.size() == size);
}
IntSetT MakeIntSet(const StdSetT &model)
{
IntSetT result;
for (T x : model) {
result.Insert(x);
}
return result;
}
} // namespace
// CODECHECK-NOLINTNEXTLINE(C_RULE_ID_FUNCTION_SIZE)
TEST_CASE("Test IntSet behaves like std::set", "verifier_IntSetT")
{
T max_value = 2048;
auto value_gen = rc::gen::inRange<T>(0, max_value);
rc::prop("Insert", [&]() {
StdSetT set = *rc::gen::container<StdSetT>(value_gen);
bool pick_from_set = *rc::gen::arbitrary<bool>();
T value = pick_from_set ? *rc::gen::elementOf(set) : *value_gen;
RC_PRE(Index(value).IsValid());
RC_TAG(set.find(value) == set.end() ? "value not in set" : "value in set");
IntSetT int_set {MakeIntSet(set)};
set.insert(value);
int_set.Insert(value);
AssertSetsEqual(set, int_set);
});
rc::prop("InsertMany", [&]() {
StdSetT set = *rc::gen::container<StdSetT>(value_gen);
auto values = *rc::gen::container<std::vector<T>>(value_gen);
bool sorted = *rc::gen::arbitrary<bool>();
IntSetT int_set {MakeIntSet(set)};
set.insert(values.begin(), values.end());
if (sorted) {
std::sort(values.begin(), values.end());
int_set.Insert<true>(values.begin(), values.end());
} else {
int_set.Insert(values.begin(), values.end());
}
AssertSetsEqual(set, int_set);
});
rc::prop("Intersect/IntersectionSize", [&]() {
StdSetT set1 = *rc::gen::container<StdSetT>(value_gen), set2 = *rc::gen::container<StdSetT>(value_gen);
size_t num_common_elems = *rc::gen::inRange<size_t>(0, 2 * THRESHOLD);
std::vector<T> common_elems = *rc::gen::unique<std::vector<T>>(num_common_elems, value_gen);
for (T value : common_elems) {
set1.insert(value);
set2.insert(value);
}
IntSetT int_set1 {MakeIntSet(set1)}, int_set2 {MakeIntSet(set2)};
StdSetT std_intersection;
std::set_intersection(set1.begin(), set1.end(), set2.begin(), set2.end(),
std::inserter(std_intersection, std_intersection.begin()));
IntSetT int_set_intersection = int_set1 & int_set2;
AssertSetsEqual(std_intersection, int_set_intersection);
AssertLazySetsEqual(std_intersection, int_set1.LazyIntersect(int_set2));
int_set1 &= int_set2;
AssertSetsEqual(std_intersection, int_set1);
});
rc::prop("Union", [&]() {
StdSetT set1 = *rc::gen::container<StdSetT>(value_gen), set2 = *rc::gen::container<StdSetT>(value_gen);
size_t num_common_elems = *rc::gen::inRange<size_t>(0, 2 * THRESHOLD);
std::vector<T> common_elems = *rc::gen::unique<std::vector<T>>(num_common_elems, value_gen);
for (T value : common_elems) {
set1.insert(value);
set2.insert(value);
}
IntSetT int_set1 {MakeIntSet(set1)}, int_set2 {MakeIntSet(set2)};
StdSetT std_union;
std::set_union(set1.begin(), set1.end(), set2.begin(), set2.end(), std::inserter(std_union, std_union.begin()));
IntSetT int_set_union = int_set1 | int_set2;
AssertSetsEqual(std_union, int_set_union);
int_set1 |= int_set2;
AssertSetsEqual(std_union, int_set1);
});
}
#endif // !PANDA_CATCH2
} // namespace panda::verifier::test
|
/*
* Copyright © 2019 <NAME>.
*/
package chat
import (
"context"
"fmt"
"github.com/golang/protobuf/proto"
"github.com/gorilla/websocket"
"github.com/hedzr/cmdr/plugin/daemon"
"github.com/hedzr/voxr-api/api"
"github.com/hedzr/voxr-api/api/v10"
"github.com/hedzr/voxr-common/vxconf/gwk"
"github.com/hedzr/voxr-lite/core/impl/grpc"
"github.com/hedzr/voxr-lite/core/impl/grpc/coco-client/coco"
"github.com/hedzr/voxr-lite/internal/scheduler"
"github.com/sirupsen/logrus"
"io"
"strconv"
"strings"
"time"
)
func (c *WsClient) PostBinaryMsg(message []byte) {
if !c.exited {
c.ppttSend <- message
}
}
func (c *WsClient) _postBinMsg(message []byte) {
if !c.exited {
c.ppttSend <- message
}
}
func (c *WsClient) _postTxtMsg(message []byte) {
if !c.exited {
c.textSend <- message
}
}
func (c *WsClient) _writeBack(msg string) {
if !c.exited {
c.textSend <- []byte(msg)
// _ = from.conn.WriteMessage(1, []byte(msg))
}
}
func (c *WsClient) _writeBackBytes(msg []byte) {
if !c.exited {
c.textSend <- msg
// _ = from.conn.WriteMessage(1, []byte(msg))
}
}
func (c *WsClient) onTxtMsg(msg string) (handled bool) {
if strings.EqualFold(msg, "ping") {
// _ = c.conn.WriteMessage(1, []byte("pong"))
c._writeBack("pong")
handled = true
return
}
if strings.HasPrefix(msg, "shutdown:shutdown:shutdown") {
logrus.Infof("SHUTDOWN from websocket text command.")
daemon.StopSelf()
handled = true
return
}
defer func() {
if r := recover(); r != nil {
err, _ := r.(error)
logrus.Errorln("Websocket.onTxtMsg() error:", err)
}
}()
// 测试用的触发器
prefix := msg[0:2]
if fn, ok := txtMsgFuncMaps[prefix]; ok && msg[2:3] == ":" {
handled = fn(c, msg[3:])
return
}
// if strings.HasPrefix(msg, "ux:") {
// // simulate a grpc request
// // 连接到 vx-core 服务,发起一个 Login 请求
// coco.ClientSend(msg[3:], func(cc *coco.GrpcClient, uit *v10.UserInfoToken) {
// c._writeBack(uit.String())
// // _ = c.conn.WriteMessage(1, []byte(ret))
// // cc.RequestClose()
// })
// handled = true
// return
// }
//
// if strings.HasPrefix(msg, "ud:") {
// c.simulateCoreLoginDirectly()
// handled = true
// return
// }
//
// //
// if strings.HasPrefix(msg, "gc:") {
// c.simulateGetContact(msg[3:])
// handled = true
// return
// }
//
// if strings.HasPrefix(msg, "lc:") {
// c.simulateListContacts(msg[3:])
// handled = true
// return
// }
//
// if strings.HasPrefix(msg, "sm:") {
// // simulate a grpc request
// // 连接到 vx-core 服务,发起一个 SendMsg 请求
// c.simulateSendMsg(msg[3:])
//
// handled = true
// return
// }
//
// if strings.HasPrefix(msg, "ts:") { // timestamp msg for testing, and make a grpc request
// // test for SvrRecordResolverAll()
// var r = gwk.ThisConfig.Registrar
// if r.IsOpen() {
// var records = r.SvrRecordResolverAll(api.GrpcCore, "grpc")
// for ix, rec := range records {
// logrus.Infof("%3d. id:%v, ip:%v, port:%v, what:%v", ix, rec.ID, rec.IP, rec.Port, rec.What)
// }
// } else {
// logrus.Warn("store is NOT open.")
// }
//
// // test for scheduler.Invoke()
// scheduler.Invoke(api.GrpcAuth, api.GrpcAuthPackageName, "UserAction", "/inx.im.user.UserAction/Login", &coco.DemoLoginReq, func(e error, input *scheduler.Input, out interface{}) {
// if r, ok := out.(*v10.UserInfoToken); ok {
// logrus.Debugf(">> Input: %v\n<< Output: %v", input, r)
// } else {
// logrus.Warnf(">> Input: %v\nhas error??? output: %v", input, out)
// }
// })
// handled = true
// return
// }
return
}
type handler func(c *WsClient, msg string) (handled bool)
var txtMsgFuncMaps = map[string]handler{
"ux": ux,
"ud": func(c *WsClient, msg string) (handled bool) { c.simulateCoreLoginDirectly(); handled = true; return },
"gc": func(c *WsClient, msg string) (handled bool) { c.simulateGetContact(msg); handled = true; return },
"lc": func(c *WsClient, msg string) (handled bool) { c.simulateListContacts(msg); handled = true; return },
"sm": func(c *WsClient, msg string) (handled bool) { c.simulateSendMsg(msg); handled = true; return },
"ts": ts,
}
func ux(c *WsClient, msg string) (handled bool) {
// simulate a grpc request
// 连接到 vx-core 服务,发起一个 Login 请求
coco.ClientSend(msg[3:], func(cc *coco.GrpcClient, uit *v10.UserInfoToken) {
c._writeBack(uit.String())
// _ = c.conn.WriteMessage(1, []byte(ret))
// cc.RequestClose()
})
handled = true
return
}
func ts(c *WsClient, msg string) (handled bool) {
// test for SvrRecordResolverAll()
var r = gwk.ThisConfig.Registrar
if r.IsOpen() {
var records = r.SvrRecordResolverAll(api.GrpcCore, "grpc")
for ix, rec := range records {
logrus.Infof("%3d. id:%v, ip:%v, port:%v, what:%v", ix, rec.ID, rec.IP, rec.Port, rec.What)
}
} else {
logrus.Warn("store is NOT open.")
}
// test for scheduler.Invoke()
scheduler.Invoke(api.GrpcAuth, api.GrpcAuthPackageName, "UserAction", "/inx.im.user.UserAction/Login",
&coco.DemoLoginReq, nil, func(e error, input *scheduler.Input, out proto.Message) {
if r, ok := out.(*v10.UserInfoToken); ok {
logrus.Debugf(">> Input: %v\n<< Output: %v", input, r)
} else if e != nil {
logrus.Errorf(" invoke failed, err: %v", e)
} else {
logrus.Warnf(">> Input: %v\nhas error??? output: %v", input, out)
}
})
handled = true
return
}
func (c *WsClient) simulateCoreLoginDirectly() {
// simulate a grpc request
// 直接内部调用 vx-core 服务,发起一个 Login 请求
if ret, err := grpc.Instance.Login(context.Background(), &v10.AuthReq{
Oneof: &v10.AuthReq_Req{Req: &coco.DemoLoginReq},
}); err == nil {
// b, _ := json.Marshal(res)
c._writeBack(ret.String())
} else {
c._writeBack(fmt.Sprintf("ERR: %v", err))
}
}
func prepareListContactsReq(seq uint32, cid int64) *v10.ListContactsReq {
seq++
return &v10.ListContactsReq{
ProtoOp: v10.Op_ListContacts,
Seq: seq,
UidOwner: cid,
}
}
func (c *WsClient) simulateListContacts(msg string) {
cid, err := strconv.ParseInt(msg, 10, 64)
if err != nil {
logrus.Warnf("Err: %v", err)
}
if res, err := grpc.Instance.ListContacts(context.Background(), prepareListContactsReq(c.seq, cid)); err == nil {
c._writeBack(res.String())
} else {
c._writeBack(fmt.Sprintf("ERR: %v", err))
}
c.seq++
}
func prepareGetContactReq(seq uint32, cid int64) *v10.GetContactReq {
seq++
return &v10.GetContactReq{
ProtoOp: v10.Op_GetContact,
Seq: seq,
UidOwner: 1,
UidFriend: cid,
}
}
func (c *WsClient) simulateGetContact(msg string) {
cid, err := strconv.ParseInt(msg, 10, 64)
if err != nil {
logrus.Warnf("Err: %v", err)
}
if res, err := grpc.Instance.GetContact(context.Background(), prepareGetContactReq(c.seq, cid)); err == nil {
c._writeBack(res.String())
} else {
c._writeBack(fmt.Sprintf("ERR: %v", err))
}
c.seq++
}
func prepareSendMsgReq(seq uint32, msg string) *v10.SendMsgReq {
seq++
return &v10.SendMsgReq{
ProtoOp: v10.Op_SendMsg,
Seq: seq,
Body: &v10.SaveMessageRequest{
GroupId: 0,
FromUser: 1,
ToUser: 2,
MsgContent: fmt.Sprintf("自然而然 %v - %v", seq, msg),
MsgType: 0,
},
}
}
func (c *WsClient) simulateSendMsg(msg string) {
if res, err := grpc.Instance.SendMsg(context.Background(), prepareSendMsgReq(c.seq, msg)); err == nil {
c._writeBack(res.Body.String())
} else {
c._writeBack(fmt.Sprintf("ERR: %v", err))
}
c.seq++
}
// grpc.ClientSend(msg[3:], func(ret string) {
// c.conn.WriteMessage(1, []byte(ret))
// })
//
// scheduler.Invoke(api.GrpcAuth, "UserAction", "Login", &loginReq, func(e error, input *s.Input, out interface{}) {
// if r, ok := out.(*user.UserInfoToken); ok {
// logrus.Debugf(">> Input: %v\n<< Output: %v", input, r)
// } else {
// logrus.Warnf(">> Input: %v\nhas error??? output: %v", input, out)
// }
// })
// // encode sth.
// token := &user.UserInfoToken{
// UserInfo: &user.UserInfo{
// 8, "uid-1", 3, "realname", "nickname",
// "13801234567", "avatar://xxx", "510214xxxxxxxxxxxx",
// 100, 1, "<EMAIL>", "<PASSWORD>",
// 9,
// struct{}{}, []byte{}, 0,
// },
// Token: "<PASSWORD>",
// }
// data, err := proto.Marshal(token)
// if err != nil {
// logrus.Fatalf("encode failed: ", err)
// }
//
// // decode sth.
// var target user.UserInfoToken
// err = proto.Unmarshal(data, &target)
// if err != nil {
// logrus.Fatalf("encode failed: ", err)
// }
// logrus.Debugf("userInfoToken = %v", target)
// func randomUserInfo() *user.UserInfo {
// user_login_res := user.UserInfo{Id: 1,
// Uid: "op_ajkhsajk98217hjsbnp",
// UType: 0,
// UNickname: "David",
// UPhone: "13323977614",
// UAvatar: "https://www.ajskja.com",
// UIdcard: "500129199602293301",
// UAge: 23,
// USex: 1,
// UEmail: "<EMAIL>",
// URealname: "佚名",
// UPass: "<PASSWORD>"}
// return &user_login_res
// }
// bAD
func (c *WsClient) writeN(ok bool, type_ int, message []byte) (err error) {
if err = c.conn.SetWriteDeadline(time.Now().Add(writeWait)); err != nil {
return
}
if !ok {
// The hub closed the channel.
if err = c.conn.WriteMessage(websocket.CloseMessage, []byte{}); err != nil {
err = nil
}
return
}
var w io.WriteCloser
w, err = c.conn.NextWriter(type_)
if err != nil {
return
}
if _, err = w.Write(message); err != nil {
return
}
// Add queued chat messages to the current websocket message.
n := len(c.textSend)
for i := 0; i < n; i++ {
_, err = w.Write(newline)
if err == nil {
_, err = w.Write(<-c.textSend)
}
if err != nil {
return
}
}
err = w.Close()
return
}
func (c *WsClient) writeB(type_ int, data []byte) (err error) {
if err = c.conn.SetWriteDeadline(time.Now().Add(writeWait)); err != nil {
logrus.Warnf("error occurs at ws SetWriteDeadline/writeWait: %v", err)
return
}
if err = c.conn.WriteMessage(type_, data); err != nil {
logrus.Warnf("error occurs at ws WriteMessage/%d: %v", type_, err)
}
return
}
|
gpus=3
config=unet_baidulane_320x180.yaml
export CUDA_VISIBLE_DEVICES=${gpus}
# python pdseg/train.py \
# --cfg configs/${config} \
# --use_gpu \
python pdseg/vis.py \
--cfg configs/${config} \
--use_gpu \
--width 1280 \
--height 720
|
def generate_fibonacci(length):
# Initialize
n1, n2 = 0, 1
# Check if length is valid
if length <= 0:
print('Invalid length. Positive numbers only.')
elif length == 1:
print(n1)
else:
print(n1, n2, end=' ')
# Generate sequence
for i in range(2, length):
n3 = n1 + n2
print(n3, end=' ')
n1 = n2
n2 = n3
# Show output
generate_fibonacci(6)
# Output: 0 1 1 2 3 5 |
#!/bin/sh
# This file exists to document the proper way to initialize autotools,
# and so that those used to the presence of bootstrap.sh or autogen.sh
# will have an eaiser time.
exec autoreconf -is -Wall,no-override
|
#!/usr/bin/env bash
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
train_set="train_960"
valid_set="dev"
test_sets="test_clean test_other dev_clean dev_other"
asr_config=conf/train_asr_conformer.yaml
lm_config=conf/tuning/train_lm_transformer2.yaml
inference_config=conf/decode_asr.yaml
./asr.sh \
--lang en \
--ngpu 4 \
--nbpe 5000 \
--max_wav_duration 30 \
--speed_perturb_factors "0.9 1.0 1.1" \
--asr_config "${asr_config}" \
--lm_config "${lm_config}" \
--inference_config "${inference_config}" \
--train_set "${train_set}" \
--valid_set "${valid_set}" \
--test_sets "${test_sets}" \
--lm_train_text "data/${train_set}/text data/local/other_text/text" \
--bpe_train_text "data/${train_set}/text" "$@"
|
#!/bin/sh
#
# Naive implementation of key management.
# it generates, encrypt and decrypt key file.
#
key_management_dependencies(){
echo ""
}
key_management_generate_key_file(){
decrypted_key_file_path=$1
echo "test key" >> "$decrypted_key_file_path"
}
key_management_encrypt_key_file(){
decrypted_key_file_path=$1
encrypted_key_file_path=$2
cp "$decrypted_key_file_path" "$encrypted_key_file_path"
}
key_management_decrypt_key_file(){
encrypted_key_file_path=$1
decrypted_key_file_path=$2
cp "$encrypted_key_file_path" "$decrypted_key_file_path"
}
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.chart_3_8 = void 0;
var chart_3_8 = {
"viewBox": "0 0 64 64",
"children": [{
"name": "g",
"attribs": {
"id": "CHART__x2F__3_x2F_8_1_",
"enable-background": "new "
},
"children": [{
"name": "g",
"attribs": {
"id": "CHART__x2F__3_x2F_8"
},
"children": [{
"name": "g",
"attribs": {
"id": "CHART__x2F__3_x2F_8"
},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M32,0C14.327,0,0,14.327,0,32s14.327,32,32,32s32-14.327,32-32S49.673,0,32,0z M32,58C17.641,58,6,46.359,6,32\r\n\t\t\t\tC6,17.641,17.641,6,32,6v26l18.385,18.385C45.68,55.09,39.18,58,32,58z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M32,0C14.327,0,0,14.327,0,32s14.327,32,32,32s32-14.327,32-32S49.673,0,32,0z M32,58C17.641,58,6,46.359,6,32\r\n\t\t\t\tC6,17.641,17.641,6,32,6v26l18.385,18.385C45.68,55.09,39.18,58,32,58z"
},
"children": []
}]
}]
}]
}]
}]
}]
}]
};
exports.chart_3_8 = chart_3_8; |
<gh_stars>1000+
interface I {
boolean equals(Object o);
} |
CREATE TABLE users (
user_id int NOT NULL,
username varchar (255) NOT NULL,
email varchar (255) NOT NULL,
address varchar (255) NOT NULL,
PRIMARY KEY (user_id)
); |
# Creates a database using Doctrine Migrations and compares it with the original sql
# To use this create a my.cnf with you credentials
#
# NOTE: before running this, change your database name to: 'janus_migrations_test'
MYSQL_BIN="mysql --defaults-extra-file=$HOME/my.cnf"
MYSQLDUMP_BIN="mysqldump --defaults-extra-file=$HOME/my.cnf"
UPDATE_SOURCE=''
# Enable to test updating from original janus schema instead of installing
#UPDATE_SOURCE='janus-1.12'
# Enable to test updating from original janus-surfnet-merge schema instead of installing
#UPDATE_SOURCE='janus-1.13'
# Enable to test updating from current schema instead of installing
UPDATE_SOURCE='local_dump'
# Enable to test updating from production schema instead of installing (requires dump files to be present
UPDATE_SOURCE='live_dump'
recreateDb() {
echo "Recreating 'janus_migrations_test' database"
echo 'drop database janus_migrations_test' | $MYSQL_BIN
echo 'create database janus_migrations_test CHARSET=utf8 COLLATE=utf8_unicode_ci' | $MYSQL_BIN
}
provisionDb() {
if [ "$UPDATE_SOURCE" == "janus-1.12" ]; then
echo "importing original janus 1.12 schema into test db"
$MYSQL_BIN janus_migrations_test < home/lucasvanlierop/janus/tests-for-orm-introduction/compareDatabaseTestResources/janus-1.12.sql
fi
if [ "$UPDATE_SOURCE" == "janus-1.13" ]; then
echo "importing original janus 1.13 schema into test db"
$MYSQL_BIN janus_migrations_test < home/lucasvanlierop/janus/tests-for-orm-introduction/compareDatabaseTestResources/janus-1.13.sql
fi
if [ "$UPDATE_SOURCE" == "local_dump" ]; then
echo 'dumping local db'
$MYSQLDUMP_BIN --compact --skip-comments serviceregistry > /tmp/serviceregistry-dump.sql
echo 'importing copy of local db into test db'
$MYSQL_BIN janus_migrations_test < /tmp/serviceregistry-dump.sql
fi
if [ "$UPDATE_SOURCE" == "live_dump" ]; then
#echo "Recreating 'janus_prod' database"
#echo 'drop database janus_prod' | $MYSQL_BIN
#echo 'create database janus_prod CHARSET=utf8 COLLATE=utf8_unicode_ci' | $MYSQL_BIN
# Uncomment this once to get a copyable db
#echo "Importing production dump into db for comparison"
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/db_changelog.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__blockedEntity.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__allowedEntity.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__arp.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__attribute.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__disableConsent.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__entity.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__hasEntity.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__metadata.sql
#$MYSQL_BIN -v janus_prod < ~/janus-db-export-prod/janus__user.sql
sudo service mysqld stop
echo 'Copy mysql prod database the brute force way'
prodSourceDb='/var/lib/mysql/janus_prod'
prodTestDb='/var/lib/mysql/janus_migrations_test'
sudo rm -rf $prodTestDb
sudo cp -R $prodSourceDb $prodTestDb
sudo chown -R mysql:mysql $prodTestDb
sudo service mysqld start
# Run serviceregistry patches over prod import
JANUS_DIR="$( cd -P "$( dirname "$0" )" && pwd )"
$JANUS_DIR/../../../../bin/dbpatch.php update
fi
}
migrateUp() {
# Exec migrations
./app/console doctrine:migrations:migrate --no-interaction
# Dump migrations
$MYSQLDUMP_BIN --compact --skip-comments --no-data janus_migrations_test > /tmp/janus_migrations_test.sql
# Remove autoincrement created by data
sed -i 's/ AUTO_INCREMENT=[0-9]*\b//' /tmp/janus_migrations_test.sql
# Prefix set foreign ignore statement
echo "SET FOREIGN_KEY_CHECKS = 0;"|cat - /tmp/janus_migrations_test.sql > /tmp/out && mv /tmp/out /tmp/janus_migrations_test.sql
}
compareWithSchemaTool() {
echo "Check differences between migrations and schematool, there should be none otherwise the models do not map to the db"
./app/console doctrine:schema:update --dump-sql > /tmp/janus_schematool_update.sql
# fix Doctrine removing quotes...
sed -i 's/\ update\ /\ `update`\ /' /tmp/janus_schematool_update.sql
sed -i 's/\ read\ /\ `read`\ /' /tmp/janus_schematool_update.sql
sed -i 's/\ key\ /\ `key`\ /' /tmp/janus_schematool_update.sql
echo "Creating test db"
$MYSQL_BIN -e "drop database janus_schematool_test"
$MYSQL_BIN -e "create database janus_schematool_test CHARSET=utf8 COLLATE=utf8_unicode_ci"
echo "loading current db state in test db"
$MYSQL_BIN janus_schematool_test < /tmp/janus_migrations_test.sql
echo "Applying the following changes from doctrine schematool update:"
cat /tmp/janus_schematool_update.sql
$MYSQL_BIN janus_schematool_test < /tmp/janus_schematool_update.sql
$MYSQLDUMP_BIN --compact --skip-comments --no-data janus_schematool_test > /tmp/janus_schematool_test_dump.sql
colordiff -u /tmp/janus_migrations_test.sql /tmp/janus_schematool_test_dump.sql
}
compareWithJanus() {
echo "Importing Janus sql"
echo 'drop database janus_wayf' | $MYSQL_BIN
echo 'create database janus_wayf CHARSET=utf8 COLLATE=utf8_unicode_ci' | $MYSQL_BIN
$MYSQL_BIN janus_wayf < home/lucasvanlierop/janus/tests-for-orm-introduction/compareDatabaseTestResources/pre-surfnet-merge-schema.sql
$MYSQLDUMP_BIN --compact --skip-comments --no-data janus_wayf > /tmp/janus_wayf.sql
colordiff -u /tmp/janus_wayf.sql /tmp/janus_migrations_test.sql
}
migrateDown() {
echo "Test reverse migration"
../app/console doctrine:migrations:migrate --no-interaction 0
}
compareWithOriginal() {
$MYSQLDUMP_BIN --compact --skip-comments --no-data janus_migrations_test > /tmp/janus_migrations_test.sql
# Remove autoincrement created by data
sed -i 's/ AUTO_INCREMENT=[0-9]*\b//' /tmp/janus_migrations_test.sql
$MYSQLDUMP_BIN --compact --skip-comments --no-data janus_wayf > /tmp/janus_wayf.sql
colordiff -u /tmp/janus_wayf.sql /tmp/janus_migrations_test.sql
}
recreateDb
provisionDb
migrateUp
compareWithSchemaTool
#compareWithJanus
exit;
migrateDown
compareWithOriginal |
<reponame>bakunya/instagram-bot
module.exports = async ({ page, userTarget }) => {
try {
await page.goto(`https://instagram.com/${userTarget}`, { timeout: 30000, waitUntil: 'domcontentloaded' })
await page.waitForSelector('header', { timeout: 30000 })
await page.evaluate(() => {
document.querySelectorAll('main[role=main] header section button').forEach(itm => {
if(itm.textContent === 'Follow') {
itm.click()
return;
}
})
})
await page.waitForTimeout(5000)
return Promise.resolve()
} catch (er) {
return Promise.reject(er)
}
} |
<gh_stars>1-10
package printer
import (
"bytes"
"io/ioutil"
"testing"
. "github.com/onsi/gomega"
)
func TestAddRow(t *testing.T) {
g := NewGomegaWithT(t)
testCases := []struct {
title string
rows [][]string
output string
}{
{
title: "one row and one column",
rows: [][]string{{"foo"}},
output: "foo\n",
},
{
title: "one row and three columns",
rows: [][]string{{"foo", "bar", "buz"}},
output: "foo bar buz\n",
},
{
title: "two rows and three columns",
rows: [][]string{{"foo", "bar", "buz"}, {"foo1", "foo2", "foo3"}},
output: `foo bar buz
foo1 foo2 foo3
`,
},
}
for _, tc := range testCases {
t.Run(tc.title, func(t *testing.T) {
buf := &bytes.Buffer{}
p := NewTablePrinter(buf, 20, 1, 3, ' ')
for _, row := range tc.rows {
p.AddRow(row)
}
err := p.Flush()
g.Expect(err).ShouldNot(HaveOccurred())
data, err := ioutil.ReadAll(buf)
g.Expect(err).ShouldNot(HaveOccurred())
g.Expect(string(data)).Should(Equal(tc.output))
})
}
}
|
<filename>authenticationService/src/main/java/com/dam/authentication/model/PermissionModel.java<gh_stars>0
package com.dam.authentication.model;
import java.util.List;
import java.util.Optional;
import javax.transaction.Transactional;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.query.Param;
import com.dam.authentication.model.entity.Permission;
@Transactional
public interface PermissionModel extends Repository<Permission, Long>, CrudRepository<Permission, Long> {
Optional<Permission> findBy_id(Long id);
Permission findByRole(String role);
List<Permission> findByServiceDomain(String domain);
@Query("SELECT permission FROM Permission permission where permission.role = :role "
+ "AND permission.serviceDomain = :serviceDomain")
Permission findByRoleDomain(@Param("role") String role, @Param("serviceDomain") String serviceDomain);
@Modifying
@Transactional
@Query("Update Permission permission set "
+ "permission.role = :#{#permission.role}, "
+ "permission.serviceDomain = :#{#permission.serviceDomain}, "
+ "permission.rights = :#{#permission.rights} "
+ "Where permission._id = :#{#permission._id} ")
Integer update(@Param("permission") Permission permission);
}
|
CREATE TABLE PERSON (
id integer not null,
first_name varchar(255) not null,
last_name varchar(255) not null,
primary key(id)
);
insert into PERSON (id, first_name, last_name) values (1, 'Bright', 'Zheng');
-- alter table student add column remarks varchar(200) null; |
#!/usr/bin/env bash
# Copyright 2017-2020 by SDRausty. All rights reserved. 🌎 🌍 🌏 🌐 🗺
# Hosted sdrausty.github.io/TermuxArch courtesy https://pages.github.com
# https://sdrausty.github.io/TermuxArch/README has info about this project.
# https://sdrausty.github.io/TermuxArch/CONTRIBUTORS Thank you for your help.
################################################################################
# To regenerate the start script use 'setupTermuxArch [r[e[fresh]]]'. The command 'setupTermuxArch refresh' will refresh the installation globally, including excecuting keys and locales and backup user configuration files that were refreshed. The command 'setupTermuxArch re' will refresh the installation and update user configuration files and backup user configuration files that were refreshed. While the command 'setupTermuxArch r' will only refresh the installation and update the root user configuration files and backup root user configuration files that were refreshed.
# Appending to the PRoot statement can be accomplished on the fly by creating a .prs file in the var/binds directory. The format is straightforward, 'PROOTSTMNT+="option command "'. The space is required before the last double quote. Commands 'info proot' and 'man proot' have more information about what can be configured in a proot init statement. If more suitable configurations are found, share them at https://github.com/TermuxArch/TermuxArch/issues to improve TermuxArch. PRoot bind usage: PROOTSTMNT+="-b host_path:guest_path " The space before the last double quote is necessary.
_PR00TSTRING_() { # construct the PRoot init statement
PROOTSTMNT="exec proot "
if [[ -z "${KID:-}" ]]
then
PROOTSTMNT+="--kernel-release=$(uname -r)-generic "
elif [[ "$KID" = 0 ]]
then
PROOTSTMNT+="--kernel-release=4.14.15-generic "
else
PROOTSTMNT+=""
fi
if [[ "$KOE" = 0 ]]
then
PROOTSTMNT+="--kill-on-exit "
fi
PROOTSTMNT+="--link2symlink -0 -r $INSTALLDIR "
# file var/binds/fbindexample.prs has a few more examples
if [[ -n "$(ls -A "$INSTALLDIR"/var/binds/*.prs)" ]]
then
for PRSFILES in "$INSTALLDIR"/var/binds/*.prs
do
. "$PRSFILES"
done
fi
[[ "$SYSVER" -ge 10 ]] && PROOTSTMNT+="-b /apex:/apex "
# Function _PR00TSTRING_ which creates the PRoot init statement PROOTSTMNT uses associative arrays. Page https://www.gnu.org/software/bash/manual/html_node/Arrays.html has information about BASH arrays and is also available at https://www.gnu.org/software/bash/manual/ this link.
declare -A PRSTARR # associative array
# populate writable binds
PRSTARR=([/dev/ashmem]=/dev/ashmem [/dev/shm]=/dev/shm)
for PRBIND in ${!PRSTARR[@]}
do
if [[ -w "$PRBIND" ]] # is writable
then # add proot bind
PROOTSTMNT+="-b $PRBIND:$PRBIND "
fi
done
# populate readable binds
PRSTARR=([/dev/]=/dev/ [/dev/urandom]=/dev/random ["$EXTERNAL_STORAGE"]="$EXTERNAL_STORAGE" ["$HOME"]="$HOME" ["$PREFIX"]="$PREFIX" [/proc/]=/proc/ [/proc/self/fd]=/dev/fd [/proc/self/fd/0]=/dev/stdin [/proc/self/fd/1]=/dev/stdout [/proc/self/fd/2]=/dev/stderr [/proc/stat]=/proc/stat [/property_contexts]=/property_contexts [/storage/]=/storage/ [/sys/]=/sys/ [/system/]=/system/ [/vendor/]=/vendor/)
for PRBIND in ${!PRSTARR[@]}
do
if [[ -r "$PRBIND" ]] # is readable
then # add proot bind
PROOTSTMNT+="-b $PRBIND:${PRSTARR[$PRBIND]} "
fi
done
# populate NOT readable binds
PRSTARR=([/dev/]=/dev/ [/dev/ashmem]="$INSTALLDIR/tmp" [/dev/shm]="$INSTALLDIR/tmp" [/proc/stat]="$INSTALLDIR/var/binds/fbindprocstat" [/sys/]=/sys/ [/proc/uptime]="$INSTALLDIR/var/binds/fbindprocuptime")
for PRBIND in ${!PRSTARR[@]}
do
if [[ ! -r "$PRBIND" ]] # is not readable
then # add proot bind
PROOTSTMNT+="-b ${PRSTARR[$PRBIND]}:$PRBIND "
fi
done
PROOTSTMNT+="-w \"\$PWD\" /usr/bin/env -i HOME=/root TERM=\"\$TERM\" TMPDIR=/tmp ANDROID_DATA=/data " # create PRoot user string
}
_PR00TSTRING_
# uncomment the next line to test function _PR00TSTRING_
# printf "%s\\n" "$PROOTSTMNT" && exit
# knownconfigurations.bash EOF
|
#!/bin/bash
. install.sh
cd ..
rm -rf Gwords
|
# -----------------------------------------------------------------------------
#
# Package : node-notifier
# Version : 5.4.3
# Source repo : https://github.com/mikaelbr/node-notifier
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=node-notifier
PACKAGE_VERSION=5.4.3
PACKAGE_URL=https://github.com/mikaelbr/node-notifier
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
#!/bin/bash
shapeworks readimage --name $DATA/1x2x2.nrrd curvature compareimage --name $DATA/curvature1.nrrd
if [[ $? != 0 ]]; then exit -1; fi
shapeworks readimage --name $DATA/1x2x2.nrrd curvature --iterations 5 compareimage --name $DATA/curvature2.nrrd
|
<gh_stars>0
class Mailing
attr_reader(:address, :city, :state, :zip_code)
@@mailings = []
def initialize(attributes)
@address = attributes.fetch(:address)
@city = attributes.fetch(:city)
@state = attributes.fetch(:state)
@zip_code = attributes.fetch(:zip_code)
@id = @@mailings.length + 1
end
define_singleton_method(:all) do
@@mailings
end
def save
@@mailings.push(self)
end
define_singleton_method(:clear) do
@@mailings = []
end
end
|
<reponame>divolgin/replicated-2<gh_stars>0
package cmd
import (
"github.com/replicatedhq/replicated/cli/print"
"github.com/spf13/cobra"
)
// channelLsCmd represents the channelLs command
var channelLsCmd = &cobra.Command{
Use: "ls",
Short: "List all channels in your app",
Long: "List all channels in your app",
}
func init() {
channelCmd.AddCommand(channelLsCmd)
}
func (r *runners) channelList(cmd *cobra.Command, args []string) error {
channels, err := r.api.ListChannels(r.appID)
if err != nil {
return err
}
return print.Channels(r.w, channels)
}
|
"""
Odwroc napis.
"""
# Wersja 1
def odwroc_napis_v1(napis):
return napis[::-1]
# Wersja 2
def odwroc_napis_v2(napis):
if len(napis) <= 1:
return napis
return odwroc_napis_v2(napis[1:]) + napis[0]
# Wersja 3
def odwroc_napis_v3(napis):
return "".join(reversed(napis))
# Testy Poprawnosci
napis = "abc"
wynik = "cba"
assert odwroc_napis_v1(napis) == wynik
assert odwroc_napis_v2(napis) == wynik
assert odwroc_napis_v3(napis) == wynik
|
<reponame>marlonsupetran/ChromeExtensions
import { Substitutes } from '../constants';
const domHelper = {
sanitizeAndGetInnerHtml: el => {
return (
(!el && Substitutes.ERROR) // Element not found
|| (el && !el[0] && Substitutes.ERROR) // Element not found
|| (el && el[0] && !el[0].innerHTML.length && Substitutes.EMPTY) // Empty or blank
|| (el && el[0] && el[0].innerHTML.length && el[0].innerHTML)
);
},
sanitizeAndGetText: el => {
return (
(!el && Substitutes.ERROR) // Element not found
|| (el && el.text() && !el.text().length && Substitutes.EMPTY) // Empty or blank
|| (el && el.text() && el.text().length && el.text())
);
}
};
export default domHelper; |
package app
import "appengine/datastore"
import "time"
type JUser struct {
Key *datastore.Key `json:"id" datastore:"-"`
Username string `json:"username"`
Password string `json:"password"`
Email string `json:"email"`
Phone string `json:"phone"`
Admin bool `json:"admin"`
Date time.Time `json:"date"`
}
type JUsersList struct {
Items []*JUser `json:"items"`
}
type JUsersListReq struct {
Limit int `json:"limit" endpoints:"d=10"`
}
type JUsersAuth struct {
Items *JUser `json:"items"`
Auth bool `json:"auth"`
}
type JUsersAuthReq struct {
//Limit int `json:"limit" endpoints:"d=10"`
Username string `json:"username"`
Password string `json:"password"`
}
|
import React from 'react';
const List = (props) => {
return (
<div>
{props.items.map(item => (
<div>{item}</div>
))}
</div>
)
};
export default List; |
<filename>tests/common/Common.cpp
//
// Copyright (c) 2015-2020 Microsoft Corporation and Contributors.
// SPDX-License-Identifier: Apache-2.0
//
#ifndef _CRT_SECURE_NO_WARNINGS
#define _CRT_SECURE_NO_WARNINGS
#endif
#include "Common.hpp"
#include "zlib.h"
#include "utils/Utils.hpp"
#ifdef _WIN32
#include <windows.h>
#include <stdio.h>
#include <psapi.h>
#include <processthreadsapi.h>
#elif defined(linux)
#include <malloc.h>
#include <unistd.h>
#endif
namespace testing {
MATSDK_LOG_INST_COMPONENT_NS("Testing", "Unit testing helpers");
CsProtocol::Value toCsProtocolValue(const std::string& val)
{
CsProtocol::Value temp;
temp.stringValue = val;
return temp;
}
CsProtocol::Value toCsProtocolValue(bool val)
{
CsProtocol::Value temp;
temp.type = CsProtocol::ValueBool;
temp.longValue = val;
return temp;
}
CsProtocol::Value toCsProtocolValue(double val)
{
CsProtocol::Value temp;
temp.type = CsProtocol::ValueDouble;
temp.doubleValue = val;
return temp;
}
CsProtocol::Value toCsProtocolValue(int64_t val)
{
CsProtocol::Value temp;
temp.type = CsProtocol::ValueInt64;
temp.longValue = val;
return temp;
}
CsProtocol::Value toCsProtocolValue(uint64_t val)
{
CsProtocol::Value temp;
temp.type = CsProtocol::ValueArrayUInt64;
temp.longValue = val;
return temp;
}
CsProtocol::Value toCsProtocolValue(MAT::EventLatency val)
{
CsProtocol::Value temp;
temp.type = CsProtocol::ValueArrayInt32;
temp.longValue = (int)val;
return temp;
}
/// <summary>
/// Compress buffer from source to dest.
/// </summary>
/// <param name="source"></param>
/// <param name="sourceLen"></param>
/// <param name="dest"></param>
/// <param name="destLen"></param>
/// <param name="prependSize"></param>
/// <returns></returns>
bool Compress(const char* source, size_t sourceLen, char** dest, size_t& destLen, bool prependSize)
{
if ((!source) || (!sourceLen))
return false;
*dest = NULL;
destLen = 0;
// Compressing variables
uLong compSize = compressBound((uLong)sourceLen);
// Allocate memory for the new compressed buffer
size_t reserved = ((unsigned)prependSize * sizeof(uint32_t));
char* compBody = new char[std::max(compSize, ((uLong)sourceLen)) + reserved];
if (compBody != NULL)
{
if (prependSize)
{
// Remember source uncompressed size if requested
uint32_t *s = (uint32_t*)(compBody);
(*s) = (uint32_t)sourceLen; // truncate this to 32-bit, we do not support 3+ TB blobs
}
// Deflate
int res = compress2((Bytef *)(compBody + reserved), &compSize, (Bytef *)source, (uLong)sourceLen, Z_BEST_SPEED);
if (res != Z_OK)
{
LOG_ERROR("Compression failed, error=%u", res);
delete[] compBody;
compBody = NULL;
return false;
}
else
{
*dest = compBody;
destLen = compSize + reserved;
return true;
}
}
// OOM
return false;
}
/// <summary>
/// Expand buffer from source to dest.
/// </summary>
/// <param name="source"></param>
/// <param name="sourceLen"></param>
/// <param name="dest"></param>
/// <param name="destLen"></param>
/// <param name="sizeAtZeroIndex"></param>
/// <returns></returns>
bool Expand(const char* source, size_t sourceLen, char** dest, size_t& destLen, bool sizeAtZeroIndex)
{
if (!(source) || !(sourceLen))
return false;
*dest = NULL;
unsigned reserved = (unsigned)sizeAtZeroIndex * sizeof(uint32_t);
// Get uncompressed size at zero offset.
if (sizeAtZeroIndex)
{
uint32_t s32 = *((uint32_t*)(source));
uint64_t s64 = (sourceLen >= sizeof(uint64_t)) ? *((uint64_t*)(source)) : 0;
// If we are reading 64-bit generated legacy DB, step 32-bit forward to
// skip zero-padding in most-significant DWORD on Intel architecture
if ((s64 - s32) == 0)
reserved += sizeof(uint32_t);
destLen = s32;
}
// Allocate memory for the new uncompressed buffer
if (destLen > 0)
{
try {
char* decompBody = new char[destLen];
if (source != NULL)
{
// Inflate
uLongf len = (uLongf)destLen;
int res = uncompress((Bytef *)decompBody, &len, (const Bytef *)(source + reserved), (uLong)(sourceLen - reserved));
if ((res != Z_OK) || (len != destLen))
{
LOG_ERROR("Decompression failed, error=%d, len=%z, destLen=%z", res, len, (unsigned int)destLen);
delete[] decompBody;
return false;
}
*dest = decompBody;
destLen = len;
return true;
}
}
catch (std::bad_alloc&) {
LOG_ERROR("Decompression failed (out of memory): destLen=%u", destLen);
dest = NULL;
destLen = 0;
}
}
// OOM
return false;
}
EventProperties CreateSampleEvent(const char *name, EventPriority prio)
{
#ifdef _WIN32
/* Test for Win32 GUID type, specific to Windows only */
GUID win_guid;
win_guid.Data1 = 0;
win_guid.Data2 = 1;
win_guid.Data3 = 2;
for (uint8_t i = 0; i < 8; i++)
{
win_guid.Data4[i] = i;
}
#endif
// GUID constructor from byte[16]
const uint8_t guid_b[16] = {
0x03, 0x02, 0x01, 0x00,
0x05, 0x04,
0x07, 0x06,
0x08, 0x09,
0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F };
GUID_t guid_c(
0x00010203,
0x0405,
0x0607,
{ 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F }
);
const GUID_t guid_d;
// Prepare current time in UTC (seconds precision)
std::time_t t = std::time(nullptr);
std::gmtime(&t);
/* С++11 constructor for Visual Studio 2015: this is the most JSON-lookalike syntax that makes use of C++11 initializer lists. */
EventProperties props(name,
{
#ifdef _MSC_VER
{ "_MSC_VER", _MSC_VER },
#endif
{ "piiKind.None", EventProperty("jackfrost", PiiKind_None) },
{ "piiKind.DistinguishedName", EventProperty("/CN=<NAME>,OU=PIE,DC=REDMOND,DC=COM", PiiKind_DistinguishedName) },
{ "piiKind.GenericData", EventProperty("jackfrost", PiiKind_GenericData) },
{ "piiKind.IPv4Address", EventProperty("127.0.0.1", PiiKind_IPv4Address) },
{ "piiKind.IPv6Address", EventProperty("2001:0db8:85a3:0000:0000:8a2e:0370:7334", PiiKind_IPv6Address) },
{ "piiKind.MailSubject", EventProperty("RE: test", PiiKind_MailSubject) },
{ "piiKind.PhoneNumber", EventProperty("+1-613-866-6960", PiiKind_PhoneNumber) },
{ "piiKind.QueryString", EventProperty("a=1&b=2&c=3", PiiKind_QueryString) },
{ "piiKind.SipAddress", EventProperty("sip:<EMAIL>", PiiKind_SipAddress) },
{ "piiKind.SmtpAddress", EventProperty("<NAME> <<EMAIL>>", PiiKind_SmtpAddress) },
{ "piiKind.Identity", EventProperty("<NAME>", PiiKind_Identity) },
{ "piiKind.Uri", EventProperty("http://www.microsoft.com", PiiKind_Uri) },
{ "piiKind.Fqdn", EventProperty("www.microsoft.com", PiiKind_Fqdn) },
{ "strKey", "hello" },
{ "strKey2", "hello2" },
{ "int64Key", (int64_t)1L },
{ "dblKey", 3.14 },
{ "boolKey", false },
{ "guidKey0", GUID_t("00000000-0000-0000-0000-000000000000") },
{ "guidKey1", GUID_t("<KEY>") },
{ "guidKey2", GUID_t(guid_b) },
{ "guidKey3", GUID_t("<KEY>") },
{ "guidKey4", GUID_t(guid_c) },
{ "timeKey1", time_ticks_t((uint64_t)0) }, // ticks precision
{ "timeKey2", time_ticks_t(&t) } // seconds precision
});
#ifdef _WIN32
props.SetProperty("win_guid", GUID_t(win_guid));
#endif
props.SetPriority(prio);
props.SetLevel(DIAG_LEVEL_REQUIRED);
return props;
}
std::string GetUniqueDBFileName()
{
std::string fname = std::to_string(MAT::GetCurrentProcessId());
fname.insert(0, "file_");
fname.append(".db");
return fname;
}
void LogMemUsage(const char* label)
{
#ifdef DEBUG_PERF
#ifdef _WIN32
DWORD processID = ::GetCurrentProcessId();
HANDLE hProcess;
PROCESS_MEMORY_COUNTERS pmc;
hProcess = OpenProcess(PROCESS_QUERY_INFORMATION |
PROCESS_VM_READ,
FALSE, processID);
if (NULL == hProcess)
return;
if (GetProcessMemoryInfo(hProcess, &pmc, sizeof(pmc))) {
printf("Heap Usage- %s ... %ld\n", label, pmc.WorkingSetSize);
}
CloseHandle(hProcess);
#elif defined(linux)
struct mallinfo mem = mallinfo();
printf("Heap Usage- %s ... %ld\n", label, mem.uordblks + mem.hblkhd);
#else
UNREFERENCED_PARAMETER(label);
#endif
#else
UNREFERENCED_PARAMETER(label);
#endif
}
void LogCpuUsage(const char* label)
{
#ifdef DEBUG_PERF
static int64_t lastTime = GetUptimeMs();
int64_t currTime = GetUptimeMs();
printf("Time taken- %s: ... %lld\n", label, (currTime - lastTime));
lastTime = currTime;
#else
UNREFERENCED_PARAMETER(label);
#endif
}
} // namespace testing
|
// $Header: /nfs/slac/g/glast/ground/cvs/CalibData/CalibData/Acd/AcdHighRange.h,v 1.1 2007/10/09 18:15:21 echarles Exp $
#ifndef CalibData_AcdHighRange_h
#define CalibData_AcdHighRange_h
#include "CalibData/Acd/AcdCalibObj.h"
#include "CalibData/CalibModel.h"
#include "CalibData/Acd/AcdCalibEnum.h"
namespace CalibData {
/**
* @class AcdHighRangeFitDesc
*
* @brief Description of an ACD calibration for high range readout
*
* This calibration consists of:
* - pedestal = High range pedestal in PHA counts
* - slope = Mips / PHA count near pedestal
* - saturation = Electronics saturation point in PHA counts
*
* @author <NAME>
* $Header: /nfs/slac/g/glast/ground/cvs/AcdDigi/src/AcdDigiUtil.h,v 1.14 2007/12/21 22:54:30 echarles Exp $
*/
class AcdHighRangeFitDesc : public AcdCalibDescription {
public:
/// Get this description
static const AcdHighRangeFitDesc& instance() {
static const AcdHighRangeFitDesc desc;
return desc;
}
public:
/// Trivial D'ctor
virtual ~AcdHighRangeFitDesc(){;};
private:
/// This is a singleton
AcdHighRangeFitDesc()
:AcdCalibDescription(AcdCalibData::HIGH_RANGE,"ACD_HighRange"){
addVarName("pedestal");
addVarName("slope");
addVarName("saturation");
}
};
/**
* @class AcdHighRange
*
* @brief An ACD calibration for high range readout for 1 PMT.
*
* This calibration consists of:
* - pedestal = High range pedestal in PHA counts
* - slope = Mips / PHA count near pedestal
* - saturation = Electronics saturation point in PHA counts
*
* @author <NAME>
* $Header: /nfs/slac/g/glast/ground/cvs/AcdDigi/src/AcdDigiUtil.h,v 1.14 2007/12/21 22:54:30 echarles Exp $
*/
class AcdHighRange : public AcdCalibObj {
public:
/// For gaudi
static const CLID& calibCLID() {
return CLID_Calib_ACD_HighRange;
}
/// Define the type of calibration
static AcdCalibData::CALTYPE calibType() {
return AcdCalibData::HIGH_RANGE;
}
public:
/// Build from description and a set of values
AcdHighRange(const AcdCalibDescription& desc, const std::vector<float>& vals, STATUS status=NOFIT) :
AcdCalibObj(status,vals,desc){
assert( desc.calibType() == calibType() );
setVals(vals,status);
}
/// Build from individaul values
AcdHighRange(float pedestal, float slope, float saturation, STATUS status) :
AcdCalibObj(status,AcdHighRangeFitDesc::instance()){
setVals(pedestal,slope,saturation,status);
}
/// Trivial d'tor
virtual ~AcdHighRange() {}
// Provide access to the values
float getPedestal() const { return (*this)[0];}
float getSlope() const { return (*this)[1]; }
float getSaturation() const { return (*this)[2]; }
};
}
#endif
|
docker secret rm AURORA_PASS
docker secret rm AURORA_USER
docker secret rm settings.php
docker secret rm SRCKEY
docker secret create --label env=local SRCKEY $HOME/.secrets/SRCKEY
docker secret create --label env=local AURORA_USER $HOME/.secrets/AURORA_USER
docker secret create --label env=local AURORA_PASS $HOME/.secrets/AURORA_PASS
docker secret create --label env=local settings.php $HOME/.secrets/settings.php
|
package com.yoga.admin.shiro;
import com.yoga.core.exception.BusinessException;
import com.yoga.logging.service.LoggingService;
import com.yoga.operator.user.model.User;
import com.yoga.operator.user.service.UserService;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.SimpleAuthenticationInfo;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.session.Session;
import org.apache.shiro.subject.PrincipalCollection;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import java.util.HashSet;
import java.util.Set;
@Component
public class OperatorRealm extends AuthorizingRealm {
@Autowired
@Lazy
private SuperAdminUser superAdminUser;
@Autowired
@Lazy
private UserService userService;
public final static String OperatorRole = "operator";
private final static Set<String> operatorRoles = new HashSet<String>(){{
add("operator");
}};
@Override
protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principalCollection) {
Object principal = super.getAvailablePrincipal(principalCollection);
if (principal instanceof OperatorPrincipal) return ((OperatorPrincipal)principal).getAuthorizationInfo();
else return null;
}
@Override
public boolean supports(AuthenticationToken token) {
return token instanceof OperatorToken;
}
@Override
protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken authenticationToken) throws AuthenticationException {
OperatorToken token = (OperatorToken) authenticationToken;
User user;
SimpleAuthorizationInfo info;
if (superAdminUser.isAdmin(token.getUsername())) {
if (superAdminUser.passwordMatches(String.valueOf(token.getPassword()))) throw new BusinessException("用户名不存在或者密码错误!");
user = superAdminUser.getAdminInfo();
info = new SimpleAuthorizationInfo();
info.setRoles(operatorRoles);
info.setStringPermissions(superAdminUser.getPermissions());
} else {
user = userService.login(token.getTenantId(), token.getUsername(), String.valueOf(token.getPassword()));
info = new SimpleAuthorizationInfo();
info.setRoles(operatorRoles);
info.setStringPermissions(userService.getPrivileges(user.getTenantId(), user.getId()));
}
Session session = SecurityUtils.getSubject().getSession();
session.setAttribute("user", user);
session.setAttribute("permissions", info.getStringPermissions());
LoggingService.add(user.getId(), UserService.ModuleName, "管理员登录", user.getId());
return new SimpleAuthenticationInfo(new OperatorPrincipal(user.getTenantId(), user.getId(), info), token.getPassword(), this.getName());
}
}
|
#!/bin/bash
qemu-img create -f raw disk.img 200M
mkfs.fat -n 'MIKAN OS' -s 2 -f 2 -R 32 -F 32 disk.img
mkdir -p mnt
sudo mount -o loop disk.img mnt
sudo mkdir -p mnt/EFI/BOOT
#sudo cp Loader.efi mnt/EFI/BOOT/BOOTX64.EFI
sudo cp Loader_memmap.efi mnt/EFI/BOOT/BOOTX64.EFI
sudo umount mnt
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.