text
stringlengths 27
775k
|
|---|
-module(flower_arp).
%% API
-export([make_arp/8, make_arp/6, op/1]).
%% --------------------------------------------------------------------
%% Include files
%% --------------------------------------------------------------------
-include("flower_packet.hrl").
-include("flower_flow.hrl").
%%--------------------------------------------------------------------
%% @doc
%% Make an ethernet ARP packet
%%
%% @end
%%--------------------------------------------------------------------
op(?ARP_OP_REQUEST) -> request;
op(?ARP_OP_REPLY) -> reply;
op(?ARP_OP_REVREQUEST) -> revrequest;
op(?ARP_OP_REVREPLY) -> revreply;
op(X) when is_integer(X) -> X;
op(request) -> ?ARP_OP_REQUEST;
op(reply) -> ?ARP_OP_REPLY;
op(revrequest) -> ?ARP_OP_REVREQUEST;
op(revreply) -> ?ARP_OP_REVREPLY.
-spec ether_hdr(binary(), binary(), vlan_tci(), integer()) -> binary().
ether_hdr(DlDst, DlSrc, undefined, EthType) ->
<<DlDst:?ETH_ADDR_LEN/bytes-unit:8, DlSrc:?ETH_ADDR_LEN/bytes-unit:8, EthType:16>>;
ether_hdr(DlDst, DlSrc, {PCP, VID}, EthType) ->
<<DlDst:?ETH_ADDR_LEN/bytes-unit:8, DlSrc:?ETH_ADDR_LEN/bytes-unit:8, 16#8100:16, PCP:3, 0:1, VID:12, EthType:16>>.
-spec make_arp(integer(), vlan_tci(), binary(), binary(), binary(), binary(), binary(), binary()) -> binary().
make_arp(Op, TCI, DlDst, DlSrc, Sha, Spa, Tha, Tpa) ->
Ether = ether_hdr(DlDst, DlSrc, TCI, flower_packet:eth_type(arp)),
Arp = <<1:16, ?ETH_TYPE_IP:16, ?ETH_ADDR_LEN:8, 4:8,
Op:16, Sha:?ETH_ADDR_LEN/bytes-unit:8, Spa:4/bytes-unit:8, Tha:?ETH_ADDR_LEN/bytes-unit:8, Tpa:4/bytes-unit:8>>,
list_to_binary([Ether, Arp]).
-spec make_arp(integer(), vlan_tci(), binary(), binary(), binary(), binary()) -> binary().
make_arp(Op, TCI, Sha, Spa, Tha, Tpa) ->
make_arp(Op, TCI, Tha, Sha, Sha, Spa, Tha, Tpa).
|
Simple Waiter trait
===================
[](https://travis-ci.org/dtantsur/rust-waiter)
[](https://crates.io/crates/waiter)
[Documentation](https://docs.rs/waiter/).
|
package io.novafoundation.nova.app.root.navigation
import android.annotation.SuppressLint
import androidx.navigation.NavController
import androidx.navigation.fragment.DialogFragmentNavigator
import androidx.navigation.fragment.NavHostFragment
import io.novafoundation.nova.app.R
import io.novafoundation.nova.app.root.navigation.navigators.AddFragmentNavigator
class NovaNavHostFragment : NavHostFragment() {
@SuppressLint("MissingSuperCall")
override fun onCreateNavController(navController: NavController) {
navController.navigatorProvider.addNavigator(DialogFragmentNavigator(requireContext(), childFragmentManager))
val addFragmentNavigator = AddFragmentNavigator(requireContext(), childFragmentManager, R.id.navHost)
navController.navigatorProvider.addNavigator(addFragmentNavigator)
}
}
|
// Copyright 2021, Peter Birch, mailto:peter@lightlogic.co.uk
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <filesystem>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include "nexus.hpp"
#include "nxloader.hpp"
namespace py = pybind11;
using namespace NXModel;
PYBIND11_MODULE(nxmodel, m) {
// Expose enumerations
py::enum_<NXConstants::direction_t>(m, "direction_t")
.value("NORTH", NXConstants::DIRECTION_NORTH)
.value("EAST", NXConstants::DIRECTION_EAST )
.value("SOUTH", NXConstants::DIRECTION_SOUTH)
.value("WEST", NXConstants::DIRECTION_WEST )
.export_values();
// Expose structs
py::class_<NXConstants::node_control_t>(m, "node_control_t")
.def(py::init([]() { node_control_t _; return _; }));
py::class_<NXConstants::node_load_t>(m, "node_load_t")
.def(py::init([]() { node_load_t _; return _; }));
py::class_<NXConstants::node_signal_t>(m, "node_signal_t")
.def(py::init([]() { node_signal_t _; return _; }));
py::class_<NXConstants::node_trace_t>(m, "node_trace_t")
.def(py::init([]() { node_trace_t _; return _; }));
py::class_<NXConstants::node_raw_t>(m, "node_raw_t")
.def(py::init([]() { node_raw_t _; return _; }));
// Expose packing functions
m.def("pack_node_control", [](NXConstants::node_control_t msg) -> uint32_t {
uint32_t raw = 0;
NXConstants::pack_node_control(msg, (uint8_t *)&raw);
return raw;
});
m.def("pack_node_load", [](NXConstants::node_load_t msg) -> uint32_t {
uint32_t raw = 0;
NXConstants::pack_node_load(msg, (uint8_t *)&raw);
return raw;
});
m.def("pack_node_signal", [](NXConstants::node_signal_t msg) -> uint32_t {
uint32_t raw = 0;
NXConstants::pack_node_signal(msg, (uint8_t *)&raw);
return raw;
});
m.def("pack_node_trace", [](NXConstants::node_trace_t msg) -> uint32_t {
uint32_t raw = 0;
NXConstants::pack_node_trace(msg, (uint8_t *)&raw);
return raw;
});
m.def("pack_node_raw", [](NXConstants::node_raw_t msg) -> uint32_t {
uint32_t raw = 0;
NXConstants::pack_node_raw(msg, (uint8_t *)&raw);
return raw;
});
// Expose unpacking functions
m.def("unpack_node_control", [](uint32_t raw) -> NXConstants::node_control_t {
return NXConstants::unpack_node_control((uint8_t *)&raw);
});
m.def("unpack_node_load", [](uint32_t raw) -> NXConstants::node_load_t {
return NXConstants::unpack_node_load((uint8_t *)&raw);
});
m.def("unpack_node_signal", [](uint32_t raw) -> NXConstants::node_signal_t {
return NXConstants::unpack_node_signal((uint8_t *)&raw);
});
m.def("unpack_node_trace", [](uint32_t raw) -> NXConstants::node_trace_t {
return NXConstants::unpack_node_trace((uint8_t *)&raw);
});
m.def("unpack_node_raw", [](uint32_t raw) -> NXConstants::node_raw_t {
return NXConstants::unpack_node_raw((uint8_t *)&raw);
});
// Expose classes
py::class_<Nexus, std::shared_ptr<Nexus>>(m, "Nexus")
.def(py::init<uint32_t, uint32_t, uint32_t, uint32_t>())
.def("get_rows", &Nexus::get_rows )
.def("get_columns", &Nexus::get_columns )
.def("get_mesh", &Nexus::get_mesh )
.def("get_ingress", &Nexus::get_ingress )
.def("get_egress", &Nexus::get_egress )
.def("run", &Nexus::run )
.def("dump_vcd", &Nexus::dump_vcd )
.def("is_output_available", &Nexus::is_output_available)
.def("pop_output", &Nexus::pop_output );
py::class_<NXMesh, std::shared_ptr<NXMesh>>(m, "NXMesh")
.def(py::init<uint32_t, uint32_t, uint32_t, uint32_t>())
.def("get_node", &NXMesh::get_node)
.def("is_idle", &NXMesh::is_idle )
.def("step", &NXMesh::step );
py::class_<NXNode, std::shared_ptr<NXNode>>(m, "NXNode")
.def(py::init<uint32_t, uint32_t, uint32_t, uint32_t>())
.def("reset", &NXNode::reset )
.def("attach", &NXNode::attach )
.def("get_pipe", &NXNode::get_pipe )
.def("is_idle", &NXNode::is_idle )
.def("step", &NXNode::step )
.def("get_memory", &NXNode::get_memory )
.def("get_instruction_count", &NXNode::get_instruction_count)
.def("get_output_count", &NXNode::get_output_count )
.def("get_current_inputs", &NXNode::get_current_inputs )
.def("get_next_inputs", &NXNode::get_next_inputs )
.def("get_current_outputs", &NXNode::get_current_outputs );
py::class_<NXMessagePipe, std::shared_ptr<NXMessagePipe>>(m, "NXMessagePipe")
.def(py::init<>())
.def("enqueue", static_cast<void (NXMessagePipe::*)(node_load_t )>(&NXMessagePipe::enqueue))
.def("enqueue", static_cast<void (NXMessagePipe::*)(node_signal_t )>(&NXMessagePipe::enqueue))
.def("enqueue", static_cast<void (NXMessagePipe::*)(node_control_t )>(&NXMessagePipe::enqueue))
.def("enqueue", static_cast<void (NXMessagePipe::*)(node_trace_t )>(&NXMessagePipe::enqueue))
.def("enqueue", static_cast<void (NXMessagePipe::*)(node_raw_t )>(&NXMessagePipe::enqueue))
.def("dequeue", static_cast<void (NXMessagePipe::*)(node_load_t &)>(&NXMessagePipe::dequeue))
.def("dequeue", static_cast<void (NXMessagePipe::*)(node_signal_t &)>(&NXMessagePipe::dequeue))
.def("dequeue", static_cast<void (NXMessagePipe::*)(node_control_t &)>(&NXMessagePipe::dequeue))
.def("dequeue", static_cast<void (NXMessagePipe::*)(node_trace_t &)>(&NXMessagePipe::dequeue))
.def("dequeue", static_cast<void (NXMessagePipe::*)(node_raw_t &)>(&NXMessagePipe::dequeue))
.def("enqueue_raw", &NXMessagePipe::enqueue_raw)
.def("dequeue_raw", &NXMessagePipe::dequeue_raw)
.def("is_idle", &NXMessagePipe::is_idle)
.def("next_header", &NXMessagePipe::next_header)
.def("next_type", &NXMessagePipe::next_type);
py::class_<NXLoader>(m, "NXLoader")
.def(py::init<Nexus *, std::string>());
}
|
package com.example.yourquiz.screens
import android.graphics.Color
import android.os.Bundle
import androidx.fragment.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.TextView
import android.widget.Toast
import androidx.navigation.NavArgs
import androidx.navigation.Navigation
import androidx.navigation.fragment.navArgs
import com.example.yourquiz.R
import com.example.yourquiz.model.DeckModel
import com.google.android.material.card.MaterialCardView
import com.google.android.material.snackbar.Snackbar
import com.google.gson.Gson
import com.google.gson.reflect.TypeToken
import java.io.File
class Deck : Fragment() {
lateinit var showQuestion: View
lateinit var addQuestion: View
lateinit var replyCard : MaterialCardView
lateinit var trueCard : MaterialCardView
lateinit var falseCard : MaterialCardView
lateinit var replyListSize:TextView
lateinit var trueListSize:TextView
lateinit var falseListSize:TextView
lateinit var deckName:TextView
var listSize = 0
var deckList=ArrayList<DeckModel>()
val args:DeckArgs by navArgs()
var selectedList="Tekrar Listesi"
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
val view =inflater.inflate(R.layout.fragment_deck, container, false)
deckName = view.findViewById(R.id.deckName)
showQuestion = view.findViewById(R.id.showQuestionBtn)
addQuestion = view.findViewById(R.id.addQuestionBtn)
replyCard = view.findViewById(R.id.replyList)
trueCard = view.findViewById(R.id.trueList)
falseCard = view.findViewById(R.id.falseList)
replyListSize = view.findViewById(R.id.replyListSize)
trueListSize = view.findViewById(R.id.trueListSize)
falseListSize = view.findViewById(R.id.falseListSize)
deckName.setText(args.deckName)
getDecks()
var rlSize=deckList.get(args.id).replyList.size
var tSize=deckList.get(args.id).trueList.size
var fSize=deckList.get(args.id).falseList.size
replyListSize.setText(rlSize.toString())
trueListSize.setText(tSize.toString())
falseListSize.setText(fSize.toString())
replyCard.setOnClickListener {
replyCard.setCardBackgroundColor(Color.rgb(28,75,130))
falseCard.setCardBackgroundColor(Color.rgb(221,107,77))
trueCard.setCardBackgroundColor(Color.rgb(221,107,77))
selectedList="Tekrar Listesi"
}
trueCard.setOnClickListener {
replyCard.setCardBackgroundColor(Color.rgb(221,107,77))
falseCard.setCardBackgroundColor(Color.rgb(221,107,77))
trueCard.setCardBackgroundColor(Color.rgb(28,75,130))
selectedList="Doğru Listesi"
}
falseCard.setOnClickListener {
replyCard.setCardBackgroundColor(Color.rgb(221,107,77))
falseCard.setCardBackgroundColor(Color.rgb(28,75,130))
trueCard.setCardBackgroundColor(Color.rgb(221,107,77))
selectedList="Yanlış Listesi"
}
showQuestion.setOnClickListener {
val action = DeckDirections.actionDeckToShowQuestion(args.id)
action.listName=selectedList
selectedListFun(selectedList)
if(listSize>0){
Navigation.findNavController(view).navigate(action)
}else{
}
}
addQuestion.setOnClickListener {
val action = DeckDirections.actionDeckToAddQuestion(args.id)
Navigation.findNavController(view).navigate(action)
}
return view
}
fun selectedListFun(name:String){
when(name){
"Tekrar Listesi" -> {
listSize = deckList.get(args.id).replyList.size
}
"Doğru Listesi" -> {
listSize = deckList.get(args.id).trueList.size
}
else -> {
listSize = deckList.get(args.id).falseList.size
}
}
}
private fun getJsonData():String?{
val fileName = requireContext().cacheDir.absolutePath+"/DeckJson.json"
val jsonString:String
try {
jsonString = File(fileName).bufferedReader().use { it.readText() }
}catch (ex:Exception){
ex.printStackTrace()
return null
}
return jsonString
}
private fun getDecks(){
if(getJsonData()!=null){
val jsonFileString = getJsonData()
val gson = Gson()
val listDeckType = object : TypeToken<ArrayList<DeckModel>>() {}.type
deckList=gson.fromJson(jsonFileString,listDeckType)
}
}
}
|
#include "quasar.h"
/*
* HTTP server load testing tool
* server must provide Content-Length header
* URL: [http://]<host|ip>[:<port>]/object[?|&|_]
* may be appended with ?, &, or _<version>
* nofile: /etc/security/limits.conf
* tee: stdbuf -o L ./quasar 2 20 0 "http://host/qq?" |tee qt
*/
int
main(int argc, char **argv)
{
int i, tnum, ramp, allcox, connum, k, cfd;
uint64_t allrps, allbps;
char *p, hbuf[256], prt[8], erbuf[256];
pthread_t tid;
struct timespec tick, tock;
struct addrinfo hints, *res, *r;
struct epoll_event ev = {.events = EPOLLIN};
if (argc < 5) {
meh: fputs("Usage: ./quasar <#threads> <#connAsecond> <#versions> <URL> [noramp]\n", stderr);
exit(1);
}
if ( sscanf(argv[1], "%d", &tnum) < 1 ||\
sscanf(argv[2], "%d", &ramp) < 1 ||\
sscanf(argv[3], "%lu", &vernum) < 1) goto meh;
if (tnum < 1) {
fputs("At least 1 thread, please!\n", stderr);
exit(1);
}
if ( (p = strstr(argv[4], "://")) != NULL) hp = p + 3;
else hp = argv[4];
if ( (gp = strstr(hp, "/")) == NULL) {
fputs("URL needs closing slash!\n", stderr);
exit(1);
}
gl = strlen(gp);
if ( (p = strstr(hp, ":")) != NULL && p < gp) {
i = gp - p - 1; // length of the port string
memcpy(prt, p + 1, i);
prt[i] = '\0';
*p = '\0';
hl = p - hp;
} else {
memcpy(prt, "80\0", 3);
hl = gp - hp;
if (hl > sizeof(hbuf) - 1) {
fputs("Hostname is too long!\n", stderr);
exit(1);
}
memcpy(hbuf, hp, hl);
hbuf[hl] = '\0';
hp = hbuf;
}
/* parsing completed, setting up data structures: */
bzero(&hints, sizeof(hints));
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_STREAM;
if ( (i = getaddrinfo(hp, prt, &hints, &res)) != 0) {
fputs("getaddrinfo: ", stderr);
fputs(gai_strerror(i), stderr);
exit(1);
}
// th[tnum] is a structure for the main() thread:
if ( (th = calloc(tnum + 1, sizeof(struct qstat))) == NULL) {
fputs("No memory!\n", stderr);
exit(1);
}
for (i = 0; i < tnum; i++) {
if ( (th[i].efd = epoll_create(ECHUNK)) < 0) {
perror("epoll_create");
exit(1);
}
pthread_mutex_init(&th[i].mx, NULL);
if ( (errno = pthread_create(&tid, NULL, wkr, &th[i])) != 0) {
perror("pthread_create");
exit(1);
}
}
pthread_mutex_init(&th[tnum].mx, NULL); // dummy mutex for this thread
/* ramping up: */
fputs(" #TCP RPS kbps [SRVCLOSE|SNDERROR|CONERROR]\n", stdout);
clock_gettime(CLOCK_MONOTONIC, &tick);
tick.tv_sec -= 1;
connum = k = 0; // k is round-robin thread index
for (; ;) {
allcox = allrps = allbps = 0;
if (clock_gettime(CLOCK_MONOTONIC, &tock) < 0) {
perror("clock_gettime");
exit(1);
}
for (i = 0; i <= tnum; i++) {
pthread_mutex_lock(&th[i].mx);
allcox += th[i].cox;
allrps += th[i].rps;
allbps += th[i].bps;
th[i].cox = 0;
th[i].rps = 0;
th[i].bps = 0;
pthread_mutex_unlock(&th[i].mx);
}
connum -= allcox; // if some connections die
/* tick = tock - tick: */
tick.tv_sec = tock.tv_sec - tick.tv_sec;
if ( (tick.tv_nsec = tock.tv_nsec - tick.tv_nsec) < 0) {
tick.tv_sec--;
tick.tv_nsec += 1000000000;
}
tick.tv_sec = tick.tv_nsec / 1000000 + tick.tv_sec * 1000; //milliseconds
allrps = allrps * 1000 / tick.tv_sec; // rps
allbps = allbps * 8 / tick.tv_sec; // kbps
/* presentation block (connum rps bps scf sre coe): */
for (i = 0; tick.tv_sec > (i * 1000 + 500) * PERIOD; i++) {
printf("%*d %*d %*d ", 8, connum, 8, (int) allrps, 8, (int) allbps);
if (scf) printf(" *SRVCLOSE*");
if (sre) {
strerror_r(sre, erbuf, sizeof(erbuf));
printf(" SND: %s", erbuf);
}
if (coe) {
strerror_r(coe, erbuf, sizeof(erbuf));
printf(" CON: %s", erbuf);
}
printf("\n");
}
fflush(stdout);
coe = sre = scf = 0; // we handle flags off mutex
/* connection block: */
for (i = 0; (argc < 6 && i < ramp) || (argc == 6 && connum < ramp); i++) {
r = res;
do {
cfd = socket(r->ai_family, r->ai_socktype, r->ai_protocol);
if (cfd < 0) continue;
if (connect(cfd, r->ai_addr, r->ai_addrlen) == 0) break;
while (close(cfd) < 0 && errno == EINTR);
} while ( (r = r->ai_next) != NULL);
if (r == NULL) {
// connection can't be established
coe = errno;
break;
}
connum++;
ev.data.u64 = (uint64_t) cfd;
if (epoll_ctl(th[k].efd, EPOLL_CTL_ADD, cfd, &ev) < 0) {
perror("epoll_ctl_add");
exit(1);
}
if (++k >= tnum) k = 0;
send_req(cfd, &th[tnum]); // here the dummy mutex fires
}
memcpy(&tick, &tock, sizeof(tick));
tock.tv_sec += PERIOD;
while (clock_nanosleep(CLOCK_MONOTONIC, TIMER_ABSTIME, &tock, NULL) < 0 && errno == EINTR);
}
return 0;
}
|
#!/bin/bash
mencoder "$1" -o "$2" \
-of lavf \
-vf scale=320:240 \
-srate 44100 \
-oac mp3lame -lameopts abr:br=128 \
-ovc lavc -lavcopts vcodec=flv:vbitrate=250:mbd=2:mv0:trell:v4mv:cbp:last_pred=3
|
print("Congratulations! You're running Maddi's Task List Program.")
class Tasklist():
def __init__(self, arrayOfTasks):
self.arrayOfTasks = arrayOfTasks
def main():
arrayOfTasks = ["learn", "practice", "teach"]
userRequest = input("What would you like to do next?"
"1.List all tasks. "
"2.Add a task to the list. "
"3.Delete a task. "
"Type q to quit the program.")
while userRequest != 'q':
if userRequest == "1":
print(arrayOfTasks)
elif userRequest == "2":
addedTask = input("Add a task")
arrayOfTasks.append(addedTask)
elif userRequest == "3":
deleteTask = input("Delete a task")
arrayOfTasks.remove(deleteTask)
userRequest = input("What would you like to do next?"
"1.List all tasks. "
"2.Add a task to the list. "
"3.Delete a task. "
"Type q to quit the program.")
main()
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.tensorflow.cluster.tfrunning;
import org.apache.ignite.Ignite;
/**
* Utils class that helps to format Python script that starts TensorFlow server.
*/
public class TensorFlowServerScriptFormatter {
/**
* Formats TensorFlow server specification so that it's available to be passed into а python script.
*
* @param srv Server specification.
* @param join Joins server by default or not.
* @param ignite Ignite instance.
* @return Formatted TensorFlow server script.
*/
public String format(TensorFlowServer srv, boolean join, Ignite ignite) {
StringBuilder builder = new StringBuilder();
builder.append("from __future__ import absolute_import").append("\n");
builder.append("from __future__ import division").append("\n");
builder.append("from __future__ import print_function").append("\n");
builder.append("from threading import Thread").append("\n");
builder.append("from time import sleep").append("\n");
builder.append("import os, signal").append("\n");
builder.append("\n");
builder.append("def check_pid(pid):").append("\n");
builder.append(" try:").append("\n");
builder.append(" os.kill(pid, 0)").append("\n");
builder.append(" except OSError:").append("\n");
builder.append(" return False").append("\n");
builder.append(" else:").append("\n");
builder.append(" return True").append("\n");
builder.append("\n");
builder.append("def threaded_function(pid):").append("\n");
builder.append(" while check_pid(pid):").append("\n");
builder.append(" sleep(1)").append("\n");
builder.append(" os.kill(os.getpid(), signal.SIGUSR1)").append("\n");
builder.append("\n");
builder.append("Thread(target = threaded_function, args = (int(os.environ['PPID']), )).start()")
.append("\n");
builder.append("\n");
builder.append("import tensorflow as tf").append('\n');
builder.append("to_import_contrib_ops = tf.contrib.resampler").append("\n");
builder.append("from tensorflow.contrib.ignite import IgniteDataset").append("\n");
builder.append("import tensorflow.contrib.igfs.python.ops.igfs_ops").append("\n");
builder.append("print('job:%s task:%d' % ('")
.append(srv.getJobName())
.append("', ")
.append(srv.getTaskIdx())
.append("))")
.append("\n");
builder.append("print('IGNITE_DATASET_HOST = ', os.environ.get('IGNITE_DATASET_HOST'))").append("\n");
builder.append("print('IGNITE_DATASET_PORT = ', os.environ.get('IGNITE_DATASET_PORT'))").append("\n");
builder.append("print('IGNITE_DATASET_LOCAL = ', os.environ.get('IGNITE_DATASET_LOCAL'))").append("\n");
builder.append("os.environ['TF_CONFIG'] = '").append(formatTfConfigVar(srv, ignite)).append("'\n");
builder.append("server = tf.contrib.distribute.run_standard_tensorflow_server()").append("\n");
if (join)
builder.append("server.join()").append('\n');
return builder.toString();
}
/**
* Formats "TF_CONFIG" variable to be passed into user script.
*
* @param srv Server description.
* @param ignite Ignite instance.
* @return Formatted "TF_CONFIG" variable to be passed into user script.
*/
private String formatTfConfigVar(TensorFlowServer srv, Ignite ignite) {
return "{\"cluster\" : " +
srv.getClusterSpec().format(ignite).replace('\n', ' ') +
", " +
"\"task\": {\"type\" : \"" +
srv.getJobName() +
"\", \"index\": " +
srv.getTaskIdx() +
"}}";
}
}
|
#!/usr/bin/perl
use warnings;
use strict;
my $DIAMOND_SRC="../../..";
my $JAVA_BINDINGS_DIR="$DIAMOND_SRC/platform/bindings/java";
my $classpath=".:$JAVA_BINDINGS_DIR/libs/javacpp.jar:$JAVA_BINDINGS_DIR/target/diamond-1.0-SNAPSHOT.jar";
my $nativePath="$JAVA_BINDINGS_DIR/target/classes/x86-lib:$DIAMOND_SRC/platform/build";
$ENV{LD_LIBRARY_PATH}=$nativePath;
die "usage: ./run-client.pl config_file [message]" unless @ARGV>=1;
my $configFile = $ARGV[0];
my $message = "";
if (@ARGV==2) {
$message = $ARGV[1];
}
system("java -classpath $classpath -Djava.library.path=$nativePath Publisher $configFile $message");
|
require 'json'
module Metro
module Views
#
# Provides support for a JSON Representation of a view.
#
class JSONView
#
# Determine if a view exists for this specified format
#
# @param [String] view_path the name of the view to find
# @return a true if the json view exists and false if it does not exist.
#
def self.exists?(view_path)
File.exists? json_view_path(view_path)
end
#
# Parse the contents of the view given the name.
#
# @param [String] view_path the name of the view to read
# @return a Hash that contains the contents of the view.
#
def self.parse(view_path)
JSON.parse File.read json_view_path(view_path)
end
#
# @return the file type format of this view.
#
def self.format
:json
end
#
# Writes the content out to the spcified view path
#
def self.write(view_path,content)
filename = json_view_path(view_path)
json_content = JSON.pretty_generate(content)
File.write(filename,json_content)
end
private
#
# A helper method to generate the name of the json view file. In this case
# it is the view name with the suffix .json.
#
def self.json_view_path(view_path)
File.extname(view_path) == "" ? "#{view_path}.json" : view_path
end
end
end
end
|
param($installPath, $toolsPath, $package, $project)
$sqliteReference = $project.Object.References.Find("SQLite.WP80, version=3.8.7.2")
if ($sqliteReference -eq $null) {
Write-Host "Unable to find a reference to the extension SDK SQLite for Windows Phone."
Write-Host "Verify that the reference to the extension SDK SQLite for Windows Phone has already been removed."
} else {
$sqliteReference.Remove()
Write-Host "Successfully removed the reference to the extension SDK SQLite for Windows Phone."
}
|
Foobar
```j<caret>avascript
console.log("!");
```
Foobar
|
package im.ycz.zrouter.utils;
import android.net.Uri;
import android.support.v4.app.Fragment;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by xuanqi on 4/26/16.
*/
public class ZRouteUtils {
private static Map<String, Class> clazzes = new HashMap<>();
/**
* 根据 class name 获取类的 class 实例,会缓存已查找过的类
* @param className
* @return
*/
public static Class getClazz(String className) {
Class<?> clazz = null;
if (clazzes.containsKey(className)) {
clazz = clazzes.get(className);
} else {
try {
clazz = Class.forName(className);
clazzes.put(className, clazz); // 缓存 class 的解析
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
}
}
return clazz;
}
private static boolean isRelativeURL(Uri uri, String domain) {
// 相对地址,同时不以默认域名开头
return uri.isRelative() && !uri.toString().startsWith(domain);
}
/**
* 包装链接成标准形式
* 相对地址 -> 默认域名下绝对地址
* 不包含scheme -> 加上默认 scheme
*
* @param url
* @return
*/
public static String wrapURL(String url, String scheme, String domain) {
Uri uri = Uri.parse(url);
if (isRelativeURL(uri, domain)) {
// 相对地址,或者以不以默认域名开, /static/m/、 static/m
if (url.startsWith("/")) {
return scheme + "://" + domain + url;
} else {
return scheme + "://" + domain + "/" + url;
}
} else {
// 绝对地址,或者带默认域名的地址, http://example.com, local://index.js, example.com
if (!uri.isAbsolute()) {
// 如果是 example.com 的地址
url = uri.buildUpon().scheme(scheme).toString();
}
}
return url;
}
public static Fragment create(String fragmentName, String url, boolean isStandalone) {
try {
Class clazz = getClazz(fragmentName);
Method method = clazz.getDeclaredMethod("newInstance", String.class, boolean.class);
Fragment fragment = (Fragment) method.invoke(null, url, isStandalone);
return fragment;
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
public static Fragment create(Class fragmentClazz, String url, boolean isStandalone) {
try {
Method method = fragmentClazz.getDeclaredMethod("newInstance", String.class, boolean.class);
Fragment fragment = (Fragment) method.invoke(null, url, isStandalone);
return fragment;
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
}
|
import {IPluginLoader, ILogger, IConfig, IEventBus, IStatsTarget} from '@homenet/core';
import * as metrics from 'datadog-metrics';
interface IDatadogConfig extends IConfig {
datadog: {
apiKey: string;
appKey: string;
}
}
export function create(annotate: any): { DatadogPluginLoader: new(...args: any[]) => IPluginLoader } {
@annotate.plugin()
class DatadogPluginLoader implements IStatsTarget {
private metricsLogger: any;
constructor(
@annotate.service('IConfig') private config: IDatadogConfig,
@annotate.service('ILogger') private logger: ILogger,
@annotate.service('IEventBus') private eventBus: IEventBus
) {
this.metricsLogger = new metrics.BufferedMetricsLogger({
apiKey: config.datadog.apiKey,
appKey: config.datadog.appKey,
// host: 'myhost',
prefix: 'homenet.',
flushIntervalSeconds: 30,
// defaultTags: ['env:staging', 'region:us-east-1']
});
}
load() : void {
this.logger.info('Loading DataDog');
this.eventBus.on('value.*.*', '*', e => {
this.gauge(e.name, e.data);
});
this.eventBus.on('trigger.*.*', 'triggered', e => {
this.counter(e.name);
});
}
gauge(id: string, value: number) : void {
this.metricsLogger.gauge(id, value);
}
counter(id: string, increment?: number) : void {
this.metricsLogger.increment(id, increment);
}
}
return { DatadogPluginLoader };
}
|
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
import 'package:flutter_easyrefresh/easy_refresh.dart';
import 'package:get/get.dart';
import 'package:tin_flutter/app/index.dart';
import 'package:tin_flutter/generated/l10n.dart';
import 'dynamic_page.dart';
import 'home_page.dart';
import 'mine_page.dart';
import 'main_logic.dart';
import 'main_state.dart';
class MainPage extends StatefulWidget {
@override
_MainPageState createState() => _MainPageState();
}
class _MainPageState extends State<MainPage> {
final logic = Get.find<MainLogic>();
final MainState state = Get.find<MainLogic>().state;
PageController? _pageController;
List<Widget> pages = [HomePage(), DynamicPage(), MinePage()];
@override
Widget build(BuildContext context) {
_pageController = PageController();
SchedulerBinding.instance?.addPostFrameCallback((timeStamp) {
EasyRefresh.defaultHeader = MaterialHeader();
EasyRefresh.defaultFooter = MaterialFooter();
});
// List<String> tabs = [S.current.home, S.current.activity, S.current.other];
return Scaffold(
body: PageView(
children: pages, //这个就类似于viewpage
controller: _pageController,
onPageChanged: (index) {
state.selectedIndex.value = index;
},
),
bottomNavigationBar: Obx( ()=>
BottomNavigationBar(
items: [
BottomNavigationBarItem(icon: Icon(Icons.home), label: S.of(context).home),
BottomNavigationBarItem(
icon: Icon(Icons.dynamic_form), label: S.of(context).activity
),
BottomNavigationBarItem(
icon: Icon(Icons.supervisor_account), label: S.of(context).other
),
],
currentIndex: state.selectedIndex.value,
fixedColor: Colors.blue,
type: BottomNavigationBarType.fixed,
onTap: _onItemTapped,
)
),
);
}
void _onItemTapped(int index) {
_pageController?.jumpToPage(index);
}
@override
void dispose() {
_pageController?.dispose();
Get.delete<MainLogic>();
super.dispose();
}
}
|
package com.example.core.domain.model
import android.os.Parcelable
import kotlinx.android.parcel.Parcelize
@Parcelize
data class Country(
val active: Int,
val activePerOneMillion: Double,
val cases: Int,
val casesPerOneMillion: Int,
val continent: String,
val country: String,
val critical: Int,
val criticalPerOneMillion: Double,
val deaths: Int,
val deathsPerOneMillion: Double,
val oneCasePerPeople: Int,
val oneDeathPerPeople: Int,
val oneTestPerPeople: Int,
val population: Int,
val recovered: Int,
val recoveredPerOneMillion: Double,
val tests: Int,
val testsPerOneMillion: Int,
val todayCases: Int,
val todayDeaths: Int,
val todayRecovered: Int,
val updated: Long,
val countryInfo: CountryInfo,
var isFavorite:Boolean
) : Parcelable
@Parcelize
data class CountryInfo(
var _id: Int =0,
var flag: String="",
var iso2: String?="",
var iso3: String?="",
var lat: Double=0.0,
var long: Double=0.0
) : Parcelable
|
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.client.workbench;
import com.google.gwt.user.client.ui.HasWidgets;
import com.google.gwt.user.client.ui.IsWidget;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.jboss.errai.common.client.dom.HTMLElement;
import org.jboss.errai.common.client.dom.elemental2.Elemental2DomUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.uberfire.client.mvp.PerspectiveActivity;
import org.uberfire.client.mvp.UIPart;
import org.uberfire.client.workbench.events.PanelFocusEvent;
import org.uberfire.client.workbench.events.PlaceGainFocusEvent;
import org.uberfire.client.workbench.events.PlaceHiddenEvent;
import org.uberfire.client.workbench.events.PlaceLostFocusEvent;
import org.uberfire.client.workbench.events.PlaceMaximizedEvent;
import org.uberfire.client.workbench.events.PlaceMinimizedEvent;
import org.uberfire.client.workbench.events.SelectPlaceEvent;
import org.uberfire.client.workbench.panels.WorkbenchPanelPresenter;
import org.uberfire.client.workbench.panels.WorkbenchPanelView;
import org.uberfire.client.workbench.panels.impl.SimpleWorkbenchPanelPresenter;
import org.uberfire.client.workbench.panels.impl.StaticWorkbenchPanelPresenter;
import org.uberfire.client.workbench.part.WorkbenchPartPresenter;
import org.uberfire.mocks.EventSourceMock;
import org.uberfire.mvp.PlaceRequest;
import org.uberfire.mvp.impl.DefaultPlaceRequest;
import org.uberfire.mvp.impl.PathPlaceRequest;
import org.uberfire.workbench.model.CompassPosition;
import org.uberfire.workbench.model.PanelDefinition;
import org.uberfire.workbench.model.PartDefinition;
import org.uberfire.workbench.model.PerspectiveDefinition;
import org.uberfire.workbench.model.impl.PanelDefinitionImpl;
import org.uberfire.workbench.model.impl.PartDefinitionImpl;
import org.uberfire.workbench.model.impl.PerspectiveDefinitionImpl;
import org.uberfire.workbench.model.menu.MenuFactory;
import org.uberfire.workbench.model.menu.Menus;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.refEq;
import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(GwtMockitoTestRunner.class)
public class PanelManagerTest {
@Mock
BeanFactory beanFactory;
@Mock
StubPlaceGainFocusEvent placeGainFocusEvent;
@Mock
StubPlaceLostFocusEvent placeLostFocusEvent;
@Mock
StubSelectPlaceEvent selectPlaceEvent;
@Mock
StubPanelFocusEvent panelFocusEvent;
@Mock
StubPlaceMaximizedEvent placeMaximizedEvent;
@Mock
StubPlaceMinimizedEvent placeMinimizedEvent;
@Mock
StubPlaceHiddenEvent placeHidEvent;
@Mock
SimpleWorkbenchPanelPresenter workbenchPanelPresenter;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
LayoutSelection layoutSelection;
@Mock
Elemental2DomUtil elemental2DomUtil;
private PanelManagerImpl panelManager;
/**
* This is the part presenter that will be returned by the mock BeanFactory in response to any newWorkbenchPart()
* call. Tests that work with more than one part will have to create their own more specific when/then rules.
*/
private WorkbenchPartPresenter partPresenter;
/**
* This perspective is set as the current perspective on the PanelManager before each test is run.
*/
private PerspectiveDefinition testPerspectiveDef;
/**
* This is the Panel Presenter returned by the mock BeanFactory when asked for <tt>newWorkbenchPanel( testPerspectiveDef.getRoot() ) )</tt>.
*/
private WorkbenchPanelPresenter testPerspectiveRootPanelPresenter;
@Before
public void setup() {
when(layoutSelection.get().getPerspectiveContainer()).thenReturn(mock(HasWidgets.class));
testPerspectiveDef = new PerspectiveDefinitionImpl(SimpleWorkbenchPanelPresenter.class.getName());
testPerspectiveRootPanelPresenter = mock(WorkbenchPanelPresenter.class);
when(beanFactory.newRootPanel(any(),
eq(testPerspectiveDef.getRoot()))).thenReturn(testPerspectiveRootPanelPresenter);
when(testPerspectiveRootPanelPresenter.getDefinition()).thenReturn(testPerspectiveDef.getRoot());
when(testPerspectiveRootPanelPresenter.getPanelView()).thenReturn(mock(WorkbenchPanelView.class));
when(testPerspectiveRootPanelPresenter.getDefaultChildType()).thenReturn(SimpleWorkbenchPanelPresenter.class.getName());
partPresenter = mock(WorkbenchPartPresenter.class);
when(beanFactory.newWorkbenchPart(any(),
any(),
any(),
any(),
any())).thenReturn(partPresenter);
when(beanFactory.newWorkbenchPanel(any())).thenAnswer(new Answer<WorkbenchPanelPresenter>() {
@Override
public WorkbenchPanelPresenter answer(InvocationOnMock invocation) throws Throwable {
WorkbenchPanelPresenter newPanelPresenter = mock(WorkbenchPanelPresenter.class,
RETURNS_DEEP_STUBS);
when(newPanelPresenter.getDefinition()).thenReturn((PanelDefinition) invocation.getArguments()[0]);
return newPanelPresenter;
}
});
PerspectiveActivity testPerspectiveActivity = mock(PerspectiveActivity.class);
panelManager = spy(new PanelManagerImpl(placeGainFocusEvent,
placeLostFocusEvent,
panelFocusEvent,
selectPlaceEvent,
placeMaximizedEvent,
placeMinimizedEvent,
placeHidEvent,
null,
null,
layoutSelection,
beanFactory,
elemental2DomUtil));
panelManager.setRoot(testPerspectiveActivity,
testPerspectiveDef.getRoot());
doNothing().when(panelManager).appendWidgetToElement(any(HTMLElement.class),
any(Widget.class));
}
@Test
public void addPartToRootPanelShouldWork() throws Exception {
PlaceRequest rootPartPlace = new DefaultPlaceRequest("rootPartPlace");
PartDefinition rootPart = new PartDefinitionImpl(rootPartPlace);
Menus rootPartMenus = MenuFactory.newContributedMenu("RootPartMenu").endMenu().build();
UIPart rootUiPart = new UIPart("RootUiPart",
null,
mock(IsWidget.class));
panelManager.addWorkbenchPart(rootPartPlace,
rootPart,
panelManager.getRoot(),
rootPartMenus,
rootUiPart,
"rootContextId",
100,
200);
// the presenter should have been created and configured for the rootPart
verify(partPresenter).setWrappedWidget(rootUiPart.getWidget());
verify(partPresenter).setContextId("rootContextId");
// the panel manager should be aware of the place/part mapping for the added part
assertEquals(rootPart,
panelManager.getPartForPlace(rootPartPlace));
// the panel manager should select the place, firing a general notification
verify(selectPlaceEvent).fire(refEq(new SelectPlaceEvent(rootPartPlace)));
// the panel manager should have modified the panel or part definitions (this is the responsibility of the parent panel)
assertEquals(null,
rootPart.getParentPanel());
assertFalse(panelManager.getRoot().getParts().contains(rootPart));
}
@Test
public void addPartToUnknownPanelShouldFail() throws Exception {
PlaceRequest partPlace = new DefaultPlaceRequest("partPlace");
PartDefinition part = new PartDefinitionImpl(partPlace);
Menus partMenus = MenuFactory.newContributedMenu("PartMenu").endMenu().build();
UIPart uiPart = new UIPart("uiPart",
null,
mock(IsWidget.class));
PanelDefinition randomUnattachedPanel = new PanelDefinitionImpl(SimpleWorkbenchPanelPresenter.class.getName());
try {
panelManager.addWorkbenchPart(partPlace,
part,
randomUnattachedPanel,
partMenus,
uiPart,
"contextId",
null,
null);
fail();
} catch (IllegalArgumentException e) {
assertEquals("Target panel is not part of the layout",
e.getMessage());
}
// the presenter should not have been created and configured for the rootPart
verify(partPresenter,
never()).setWrappedWidget(uiPart.getWidget());
verify(partPresenter,
never()).setContextId("rootContextId");
// the panel manager should not be aware of the place/part mapping for the failed add
assertEquals(null,
panelManager.getPartForPlace(partPlace));
// the failed part/place should not be selected
verify(selectPlaceEvent,
never()).fire(refEq(new SelectPlaceEvent(partPlace)));
}
@Test
public void removingLastPartFromRootPanelShouldLeaveRootPanel() throws Exception {
// add
PlaceRequest rootPartPlace = new DefaultPlaceRequest("rootPartPlace");
PartDefinition rootPart = new PartDefinitionImpl(rootPartPlace);
Menus rootPartMenus = MenuFactory.newContributedMenu("RootPartMenu").endMenu().build();
UIPart rootUiPart = new UIPart("RootUiPart",
null,
mock(IsWidget.class));
panelManager.addWorkbenchPart(rootPartPlace,
rootPart,
panelManager.getRoot(),
rootPartMenus,
rootUiPart,
"rootContextId",
null,
null);
panelManager.removePartForPlace(rootPartPlace);
// the panel manager should not know about the part/place mapping anymore
assertEquals(null,
panelManager.getPartForPlace(rootPartPlace));
// the part's presenter bean should be destroyed
verify(beanFactory).destroy(partPresenter);
// the root panel itself, although empty, should remain (because it is the root panel)
verify(beanFactory,
never()).destroy(testPerspectiveRootPanelPresenter);
}
@Test
public void addPanelAtRootPositionShouldReturnRootPanel() throws Exception {
when(beanFactory.newRootPanel(any(PerspectiveActivity.class),
eq(testPerspectiveDef.getRoot()))).thenReturn(testPerspectiveRootPanelPresenter);
when(testPerspectiveRootPanelPresenter.getDefaultChildType()).thenReturn(null);
PerspectiveActivity testPerspectiveActivity = mock(PerspectiveActivity.class);
panelManager.setRoot(testPerspectiveActivity,
testPerspectiveDef.getRoot());
PanelDefinition notActuallyAdded = new PanelDefinitionImpl(SimpleWorkbenchPanelPresenter.class.getName());
PanelDefinition result = panelManager.addWorkbenchPanel(testPerspectiveDef.getRoot(),
notActuallyAdded,
CompassPosition.ROOT);
assertSame(result,
testPerspectiveDef.getRoot());
}
@Test
public void addedPanelsShouldBeRemembered() throws Exception {
PanelDefinition subPanel = new PanelDefinitionImpl(SimpleWorkbenchPanelPresenter.class.getName());
testPerspectiveDef.getRoot().appendChild(CompassPosition.WEST,
subPanel);
panelManager.addWorkbenchPanel(panelManager.getRoot(),
subPanel,
CompassPosition.WEST);
assertTrue(panelManager.mapPanelDefinitionToPresenter.containsKey(subPanel));
}
@Test
public void addedCustomPanelsShouldBeRemembered() throws Exception {
HasWidgets container = mock(HasWidgets.class);
PanelDefinition customPanel = panelManager.addCustomPanel(container,
StaticWorkbenchPanelPresenter.class.getName());
assertTrue(panelManager.mapPanelDefinitionToPresenter.containsKey(customPanel));
}
@Test
public void addedCustomPanelsInsideHTMLElementsShouldBeRemembered() throws Exception {
HTMLElement container = mock(HTMLElement.class);
PanelDefinition customPanel = panelManager.addCustomPanel(container,
StaticWorkbenchPanelPresenter.class.getName());
assertTrue(panelManager.mapPanelDefinitionToPresenter.containsKey(customPanel));
}
@Test
public void explicitlyRemovedPanelsShouldBeForgotten() throws Exception {
PanelDefinition subPanel = new PanelDefinitionImpl(SimpleWorkbenchPanelPresenter.class.getName());
testPerspectiveDef.getRoot().appendChild(CompassPosition.WEST,
subPanel);
panelManager.addWorkbenchPanel(panelManager.getRoot(),
subPanel,
CompassPosition.WEST);
panelManager.removeWorkbenchPanel(subPanel);
assertFalse(panelManager.mapPanelDefinitionToPresenter.containsKey(subPanel));
}
@Test
public void explicitlyRemovedCustomPanelsShouldBeForgotten() throws Exception {
HasWidgets container = mock(HasWidgets.class);
PanelDefinition customPanel = panelManager.addCustomPanel(container,
StaticWorkbenchPanelPresenter.class.getName());
panelManager.removeWorkbenchPanel(customPanel);
assertFalse(panelManager.mapPanelDefinitionToPresenter.containsKey(customPanel));
}
@Test
public void explicitlyRemovedCustomPanelsInsideHTMLElementsShouldBeForgotten() throws Exception {
HTMLElement container = mock(HTMLElement.class);
PanelDefinition customPanel = panelManager.addCustomPanel(container,
StaticWorkbenchPanelPresenter.class.getName());
panelManager.removeWorkbenchPanel(customPanel);
assertFalse(panelManager.mapPanelDefinitionToPresenter.containsKey(customPanel));
}
@Test
public void explicitlyRemovedCustomPanelsInsideElemental2HTMLElementsShouldBeForgotten() {
elemental2.dom.HTMLElement container = mock(elemental2.dom.HTMLElement.class);
PanelDefinition customPanel = panelManager.addCustomPanel(container,
StaticWorkbenchPanelPresenter.class.getName());
panelManager.removeWorkbenchPanel(customPanel);
assertFalse(panelManager.mapPanelDefinitionToPresenter.containsKey(customPanel));
}
@Test
public void explicitlyRemovingRootPanelShouldFail() throws Exception {
try {
panelManager.removeWorkbenchPanel(testPerspectiveDef.getRoot());
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("root"));
}
}
@Test
public void onSelectPlaceEventFocusesCorrectPresenter() throws Exception {
PanelDefinition p1 = new PanelDefinitionImpl(SimpleWorkbenchPanelPresenter.class.getName());
PartDefinition pd1 = new PartDefinitionImpl(new DefaultPlaceRequest());
p1.addPart(pd1);
testPerspectiveDef.getRoot().appendChild(CompassPosition.WEST,
p1);
panelManager.addWorkbenchPanel(panelManager.getRoot(),
p1,
CompassPosition.WEST);
PanelDefinition p2 = new PanelDefinitionImpl(SimpleWorkbenchPanelPresenter.class.getName());
PartDefinition pd2 = new PartDefinitionImpl(new PathPlaceRequest());
p2.addPart(pd2);
testPerspectiveDef.getRoot().appendChild(CompassPosition.EAST,
p2);
panelManager.addWorkbenchPanel(panelManager.getRoot(),
p2,
CompassPosition.EAST);
SelectPlaceEvent event = new SelectPlaceEvent(new PathPlaceRequest());
panelManager.onSelectPlaceEvent(event);
final WorkbenchPanelPresenter partPresenter = panelManager.mapPanelDefinitionToPresenter.get(p2);
verify(partPresenter,
times(2)).setFocus(true);
}
// After UF-117:
// TODO test part disposal (not NORTH/SOUTH/EAST/WEST) side effect of AbstractPanelManagerImpl.removePart()
// TODO test part reattachment (NORTH/SOUTH/EAST/WEST) side effect of AbstractPanelManagerImpl.removePart()
static class StubPlaceGainFocusEvent extends EventSourceMock<PlaceGainFocusEvent> {
}
static class StubPlaceLostFocusEvent extends EventSourceMock<PlaceLostFocusEvent> {
}
static class StubSelectPlaceEvent extends EventSourceMock<SelectPlaceEvent> {
}
static class StubPanelFocusEvent extends EventSourceMock<PanelFocusEvent> {
}
static class StubPlaceMaximizedEvent extends EventSourceMock<PlaceMaximizedEvent> {
}
static class StubPlaceMinimizedEvent extends EventSourceMock<PlaceMinimizedEvent> {
}
static class StubPlaceHiddenEvent extends EventSourceMock<PlaceHiddenEvent> {
}
}
|
# Michael Fulton's Portfolio and Resume webpage
Site is currently deployed as a github pages site, and can be viewed here:
https://dwareb.github.io/MFultonPortfolio/
## Technologies
* Javascript
* React
* Node / NPM
* CSS
* HTML
## How to build / run / deploy
To run and build this do a git clone first.
```
git clone https://github.com/dwareb/MFultonPortfolio/
```
After that, have npm install the required modules.
```
npm install
```
To run the site for testing you can run the following command
```
node start
```
After that, the website should be available at `localhost:3000`, and should update live as changes are mode.
To build the site run the following:
```
node run build
```
Finally to deploy the following command:
```
npm run deploy
```
## Additional sources
* I used coolors.co for mapping out my color scheme.
* I used the following youtube guide as inspiration for my slideshow thumbnail bar: https://youtu.be/-VKXgsrLEjw
* The contact page is powered by the formspree api.
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
class TugasController extends Controller
{
public function index()
{
//DB::table('')->get(); //mengembalikan array of Object [][]
// mengambil data dari table tugas
//$tugas = DB::table('tugas')->get();
$tugas = DB::table('tugas')
->join('pegawai', 'tugas.IDPegawai', '=', 'pegawai.pegawai_id')
->select('tugas.*', 'pegawai.pegawai_nama')
->paginate(5);
// mengirim data tugas ke view index
return view('tugas.index',['tugas' => $tugas]); //passing value bisa lebih dari 1
}
public function tambah()
{
$pegawai = DB::table('pegawai')->orderBy('pegawai_nama', 'asc')->get();
// memanggil view tambah
return view('tugas.tambah',['pegawai' => $pegawai]);
}
// method untuk insert data ke table tugas
public function store(Request $request)
{
// insert data ke table tugas
DB::table('tugas')->insert([
'IDPegawai' => $request->IDPegawai,
'Tanggal' => $request->Tanggal,
'NamaTugas' => $request->NamaTugas,
'Status' => $request->Status
]);
// alihkan halaman ke halaman tugas
return redirect('/tugas');
}
// method untuk edit data tugas
public function edit($id)
{
// mengambil data tugas berdasarkan id yang dipilih
$tugas = DB::table('tugas')->where('ID',$id)->get();
$pegawai = DB::table('pegawai')->orderBy('pegawai_nama', 'asc')->get();
$Status = "" ;
// passing data tugas yang didapat ke view edit.blade.php
return view('tugas.edit',['tugas' => $tugas,'pegawai' => $pegawai,'status' => $Status]);
}
// update data tugas
public function update(Request $request)
{
// update data tugas
DB::table('tugas')->where('ID',$request->id)->update([
'IDPegawai' => $request->IDPegawai,
'Tanggal' => $request->Tanggal,
'NamaTugas' => $request->NamaTugas,
'Status' => $request->Status
]);
// alihkan halaman ke halaman tugas
return redirect('/tugas');
}
// method untuk hapus data tugas
public function hapus($id)
{
// menghapus data tugas berdasarkan id yang dipilih
DB::table('tugas')->where('ID',$id)->delete();
// alihkan halaman ke halaman tugas
return redirect('/tugas');
}
}
|
#!/bin/bash
function usage {
echo 'This script takes an input directory and runs convert.py and process.py to run the time domain processing'
echo ' and plot the raw data'
}
dir='st'
exeDir='.'
csv=''
while getopts "hd:e:f:c" arg; do
case $arg in
h)
usage
exit
;;
d)
dir=$OPTARG
echo $dir
;;
e)
execDir=$OPTARG
echo $execDir
;;
f)
list=$OPTARG
echo $list
;;
c)
csv=".csv"
esac
done
list=$dir/*.txt
for file1 in $list
do
echo $file1
filepart=`basename -s .txt $file1`
echo $filepart
truth="$dir/$filepart.interpolated.truth.csv"
echo $truth
$exeDir/process.py $dir/$filepart.txt --truthFile $truth &
processPid=$!
$exeDir/sensorLog.py $dir/$filepart
kill $processPid
done
|
# Metrics obtained from Vera.ttf by hand using PfaEdit
# version 08:28 11-Jan-2004 (040111).
#
# 268 chars, 266 glyphs
# weight class 400 (Book), width class medium (100%), line gap 410
# styles (SubFamily) 'Roman'
use v6;
use Test;
plan 65 + 256 * 2 + 268 * 3 + 1;
use Font::FreeType;
use Font::FreeType::Raw::Defs;
# Load the Vera Sans face.
my Font::FreeType $ft .= new;
# Load the BDF file.
my $vera = $ft.face: 't/fonts/Vera.ttf';
ok $vera.defined, 'FreeType.face returns an object';
isa-ok $vera, 'Font::FreeType::Face',
'FreeType.face returns face object';
# Test general properties of the face.
is $vera.num-faces, 1, '$face.num-faces';
is $vera.face-index, 0, '$face.face-index';
is $vera.postscript-name, 'BitstreamVeraSans-Roman', '$face.postscript-name';
is $vera.family-name, 'Bitstream Vera Sans', '$face.family-name';
is $vera.style-name, 'Roman', '$face->style-name';
# Test face flags.
my %expected-flags = (
:has-glyph-names(True),
:has-horizontal-metrics(True),
:has-kerning(True),
:has-reliable-glyph-names(False),
:has-vertical-metrics(False),
:is-bold(False),
:is-fixed-width(False),
:is-italic(False),
:is-scalable(True),
:is-sfnt(True),
);
for %expected-flags.pairs.sort {
is-deeply $vera."{.key}"(), .value, "\$face.{.key}";
}
# Some other general properties.
is $vera.num-glyphs, 268, '$face.number-of-glyphs';
is $vera.units-per-EM, 2048, '$face.units-per-em';
my $underline-position = $vera.underline-position;
ok $underline-position <= -213 || $underline-position >= -284, 'underline position';
is $vera.underline-thickness, 143, 'underline thickness';
# italic angle 0
is $vera.ascender, 1901, 'ascender';
is $vera.descender, -483, 'descender';
is $vera.height, 2384, 'height';
# Test getting the set of fixed sizes available.
my @fixed-sizes = $vera.fixed-sizes;
is +@fixed-sizes, 0, 'Vera has no fixed sizes';
subtest "charmaps" => {
plan 2;
subtest {
plan 4;
my $default-cm = $vera.charmap;
ok $default-cm;
is $default-cm.platform-id, 3;
is $default-cm.encoding-id, 1;
is $default-cm.encoding, FT_ENCODING_UNICODE;
}, "default charmap";
my @charmaps = $vera.charmaps;
is +@charmaps, 2, "available charmaps"
};
subtest "named-info" => {
my $infos = $vera.named-infos;
ok $infos;
ok $infos.elems, 22;
my $copy-info = $infos[0];
like $copy-info.Str, rx/'Copyright'.*'Bitstream, Inc.'/;
is $copy-info.language-id, 0;
is $copy-info.platform-id, 1;
is $copy-info.name-id, 0;
is $copy-info.encoding-id, 0;
};
subtest "bounding box" => sub {
my $bb = $vera.bounding-box;
ok $bb;
is $bb.x-min, -375, "x-min is correct";
is $bb.y-min, -483, "y-min is correct";
is $bb.x-max, 2636, "x-max is correct";
is $bb.y-max, 1901, "y-max is correct";
};
# Test iterating over all the characters. 256*2 tests.
# Note that this only gets us 256 glyphs, because there are another 10 which
# don't have corresponding Unicode characters and for some reason aren't
# reported by this, and another 2 which have Unicode characters but no glyphs.
# The expected Unicode codes and names of the glyphs are in a text file.
# Set the size to match the em size, so that the values are in font units.
$vera.set-char-size(2048, 2048, 72, 72);
my $character-list-filename = 't/fonts/vera_characters.txt';
my @character-list = $character-list-filename.IO.lines;
my $i = 0;
for $vera.iterate-chars {
my $line = @character-list[$i++];
die "not enough characters in listing file '$character-list-filename'"
unless defined $line;
my ($unicode, $name) = split /\s+/, $line;
$unicode = :16($unicode);
is .char-code, $unicode,
"glyph $unicode char code in foreach-char()";
is .name, $name, "glyph $unicode name in foreach-char";
};
is $i, +@character-list, "we aren't missing any glyphs";
# Test metrics on some particlar glyphs.
my %glyph-metrics = (
'A' => { name => 'A', advance => 1401,
LBearing => 16, RBearing => 17 },
'_' => { name => 'underscore', advance => 1024,
LBearing => -20, RBearing => -20 },
'`' => { name => 'grave', advance => 1024,
LBearing => 170, RBearing => 375 },
'g' => { name => 'g', advance => 1300,
LBearing => 113, RBearing => 186 },
'|' => { name => 'bar', advance => 690,
LBearing => 260, RBearing => 260 },
);
# 5*2 tests.
my $chars = %glyph-metrics.keys.sort.join;
$vera.for-glyphs: $chars, -> $glyph {
my $char = $glyph.Str;
with %glyph-metrics{$char} {
is $glyph.name, .<name>,
"name of glyph '$char'";
is $glyph.horizontal-advance, .<advance>,
"advance width of glyph '$char'";
is $glyph.left-bearing, .<LBearing>,
"left bearing of glyph '$char'";
is $glyph.right-bearing, .<RBearing>,
"right bearing of glyph '$char'";
is $glyph.width, .<advance> - .<LBearing> - .<RBearing>,
"width of glyph '$char'";
}
}
my $glyph-list-filename = 't/fonts/vera_glyphs.txt';
my @glyph-list = $glyph-list-filename.IO.lines;
$i = 0;
for $vera.iterate-glyphs {
my $line = @glyph-list[$i++];
die "not enough characters in listing file '$glyph-list-filename'"
unless defined $line;
my ($index, $unicode, $name) = split /\s+/, $line;
is .index, $index, "glyph $index index in iterate-glyphs";
is .char-code, $unicode,
"glyph $unicode char code in foreach-char()";
is .name, $name, "glyph $index name in foreach-glyph";
};
is $i, +@glyph-list, "we aren't missing any glyphs";
is $vera.index-from-glyph-name('G'), 42, 'index-from-glyph-name';
is $vera.glyph-name-from-index(42), 'G', 'glyph-name-from-index';
# Test kerning.
my %kerning = (
__ => 0,
AA => 57,
AV => -131,
'T.' => -243,
);
for %kerning.keys.sort {
my ($left, $right) = .comb;
my $kern = $vera.kerning( $left, $right);
is $kern.x, %kerning{$_}, "horizontal kerning of '$_'";
is $kern.y, 0, "vertical kerning of '$_'";
}
lives-ok {$vera.set-pixel-sizes(100, 120)}, 'set pixel sizes';
|
// !CHECK_TYPE
// !DIAGNOSTICS: -UNUSED_EXPRESSION -UNUSED_PARAMETER -UNUSED_VARIABLE
enum class Foo {
FOO;
companion object {
fun valueOf(something: String) = 2
fun values() = 1
}
}
fun test() {
Foo.values() checkType { <!UNRESOLVED_REFERENCE!>_<!><Array<Foo>>() }
Foo.Companion.values() checkType { <!UNRESOLVED_REFERENCE!>_<!><Int>() }
Foo.valueOf("") checkType { <!UNRESOLVED_REFERENCE!>_<!><Foo>() }
Foo.Companion.valueOf("") checkType { <!UNRESOLVED_REFERENCE!>_<!><Int>() }
}
|
import React, { useContext } from "react";
import { MainContext } from "../Context/Context";
import ProductCard from "./Product/ProductCard";
import "./ProductsList.scss";
const ProductsList = () => {
const { products, currentPage } = useContext(MainContext);
// sets product count per page and which are showed
const indexOfLastProduct = currentPage * 12;
const indexOfFirstProduct = indexOfLastProduct - 12;
return (
<>
{/* TODO: Productlar buraya gelicek ve map ile dönülüp her biri için ProductCard basılacak */}
<div className="products-main-container" data-testid="product-list">
{products
.slice(indexOfFirstProduct, indexOfLastProduct)
.map((product, index) => {
return <ProductCard key={index} product={product} />;
})}
</div>
{/* <ProductCard /> */}
</>
);
};
export default ProductsList;
|
#!/bin/sh
# Copyright 2017 Paul Twohey. All Rights reserved. See LICENSE file for details
set -e
apm install atomic-emacs
apm install editorconfig
apm install atom-ide-ui
apm install ide-typescript
|
package de.reebow.loga;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Properties;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
final class PropertiesConfigParser implements ConfigParser {
private static final Logger log = LogManager.getLogger();
private final String fileName;
public PropertiesConfigParser(String fileName) {
this.fileName = fileName;
}
@Override
public Config parseConfig() {
ClassLoader classLoader = PropertiesConfigParser.class.getClassLoader();
URL url = classLoader.getResource(fileName);
if (url == null) {
return DefaultConfigValues.defaultConfig();
}
try (InputStream inputStream = new FileInputStream(url.getFile())) {
Properties properties = new Properties();
properties.load(inputStream);
LogLevel logLevel = getLogLevel(properties);
return new Config(logLevel);
} catch (IOException e) {
log.error("Could not read properties file, returning default config.", e);
return DefaultConfigValues.defaultConfig();
}
}
private LogLevel getLogLevel(Properties properties) {
String defaultLogLevelString = properties.getProperty("defaultLogLevel");
if (defaultLogLevelString == null) {
return DefaultConfigValues.defaultLogLevel();
}
return LogLevel.valueOf(defaultLogLevelString);
}
}
|
```javascript
// pi :: Float
const pi = Math.PI;
```
|
CREATE TABLE memos (
id integer,
tag text
);
INSERT INTO memos VALUES (1, 'PostgreSQL');
INSERT INTO memos VALUES (2, 'PostgreSQL Groonga');
INSERT INTO memos VALUES (3, 'Groonga');
CREATE INDEX grnindex ON memos
USING pgroonga (tag)
WITH (tokenizer = 'none');
SET enable_seqscan = off;
SET enable_indexscan = on;
SET enable_bitmapscan = off;
SELECT id, tag
FROM memos
WHERE tag %% 'PostgreSQL';
SELECT id, tag
FROM memos
WHERE tag %% 'PostgreSQL Groonga';
DROP TABLE memos;
|
#include <iostream>
#include <string>
#include <sstream>
using namespace std;
struct m_t {
string title;
int year;
};
int main()
{
m_t a_movie;
m_t *p_movie;
p_movie = & a_movie;
cout << "enter title: \n";
getline(cin, p_movie->title);
cout << "enter year: \n";
// getline(cin, p_movie->year) error! interger is not ok!!!
// creat a container
string mystr;
getline(cin, mystr);
stringstream (mystr) >> p_movie->year;
cout << "\n you have entered: \n";
cout << p_movie->title;
cout << "(" << p_movie->year << ")\n" ;
system("pause");
return 0;
}
|
export default () => ({
data: {},
status: {},
date: {},
nodes: {},
approval: {},
scan_ids: {},
history_items: {}
})
|
import os
import json
import pickle
import numpy as np
import pandas as pd
from scipy.spatial import cKDTree
from progressbar import progressbar
from biopandas.mol2 import split_multimol2, PandasMol2
from sklearn.metrics.pairwise import pairwise_distances
np.random.seed(1)
################################# Variables ####################################
# Map elements to integers.
atom_mapping = {
"Br": 1,
"C": 2,
"Cl": 3,
"F": 4,
"H": 5,
"I": 6,
"N": 7,
"N1+": 8,
"O": 9,
"O1-": 10,
"P": 11,
"S": 12,
"S1-": 13,
"Si": 14,
}
# Map ligand bonds to integers. (Need to determine types).
bond_mapping = {"1": 1, "2": 2, "3": 3, "am": 4, "ar": 5}
# Load all of the DUD-E target names.
all_targets = pd.read_csv("dud-e_targets.csv").target_name.tolist()
all_targets = [target.lower() for target in all_targets]
# Remove proteins without bond information.
all_targets.remove("drd3")
all_targets.remove("aa2ar")
all_targets.remove("thrb")
np.random.shuffle(all_targets)
all_targets = all_targets[:5]
################################# Proteins #####################################
## Text
def get_protein_text(target):
"""Open the target's PDB file and read in the lines in a list."""
with open(f"raw/pdb/{target}.pdb", "r") as f:
return [entry.split() for entry in f.read().split("\n")][5:-3]
def get_protein_atoms(protein_text):
"""Get details on target atom elements and coordinates."""
atoms = np.array([entry[3:] for entry in protein_text if entry[0] == "ATOM"])[
:, [2, 3, 4, 7]
]
atom_attribs = atoms[:, -1]
atom_attribs = [[atom_mapping[entry]] for entry in atom_attribs.tolist()]
return np.array(atoms[:, [0, 1, 2]]).astype(float), atom_attribs
## Bonds
def get_protein_chemical_bonds(protein_text):
"""Get the protein's chemical bonds."""
bonds = [entry[1:] for entry in protein_text if entry[0] == "CONECT"]
bonds = [[int(element) - 1 for element in entry] for entry in bonds]
processed_bonds = []
for i in range(len(bonds)):
for j in range(1, len(bonds[i])):
# Add both "directions" of the bond to the list, if
# they do not already exist in it.
if [bonds[i][0], bonds[i][j]] not in processed_bonds:
processed_bonds.append(
[bonds[i][0], len(bond_mapping.keys()) + 1, bonds[i][j]]
)
if [bonds[i][j], bonds[i][0]] not in processed_bonds:
processed_bonds.append(
[bonds[i][j], len(bond_mapping.keys()) + 1, bonds[i][0]]
)
return processed_bonds
def get_bonds_cutoff(atom_coords, bond_cutoff):
"""Get bond list using the proximity-based method."""
# Create a KD tree out of the atoms - this will help
# find an atom's nearest neighbors efficiently.
KD_tree = cKDTree(atom_coords, leafsize=32)
processed_bonds = []
# For each atom, find its neighbors within the cutoff,
# and add the associated "bonds" to processed_bonds.
for atom_index in range(atom_coords.shape[0]):
index_atom_atom_coords = atom_coords[atom_index, :]
neighbor_indices = KD_tree.query_ball_point(index_atom_atom_coords, bond_cutoff)
# Remove the index of the current atom from the
# neighbors list, to avoid inducing self-loops.
neighbor_indices.remove(atom_index)
# Add both "directions" of the bond to the list.
processed_bonds += [
[atom_index, len(bond_mapping.keys()) + 1, neighbor_index]
for neighbor_index in neighbor_indices
]
processed_bonds += [
[neighbor_index, len(bond_mapping.keys()) + 1, atom_index]
for neighbor_index in neighbor_indices
]
return processed_bonds
## Pocket
def get_ligand_centroid(target):
"""Get the coordinates of the crystal ligand."""
# Read in the DataFrame of the ligand, and return the
# coordinates as a numpy array.
ligand = PandasMol2().read_mol2(f"raw/{target}/crystal_ligand.mol2").df
return ligand.iloc[:, 2:5].to_numpy().mean(0)
def get_pocket_indices(target, target_coords, protein_cutoff):
"""Get the indices of target atoms that are within its pocket."""
ligand_centroid = get_ligand_centroid(target)
dist_from_centroid = lambda target_atom_coords: np.linalg.norm(
target_atom_coords - ligand_centroid
)
target_atom_dists = np.apply_along_axis(dist_from_centroid, 1, target_coords)
# Return the indices of the target atoms that are less than
# the cutoff away from the centroid of the crystal ligand.
return np.where(target_atom_dists <= protein_cutoff)[0]
def filter_bonds(bonds, pocket_indices):
"""Get the bond list of bonds between atoms in the target's pocket."""
# Keep only the bonds in the processed bonds list that are in the
# target's pocket.
filtered_bonds = [
entry
for entry in bonds
if entry[0] in pocket_indices and entry[2] in pocket_indices
]
# Remap the entry indices to be between 0-number of atoms remaining,
# to correspond to their positions in the coordinates array.
index_mapping = dict(zip(pocket_indices, range(1, len(pocket_indices) + 1)))
return [
[index_mapping[entry[0]], entry[1], index_mapping[entry[2]]]
for entry in filtered_bonds
]
## Put it all together
def process_target(target, bond_mode, protein_cutoff=10, bond_cutoff=5):
"""Get a Data object after processing the target's PDB."""
protein_text = get_protein_text(target)
atom_coords, atom_attribs = get_protein_atoms(protein_text)
# Get the bond list according to the bond_mode.
if bond_mode == "chemical":
bonds = get_chemical_bonds(protein_text)
elif bond_mode == "bond_cutoff":
bonds = get_bonds_cutoff(atom_coords, bond_cutoff)
else:
raise Exception("Invalid bond mode.")
pocket_indices = get_pocket_indices(target, atom_coords, protein_cutoff)
# Filter out all non-pocket atom information from
# atom_attribs and atom_coords.
atom_attribs = (np.array(atom_attribs)[pocket_indices, :]).tolist()
bonds = filter_bonds(bonds, pocket_indices)
return {
"graph": bonds,
"node_features": atom_attribs,
}
################################## Ligands #####################################
def process_ligand_text(text, num_atoms_target):
"""Get atom attributes and bond information for the current ligand."""
num_atoms = int(text[2].split()[0])
# Remove extraneous information from each line of the text.
cleaned_text = [row[:-1] for row in text[7:]]
# Split the cleaned text into two.
atom_attribs, bonds = (
[row.split()[5].split(".")[0] for row in cleaned_text[:num_atoms]],
[row.split() for row in cleaned_text[(num_atoms + 1) :]],
)
atom_attribs = [[atom_mapping[element]] for element in atom_attribs]
# Process bond information into integers.
bonds = [
[int(bond[1]) + num_atoms_target, bond_mapping[bond[3]], int(bond[2]) + num_atoms_target]
for bond in bonds
]
# Create a Data object out of the ligand information, and add it to
# the ligand dictionary for this target.
return [atom_attribs, bonds]
def process_ligands(target):
"""Get information for all ligands associated with the target."""
ligand_list = []
num_atoms_target = len(protein_dict[target]["node_features"])
for fname in ["actives_final.mol2", "decoys_final.mol2"]:
response = int(fname.startswith("a"))
# Split the mol2 file with multiple ligands by ligand.
# This list will be a list of pair sub-lists, the first
# element of which is the ligand code, and the second of which
# is the associated coordinate and bond text.
curr_info = list(split_multimol2(f"raw/{target}/{fname}"))
curr_info = [
[f"{target}_{entry[0]}"] + process_ligand_text(entry[1], num_atoms_target) + [[[response]]]
for entry in curr_info
]
curr_info = [
dict(zip(("id", "node_features", "graph", "targets"), curr_info[i]))
for i in range(len(curr_info))
]
ligand_list += curr_info
return ligand_list
################################ Application ###################################
if not os.path.isfile("ligand_dict.pkl") or not os.path.isfile("combined_list.pkl"):
if not os.path.isfile("protein_dict.pkl"):
print("Creating protein dictionary")
protein_dict = {}
for target in progressbar(all_targets):
protein_dict[target] = process_target(target, "bond_cutoff")
with open(r"protein_dict.pkl", "wb") as output_file:
pickle.dump(protein_dict, output_file)
else:
protein_dict = pd.read_pickle("protein_dict.pkl")
if not os.path.isfile("combined_list.pkl"):
if not os.path.isfile("ligand_dict.pkl"):
print("Creating ligand dictionary")
ligand_dict = {}
for target in progressbar(all_targets):
ligand_dict[target] = process_ligands(target)
# Keep all the ligands which are active.
ligand_dict[target][0]
ligands = [
ligand
for ligand in ligand_dict[target]
if ligand["targets"][0][0]
]
# Create a list of the decoys.
neg_ligands = [
ligand
for ligand in ligand_dict[target]
if not ligand["targets"][0][0]
]
# Add (a sample of) the decoys to the selected ligands list.
ligands += np.random.choice(
neg_ligands,
size=len(ligands),
replace=False,
).tolist()
ligand_dict[target] = ligands
with open(r"ligand_dict.pkl", "wb") as output_file:
pickle.dump(ligand_dict, output_file)
else:
ligand_dict = pd.read_pickle("ligand_dict.pkl")
def combine(protein, ligand):
combined = {}
combined["targets"] = ligand["targets"]
combined["id"] = ligand["id"]
combined["graph"] = protein["graph"] + ligand["graph"]
combined["graph"] = [[entry[0] - 1, entry[1], entry[2] - 1] for entry in combined["graph"]]
combined["node_features"] = protein["node_features"] + ligand["node_features"]
return combined
if not os.path.isfile("combined_list.pkl"):
print("Combining proteins and ligands")
combined_list = []
for target in progressbar(all_targets):
for ligand in ligand_dict[target]:
combined_list.append(combine(protein_dict[target], ligand))
with open(r"combined_list.pkl", "wb") as output_file:
pickle.dump(combined_list, output_file)
else:
combined_list = pd.read_pickle("combined_list.pkl")
np.random.shuffle(combined_list)
def write_jsonl(name, start_index, end_index):
with open(f"{name}.jsonl", "w") as outfile:
for entry in progressbar(combined_list[start_index:end_index]):
json.dump(entry, outfile)
outfile.write('\n')
print(len(combined_list))
if not os.path.isfile("train.jsonl"):
write_jsonl("train", 0, 2000)
if not os.path.isfile("valid.jsonl"):
write_jsonl("valid", 2000, 2500)
if not os.path.isfile("test.jsonl"):
write_jsonl("test", 2500, -1)
|
import React from "react";
function Loader({ text, isVisible }) {
if (!isVisible) return null;
return <div>{text}</div>;
}
export default Loader;
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Lab09
{
internal class Seat
{
private string Comfortability = "Pleasant";
public string getSeatComfortability()
{
return this.Comfortability;
}
public void setSeatComfortability(string comfort)
{
this.Comfortability = comfort;
}
private bool SeatWarmer = false;
public bool getSeatWarmerStatus()
{
return this.SeatWarmer;
}
public void setSeatWarmer(bool status)
{
this.SeatWarmer = status;
}
}
}
|
import React from 'react';
import { render } from 'react-dom'
import { hashHistory, Route, Router } from 'react-router';
import {App} from './app';
import {ThankYou} from './components/thank-you';
render((
<Router histroy={hashHistory}>
<Route path="/" component={App}></Route>
<Route path="/thank-you" component={ThankYou} />
</Router>
), document.getElementById("dashboard"));
|
import React from "react";
import Personal from '../controllers/Personal';
export default {
title: 'Personal wrapper',
component: Personal
};
export const Template = () => <Personal />;
|
#include <efsw/DirectorySnapshot.hpp>
#include <efsw/FileSystem.hpp>
namespace efsw {
DirectorySnapshot::DirectorySnapshot()
{
}
DirectorySnapshot::DirectorySnapshot( std::string directory )
{
init( directory );
}
DirectorySnapshot::~DirectorySnapshot()
{
}
void DirectorySnapshot::init( std::string directory )
{
setDirectoryInfo( directory );
initFiles();
}
bool DirectorySnapshot::exists()
{
return DirectoryInfo.exists();
}
void DirectorySnapshot::deleteAll( DirectorySnapshotDiff& Diff )
{
FileInfo fi;
for ( FileInfoMap::iterator it = Files.begin(); it != Files.end(); it++ )
{
fi = it->second;
if ( fi.isDirectory() )
{
Diff.DirsDeleted.push_back( fi );
}
else
{
Diff.FilesDeleted.push_back( fi );
}
}
}
void DirectorySnapshot::setDirectoryInfo( std::string directory )
{
DirectoryInfo = FileInfo( directory );
}
void DirectorySnapshot::initFiles()
{
Files = FileSystem::filesInfoFromPath( DirectoryInfo.Filepath );
FileInfoMap::iterator it = Files.begin();
std::list<std::string> eraseFiles;
/// Remove all non regular files and non directories
for ( ; it != Files.end(); it++ )
{
if ( !it->second.isRegularFile() && !it->second.isDirectory() )
{
eraseFiles.push_back( it->first );
}
}
for ( std::list<std::string>::iterator eit = eraseFiles.begin(); eit != eraseFiles.end(); eit++ )
{
Files.erase( *eit );
}
}
DirectorySnapshotDiff DirectorySnapshot::scan()
{
DirectorySnapshotDiff Diff;
Diff.clear();
FileInfo curFI( DirectoryInfo.Filepath );
Diff.DirChanged = DirectoryInfo != curFI;
if ( Diff.DirChanged )
{
DirectoryInfo = curFI;
}
/// If the directory was erased, create the events for files and directories deletion
if ( !curFI.exists() )
{
deleteAll( Diff );
return Diff;
}
FileInfoMap files = FileSystem::filesInfoFromPath( DirectoryInfo.Filepath );
if ( files.empty() && Files.empty() )
{
return Diff;
}
FileInfo fi;
FileInfoMap FilesCpy;
FileInfoMap::iterator it;
FileInfoMap::iterator fiIt;
if ( Diff.DirChanged )
{
FilesCpy = Files;
}
for ( it = files.begin(); it != files.end(); it++ )
{
fi = it->second;
/// File existed before?
fiIt = Files.find( it->first );
if ( fiIt != Files.end() )
{
/// Erase from the file list copy
FilesCpy.erase( it->first );
/// File changed?
if ( (*fiIt).second != fi )
{
/// Update the new file info
Files[ it->first ] = fi;
/// handle modified event
if ( fi.isDirectory() )
{
Diff.DirsModified.push_back( fi );
}
else
{
Diff.FilesModified.push_back( fi );
}
}
}
/// Only add regular files or directories
else if ( fi.isRegularFile() || fi.isDirectory() )
{
/// New file found
Files[ it->first ] = fi;
FileInfoMap::iterator fit;
std::string oldFile = "";
/// Check if the same inode already existed
if ( ( fit = nodeInFiles( fi ) ) != Files.end() )
{
oldFile = fit->first;
/// Avoid firing a Delete event
FilesCpy.erase( fit->first );
/// Delete the old file name
Files.erase( fit->first );
if ( fi.isDirectory() )
{
Diff.DirsMoved.push_back( std::make_pair( oldFile, fi ) );
}
else
{
Diff.FilesMoved.push_back( std::make_pair( oldFile, fi ) );
}
}
else
{
if ( fi.isDirectory() )
{
Diff.DirsCreated.push_back( fi );
}
else
{
Diff.FilesCreated.push_back( fi );
}
}
}
}
if ( !Diff.DirChanged )
{
return Diff;
}
/// The files or directories that remains were deleted
for ( it = FilesCpy.begin(); it != FilesCpy.end(); it++ )
{
fi = it->second;
if ( fi.isDirectory() )
{
Diff.DirsDeleted.push_back( fi );
}
else
{
Diff.FilesDeleted.push_back( fi );
}
/// Remove the file or directory from the list of files
Files.erase( it->first );
}
return Diff;
}
FileInfoMap::iterator DirectorySnapshot::nodeInFiles( FileInfo& fi )
{
FileInfoMap::iterator it;
if ( FileInfo::inodeSupported() )
{
for ( it = Files.begin(); it != Files.end(); it++ )
{
if ( it->second.sameInode( fi ) && it->second.Filepath != fi.Filepath )
{
return it;
}
}
}
return Files.end();
}
void DirectorySnapshot::addFile( std::string path )
{
std::string name( FileSystem::fileNameFromPath( path ) );
Files[ name ] = FileInfo( path );
}
void DirectorySnapshot::removeFile( std::string path )
{
std::string name( FileSystem::fileNameFromPath( path ) );
FileInfoMap::iterator it = Files.find( name );
if ( Files.end() != it )
{
Files.erase( it );
}
}
void DirectorySnapshot::moveFile( std::string oldPath, std::string newPath )
{
removeFile( oldPath );
addFile( newPath );
}
void DirectorySnapshot::updateFile(std::string path)
{
addFile( path );
}
}
|
/*
@jest-environment jsdom
*/
import {SizedConfig, LineLayerConfig, RectLayerSpec} from '../types'
import {PlotEnv} from './PlotEnv'
import {newTable} from './newTable'
import {COUNT} from '../constants/columnKeys'
import * as lineTransformModule from '../transforms/line'
import * as histogramTransformModule from '../transforms/histogram'
describe('PlotEnv', () => {
describe('config updates and memoization', () => {
let histogramTransformSpy
let lineTransformSpy
beforeEach(() => {
histogramTransformSpy = jest.spyOn(
histogramTransformModule,
'histogramTransform'
)
lineTransformSpy = jest.spyOn(lineTransformModule, 'lineTransform')
})
afterEach(() => {
histogramTransformSpy.mockRestore()
lineTransformSpy.mockRestore()
})
it('updates xScale when xDomain is updated', () => {
const plotEnv = new PlotEnv()
const aData = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
const table = newTable(10).addColumn('a', 'long', 'number', aData)
const config: SizedConfig = {
table,
width: 1000,
height: 500,
xDomain: [10, 19],
onSetXDomain: () => {},
onResetXDomain: () => {},
layers: [{type: 'histogram', x: 'a'}],
}
plotEnv.config = config
const rangePadding = plotEnv['rangePadding']
expect(plotEnv.xScale(10)).toEqual(rangePadding)
expect(plotEnv.xScale(19)).toEqual(
1000 - plotEnv.margins.left - plotEnv.margins.right - rangePadding * 2
)
plotEnv.config = {...config, xDomain: [10, 28]}
expect(plotEnv.xScale(10)).toEqual(rangePadding)
expect(plotEnv.xScale(28)).toEqual(
1000 - plotEnv.margins.left - plotEnv.margins.right - rangePadding * 2
)
})
it('runs bin stat when x domain changes', () => {
const plotEnv = new PlotEnv()
const aData = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
const table = newTable(10).addColumn('a', 'long', 'number', aData)
const config: SizedConfig = {
table,
width: 1000,
height: 500,
xDomain: [10, 19],
onSetXDomain: () => {},
onResetXDomain: () => {},
layers: [{type: 'histogram', x: 'a', binCount: 10}],
}
plotEnv.config = config
const getFirstBinCount = () =>
(plotEnv.getSpec(0) as RectLayerSpec).table.getColumn(
COUNT,
'number'
)[0]
expect(getFirstBinCount()).toEqual(1)
plotEnv.config = {...config, xDomain: [10, 28]}
expect(getFirstBinCount()).toEqual(2)
expect(histogramTransformSpy).toHaveBeenCalledTimes(2)
})
it('runs bin stat when histogram layer x mapping changes', () => {
const plotEnv = new PlotEnv()
const aData = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
const bData = [10, 10, 10, 10, 10, 10, 10, 10, 10, 19]
const table = newTable(10)
.addColumn('a', 'long', 'number', aData)
.addColumn('b', 'long', 'number', bData)
const config: SizedConfig = {
table,
width: 1000,
height: 500,
layers: [{type: 'histogram', x: 'a', binCount: 10}],
}
plotEnv.config = config
const getFirstBinCount = () =>
(plotEnv.getSpec(0) as RectLayerSpec).table.getColumn(
COUNT,
'number'
)[0]
expect(getFirstBinCount()).toEqual(1)
plotEnv.config = {
...config,
layers: [{type: 'histogram', x: 'b', binCount: 10}],
}
expect(getFirstBinCount()).toEqual(9)
expect(histogramTransformSpy).toHaveBeenCalledTimes(2)
})
it('does not run bin stat when histogram fillOpacity change', () => {
const plotEnv = new PlotEnv()
const aData = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
const table = newTable(10).addColumn('a', 'long', 'number', aData)
const config: SizedConfig = {
table,
width: 1000,
height: 500,
layers: [{type: 'histogram', x: 'a', binCount: 10, fillOpacity: 1}],
}
plotEnv.config = config
expect(histogramTransformSpy).toHaveBeenCalledTimes(0)
plotEnv.getSpec(0)
expect(histogramTransformSpy).toHaveBeenCalledTimes(1)
plotEnv.config = {
...config,
layers: [{type: 'histogram', x: 'a', binCount: 10, fillOpacity: 0.5}],
}
plotEnv.getSpec(0)
expect(histogramTransformSpy).toHaveBeenCalledTimes(1)
})
it('updating line interpolation should not reset the x domain', () => {
const plotEnv = new PlotEnv()
const aData = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
const bData = [10, 10, 10, 10, 10, 10, 10, 10, 10, 19]
const table = newTable(10)
.addColumn('a', 'long', 'number', aData)
.addColumn('b', 'long', 'number', bData)
const config: SizedConfig = {
table,
width: 1000,
height: 500,
layers: [{type: 'line', x: 'b', y: 'a', interpolation: 'linear'}],
}
plotEnv.config = config
plotEnv.xDomain = [12, 15]
expect(plotEnv.xDomain).toEqual([12, 15])
plotEnv.config = {
...config,
layers: [
{...config.layers[0], interpolation: 'monotoneX'} as LineLayerConfig,
],
}
expect(plotEnv.xDomain).toEqual([12, 15])
})
it('does not run line stat when x domain changes', () => {
const plotEnv = new PlotEnv()
const aData = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
const bData = [10, 10, 10, 10, 10, 10, 10, 10, 10, 19]
const table = newTable(10)
.addColumn('a', 'long', 'number', aData)
.addColumn('b', 'long', 'number', bData)
const config: SizedConfig = {
table,
width: 1000,
height: 500,
xDomain: [10, 19],
onSetXDomain: () => {},
onResetXDomain: () => {},
layers: [{type: 'line', x: 'b', y: 'a'}],
}
plotEnv.config = config
expect(lineTransformSpy).toHaveBeenCalledTimes(0)
plotEnv.getSpec(0)
expect(lineTransformSpy).toHaveBeenCalledTimes(1)
plotEnv.config = {...config, xDomain: [12, 14]}
plotEnv.getSpec(0)
expect(lineTransformSpy).toHaveBeenCalledTimes(1)
})
it('resets uncontrolled domain state when x mapping changes', () => {
const plotEnv = new PlotEnv()
const aData = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
const bData = [10, 10, 10, 10, 10, 10, 10, 10, 10, 19]
const table = newTable(10)
.addColumn('a', 'long', 'number', aData)
.addColumn('b', 'long', 'number', bData)
const config: SizedConfig = {
table,
width: 1000,
height: 500,
layers: [{type: 'line', x: 'b', y: 'a'}],
}
plotEnv.config = config
expect(plotEnv.xDomain).toEqual([10, 19])
plotEnv.xDomain = [12, 16]
expect(plotEnv.xDomain).toEqual([12, 16])
plotEnv.config = {
...config,
layers: [{type: 'line', x: 'a', y: 'b'}], // Swap mappings
}
expect(plotEnv.xDomain).toEqual([10, 19])
})
})
})
|
package com.zhuinden.simplestackexamplemvvm.features.statistics
import com.zhuinden.simplestackexamplemvvm.data.tasks.TasksDataSource
class StatisticsViewModel(tasksDataSource: TasksDataSource) {
val activeTasks = tasksDataSource.activeTasksWithChanges
val completedTasks = tasksDataSource.completedTasksWithChanges
}
|
; RUN: llc < %s -march=x86-64 -mcpu=core2 -x86-early-ifcvt -enable-misched \
; RUN: -misched=shuffle -misched-bottomup -verify-machineinstrs \
; RUN: | FileCheck %s
; REQUIRES: asserts
;
; Interesting MachineScheduler cases.
;
; FIXME: There should be an assert in the coalescer that we're not rematting
; "not-quite-dead" copies, but that breaks a lot of tests <rdar://problem/11148682>.
declare void @llvm.memcpy.p0i8.p0i8.i64(i8* nocapture, i8* nocapture, i64, i32, i1) nounwind
; From oggenc.
; After coalescing, we have a dead superreg (RAX) definition.
;
; CHECK: xorl %esi, %esi
; CHECK: movl $32, %ecx
; CHECK: rep;movsl
define fastcc void @_preextrapolate_helper() nounwind uwtable ssp {
entry:
br i1 undef, label %for.cond.preheader, label %if.end
for.cond.preheader: ; preds = %entry
call void @llvm.memcpy.p0i8.p0i8.i64(i8* undef, i8* null, i64 128, i32 4, i1 false) nounwind
unreachable
if.end: ; preds = %entry
ret void
}
; The machine verifier checks that EFLAGS kill flags are updated when
; the scheduler reorders cmovel instructions.
;
; CHECK: test
; CHECK: cmovel
; CHECK: cmovel
; CHECK: call
define void @foo(i32 %b) nounwind uwtable ssp {
entry:
%tobool = icmp ne i32 %b, 0
br i1 %tobool, label %if.then, label %if.end
if.then: ; preds = %entry
br label %if.end
if.end: ; preds = %if.then, %entry
%v1 = phi i32 [1, %entry], [2, %if.then]
%v2 = phi i32 [3, %entry], [4, %if.then]
call void @bar(i32 %v1, i32 %v2)
ret void
}
declare void @bar(i32,i32)
|
#Sobre os Dados
Nos links abaixo você encontra descrições detalhadas dos dados que oferecemos via [IPT](http://ipt.jbrj.gov.br/).
- Dados de Coleções
- Coleções Preservadas
- [Herbário RB](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:herbario)
- [Banco de DNA](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:dna)
- [Carpoteca](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:carpoteca)
- [Etnobotânica](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:etno)
- [Xiloteca](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:xiloteca)
- [Coleção Spirit (em meio líquido)](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:spirit)
- Coleções Vivas
- [Coleção Micológica viva](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:micologia)
- [Banco de Sementes](http://dadoswiki.jbrj.gov.br/doku.php?id=colecoes:bancosementes)
- Dados de Projetos
- [Lista Vermelha da Flora - CNCFlora](http://dadoswiki.jbrj.gov.br/doku.php?id=projetos:cncflora)
- [Herbário Virtual - Reflora](http://dadoswiki.jbrj.gov.br/doku.php?id=projetos:herbariovirtual)
- [Lista de Espécies da Flora do Brasil](http://dadoswiki.jbrj.gov.br/doku.php?id=projetos:florabr)
- [Flora do Rio de Janeiro](http://dadoswiki.jbrj.gov.br/doku.php?id=projetos:florarj)
##Ainda sobre os dados:
- [Dicionários de Classes](http://dadoswiki.jbrj.gov.br/doku.php?id=classes)
- [Termos e Vocabulários Adotados](http://vocab.jbrj.gov.br/)
- [Padrões de Dados Utilizados](http://dadoswiki.jbrj.gov.br/doku.php?id=padroes)
|
KeysMap file format
====================
The format of the file is similar to, but more flexible than, a Java properties file.
The syntax of each line of properties file is:
groupId:artifactId:packaging:version=pgpKeyFingerprint
Where
- `groupId` - groupId of Maven artifact, this field is required, but can be `*` for match any
- `artifactId` - artifactId of Maven artifact - optional
- `packaging` - packaging of Maven artifact, eg. `pom`, `jar` - optional
- `version` - version of Maven artifact, this filed support Maven version range syntax - optional
- `pgpKeyFingerprint` - PGP key fingerprints in hex format which are allowed to sign artifact,
multiple keys can be supplied separated by comma
PGP keys special values
----------------------
`pgpKey` field can contains multiple PGP fingerprints, separated by a comma,
each fingerprint must start with `0x`. Whitespace is allowed in hex fingerprint.
`pgpKey` can also contain special values:
- `*`, `any` - match any key for artifact
- `noSig` - allow artifact without signature
- `badSig` - allow artifact with invalid signature
- `noKey` - allow that key for artifact will not exist on public key servers
The order of items and matching
-------------------------------
The order of items is not important.
In first step items are filtered for matching artifact and then fingerprints or special key value are searched until first is found.
Comments
--------
Everything from `#` (hash sign) and continue to the end of the line are comment and will be skipped.
Multiline
---------
If line is ending with ` \ ` (backslash), break of line will be removed and next line will be joined.
Whitespace and comments are allowed after ` \ `.
Examples
--------
match any artifact from group with any packaging and version
test.groupId = 0x1234567890123456789012345678901234567890
---
match any artifact from group and any subgroups with any packaging and version
test.groupId.* = 0x1234567890123456789012345678901234567890
---
match a specific artifact with any packaging and version
test.groupId:artifactId = 0x1234567890123456789012345678901234567890
---
match a specific artifact with packaging and with any version
test.groupId:artifactId:jar = 0x1234567890123456789012345678901234567890
---
match a specific artifact with packaging and version
test.groupId:artifactId:jar:1.0.0 = 0x1234567890123456789012345678901234567890
---
match a specific artifact with packaging and version range
test.groupId:artifactId:jar:[1.0.0,2.0.0) = 0x1234567890123456789012345678901234567890
---
match a specific artifact with the version and any packaging
test.groupId:artifactId:1.0.0 = 0x1234567890123456789012345678901234567890
---
match a specific artifact with any version and packaging and many keys
test.groupId:artifactId = 0x1234567890123456789012345678901234567890, 0x1234567890123456789012345678901234567890, \
0x1234567890123456789012345678901234567890
---
allow bad signature for a specific artifact with version
test.groupId:artifactId = 0x1234567890123456789012345678901234567890
test.groupId:artifactId:pom:1.0.0 = badSig
---
match specific artifact with any packaging and version and allow that signature will not exist
test.groupId:artifactId = 0x1234567890123456789012345678901234567890, noSig
---
define fingerprints for group and one for specific artifact
test.groupId = 0x1111222233334444555566667777888899990000
test.groupId:artifactId = 0x0000999988887777666655554444333322221111
in this case every artifact from group `test.groupId` can be signed by key `0x1111222233334444555566667777888899990000`.
artifact `test.groupId:artifactId` can be signed by `0x0000999988887777666655554444333322221111`
and also by `0x1111222233334444555566667777888899990000`.
---
comments
# my comments
test.groupId:artifactId = \ #
0x1234567890123456789012345678901234567890, \ # first key
0x1234567890123456789012345678901234567890, \ # second key
0x1234567890123456789012345678901234567890 # end
---
External resources
------------------
- [Maven GAV - naming conventions](https://maven.apache.org/guides/mini/guide-naming-conventions.html)
- [Version range syntax](https://maven.apache.org/enforcer/enforcer-rules/versionRanges.html)
|
/*
Copyright 2021 Volt Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use crate::core::utils::{enable_ansi_support, errors::VoltError};
use clap::ArgMatches;
use clap::Parser;
use dirs::home_dir;
use package_spec::{parse_package_spec, PackageSpec};
use sha1::Digest;
use sha2::Sha512;
use ssri::{Algorithm, Integrity};
use std::{env, path::PathBuf};
#[derive(Debug, Clone, Parser)]
pub struct VoltConfig {
/// Path to current working directory
#[clap(short, long)]
cwd: Option<PathBuf>,
}
impl VoltConfig {
pub const OS: &'static str = env::consts::OS;
pub const VOLT_HOME: &'static str = ".volt";
pub const VOLT_LOCK: &'static str = "volt.lock";
pub fn home(&self) -> miette::Result<PathBuf> {
Ok(dirs::home_dir().ok_or(VoltError::GetHomeDirError)?)
}
/// Return the current directory (defaults to `.` if not provided)
pub fn cwd(&self) -> miette::Result<PathBuf> {
Ok(self.cwd.to_owned().unwrap_or({
env::current_dir().map_err(|e| VoltError::EnvironmentError {
env: "CURRENT_DIRECTORY".to_string(),
source: e,
})?
}))
}
/// Path to the volt lockfile (defaults to `./volt.lock`)
pub fn lockfile(&self) -> miette::Result<PathBuf> {
Ok(self.cwd()?.join(Self::VOLT_LOCK))
}
/// Path to the `node_modules` directory (defaults to `./node_modules`)
pub fn node_modules(&self) -> miette::Result<PathBuf> {
Ok(self.cwd()?.join("node_modules"))
}
/// Path to the config directory (defaults to `~/.volt`)
pub fn volt_home(&self) -> miette::Result<PathBuf> {
Ok(self.home()?.join(Self::VOLT_HOME))
}
/// Calculate the hash of a tarball
///
/// ## Examples
/// ```rs
/// calc_hash(bytes::Bytes::new(), ssri::Algorithm::Sha1)?;
/// ```
/// ## Returns
/// * Result<String>
pub fn calc_hash(data: &bytes::Bytes, algorithm: Algorithm) -> miette::Result<String> {
let integrity;
if algorithm == Algorithm::Sha1 {
let hash = ssri::IntegrityOpts::new()
.algorithm(Algorithm::Sha1)
.chain(&data)
.result();
integrity = format!("sha1-{}", hash.to_hex().1);
} else {
integrity = ssri::IntegrityOpts::new()
.algorithm(Algorithm::Sha512)
.chain(&data)
.result()
.to_string();
}
Ok(integrity)
}
}
|
using Model;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
// UI element for player unit presentation and base actions
public sealed class UnitPanel : MonoBehaviour {
[SerializeField]
private Text unitName; // Set from editor
[SerializeField]
private Text lifeCount; // Set from editor
[SerializeField]
private Button cancelButton; // Set from editor
private Action OnCancelButtonClickCallback;
public event Action OnCancelUnitClick;
public void Show(Unit unit = null) {
if (unit == null) {
gameObject.SetActive(false);
return;
}
gameObject.SetActive(true);
unitName.text = unit.Id;
lifeCount.text = string.Format("{0}/{1}", unit.HitPoints, unit.MaxHitPoints);
}
private void Start () {
cancelButton.onClick.AddListener(OnCancelButtonClickHandler);
}
private void OnCancelButtonClickHandler() {
OnCancelUnitClick.InvokeSafe();
}
}
|
# Module for handling actions to avoid DRY
module UserHelpers
def set_session_create_values
session[:user] = params[:user]
end
def process_new
postuser = params[:user]
user = {
first_name: postuser[:first_name], last_name: postuser[:last_name],
username: postuser[:username], email: postuser[:email],
password: postuser[:password],
password_confirmation: postuser[:password],
biography: postuser[:biography], is_active: true,
created_at: DateTime.now, updated_at: DateTime.now
}
@user = User.new(user)
end
def process_update(id)
@user = User.find_by_id(id)
updated_values = {
first_name: params[:user][:first_name],
last_name: params[:user][:last_name], is_active: params[:user][:is_active],
biography: params[:user][:biography], updated_at: DateTime.now
}
@user.update(updated_values)
end
def add_role(role_id, user_id)
if check_role(role_id)
@error = 'User has role already'
false
else
UserRole.create(
user_id: user_id, role_id: role_id, is_active: true
)
true
end
end
def check_role(role_id)
@user_role = UserRole.where(
user_id: session[:user_id], role_id: role_id,
is_active: true
).first
return true if @user_role
false
end
def get_role_by_id(record_id)
@user_role = UserRole.where(
id: record_id
).first
end
def edit_role(role_id, record_id)
get_role_by_id(record_id)
@user_role.update(role_id: role_id)
@user_role.save
end
def save_process(typeprocess, page_redirect, page_redirect_error)
if @user.save
session[:user_id] = @user.id
if typeprocess == 'Add'
add_role(3, @user.id)
flash[:success] = 'Profile successfully created!'
construct_new_mail_send(@user.email, @user.first_name, @user.last_name)
else
flash[:success] = 'Profile successfully updated!'
end
redirect to page_redirect
else
flash[:error] = @user.errors.full_messages
redirect to page_redirect_error
end
end
end
|
class AlertLogsController < ApplicationController
before_action :set_alert_log, only: [:show, :edit, :update, :destroy]
# GET /alert_logs
# GET /alert_logs.json
def index
#getting logs only that belong to current user account
alerts = Alert.where(user_id: current_user.id)
@alert_logs = Array.new
alerts.each do |alert|
(@alert_logs << alert.alert_logs).flatten!
end
end
# GET /alert_logs/1
# GET /alert_logs/1.json
def show
end
# GET /alert_logs/new
def new
@alert_log = AlertLog.new
end
# GET /alert_logs/1/edit
def edit
end
# POST /alert_logs
# POST /alert_logs.json
def create
@alert_log = AlertLog.new(alert_log_params)
respond_to do |format|
if @alert_log.save
format.html { redirect_to @alert_log, notice: 'Alert log was successfully created.' }
format.json { render :show, status: :created, location: @alert_log }
else
format.html { render :new }
format.json { render json: @alert_log.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /alert_logs/1
# PATCH/PUT /alert_logs/1.json
def update
respond_to do |format|
if @alert_log.update(alert_log_params)
format.html { redirect_to @alert_log, notice: 'Alert log was successfully updated.' }
format.json { render :show, status: :ok, location: @alert_log }
else
format.html { render :edit }
format.json { render json: @alert_log.errors, status: :unprocessable_entity }
end
end
end
# DELETE /alert_logs/1
# DELETE /alert_logs/1.json
def destroy
@alert_log.destroy
respond_to do |format|
format.html { redirect_to alert_logs_url, notice: 'Alert log was successfully destroyed.' }
format.json { head :no_content }
end
end
def check
end
private
# Use callbacks to share common setup or constraints between actions.
def set_alert_log
@alert_log = AlertLog.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def alert_log_params
params.require(:alert_log).permit(:message, :status, :recipient, :datetime, :alert_id)
end
end
|
import React from 'react'
import { SourceIcon } from '../../../icons/FontAwesomeIcons'
import FlexPair from '../../designsystem/FlexPair'
import SourceProps from './SourceProps'
export default function FallbackSource({ value }: SourceProps) {
return (
<FlexPair data-testid='text-source'>
<SourceIcon color='white' />
<span>{value}</span>
</FlexPair>
)
}
|
// Copyright 2021 ETH Zurich and University of Bologna.
// Solderpad Hardware License, Version 0.51, see LICENSE for details.
// SPDX-License-Identifier: SHL-0.51
#include <tb_lib.hh>
namespace sim {
// TODO(zarubaf): Auto-generate
const BootData BOOTDATA = {
.boot_addr = 0x1000000,
.core_count = 9,
.hartid_base = 0,
.tcdm_start = 0x100000,
.tcdm_end = 0x120000,
};
} // namespace sim
|
import 'package:apklis_direct_payment_checker/apklis_direct_payment_checker.dart';
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:package_info_plus/package_info_plus.dart';
void main() {
const MethodChannel channel = MethodChannel('apklis_direct_payment_checker');
TestWidgetsFlutterBinding.ensureInitialized();
setUp(() {
channel.setMockMethodCallHandler((MethodCall methodCall) async {
return {
'paid': false,
'username': 'example',
};
});
PackageInfo.disablePackageInfoPlatformOverride = true;
PackageInfo.setMockInitialValues(
appName: 'App Example',
packageName: 'com.example.nova.prosalud',
version: '0.0.1',
buildNumber: '1',
buildSignature: '',
);
});
tearDown(() {
channel.setMockMethodCallHandler(null);
PackageInfo.disablePackageInfoPlatformOverride = false;
});
test('isPurchased with packageId', () async {
const packageId = 'com.example.nova.prosalud';
final status = await ApklisDirectPaymentChecker.isPurchased(packageId);
expect(status.paid, false);
expect(status.username, 'example');
});
test('isPurchased without packageId', () async {
final status = await ApklisDirectPaymentChecker.isPurchased();
expect(status.paid, false);
expect(status.username, 'example');
});
}
|
using UnityEngine;
using System.Collections;
public class ScrollScript : MonoBehaviour
{
public float amount = 54.6f; //amount to move the scenery forward
void OnTriggerEnter2D(Collider2D other)
{
//if this object's trigger collider hits another object tagged "scenery"...
if (other.tag == "Scenery")
{
//...get the other object's position...
Vector3 pos = other.transform.position;
//...add the amount to move it on the x-axis...
pos.x += amount;
//...apply that to the other object's position.
other.transform.position = pos;
}
}
}
|
# ShadowsocksX
# 下载程序
在用户中心快速配置处点击下载
https://xueyun.club/ssr-download/ShadowsocksX-NG-R8.dmg
# 配置开始
1.复制订阅链接;
[cinwell website](/sublink?type=ssr ':include :type=markdown')
2.复制订阅链接↑,下载完成,运行 ShadowsocksX-NG-R8,点击任务栏托盘图标、点击编辑订阅;

3.将复制的订阅填入「订阅地址」内,并点击ok保存;

4.再次点击任务栏纸飞机图标,选择此处的更新手动订阅,并将打开时自动更新勾勾打上;

5.更新订阅完毕后即会有节点出现,再选中需要连接的线路;

6.推荐选择PAC模式,再单击打开Shadowsocks;

7.即科学上网成功;

|
default[:teamcity][:database][:host] = '127.0.0.1'
default[:teamcity][:database][:name] = "teamcity"
default[:teamcity][:database][:username] = "teamcity"
|
part of axmvvm.models;
/// Information about the visible view.
class LayoutInformation {
Size _deviceSize;
bool _isDeviceInPortrait;
MediaQueryData _deviceMediaQuery;
ThemeData _theme;
LayoutInformation(BuildContext context) {
_deviceSize = MediaQuery.of(context).size;
_isDeviceInPortrait = MediaQuery.of(context).orientation == Orientation.portrait;
_theme = Theme.of(context);
_deviceMediaQuery = MediaQuery.of(context);
}
bool get isDeviceInPortrait => _isDeviceInPortrait;
bool get isDeviceApple => Platform.isIOS || Platform.isMacOS;
bool get isDeviceSmartPhone => SmartphoneDetector.isSmartPhone(_deviceMediaQuery);
String get deviceOS => Platform.operatingSystem;
Size get deviceSize => _deviceSize;
MediaQueryData get deviceMediaQuery => _deviceMediaQuery;
ThemeData get theme => _theme;
}
|
macro llvmir2svg(expr,filepath="")
if expr.head == :call
assigns = []
vars = []
for i in expr.args[2:end]
var = gensym()
push!(vars,var)
push!(assigns,:($var = $(esc(i))))
end
return quote
$(assigns...)
llvm2graphfile($(esc(expr.args[1])),Base.typesof($(vars...)),$(esc(filepath)),"svg")
end
end
end
macro llvmir2html(expr,filepath="")
if expr.head == :call
assigns = []
vars = []
for i in expr.args[2:end]
var = gensym()
push!(vars,var)
push!(assigns,:($var = $(esc(i))))
end
return quote
$(assigns...)
llvm2graphfile($(esc(expr.args[1])),Base.typesof($(vars...)),$(esc(filepath)),"html")
end
end
end
|
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Directory" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"name" TEXT NOT NULL,
"fullPath" TEXT NOT NULL,
"valid" BOOLEAN NOT NULL DEFAULT true
);
INSERT INTO "new_Directory" ("id", "name", "fullPath") SELECT "id", "name", "fullPath" FROM "Directory";
DROP TABLE "Directory";
ALTER TABLE "new_Directory" RENAME TO "Directory";
CREATE UNIQUE INDEX "autoindex_Directory_1" ON "Directory"("name", "fullPath");
CREATE INDEX "dir_name" ON "Directory"("name");
CREATE TABLE "new_File" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"name" TEXT NOT NULL,
"fullPath" TEXT NOT NULL,
"dirId" INTEGER,
"valid" BOOLEAN NOT NULL DEFAULT true,
FOREIGN KEY ("dirId") REFERENCES "Directory" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_File" ("id", "name", "fullPath", "dirId") SELECT "id", "name", "fullPath", "dirId" FROM "File";
DROP TABLE "File";
ALTER TABLE "new_File" RENAME TO "File";
CREATE UNIQUE INDEX "autoindex_File_2" ON "File"("name", "dirId");
CREATE UNIQUE INDEX "autoindex_File_1" ON "File"("name", "fullPath");
CREATE INDEX "file_name" ON "File"("name");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
import Prelude
import qualified Control.Exception as E
import Control.Monad
import Data.Aeson
import Data.Aeson.Types (Parser)
import qualified Data.ByteString.Lazy as BL
import Data.Char (isSpace, toLower)
import Data.List (isInfixOf, sort)
import qualified Data.Map as M
import System.Directory
import System.Environment (getArgs)
import System.Exit
import System.FilePath
import System.IO.Temp (withSystemTempDirectory)
import System.Process
import Text.CSL
import Text.CSL.Compat.Pandoc (writeHtmlString)
import Text.CSL.Reference
import Text.CSL.Style hiding (Number)
import Text.Pandoc (Block (..), Format (..), Inline (..),
Pandoc (..), bottomUp, nullMeta)
import qualified Text.Pandoc.UTF8 as UTF8
import Text.Printf
data TestCase = TestCase{
testMode :: Mode -- mode
, testBibopts :: BibOpts -- bibsection
, testCitations :: [CiteObject] -- citations
, testCitationItems :: Citations -- citation-items
, testCsl :: Style -- csl
, testAbbreviations :: Abbreviations -- abbreviations
, testReferences :: [Reference] -- input
, testResult :: String -- result
} deriving (Show)
data Mode = CitationMode
| CitationRTFMode
| BibliographyMode
| BibliographyHeaderMode
| BibliographyNoSortMode
deriving Show
instance FromJSON Mode where
parseJSON (String "citation") = return CitationMode
parseJSON (String "citation-rtf") = return CitationRTFMode
parseJSON (String "bibliography") = return BibliographyMode
parseJSON (String "bibliography-header") = return BibliographyHeaderMode
parseJSON (String "bibliography-nosort") = return BibliographyNoSortMode
parseJSON _ = fail "Unknown mode"
instance FromJSON TestCase where
parseJSON (Object v) = TestCase <$>
v .: "mode" <*>
v .:? "bibsection" .!= Select [] [] <*>
((v .: "citations") >>= parseCitations) <*>
v .:? "citation_items" .!= [] <*>
(parseCSL <$> (v .: "csl")) <*>
v .:? "abbreviations" .!= (Abbreviations M.empty) <*>
v .: "input" <*>
v .: "result"
where parseCitations :: Data.Aeson.Value -> Parser [CiteObject]
parseCitations x@Array{} = parseJSON x
parseCitations _ = return []
parseJSON _ = fail "Could not parse test case"
newtype CiteObject =
CiteObject { unCiteObject :: [Cite] } deriving Show
instance FromJSON CiteObject where
parseJSON (Array v) =
case fromJSON (Array v) of
Success [Object x, Array _, Array _] ->
CiteObject <$> x .: "citationItems"
Error e -> fail $ "Could not parse CiteObject: " ++ e
x -> fail $ "Could not parse CiteObject" ++ show x
parseJSON x = fail $ "Could not parse CiteObject " ++ show x
#if MIN_VERSION_aeson(0,10,0)
#else
instance FromJSON [CiteObject] where
parseJSON (Array v) = mapM parseJSON $ V.toList v
parseJSON _ = return []
#endif
data TestResult =
Passed
| Skipped
| Failed
| Errored
deriving (Show, Eq)
testDir :: FilePath
testDir = "citeproc-test" </> "processor-tests" </> "machines"
handler :: FilePath -> E.SomeException -> IO TestResult
handler path e = do
putStrLn $ "[ERROR] " ++ path ++ "\n" ++ show e
return Errored
runTest :: FilePath -> IO TestResult
runTest path = E.handle (handler path) $ do
raw <- BL.readFile path
let testCase = either error id $ eitherDecode raw
let procOpts' = ProcOpts (testBibopts testCase) False
style <- localizeCSL Nothing
$ (testCsl testCase) { styleAbbrevs = testAbbreviations testCase }
let refs = testReferences testCase
let cites = map unCiteObject (testCitations testCase) ++ testCitationItems testCase
let cites' = if null cites
then [map (\ref -> emptyCite{ citeId = unLiteral $ refId ref}) refs]
else cites
let expected = adjustEntities $ fixBegins $ trimEnd $ testResult testCase
let mode = testMode testCase
let assemble BibliographyMode xs =
"<div class=\"csl-bib-body\">\n" ++
unlines (map (\x -> " <div class=\"csl-entry\">" ++ x ++
"</div>") xs) ++ "</div>\n"
assemble _ xs = unlines xs
case mode of
BibliographyHeaderMode -> do
putStrLn $ "[SKIPPED] " ++ path ++ "\n"
return Skipped
BibliographyNoSortMode -> do
putStrLn $ "[SKIPPED] " ++ path ++ "\n"
return Skipped
_ -> do
let result = assemble mode
$ map (inlinesToString . renderPandoc style) $
(case mode of {CitationMode -> citations; _ -> bibliography})
$ citeproc procOpts' style refs cites'
if result == expected
then do
putStrLn $ "[PASSED] " ++ path ++ "\n"
return Passed
else do
putStrLn $ "[FAILED] " ++ path
showDiff expected result
putStrLn ""
return Failed
trimEnd :: String -> String
trimEnd = reverse . ('\n':) . dropWhile isSpace . reverse
-- this is designed to mimic the test suite's output:
inlinesToString :: [Inline] -> String
inlinesToString ils =
writeHtmlString
$ bottomUp (concatMap adjustSpans)
$ Pandoc nullMeta [Plain ils]
-- We want & instead of & etc.
adjustEntities :: String -> String
adjustEntities ('&':'#':'3':'8':';':xs) = "&" ++ adjustEntities xs
adjustEntities (x:xs) = x : adjustEntities xs
adjustEntities [] = []
-- citeproc-js test suite expects "citations" to be formatted like
-- .. [0] Smith (2007)
-- >> [1] Jones (2008)
-- To get a meaningful comparison, we remove this.
fixBegins :: String -> String
fixBegins = unlines . map fixLine . lines
where fixLine ('.':'.':'[':xs) = dropWhile isSpace $ dropWhile (not . isSpace) xs
fixLine ('>':'>':'[':xs) = dropWhile isSpace $ dropWhile (not . isSpace) xs
fixLine xs = xs
-- adjust the spans so we fit what the test suite expects.
adjustSpans :: Inline -> [Inline]
adjustSpans (Note [Para xs]) = xs
adjustSpans (Link _ ils _) = ils
adjustSpans (Span ("",[],[]) xs) = xs
adjustSpans (Span ("",["nocase"],[]) xs) = xs
adjustSpans (Span ("",["citeproc-no-output"],[]) _) =
[Str "[CSL STYLE ERROR: reference with no printed form.]"]
adjustSpans (Span (id',classes,kvs) ils) =
[Span (id',classes',kvs') ils]
where classes' = filter (`notElem` ["csl-no-emph","csl-no-strong","csl-no-smallcaps"]) classes
kvs' = if null styles then kvs else (("style", concat styles) : kvs)
styles = ["font-style:normal;" | "csl-no-emph" `elem` classes]
++ ["font-weight:normal;" | "csl-no-strong" `elem` classes]
++ ["font-variant:normal;" | "csl-no-smallcaps" `elem` classes]
adjustSpans (Emph xs) =
RawInline (Format "html") "<i>" : xs ++ [RawInline (Format "html") "</i>"]
adjustSpans (Strong xs) =
RawInline (Format "html") "<b>" : xs ++ [RawInline (Format "html") "</b>"]
adjustSpans (SmallCaps xs) =
RawInline (Format "html") "<span style=\"font-variant:small-caps;\">" : xs ++ [RawInline (Format "html") "</span>"]
adjustSpans x = [x]
showDiff :: String -> String -> IO ()
showDiff expected' result' =
withSystemTempDirectory "test-pandoc-citeproc-XXX" $ \fp -> do
let expectedf = fp </> "expected"
let actualf = fp </> "actual"
UTF8.writeFile expectedf expected'
UTF8.writeFile actualf result'
withDirectory fp $ void $ rawSystem "diff" ["-u","expected","actual"]
withDirectory :: FilePath -> IO a -> IO a
withDirectory fp action = do
oldDir <- getCurrentDirectory
setCurrentDirectory fp
result <- action
setCurrentDirectory oldDir
return result
main :: IO ()
main = do
args <- getArgs
let matchesPattern x
| null args = True
| otherwise = any (`isInfixOf` (map toLower x))
(map (map toLower . takeBaseName) args)
exists <- doesDirectoryExist testDir
unless exists $ do
putStrLn "Downloading test suite"
_ <- rawSystem "git" ["clone", "https://github.com/citation-style-language/test-suite.git", "citeproc-test"]
withDirectory "citeproc-test" $
void $ rawSystem "python" ["processor.py", "--grind"]
testFiles <- if any ('/' `elem`) args
then return args
else (map (testDir </>) . sort .
filter matchesPattern .
filter (\f -> takeExtension f == ".json"))
<$> getDirectoryContents testDir
results <- mapM runTest testFiles
let numpasses = length $ filter (== Passed) results
let numskipped = length $ filter (== Skipped) results
let numfailures = length $ filter (== Failed) results
let numerrors = length $ filter (== Errored) results
putStrLn $ show numpasses ++ " passed; " ++ show numfailures ++
" failed; " ++ show numskipped ++ " skipped; " ++
show numerrors ++ " errored."
let summary = unlines $ zipWith (\fp res -> printf "%-10s %s" (show res) fp) testFiles results
when (null args) $ do -- write log if complete test suite run
ex <- doesFileExist "test-citeproc.log"
when ex $ do
putStrLn "Copying existing test-citeproc.log to test-citeproc.log.old"
copyFile "test-citeproc.log" "test-citeproc.log.old"
putStrLn "Writing test-citeproc.log."
UTF8.writeFile "test-citeproc.log" summary
exitWith $ if numfailures == 0
then ExitSuccess
else ExitFailure $ numfailures + numerrors
|
function addArray (array,func) {
var result=0;
array.forEach(function(v) { v.forEach(function(w) {result+=w;}) });
console.log("addArray: " + result);
return result;
}
function setArray (array) {
return JSON.stringify({array:array});
}
function getArray (array) {
return JSON.parse(array).arr;
}
function create2DPlot (xmin,xmax,func) {
let inc=1;
let xa=[],ya=[];
let thisFunc = new Function('x',"return " + func);
for (var x=xmin; x<=xmax; x+=inc) {
xa.push(x);
ya.push(thisFunc(x));
}
Plotly.plot( PLOTAREA, [{
x: xa,
y: ya }], {
margin: { t: 0 } } );
}
function create3DPlot (xmin,xmax,ymin,ymax,func) {
let inc=1;
let xa=[],ya=[],za=[];
let thisFunc = new Function('x','y',"return " + func);
for (var x=xmin; x<=xmax; x+=inc) {
for (var y=ymin; y<=ymax; y+=inc) {
xa.push(x);
ya.push(y);
za.push(thisFunc(x,y));
}
}
var data=[
{
opacity:0.8,
color:'rgb(300,100,200)',
type: 'mesh3d',
x: xa,
y: ya,
z: za,
}
];
Plotly.newPlot(PLOTAREA, data);
}
function plot3D (xa,ya,za) {
var data=[
{
opacity:0.8,
color:'rgb(300,100,200)',
type: 'mesh3d',
x: xa,
y: ya,
z: za,
}
];
Plotly.newPlot(PLOTAREA, data);
}
function plot2D (xa,ya) {
Plotly.plot(PLOTAREA, [{
x: xa,
y: ya }], {
margin: { t: 0 } } );
}
function plot2DMulti (plots) {
var plotData=[];
for (var i=0; i<plots.length; i++) {
plotData.push(new Plots(plots[i][0],plots[i][1]));
}
Plotly.newPlot(PLOTAREA, plotData);
}
function Plots (xA,yA) {
this.x=xA;
this.y=yA;
}
|
Visualize the Universe - CS 594 Final Project Website
=====================================================
This folder contains the web application detailing
the experiment and results for this project. It's a
node.js application that uses the Express framework,
so you'll need node.js and npm installed to run it.
Assuming you have those, running ```npm install```
in this directory should grab all the dependencies.
Then to run it:
```sh
$ PORT=8080 npm start
```
You can omit the port, and by default it will run on 3000.
If you want it on port 80, you'll probably need to run it
as root:
```sh
$ sudo PORT=80 npm start
```
|
import axios from 'axios'
// import fs from 'fs'
// import path from 'path'
import concat from 'concat-stream'
export default async function downloadImage (url: string): Promise<any> {
// axios image download with response type "stream"
const response = await axios({
method: 'GET',
url: url,
responseType: 'stream'
})
// pipe the result stream into a file on disc
// return a promise and resolve when download finishes
return new Promise((resolve, reject) => {
let concatStream = concat((imgBuff)=>{
resolve(imgBuff)
})
response.data.pipe(concatStream)
// response.data.on('end', () => {
// resolve()
// })
response.data.on('error', () => {
reject()
})
})
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geode.spark.connector
import org.apache.spark.SparkConf
import org.apache.geode.spark.connector.internal.{DefaultGeodeConnectionManager, LocatorHelper}
/**
* Stores configuration of a connection to Geode cluster. It is serializable and can
* be safely sent over network.
*
* @param locators Geode locator host:port pairs, the default is (localhost,10334)
* @param geodeProps The initial geode properties to be used.
* @param connectionManager GeodeConnectionFactory instance
*/
class GeodeConnectionConf(
val locators: Seq[(String, Int)],
val geodeProps: Map[String, String] = Map.empty,
connectionManager: GeodeConnectionManager = new DefaultGeodeConnectionManager
) extends Serializable {
/** require at least 1 pair of (host,port) */
require(locators.nonEmpty)
def getConnection: GeodeConnection = connectionManager.getConnection(this)
}
object GeodeConnectionConf {
/**
* create GeodeConnectionConf object based on locator string and optional GeodeConnectionFactory
* @param locatorStr Geode cluster locator string
* @param connectionManager GeodeConnection factory
*/
def apply(locatorStr: String, geodeProps: Map[String, String] = Map.empty)
(implicit connectionManager: GeodeConnectionManager = new DefaultGeodeConnectionManager): GeodeConnectionConf = {
new GeodeConnectionConf(LocatorHelper.parseLocatorsString(locatorStr), geodeProps, connectionManager)
}
/**
* create GeodeConnectionConf object based on SparkConf. Note that implicit can
* be used to control what GeodeConnectionFactory instance to use if desired
* @param conf a SparkConf instance
*/
def apply(conf: SparkConf): GeodeConnectionConf = {
val locatorStr = conf.getOption(GeodeLocatorPropKey).getOrElse(
throw new RuntimeException(s"SparkConf does not contain property $GeodeLocatorPropKey"))
// SparkConf only holds properties whose key starts with "spark.", In order to
// put geode properties in SparkConf, all geode properties are prefixes with
// "spark.geode.". This prefix was removed before the properties were put in `geodeProp`
val prefix = "spark.geode."
val geodeProps = conf.getAll.filter {
case (k, v) => k.startsWith(prefix) && k != GeodeLocatorPropKey
}.map { case (k, v) => (k.substring(prefix.length), v) }.toMap
apply(locatorStr, geodeProps)
}
}
|
<footer class="page-footer text-center footer footer-static footer-light white navbar-border navbar-shadow">
<div class="footer-copyright">
<div class="container">
<span>© <?= date('Y') ?> <a href="skyreka.com" target="_blank">SKYREKA Cup</a> All rights reserved.</span>
</div>
</div>
</footer>
|
#! /bin/bash
rm -rf dist/*
npm run build:client
node scripts/build-package-json.js
mkdir -p dist/src
cp -r src/server src/shared src/types dist/src
cp app.yaml tsconfig.json dist
sh scripts/secretgen.sh > dist/.mcs-token-secret.json
|
package edu.mayo.qdm.demographics;
import edu.mayo.qdm.patient.Ethnicity;
import edu.mayo.qdm.patient.Patient;
/**
*/
public class EthnicityDemographicsItem extends AbstractDemographicsItem {
private static final String ETHNICITY_STATISTIC = "ethnicity";
protected EthnicityDemographicsItem(String population, Patient patient) {
super(population, ETHNICITY_STATISTIC, patient);
}
@Override
protected String getLabelFromPatient(Patient patient) {
Ethnicity ethnicity = patient.getEthnicity();
if(ethnicity == null){
ethnicity = Ethnicity.UNKNOWN;
}
switch (ethnicity) {
case HISPANICORLATINO:
return "Hispanic or Latino";
case NONHISPANICORLATINO:
return "Non Hispanic or Latino";
case UNKNOWN:
return "Unknown";
default:
throw new IllegalStateException();
}
}
}
|
package infrastructures.tweet
import java.time.{ZoneId, ZonedDateTime}
import javax.inject.{Inject, Singleton}
import com.typesafe.config.ConfigFactory
import domains.tweet.{TweetEntity, TwitterApi}
import twitter4j.conf.{Configuration, ConfigurationBuilder}
import twitter4j.{Query, Status, Twitter, TwitterFactory}
import scala.collection.JavaConverters._
@Singleton
class TwitterApiImpl @Inject()(twitterAdapter: TwitterAdapter) extends TwitterApi {
override def request(): Seq[TweetEntity] = {
val query = new Query("#funny")
val statuses = search(query)
statuses.map(TweetEntityConverter.convert)
}
private def search(query: Query): Seq[Status] = {
val twitter = TwitterClientFactory.create()
twitterAdapter.search(twitter, query)
}
}
trait TwitterAdapter {
def search(twitter: Twitter, query: Query): Seq[Status]
}
@Singleton
class TwitterAdapterImpl extends TwitterAdapter {
def search(twitter: Twitter, query: Query): Seq[Status] = twitter.search(query).getTweets.asScala
}
private[tweet] object TweetEntityConverter {
def convert(status: Status): TweetEntity = {
TweetEntity(
None,
status.getId,
status.getUser.getScreenName,
status.getText,
status.getRetweetCount,
status.getFavoriteCount,
status.getLang,
ZonedDateTime.ofInstant(status.getCreatedAt.toInstant, ZoneId.systemDefault())
)
}
}
private[tweet] object TwitterClientFactory {
def create(): Twitter = {
new TwitterFactory(configuration()).getInstance()
}
private def configuration(): Configuration = {
val config = ConfigFactory.load().getObject("twitter4j").toConfig
val oauthConfig = config.getObject("oauth").toConfig
val builder = new ConfigurationBuilder
builder.setDebugEnabled(config.getBoolean("debug"))
.setOAuthConsumerKey(oauthConfig.getString("consumerKey"))
.setOAuthConsumerSecret(oauthConfig.getString("consumerSecret"))
.setOAuthAccessToken(oauthConfig.getString("accessToken"))
.setOAuthAccessTokenSecret(oauthConfig.getString("accessTokenSecret"))
.build()
}
}
|
import { APICategoryContent } from '../../../apiDefs/schema';
import HealthArgonautDeactivationNotice from './argonautDeactivationNotice.mdx';
import HealthArgonautDeprecationNotice from './argonautDeprecationNotice.mdx';
import ArgonautReleaseNotes from './argonautReleaseNotes.mdx';
import ClinicalHealthReleaseNotes from './clinicalHealthReleaseNotes.mdx';
import CommunityCareReleaseNotes from './communityCareReleaseNotes.mdx';
import FhirApiReleaseNotes from './fhirAPIReleaseNotes.mdx';
import FhirArgonautApiIntro from './fhirArgonautApiIntro.mdx';
import FhirDSTU2ApiIntro from './fhirDSTU2ApiIntro.mdx';
import FHIRMultiOpenAPIIntro from './FHIRMultiOpenAPIIntro.mdx';
import HealthOverview from './healthOverview.mdx';
import HealthQuickstart from './healthQuickstart.mdx';
import PgdReleaseNotes from './pgdReleaseNotes.mdx';
import UrgentCareApiIntro from './urgentCareApiIntro.mdx';
import UrgentCareDeactivationNotice from './urgentCareDeactivationNotice.mdx';
import UrgentCareDeprecationNotice from './urgentCareDeprecationNotice.mdx';
import UrgentCareReleaseNotes from './urgentCareReleaseNotes.mdx';
import ProviderDirectoryReleaseNotes from './providerDirectoryReleaseNotes.mdx';
const healthContent: APICategoryContent = {
consumerDocsLinkText: 'Read the consumer onboarding guide for getting production access',
overview: HealthOverview,
quickstart: HealthQuickstart,
shortDescription: 'Use our APIs to build tools that help Veterans manage their health.',
};
export {
healthContent,
ArgonautReleaseNotes,
ClinicalHealthReleaseNotes,
CommunityCareReleaseNotes,
FhirApiReleaseNotes,
FhirArgonautApiIntro,
FhirDSTU2ApiIntro,
FHIRMultiOpenAPIIntro,
HealthArgonautDeprecationNotice,
HealthArgonautDeactivationNotice,
PgdReleaseNotes,
UrgentCareApiIntro,
UrgentCareDeprecationNotice,
UrgentCareReleaseNotes,
UrgentCareDeactivationNotice,
ProviderDirectoryReleaseNotes,
};
|
/*
* Copyright 2010-2021 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.project.structure
/**
* A list of all modules current module can depend onwith regular dependency
*
* @see KtModule.directRegularDependencies
*/
public inline fun <reified M : KtModule> KtModule.directRegularDependenciesOfType(): Sequence<M> =
directRegularDependencies.asSequence().filterIsInstance<M>()
/**
* A list of all other modules current module can depend on.
*
* @see KtModule.directRegularDependencies
* @see KtModule.directRefinementDependencies
* @see KtModule.directFriendDependencies
*/
public fun KtModule.allDirectDependencies(): Sequence<KtModule> =
sequence {
yieldAll(directRegularDependencies)
yieldAll(directRefinementDependencies)
yieldAll(directFriendDependencies)
}
/**
* A list of all other modules of type [M] current module can depend on.
*
* @see KtModule.directRegularDependencies
* @see KtModule.directRefinementDependencies
* @see KtModule.directFriendDependencies
*/
public inline fun <reified M : KtModule> KtModule.allDirectDependenciesOfType(): Sequence<M> =
allDirectDependencies().filterIsInstance<M>()
|
// 放在前面,把 http.ClientRequest 先复写
import * as nock from 'nock';
import { Fixture, sleep, request } from '../../TestUtil';
import { HttpServerPatcher, HttpClientPatcher } from '../../../src/patchers';
import * as sinon from 'sinon';
import * as assert from 'assert';
import * as pedding from 'pedding';
import { SPAN_FINISHED } from 'pandora-component-trace';
import { HEADER_TRACE_ID, HEADER_SPAN_ID } from 'pandora-tracer';
export default class HttpClientFixture extends Fixture {
config() {
return {
patchers: {
httpServer: {
enabled: true,
klass: HttpServerPatcher
},
httpClient: {
enabled: true,
klass: HttpClientPatcher,
forcePatchHttps: true
}
}
};
}
async case(done) {
const http = require('http');
const https = require('https');
const _done = pedding(done, 2);
nock('https://www.taobao.com')
.get('/')
.reply(200);
const stub = sinon.stub(this.componentTrace.traceManager, 'record').callsFake(function(span, isEntry) {
const context = span.context();
assert(context.traceId === '1234567890');
span.once(SPAN_FINISHED, (s) => {
assert(s.duration > 0);
_done();
});
});
const server = http.createServer(function(req, res) {
setTimeout(() => {
request(https, {
hostname: 'www.taobao.com',
path: '/',
method: 'GET'
}).then((response) => {
const headers = response[0].req.headers;
assert(!headers[HEADER_TRACE_ID]);
assert(!headers[HEADER_SPAN_ID]);
res.end('OK');
});
}, Math.floor(1 + Math.random() * 10) * 100);
});
server.listen(0);
sleep(1000);
const port = server.address().port;
await request(http, {
hostname: 'localhost',
port: port,
path: '/',
method: 'GET',
headers: {
'X-Trace-Id': '1234567890'
}
});
stub.restore();
}
}
|
import {
Component, OnInit, Input, ChangeDetectionStrategy, Output, EventEmitter, ElementRef,
Directive
} from "@angular/core";
import { DomSanitizer, SafeStyle } from "@angular/platform-browser";
/**
* 参考 https://github.com/eleme/element-angular/blob/master/src/button/button.ts
* 参考directive 推荐写法my-button
*/
@Component({
selector: '[trip-button]',
template: `
<i class="trip-icon-loading" *ngIf="loading"></i>
<!--图标-->
<ng-content></ng-content>
`,
host: {
"[class]": "'btn ' + customClass + (themeType ? ' btn-' + themeType : '')",
// "[class.disabled]": "disabled",
// "[class.loading]": "loading",
// "[class.plain]": "plain",
// "[disabled]": "disabled",
// "[type]": "nativeType",
// "[style]": "extendStyles()",
// "[autofocus]": "autofocus"
}
})
export class ButtonDirective implements OnInit {
@Input('type') themeType: string = ''
@Input('native-type') nativeType: string = 'button'
@Input() size: string = ''
@Input() icon: string = ''
// @Input() disabled: string = ''
@Input() loading: string = ''
// @Input() plain: string = ''
// @Input() autofocus: string = ''
@Input('custom-class') customClass: string = ''
// @Output() mclick: EventEmitter<any> = new EventEmitter<any>()
constructor(
private el: ElementRef,
private sanitizer: DomSanitizer
) {
}
ngOnInit(): void {
// removeNgTag(this.el.nativeElement)
}
// clickHandle($event: Event): void {
// this.click.emit($event)
// }
// get extend(): SafeStyle {
// return this.sanitizer.bypassSecurityTrustStyle(this.style)
// }
// extendStyles(): SafeStyle {
// return this.sanitizer.bypassSecurityTrustStyle(this.style)
// }
}
|
from django.contrib import admin
from mezzanine.core.admin import TabularDynamicInlineAdmin
from mezzanine.pages.admin import PageAdmin
from mezzanine.utils.admin import SingletonAdmin
from .models import (SiteConfiguration,
HomePage, Slide, IconBox,
FAQ, FAQPage)
admin.site.register(SiteConfiguration, SingletonAdmin)
class SlideInline(TabularDynamicInlineAdmin):
model = Slide
class IconInline(TabularDynamicInlineAdmin):
model = IconBox
class HomePageAdmin(PageAdmin):
inlines = (SlideInline, IconInline)
admin.site.register(HomePage, HomePageAdmin)
class FAQInline(TabularDynamicInlineAdmin):
model = FAQ
class FAQPageAdmin(PageAdmin):
inlines = (FAQInline,)
admin.site.register(FAQPage, FAQPageAdmin)
|
package bart.sampler.tree
import bart.sampler.{ForestPara, HalfNormSampler, SigmaSquareSampler}
import bart.tree.LearningNode
trait TreeMutationProposer extends Serializable {
val topNode: LearningNode
val para: ForestPara
val proposer: Proposer
val treeId: Int
val sigmaSquareSampler: SigmaSquareSampler
val tauSampler: HalfNormSampler
val chainId: Int = para.chainId
val kind: String
assert(
sigmaSquareSampler.chainId == chainId && sigmaSquareSampler.chainId == tauSampler.chainId,
"sigma, tau and forest sampler should have then same chainId"
)
def logTransitionRatio: Double
def logLikelihoodRatio: Double
def logTreeRatio: Double
def logProbRatio: Double = logTransitionRatio + logLikelihoodRatio + logTreeRatio
def isAccept: Boolean = proposer.isQualified && (logProbRatio > proposer.logProbAccept)
def acceptedNode: LearningNode = if (isAccept) proposer.proposerNode else proposer.originNode
def update(): Unit = {
updateSuffStat()
refreshLeafRespAndUpdateResidual(acceptedNode)
updateTree(acceptedNode)
thin(proposer.proposerNode)
thin(proposer.originNode)
}
def updateSuffStat(): Unit
def refreshLeafRespAndUpdateResidual(acceptedNode: LearningNode): Unit = {
val filteredDf = proposer.filteredDf
require(acceptedNode.isSinglyInternal || acceptedNode.isLeaf,
"only for singly internal or leaf node")
if (acceptedNode.isLeaf) {
acceptedNode.sufficientStat.refreshLeafResp(sigmaSquareSampler, tauSampler)
val newLeafResp = acceptedNode.sufficientStat.leafResp
filteredDf.foreach { point =>
val leafResp = point.getTmpResp(chainId)
point.setResidual(point.getResidual(chainId) + leafResp - newLeafResp, chainId)
}
} else {
val left = acceptedNode.leftChild.get.sufficientStat
val right = acceptedNode.rightChild.get.sufficientStat
left.refreshLeafResp(sigmaSquareSampler, tauSampler)
right.refreshLeafResp(sigmaSquareSampler, tauSampler)
val split = acceptedNode.split.get
filteredDf.foreach { point =>
val binnedFeatures = point.binnedFeatures
val newLeafResp = if (split.shouldGoLeft(binnedFeatures)) {
left.leafResp
} else {
right.leafResp
}
val leafResp = point.getTmpResp(chainId)
point.setResidual(point.getResidual(chainId) + leafResp - newLeafResp, chainId)
}
}
}
def updateTree(acceptedNode: LearningNode): LearningNode = {
val parentNodeId = LearningNode.parentIndex(acceptedNode.id)
if (parentNodeId == 0) {
topNode.split = acceptedNode.split
topNode.leftChild = acceptedNode.leftChild
topNode.rightChild = acceptedNode.rightChild
topNode.sufficientStat.rSum = acceptedNode.sufficientStat.rSum
topNode.sufficientStat.leafResp = acceptedNode.sufficientStat.leafResp
return topNode
}
val parent = LearningNode.getNode(parentNodeId, topNode)
if (LearningNode.isLeftChild(acceptedNode.id)) {
parent.leftChild = Some(acceptedNode)
} else {
parent.rightChild = Some(acceptedNode)
}
topNode
}
def logTransitionRatioForGrowOrPrune(): Double = {
val numLeaves = proposer.numLeaves
val numPruneNodes = proposer.numPruneNodesAvailable
val logProbSplitWithinNode = proposer.logProbSplitWithinNode
val toPrune = math.log(para.probPrune / numPruneNodes)
val toGrow = math.log(para.probGrow / numLeaves) + logProbSplitWithinNode
toPrune - toGrow
}
def logTreeRatioForGrowOrPrune(): Double = {
val depth = LearningNode.indexToLevel(proposer.originNode.id)
val logProbSplitWithinNode = proposer.logProbSplitWithinNode
val probLeftNotSplit = logProbNodeNotSplit(depth + 1)
val probRightNotSplit = logProbNodeNotSplit(depth + 1)
val probParentSplit = logProbNodeSplit(depth)
val probParentNotSplit = logProbNodeNotSplit(depth)
val probSplitChosen = logProbSplitWithinNode
probLeftNotSplit + probRightNotSplit + probParentSplit -
probParentNotSplit + probSplitChosen
}
def thin(node: LearningNode): Unit = {
require(
node.isSinglyInternal || node.isLeaf, "only for Singly internal or leaf node")
node.df = None
if (node.isSinglyInternal) {
node.leftChild.get.df = None
node.rightChild.get.df = None
}
}
def logProbNodeSplit(depth: Int): Double = {
math.log(para.alpha * math.pow(1 + depth, -para.beta))
}
def logProbNodeNotSplit(depth: Int): Double = {
math.log(1.0 - para.alpha * math.pow(1 + depth, -para.beta))
}
def refreshRSum(node: LearningNode): Unit = {
require(
node.isSinglyInternal || node.isLeaf,
"only for Singly internal or leaf node")
val filteredDf = proposer.filteredDf
if (node.isLeaf) {
node.sufficientStat.refreshRSum(filteredDf)
return
}
val left = node.leftChild.get
val right = node.rightChild.get
if (node.df.isEmpty || left.df.isEmpty || right.df.isEmpty) {
node.df = Some(filteredDf)
val split = node.split.get
val (leftDf, rightDf) = filteredDf.partition{point => split.shouldGoLeft(point.binnedFeatures)}
left.df = Some(leftDf)
right.df = Some(rightDf)
}
val leftSuff = left.sufficientStat
leftSuff.refreshRSum(left.df.get)
val rightSuff = right.sufficientStat
rightSuff.refreshRSum(right.df.get)
node.sufficientStat.refreshRSum(leftSuff, rightSuff)
}
override def toString: String = {
if (!proposer.isQualified) {
s"kind: $kind, not valid"
} else {
s"""kind: $kind, isAccept: $isAccept, R: ${logProbRatio.formatted("%.3f")}, """ +
s"""TR: ${logTransitionRatio.formatted("%.3f")}, LL: ${logLikelihoodRatio.formatted("%.3f")}, """ +
s"""TR: ${logTreeRatio.formatted("%.3f")}"""
}
}
}
|
---
ID: 3639
post_title: >
Crime and Punishment, Part 1 (by Fyodor
Dostoyevsky) Audiobook
author: abbie04m553726
post_excerpt: ""
layout: post
permalink: >
https://universalflowuniversity.com/uncategorized/crime-and-punishment-part-1-by-fyodor-dostoyevsky-audiobook/
published: true
post_date: 2017-09-07 11:43:30
---
[embed]https://www.youtube.com/watch?v=gE1sLuUi6Yk[/embed]<br>
<p>Crime and Punishment, Part 1 (by Fyodor Dostoyevsky) Audiobook</p>
|
---
pid: '9648'
label: Studies of Peasants from Life
object_type: Drawing
genre: Study
worktags: Horse|Peasants
iconclass_code:
height_cm: '19.9'
width_cm: '14.5'
diameter_cm:
location_country:
location_city:
location_collection:
accession_nos_and_notes:
private_collection_info: 'England, Devonshire Collection, Chatsworth House, inv. #676'
collection_type: Private
realdate: Before 1605
numeric_date: '1605'
medium: Pen and brown ink, brown wash
support: Paper
support_notes:
signature:
signature_location:
support_marks:
further_inscription:
print_notes:
print_type:
plate_dimensions:
states:
printmaker:
publisher:
series:
collaborators:
collaborator_notes:
collectors_patrons:
our_attribution: Jan Brueghel the Elder
other_attribution_authorities: 'Bailey/Walker cat. #CHAT.DV.1'
bibliography: Berlin 1975, p. 100-1, nr. 121, pl. 224|Ertz 1979, p. 86, fig. 74|London
1993, nr. 157|Essen/Vienna 1997-98, p. 39, fig. 10|Jaffé 2002, vol. 2, p. 179, nr.
1168
biblio_reference:
exhibition_history: Berlin 1975, nr. 121|London 1993, nr. 157|Essen/Vienna 1997-98,
fig. 10
ertz_1979:
ertz_2008:
bailey_walker: CHAT.DV.1
hollstein_no:
bad_copy:
exclude_from_browsing:
provenance: '6681'
provenance_text: Dukes of Devonshire
related_works: 3547|3576
related_works_notes:
copies_and_variants:
curatorial_files: '6530'
general_notes:
discussion:
external_resources_title:
external_resources_url:
thumbnail: "/img/derivatives/simple/9648/thumbnail.jpg"
fullwidth: "/img/derivatives/simple/9648/fullwidth.jpg"
collection: janbrueghel
layout: janbrueghel_item
order: '980'
permalink: "/janbrueghel/studies-of-peasants-from-life"
full:
---
|
import { getConfig } from '../config';
import { MODE, COLLECTED } from '../constants';
import { cloneElements } from './render';
import { refElements } from './hydrate';
export const collect = () => {
const markers = document.querySelectorAll<HTMLInputElement>(
'input[data-lazy-begin]'
);
const { mode } = getConfig();
for (let i = 0, j = markers.length; i < j; i += 1) {
const el = markers[i];
const { lazyBegin } = el.dataset || {};
const value =
mode === MODE.RENDER
? cloneElements(markers[i], lazyBegin)
: refElements(markers[i], lazyBegin);
if (COLLECTED.has(lazyBegin)) {
COLLECTED.get(lazyBegin).push(value);
} else {
COLLECTED.set(lazyBegin, [value]);
}
}
};
|
const _ = require("lodash");
function toXML(name, value) {
if (_.isObject(name)) {
value = name;
name = null;
}
if (_.isArray(value)) {
return _.map(value, function (v) { return toXML(name, v); }).join(""); // eslint-disable-line
}
const attrs = [];
const elems = [];
if (_.isObject(value)) {
for (let k in value) { // eslint-disable-line
const v = value[k];
if (k[0] === "@") {
k = k.substring(1);
attrs.push(`${k}="${v}"`);
} else {
elems.push(toXML(k, v));
}
}
value = elems.join("");
} else {
value = String(value)
.replace(/&/g, "&")
.replace(/</g, "<")
.replace(/>/g, ">")
.replace(/"/g, """)
.replace(/'/g, "'");
}
const startTag = name ? `<${name}${attrs.length > 0 ? " " + attrs.join(" ") : ""}>` : ""; // eslint-disable-line
const endTag = name ? `</${name}>` : "";
return startTag + value + endTag;
}
module.exports = (responseName, responseBody) => {
return `<?xml version="1.0" encoding="utf-8"?>
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"
xmlns="urn:partner.soap.sforce.com">
<soapenv:Body>
${toXML(responseName, responseBody)}
</soapenv:Body>
</soapenv:Envelope>`;
};
|
namespace LykkeApi2.Infrastructure
{
public static class RecordChanger
{
public const string Client = "Client";
}
}
|
# frozen_string_literal: true
survey_condition = condition
json.type t('course.condition.survey.title')
json.description format_inline_text(survey_condition.title)
json.edit_url url_for([:edit, course, conditional, survey_condition])
json.delete_url url_for([course, conditional, survey_condition])
|
<?php
namespace Maruamyu\Core\Http\Message;
/**
* QUERY_STRING 処理クラス
*/
class QueryString
{
/** @var string[][] ['key' => ['value1', 'value2', ...], ...] */
private $data;
/**
* @param string|array|self $initValue 初期値
*/
public function __construct($initValue = null)
{
$this->data = [];
if (($initValue instanceof self) || is_array($initValue)) {
$this->merge($initValue);
} elseif (is_string($initValue)) {
$this->data = static::parse($initValue);
}
}
/**
* @return string QUERY_STRING形式の文字列
* @see toString()
*/
public function __toString()
{
return $this->toString();
}
/**
* @return string QUERY_STRING形式の文字列
* 注意: PHPの独自拡張形式ではない文字列を(意図的に)出力します
*/
public function toString()
{
return static::build($this->data);
}
/**
* @return string OAuthの署名に利用するQUERY_STRING形式の文字列
* 注意: PHPの独自拡張形式ではない文字列を(意図的に)出力します
*/
public function toOAuthQueryString()
{
return static::buildForOAuth1($this->data);
}
/**
* @return string PHPの独自拡張形式なQUERY_STRING形式の文字列
* @see \http_build_query()
*/
public function toPHPQueryString()
{
return http_build_query($this->toArray());
}
/**
* @param string $boundary boundary
* @param string $lineEnd 行末文字列(デフォルトは"\r\n")
* @return string multipart/form-data 形式の文字列
*/
public function toMultiPartFormData($boundary, $lineEnd = null)
{
$multipartFormData = '';
if (strlen($lineEnd) < 1) {
$lineEnd = "\r\n";
}
foreach ($this->data as $key => $values) {
$escapedKey = str_replace('"', '\\"', $key);
foreach ($values as $value) {
$multipartFormData .= '--' . $boundary . $lineEnd;
$multipartFormData .= 'Content-Disposition: form-data; name="' . $escapedKey . '"' . $lineEnd;
$multipartFormData .= $lineEnd;
$multipartFormData .= $value . $lineEnd;
}
}
return $multipartFormData;
}
/**
* @return array
*/
public function toArray()
{
$dst = [];
foreach ($this->data as $key => $values) {
if (empty($values)) {
continue;
}
if (count($values) >= 2) {
$dst[$key] = $values;
} else {
$dst[$key] = $values[0];
}
}
return $dst;
}
/**
* JSON文字列の生成
*
* @return string JSON文字列
*/
public function toJson()
{
return json_encode($this->toArray());
}
/**
* @param string $key キー
* @return bool 値が存在するならtrue, 存在しないならfalse
*/
public function has($key)
{
return isset($this->data[$key]);
}
/**
* キーの一覧を取得する.
*
* @return string[] キーの一覧
*/
public function keys()
{
return array_keys($this->data);
}
/**
* サイズ(キーの個数)を取得する.
*
* @return int サイズ(キーの個数)
*/
public function count()
{
return count($this->data);
}
/**
* 空(サイズが0)かどうか判定する.
*
* @return bool 空(サイズが0)ならtrue, それ以外はfalse
* @see hasAny()
*/
public function isEmpty()
{
return ($this->count() == 0);
}
/**
* 空でない(サイズが0でない)かどうか判定する.
*
* @return bool 空でないならtrue, それ以外はfalse
* @see isEmpty()
*/
public function hasAny()
{
return !($this->isEmpty());
}
/**
* キーに対する値を取得する.
*
* @param string $key キー
* @return string[] キーに対する値のリスト (存在しなかったときは空)
* @throws \InvalidArgumentException 空のキーを指定したとき
*/
public function get($key)
{
$key = static::validateKey($key);
if (isset($this->data[$key])) {
return $this->data[$key];
} else {
return [];
}
}
/**
* @param string $key
* @param string $glue
* @return string
*/
public function getString($key, $glue = '')
{
if (isset($this->data[$key])) {
return join($glue, $this->data[$key]);
} else {
return '';
}
}
/**
* @param string $key
* @return string
*/
public function getFirst($key)
{
if (isset($this->data[$key], $this->data[$key][0])) {
return $this->data[$key][0];
} else {
return '';
}
}
/**
* キーに対する値を設定する.
* すでに同じキーが存在している場合は, 上書きされる.
*
* @param string $key キー
* @param string|string[] $values キーに対する値
* @throws \InvalidArgumentException 空のキーを指定したとき
*/
public function set($key, $values)
{
$key = static::validateKey($key);
if (is_array($values)) {
$this->data[$key] = $values;
} else {
$this->data[$key] = [$values];
}
}
/**
* キーに対する値を追加する.
* 同じキーが存在する場合でも上書きされない. (以前の値も保持される.)
*
* @param string $key キー
* @param string|string[] $values キーに対する値
* @return int 設定後のキーに対する値の数
* @throws \InvalidArgumentException 空のキーを指定したとき
*/
public function add($key, $values)
{
$key = static::validateKey($key);
if (!(isset($this->data[$key]))) {
$this->data[$key] = [];
}
if (is_array($values)) {
$this->data[$key] = array_merge($this->data[$key], $values);
} else {
$this->data[$key][] = $values;
}
return count($this->data[$key]);
}
/**
* キーに対する値を全て削除する.
*
* @param string $key キー
* @return mixed[] 削除したキーに対する値のリスト (存在しなかったときは空)
* @throws \InvalidArgumentException 空のキーを指定したとき
*/
public function delete($key)
{
$key = static::validateKey($key);
$deleted = [];
if (isset($this->data[$key])) {
$deleted = $this->data[$key];
unset($this->data[$key]);
}
return $deleted;
}
/**
* データの統合
* 同じキーが存在した場合は, 引数で渡されたデータの値で上書きされる.
*
* @param array|self $parameters
* @return int 統合後のデータサイズ
* @throws \InvalidArgumentException 指定されたKVSデータの形式が正しくないとき
*/
public function merge($parameters)
{
if ($parameters instanceof self) {
foreach ($parameters->data as $key => $values) {
$this->set($key, $values);
}
} elseif (is_array($parameters)) {
foreach ($parameters as $key => $values) {
$this->set($key, $values);
}
} else {
throw new \InvalidArgumentException('invalid data type.');
}
return $this->count();
}
/**
* データの結合
* 同じキーが存在する場合でも上書きされない. (以前の値も保持される.)
*
* @param array|self $parameters
* @return int 統合後のデータサイズ
* @throws \InvalidArgumentException 指定されたKVSデータの形式が正しくないとき
*/
public function append($parameters)
{
if ($parameters instanceof self) {
foreach ($parameters->data as $key => $values) {
$this->add($key, $values);
}
} elseif (is_array($parameters)) {
foreach ($parameters as $key => $values) {
$this->add($key, $values);
}
} else {
throw new \InvalidArgumentException('invalid data type.');
}
return $this->count();
}
/**
* @param string $queryString QUERY_STRING形式の文字列
* 注意: PHPの独自拡張形式は(意図的に)解釈しません
* (例: "key[]=value" は ['key[]' => ['value']] となります)
* @return string[][] パースした配列
*/
public static function parse($queryString)
{
$parameters = [];
$kvpairs = explode('&', $queryString);
foreach ($kvpairs as $kvpair) {
if (strlen($kvpair) < 1) {
continue;
}
$delimiterPos = strpos($kvpair, '=');
if ($delimiterPos === false) {
$key = urldecode($kvpair);
$value = '';
} else {
$key = urldecode(substr($kvpair, 0, $delimiterPos));
$value = urldecode(substr($kvpair, ($delimiterPos + 1)));
}
if (strlen($key) < 1) {
continue;
}
if (!isset($parameters[$key])) {
$parameters[$key] = [];
}
$parameters[$key][] = $value;
}
return $parameters;
}
/**
* @param string[][]|string[] $parameters
* @return string
*/
public static function build(array $parameters)
{
$kvpairs = [];
foreach ($parameters as $key => $values) {
$encodedKey = rawurlencode($key);
if (is_array($values)) {
foreach ($values as $value) {
$kvpairs[] = $encodedKey . '=' . rawurlencode(strval($value));
}
} else {
$kvpairs[] = $encodedKey . '=' . rawurlencode($values);
}
}
return join('&', $kvpairs);
}
/**
* for OAuth1.0 base_string
*
* @param string[][] $parameters
* @return string
*/
public static function buildForOAuth1(array $parameters)
{
$kvpairs = [];
$keys = array_keys($parameters);
sort($keys, SORT_STRING);
foreach ($keys as $key) {
$encodedKey = rawurlencode($key);
if (is_array($parameters[$key])) {
$values = $parameters[$key];
sort($values, SORT_STRING);
foreach ($values as $value) {
$kvpairs[] = $encodedKey . '=' . rawurlencode(strval($value));
}
} else {
$kvpairs[] = $encodedKey . '=' . rawurlencode($parameters[$key]);
}
}
return join('&', $kvpairs);
}
/**
* @param string $key
* @return string $key
* @throws \InvalidArgumentException if empty
*/
protected static function validateKey($key)
{
$key = strval($key);
if (strlen($key) < 1) {
throw new \InvalidArgumentException('key is empty.');
}
return $key;
}
}
|
import { Directive, ElementRef, HostListener, Input } from '@angular/core';
import { NgControl } from '@angular/forms';
@Directive({
// tslint:disable-next-line:directive-selector
selector: 'input[numbersOnly]'
})
export class NumberDirective {
constructor(private _el: ElementRef) { }
@HostListener('input', ['$event']) onInputChange(event) {
const initalValue = this._el.nativeElement.value;
this._el.nativeElement.value = initalValue.replace(/[^0-9]*/g, '');
if ( initalValue !== this._el.nativeElement.value) {
event.stopPropagation();
}
}
}
|
using System;
namespace LetsTrace.Util
{
public class Clock : IClock
{
public DateTime UtcNow() => DateTime.UtcNow;
}
}
|
#!/bin/sh
if [ -z "$1" ]; then
echo "Usage: $0 <config_filename>" >&2
exit 1
fi
exec sudo -n -u erjik /usr/lib/erjik/erjik-wrapper --hup "$1"
|
#!/bin/bash
export GO_SERVER_URL="http://localhost:8153"
export GO_SERVER_ADMIN_PASSWORD="admin"
if ! [ -d venv ] ; then
virtualenv venv
source venv/bin/activate
pip install -r requirements.txt
fi
source venv/bin/activate
python configuration.py
|
module ThreeDragonAnte
class Game
class Event
class ChoiceOffered < Event::Details
def initialize(current_choices, to:)
@player, @choice = to, current_choices.last
@choices_pending = current_choices.size
end
attr_reader :player, :choice, :choices_pending
def inspect
[player.identifier, :offered_choice, choice, :pending, choices_pending].inspect
end
end
end
end
end
|
<?php
class Departments{
//database stuffs
private $conn;
//department properties
public $id;
public $tittle;
public $status;
//database contructor
public function __construct($db){
$this->conn=$db;
}
public function getAllDepartments(){
//query
$query="SELECT *FROM departments WHERE status='1' ORDER BY tittle";
$stmt=$this->conn->prepare($query);
$stmt->execute();
return $stmt;
}
}
?>
|
app.controller('IndexCtrl', function ($rootScope, $scope) {
$rootScope.baseUrl = _yii_app.baseUrl;
$rootScope.layoutPath = _yii_app.layoutPath;
$rootScope.bodyClass = 'short_main';
$scope.params = {};
});
app.controller('CustomOrderCtrl', function ($rootScope, $scope, $modal, $filter, ssHelper) {
$rootScope.baseUrl = _yii_app.baseUrl;
$rootScope.layoutPath = _yii_app.layoutPath;
$rootScope.bodyClass = '';
$scope.info = {};
$scope.level = {};
ssHelper.get("/page/customorder").then(function(response){
$scope.info = response.data;
});
$scope.getMemberLevelInfo = function(level_id){
var f = _.findWhere($scope.info.allSubscription, {id: level_id});
if(typeof f !== 'undefined'){
$scope.level = f;
}
}
$scope.changeMemberLevel = function(){
var f = _.findWhere($scope.info.allSubscription, {id: $scope.level_id});
if(typeof f !== 'undefined'){
$scope.level = f;
}
}
});
/** for the remote functionality, we have to make the remote that is able to control every screen, therefore
* I keep modal controller as top parent scope to any children scope **/
app.controller('RemoteModalCtrl', function ($scope, $modal, $location, $route, $log) {
$scope.demo_items = ['Button 1', 'Button 2', 'Button 3'];
$scope.demo2_items = ['Button 1', 'Button 2', 'Button 3'];
$scope.isShowButtons = function () {
return $location.path() == '/demo' || $location.path() == '/demo2' ;
};
$scope.open = function (size) {
var modalInstance = $modal.open({
templateUrl: _yii_app.absTemplatePath + '/partials/modal-content-signup.html',
windowTemplateUrl: _yii_app.absTemplatePath + '/partials/modal-window-signup.html',
controller: 'ModalInstanceCtrl',
windowClass: 'app-modal-window',
size: size,
scope: $scope,
backdrop: false,
resolve: {
demo_items: function () {
return $scope.demo_items;
}
}
});
modalInstance.result.then(function (selectedItem) {
$scope.selected = selectedItem;
}, function () {
$log.info('Modal dismissed at: ' + new Date());
});
};
});
app.controller('ModalInstanceCtrl', function ($scope, $modalInstance, demo_items, SignalService) {
$scope.demo_items = demo_items;
//$scope.selected = {
// item: $scope.demo_items[0]
//};
$scope.ok = function () {
$modalInstance.close($scope.selected.item);
};
$scope.cancel = function () {
$modalInstance.dismiss('cancel');
};
$scope.changeImage = function(index){
var sendData = index;
//console.log('send signal', sendData); // 'Some data'
SignalService.broadcast('remoteSignal_changeImage', sendData);
}
$scope.changeImageDemo2 = function(index){
var sendData = index;
//console.log('send signal', sendData); // 'Some data'
SignalService.broadcast('remoteSignal_changeImageDemo2', sendData);
}
});
app.controller('DemoCtrl', function ($rootScope, $scope, SignalService) {
$rootScope.baseUrl = _yii_app.baseUrl;
$rootScope.layoutPath = _yii_app.layoutPath;
$rootScope.bodyClass = '';
$scope.images = [
_yii_app.layoutPath + '/images/lotus-c-01-motorcycle-116.jpg',
_yii_app.layoutPath + '/images/motorcycle-raid.jpg',
_yii_app.layoutPath + '/images/business_people_2.png',
];
$scope.displayIndex = 0;
/* execute the command from the remote */
//$scope.$on('remoteSignal', function(event, data){
//
// console.log(event, data); // 'Some data'
// $scope.displayIndex = data;
//
//})
SignalService.listen('remoteSignal_changeImage', function(event, data){
//console.log(event, data); // 'Some data'
$scope.displayIndex = data;
});
});
app.controller('Demo2Ctrl', function ($rootScope, $scope, SignalService) {
$rootScope.baseUrl = _yii_app.baseUrl;
$rootScope.layoutPath = _yii_app.layoutPath;
$rootScope.bodyClass = '';
$scope.images = [
//_yii_app.layoutPath + '/images/music_magoc.jpg',
_yii_app.layoutPath + '/images/teamwork1.jpg',
_yii_app.layoutPath + '/images/music-work-shop.jpg'
];
$scope.displayIndex = 0;
/* execute the command from the remote */
//$scope.$on('remoteSignal', function(event, data){
//
// console.log(event, data); // 'Some data'
// $scope.displayIndex = data;
//
//})
SignalService.listen('remoteSignal_changeImageDemo2', function(event, data){
//console.log(event, data); // 'Some data'
$scope.displayIndex = data;
});
});
|
package github
import (
"context"
"errors"
"fmt"
"github.com/google/go-github/github"
"golang.org/x/oauth2"
"github.com/seabird-chat/seabird-go"
"github.com/seabird-chat/seabird-go/pb"
)
// This is really just a holding type for some configuration.
type Repo struct {
Owner string
Name string
}
type Client struct {
*seabird.Client
api *github.Client
repos map[string]Repo
}
func NewClient(seabirdCoreUrl, seabirdCoreToken, githubToken string, repos map[string]Repo) (*Client, error) {
client, err := seabird.NewClient(seabirdCoreUrl, seabirdCoreToken)
if err != nil {
return nil, err
}
// Create an oauth2 client for use with the GitHub API.
ts := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: githubToken},
)
tc := oauth2.NewClient(context.TODO(), ts)
return &Client{
Client: client,
api: github.NewClient(tc),
repos: repos,
}, nil
}
func (c *Client) replyf(source *pb.ChannelSource, format string, args ...interface{}) error {
// This is a bit ugly, but it's the simplest way to do this.
msg := fmt.Sprintf(
"%s: %s",
source.GetUser().GetDisplayName(),
fmt.Sprintf(format, args...))
_, err := c.Client.Inner.SendMessage(context.TODO(), &pb.SendMessageRequest{
ChannelId: source.ChannelId,
Text: msg,
Tags: map[string]string{
"url/skip": "1",
},
})
return err
}
func (c *Client) Run() error {
events, err := c.StreamEvents(map[string]*pb.CommandMetadata{
"issue": {
Name: "issue",
ShortHelp: "[#tag ][@asignee ]<issue title>",
FullHelp: "Creates a new seabird issue on GitHub",
},
})
if err != nil {
return err
}
defer events.Close()
for event := range events.C {
switch v := event.GetInner().(type) {
case *pb.Event_Command:
if v.Command.Command == "issue" {
c.issueCallback(v.Command.Source, v.Command)
}
}
}
return errors.New("event stream closed")
}
|
package com.honeybee.controller;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.FileCopyUtils;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import com.honeybee.domain.AttachFileDTO;
import com.honeybee.domain.CodeTableVO;
import com.honeybee.domain.FreeReplyVO;
import com.honeybee.domain.FreeVO;
import com.honeybee.domain.HopeVO;
import com.honeybee.domain.MeetVO;
import com.honeybee.domain.MsgVO;
import com.honeybee.domain.ReplyVO;
import com.honeybee.domain.UserVO;
import com.honeybee.service.CodeTableService;
import com.honeybee.service.EnrollListService;
import com.honeybee.service.FreeReplyService;
import com.honeybee.service.FreeService;
import com.honeybee.service.HopeService;
import com.honeybee.service.MeetReplyService;
import com.honeybee.service.MeetService;
import com.honeybee.service.SubscribeService;
import com.honeybee.service.ThumbService;
import com.honeybee.service.UserService;
import com.honeybee.service.MsgService;
import lombok.AllArgsConstructor;
import lombok.extern.log4j.Log4j;
import net.coobird.thumbnailator.Thumbnailator;
@Controller
@RequestMapping("/mypage/*")
@Log4j
@AllArgsConstructor
public class MypageController {
private UserService service;
private MeetService mservice;
private FreeService fservice;
private EnrollListService eservice;
private ThumbService tservice;
private SubscribeService sservice;
private MsgService msgservice;
private FreeReplyService frservice;
private CodeTableService cservice;
private MeetReplyService mrservice;
private HopeService hservice;
@GetMapping("/posted")
public void posted(Model model, String id) {
log.info("posted");
model.addAttribute("list", fservice.getMyList("HOHO995@naver.com"));
// model.addAttribute("list", fservice.getMyList(id)); // id를 받아서 리스트 가져옴
}
@GetMapping("/sendmsg")
public void getsendmsg(Model model) {
model.addAttribute("sendmsg", msgservice.getsendList("HOHO995@naver.com"));
}
@GetMapping("/freply")
public void freply(Model model) {
List<FreeReplyVO> arr = frservice.getfreereplystatus("HOHO995@naver.com");
List<String> arr2 = new ArrayList<>();
for (int i = 0; i < arr.size(); i++) {
if (arr.get(i).getDeldt() == null) {
arr2.add("원글 보기▶");
} else {
arr2.add("삭제된 글");
}
}
model.addAttribute("replylist", frservice.readmyfreereply("HOHO995@naver.com"));
model.addAttribute("replystatus", arr2);
}
@GetMapping("/mreply")
public void mreply(Model model) {
List<ReplyVO> arr = mrservice.getmeetreplystatus("HOHO995@naver.com");
List<String> arr2 = new ArrayList<>();
log.info(arr.get(0));
for (int i = 0; i < arr.size(); i++) {
if (arr.get(i).getDelDt() == null) {
arr2.add("원글 보기▶");
} else {
arr2.add("삭제된 글");
}
}
model.addAttribute("replylist", mrservice.readmymeetreply("HOHO995@naver.com"));
model.addAttribute("replystatus", arr2);
}
@GetMapping("/regCenter")
public void regCenter(Model model, HttpServletRequest request) {
model.addAttribute("meet", mservice.getListTest("HOHO995@naver.com"));
model.addAttribute("nick", mservice.getNick("HOHO995@naver.com"));
model.addAttribute("code", cservice.getCatList());
}
@GetMapping("/home")
public void home(Model model) {
model.addAttribute("meet", mservice.getListTest("HOHO995@naver.com"));
model.addAttribute("user", service.getMyList("HOHO995@naver.com"));
model.addAttribute("enrollStatus", eservice.getEnrollStatus("HOHO995@naver.com"));
model.addAttribute("enrollTitle", eservice.getEnrollTitle("HOHO995@naver.com"));
model.addAttribute("thumbList", tservice.getThumbList("HOHO995@naver.com"));
model.addAttribute("thumbRegDate", tservice.getThumbRegDate("HOHO995@naver.com"));
model.addAttribute("subscribeList", sservice.getSubscribeList("HOHO995@naver.com"));
model.addAttribute("getimg", service.getimg("HOHO995@naver.com"));
}
@RequestMapping("/register")
public String register(UserVO user, RedirectAttributes rttr) {
service.register(user);
rttr.addFlashAttribute("result", user.getCid());
return "mypage/modify";
}
@GetMapping("/get")
public void get(@RequestParam("id") String id, Model model) {
model.addAttribute("user", service.get(id));
}
@PostMapping("/mypostdelete")
public String mypostdelete(Model model, HttpServletRequest request) {
log.info("modify test 입니다~~~~~~~~~~~~~~~~~~");
String[] arr = request.getParameterValues("mypostcheck");
for (int i = 0; i < arr.length; i++) {
fservice.mypostremove(arr[i]);
}
return "redirect:/mypage/home";
}
@PostMapping("/remove")
public String remove(@RequestParam("id") String id, RedirectAttributes rttr) {
if (service.remove(id)) {
rttr.addFlashAttribute("result", "success");
}
return "redirect:/mypage/home";
}
@GetMapping("/rcvmsg")
public void getrcvmsg(Model model) {
model.addAttribute("rcvmsg", msgservice.getrcvList("HOHO995@naver.com"));
}
@PostMapping("/rcvmsgdelete")
public String rcvmsgdelete(Model model, HttpServletRequest request) {
String[] arr = request.getParameterValues("rcvmsgcheck");
for (int i = 0; i < arr.length; i++) {
msgservice.rcvmsgremove(arr[i]);
}
return "redirect:/mypage/home";
}
@PostMapping("/sendmsgdelete")
public String sendmsgdelete(Model model, HttpServletRequest request) {
String[] arr2 = request.getParameterValues("sendmsgcheck");
for (int i = 0; i < arr2.length; i++) {
msgservice.sendmsgremove(arr2[i]);
}
return "redirect:/mypage/home";
}
@GetMapping("/pwdcheck")
public void pwdcheck(Model model) {
log.info("발신함~~~~~~~~~~~~~~~~");
}
@PostMapping("/sendmsgtest")
public String sendmsg(Model model, HttpServletRequest request, MsgVO msg) {
String receiver = request.getParameter("receiver");
String content = request.getParameter("msgcontent");
msg.setId("HOHO995@naver.com");
msg.setId2(receiver);
msg.setContent(content);
msgservice.sendmsg(msg);
return "redirect:/mypage/sendmsg";
}
// @ResponseBody
// @RequestMapping(value = "/modify", method = RequestMethod.POST)
@GetMapping("/modify")
public void modify(Model model) {
model.addAttribute("user", service.getMyList("HOHO995@naver.com"));
model.addAttribute("cat", cservice.getCatList());
model.addAttribute("upper", cservice.upperregion());
model.addAttribute("hope", hservice.getList("HOHO995@naver.com"));
}
@GetMapping("/readcontent")
public String readcontent(Model model, HttpServletRequest request) {
String[] readtest = request.getParameterValues("mypostcheck");
return "redirect:/meet/get?mno=32";
}
@GetMapping("/redirecttest")
public String redirecttest(Model model, String cid) {
model.addAttribute("meet", mservice.getListWithCategory(cid));
model.addAttribute("nick", mservice.getNick("HOHO995@naver.com"));
model.addAttribute("code", cservice.getCatList());
return "/mypage/regCenter";
}
@GetMapping("/uploadForm")
public void uploadForm() {
log.info("upload form");
}
@PostMapping("/uploadFormAction")
public void uploadFormPost(MultipartFile[] uploadFile, Model model) {
String uploadFolder = "C:\\upload";
for (MultipartFile multipartFile : uploadFile) {
log.info("---------------------------------");
log.info("Upload File Name : " + multipartFile.getOriginalFilename());
log.info("upload File Size : " + multipartFile.getSize());
File saveFile = new File(uploadFolder, multipartFile.getOriginalFilename());
try {
multipartFile.transferTo(saveFile);
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
@GetMapping("/uploadAjax")
public void uploadAjax() {
log.info("upload ajax");
}
@PostMapping("/freplydelete")
public void freplydelete(HttpServletRequest request, Long frno) {
String[] arr = request.getParameterValues("myreplycheck");
for (int i = 0; i < arr.length; i++) {
frservice.freplyremove(Long.parseLong(arr[i]));
}
}
@PostMapping("/mreplydelete")
public void mreplydelete(HttpServletRequest request, Long mrno) {
String[] arr = request.getParameterValues("myreplycheck");
log.info("모임댓글삭제테스트입니다~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
log.info(arr);
for (int i = 0; i < arr.length; i++) {
mrservice.mreplyremove(Long.parseLong(arr[i]));
}
}
private String getFolder() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date date = new Date();
String str = sdf.format(date);
return str.replace("-", File.separator);
}
private boolean checkImageType(File file) {
try {
String contentType = Files.probeContentType(file.toPath());
return contentType.startsWith("image");
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
@GetMapping("/display")
@ResponseBody
public ResponseEntity<byte[]> getFile(String fileName, Model model) {
log.info("fileName: " + fileName);
File file = new File("c:\\upload\\" + fileName);
log.info("file: " + file);
ResponseEntity<byte[]> result = null;
try {
HttpHeaders header = new HttpHeaders();
header.add("Content-Type", Files.probeContentType(file.toPath()));
result = new ResponseEntity<>(FileCopyUtils.copyToByteArray(file), header, HttpStatus.OK);
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
@PostMapping(value = "/uploadAjaxAction", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@ResponseBody
public ResponseEntity<List<AttachFileDTO>> uploadAjaxPost(MultipartFile[] uploadFile, Model model) {
List<AttachFileDTO> list = new ArrayList<>();
String uploadFolder = "C:\\upload";
String uploadFolderPath = getFolder();
// make Folder----------
File uploadPath = new File(uploadFolder, uploadFolderPath);
if (uploadPath.exists() == false) {
uploadPath.mkdirs();
}
for (MultipartFile multipartFile : uploadFile) {
AttachFileDTO attachDTO = new AttachFileDTO();
String extension = multipartFile.getOriginalFilename()
.substring(multipartFile.getOriginalFilename().indexOf(".") + 1);
String uploadFileName = "HOHO995@naver.com" + "." + extension; // 여기에 아이디 넣기
// String path = uploadFolderPath.replace("\\", "/") + "/";
uploadFileName = uploadFileName.substring(uploadFileName.lastIndexOf("\\") + 1);
attachDTO.setFileName(uploadFileName);
UserVO uvo = new UserVO();
uvo.setId("HOHO995@naver.com");
uvo.setImg(uploadFileName);
service.updateimg(uvo);
try {
File saveFile = new File(uploadPath, uploadFileName);
multipartFile.transferTo(saveFile);
attachDTO.setUploadPath(uploadFolderPath);
if (checkImageType(saveFile)) {
attachDTO.setImage(true);
FileOutputStream thumbnail = new FileOutputStream(new File(uploadPath, "s_" + uploadFileName));
Thumbnailator.createThumbnail(multipartFile.getInputStream(), thumbnail, 100, 100);
thumbnail.close();
}
list.add(attachDTO);
} catch (Exception e) {
e.printStackTrace();
}
}
return new ResponseEntity<>(list, HttpStatus.OK);
}
@PostMapping("/myinfomodify") // 내 정보 수정
public String myinfomodify(HttpServletRequest request, Model model, UserVO uvo, HopeVO hvo) {
int gender = Integer.parseInt(request.getParameter("gender"));
String year = request.getParameter("year");
String month = request.getParameter("month");
String day = request.getParameter("day"); //생년월일
String forSplitReg= request.getParameter("region");
String forSplitCat= request.getParameter("category");
String[] reg=forSplitReg.split(",");
String[] cat=forSplitCat.split(",");
uvo.setSex(gender);
uvo.setBirth(year + month + day);
uvo.setId("HOHO995@naver.com");
service.infomodify(uvo);
hservice.delete("HOHO995@naver.com");
hvo.setId("HOHO995@naver.com");
for(int i=0; i<cat.length; i++) {
hvo.setCid(hservice.cidSearch(cat[i]));
log.info("set된 값 체크입니다~~"+hvo.getCid());
hservice.hopeInsert(hvo);
} //관심 카테고리를 db에 insert
for(int i=0; i<reg.length; i++) {
hvo.setCid(hservice.cidSearch(reg[i]));
log.info("set된 지역 체크입니다~~~~"+hvo.getCid());
hservice.hopeInsert(hvo);
} //관심지역을 db에 insert
return "redirect:/mypage/modify";
}
@PostMapping("/nickmodify") // 닉네임 수정
public String nickmodify(HttpServletRequest request, Model model, UserVO uvo) {
String nick = request.getParameter("nick");
uvo.setNick(nick);
uvo.setId("HOHO995@naver.com");
service.nickmodify(uvo);
String afternick = service.getMyList("HOHO995@naver.com").getNick();
return afternick;
}
@ResponseBody
@RequestMapping(value = "/nickChk", method = RequestMethod.POST)
public int nickChk(String nick) {
log.info(service.nickChk(nick));
return service.nickChk(nick);
}
@ResponseBody
@RequestMapping(value = "/detailregion", method = RequestMethod.POST)
public List<CodeTableVO> detailregion(String cid) {
return cservice.detailregion(cid);
}
}
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# See the Spack documentation for more information on packaging.
from spack.package import *
class Repeatscout(MakefilePackage):
"""RepeatScout - De Novo Repeat Finder, Price A.L., Jones N.C. and Pevzner
P.A."""
homepage = "https://www.repeatmasker.org/RepeatModeler/"
url = "https://www.repeatmasker.org/RepeatScout-1.0.5.tar.gz"
version('1.0.5', sha256='bda6f782382f2b7dcb6a004b7da586d5046b3c12429b158e24787be62de6199c')
depends_on('perl', type='run')
depends_on('trf', type='run')
depends_on('nseg', type='run')
def edit(self, spec, prefix):
filter_file('^INSTDIR.*$', 'INSTDIR=%s' % prefix.bin, 'Makefile')
|
using System;
using System.Collections;
namespace Microsoft.AnalysisServices.AdomdClient
{
internal interface IDSFAxisCollection : ICollection, IEnumerable
{
IDSFDataSet this[int index]
{
get;
}
}
}
|
import { getCurrentWeatherByCityName } from '.';
afterEach(() => {
jest.clearAllMocks();
});
test('test getCurrentWeatherByCityName with valid location', async () => {
expect(await getCurrentWeatherByCityName('US', 'California', 'Los Angeles')).toStrictEqual({
temperature: 72,
minimumTemperature: 60,
maximumTemperature: 80
});
});
test('test getCurrentWeatherByCityName with invalid location', async () => {
await expect(getCurrentWeatherByCityName('US', 'California', 'invalidCityName')).rejects.toThrow(/city not found/);
});
|
export { DatePickerComponent } from './date-picker/date-picker.component';
export { DatePickerDirective } from './date-picker/date-picker.directive';
export { DayCalendarComponent } from './day-calendar/day-calendar.component';
export { DayTimeCalendarComponent } from './day-time-calendar/day-time-calendar.component';
export { TimeSelectComponent } from './time-select/time-select.component';
export { MonthCalendarComponent } from './month-calendar/month-calendar.component';
export declare class DpDatePickerModule {
}
|
FactoryGirl.define do
factory :order do
text { FFaker::NameRU.first_name }
alphabeth_code :ру
language_code :ру
order 'св' ;end;end
|
(ns sixsq.nuvla.db.binding)
(defprotocol Binding
"This protocol defines the core interface to the underlying database.
All of the functions accept and return native clojure data structures.
The functions must handle all necessary conversions for the database.
For those functions that have a data argument, the id of the document
is taken from the value of the :id attribute in the data.
On errors, the functions must throw an ex-info with an error ring
response. This simplifies the logic and code of the client using this
protocol."
(initialize
[this collection-id options]
"This function initialize the given resource into the database.")
(add
[this data options]
[this collection-id data options]
"This function adds the given resource to the database. The resource
must not already exist in the database.
The older 4-argument function that includes the collection-id is
deprecated. The collection id is included in the document id, so
the separate argument isn't needed.
On success, the function must return a 201 ring response with the
relative URL of the new resource as the Location.
On failure, the function must throw an ex-info containing the error
ring response. The error must be 409 (conflict) if the resource
exists already. Other appropriate error codes can also be thrown.")
(retrieve
[this id options]
"This function retrieves the identified resource from the database.
On success, this returns the clojure map representation of the
resource. The response must not be embedded in a ring response.
On failure, this function must throw an ex-info containing the error
ring response. If the resource doesn't exist, use a 404 status.")
(edit
[this data options]
"This function updates (edits) the given resource in the database.
The resource must already exist in the database.
On success, the function returns the data stored in the database.
This must NOT be embedded in a ring response.
On failure, the function must throw an ex-info containing the error
ring response. The error must be 404 (not-found) if the resource
does not exist. Other appropriate error codes can also be thrown.")
(delete
[this data options]
"This function removes the given resource in the database. Note that
you can remove a document by id by providing data of the form:
{:id \"collection/uuid\"}.
On success, the function must return a 200 ring response with a map
containing status, message, and resource ID.
On failure, the function must throw an ex-info containing the error
ring response. If the resource does not exist, then a 404 response
should be returned. Other appropriate error codes can also be thrown.")
(query
[this collection-id options]
"This function returns metadata and resources, where the collection-id
corresponds to the name of a Collection.
On success, the function must return a two-element tuple. The first
element is metadata concerning the query (usually with the count and
aggregations). The second element is a list of the returned resources.
This list may possibly be empty. The list must not be embedded in a ring
response.
On failure, the function must throw an ex-info containing the error
ring response. If the resource-id does not correspond to a Collection,
then a 400 (bad-request) response must be returned. Other appropriate
error codes can also be thrown.")
(bulk-delete
[this collection-id options]
"This function removes the given resources in the database where the
collection-id corresponds to the name of a Collection.
On success, the function must return a the summary map of what was done
on the db..
On failure, the function must throw an ex-info containing the error
ring response. If the resource-id does not correspond to a Collection,
then a 400 (bad-request) response must be returned. Other appropriate
error codes can also be thrown."))
|
/*
* Copyright 2010-2021 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.codegen.inline
import org.jetbrains.kotlin.codegen.optimization.nullCheck.isParameterCheckedForNull
import org.jetbrains.kotlin.resolve.jvm.AsmTypes
import org.jetbrains.org.objectweb.asm.Opcodes
import org.jetbrains.org.objectweb.asm.Type
import org.jetbrains.org.objectweb.asm.commons.InstructionAdapter
import org.jetbrains.org.objectweb.asm.tree.*
fun canInlineArgumentsInPlace(methodNode: MethodNode): Boolean {
// Usual inline functions are inlined in the following way:
// <evaluate argument #1>
// <store argument to an argument variable V1>
// ...
// <evaluate argument #N>
// <store argument to an argument variable VN>
// <inline function method body with parameter variables Pi remapped to argument variables Vi>
// If an argument #k is already stored in a local variable W, this variable W is reused.
// When inlining arguments in-place, we instead replace corresponding variable load instructions in the inline function method body
// with bytecode for evaluating a given argument.
// We can do so if such transformation keeps the evaluation order intact, possibly disregarding class initialization.
val tcbStartLabels = methodNode.tryCatchBlocks.mapTo(HashSet()) { it.start }
val methodParameterTypes = Type.getArgumentTypes(methodNode.desc)
val jvmArgumentTypes = ArrayList<Type>(methodParameterTypes.size + 1)
if (methodNode.access and Opcodes.ACC_STATIC == 0) {
// Here we don't care much about the exact 'this' type,
// it's only important to remember that variable slot #0 holds an object reference.
jvmArgumentTypes.add(AsmTypes.OBJECT_TYPE)
}
jvmArgumentTypes.addAll(methodParameterTypes)
val argumentVarEnd = jvmArgumentTypes.sumOf { it.size }
var expectedArgumentVar = 0
var lastArgIndex = 0
var insn = methodNode.instructions.first
// During arguments evaluation, make sure that all arguments are loaded in expected order
// and there are no unexpected side effects in-between.
while (insn != null && expectedArgumentVar < argumentVarEnd) {
// Entering a try-catch block before all arguments are loaded breaks evaluation order.
if (insn in tcbStartLabels)
return false
// Some instructions break evaluation order.
if (insn.isProhibitedDuringArgumentsEvaluation())
return false
// Allow a limited list of 'GETSTATIC <owner> <name> <desc>' instructions.
if (insn.opcode == Opcodes.GETSTATIC) {
val fieldInsn = insn as FieldInsnNode
val fieldSignature = FieldSignature(fieldInsn.owner, fieldInsn.name, fieldInsn.desc)
if (fieldSignature !in whitelistedStaticFields)
return false
}
// Writing to or incrementing an argument variable forbids in-place argument inlining.
if (insn.opcode in Opcodes.ISTORE..Opcodes.ASTORE && (insn as VarInsnNode).`var` < argumentVarEnd)
return false
if (insn.opcode == Opcodes.IINC && (insn as IincInsnNode).`var` < argumentVarEnd)
return false
// Analyze variable loads.
if (insn.opcode in Opcodes.ILOAD..Opcodes.ALOAD) {
// Skip parameter null check: 'aload x; ldc "..."; invokestatic <check>'
if (insn.opcode == Opcodes.ALOAD && insn.isParameterCheckedForNull()) {
// Go directly to the instruction after 'invokestatic <check>'
insn = insn.next.next.next
continue
}
val varInsn = insn as VarInsnNode
val varIndex = (varInsn).`var`
if (varIndex == expectedArgumentVar) {
// Expected argument variable loaded.
expectedArgumentVar += jvmArgumentTypes[lastArgIndex].size
++lastArgIndex
// Skip a sequence of load instructions referring to the same argument variable
// (such sequence is present in functions like 'Array.copyOf' and can be replaced with DUP instructions).
do {
insn = insn.next
} while (insn != null && insn.opcode == varInsn.opcode && (insn as VarInsnNode).`var` == varIndex)
continue
} else if (varIndex < argumentVarEnd) {
// Loaded an argument variable, but not an expected one => broken evaluation order
return false
} else {
// It's OK to load any non-argument variable during argument evaluation.
insn = insn.next
continue
}
}
// Anything else is fine.
insn = insn.next
}
// Method body is over, but not all arguments were loaded on stack.
if (expectedArgumentVar < argumentVarEnd)
return false
// After arguments evaluation make sure that argument variables are no longer accessed
// (we are not going to store anything to those variables anyway).
while (insn != null) {
if (insn.opcode in Opcodes.ILOAD..Opcodes.ALOAD || insn.opcode in Opcodes.ISTORE..Opcodes.ASTORE) {
if ((insn as VarInsnNode).`var` < argumentVarEnd)
return false
} else if (insn.opcode == Opcodes.IINC) {
if ((insn as IincInsnNode).`var` < argumentVarEnd)
return false
}
insn = insn.next
}
// Didn't encounter anything suspicious.
return true
}
internal data class FieldSignature(
val owner: String,
val name: String,
val desc: String
)
private val whitelistedStaticFields: Set<FieldSignature> =
hashSetOf(
FieldSignature("kotlin/Result", "Companion", "Lkotlin/Result\$Companion;"),
FieldSignature("kotlin/_Assertions", "ENABLED", "Z")
)
private fun AbstractInsnNode.isProhibitedDuringArgumentsEvaluation() =
opcode in opcodeProhibitedDuringArgumentsEvaluation.indices &&
opcodeProhibitedDuringArgumentsEvaluation[opcode]
private val opcodeProhibitedDuringArgumentsEvaluation = BooleanArray(256).also { a ->
// Any kind of jump during arguments evaluation is a hazard.
// This includes all conditional jump instructions, switch instructions, return and throw instructions.
// Very conservative, but enough for practical cases.
for (i in Opcodes.IFEQ..Opcodes.RETURN) a[i] = true
a[Opcodes.IFNULL] = true
a[Opcodes.IFNONNULL] = true
a[Opcodes.ATHROW] = true
// Instruction with non-trivial side effects is a hazard.
// NB GETSTATIC is taken care of separately.
a[Opcodes.PUTSTATIC] = true
a[Opcodes.PUTFIELD] = true
a[Opcodes.INVOKEVIRTUAL] = true
a[Opcodes.INVOKESPECIAL] = true
a[Opcodes.INVOKESTATIC] = true
a[Opcodes.INVOKEINTERFACE] = true
a[Opcodes.INVOKEDYNAMIC] = true
a[Opcodes.MONITORENTER] = true
a[Opcodes.MONITOREXIT] = true
// Integer division instructions can throw exception
a[Opcodes.IDIV] = true
a[Opcodes.LDIV] = true
a[Opcodes.IREM] = true
a[Opcodes.LREM] = true
// CHECKCAST can throw exception
a[Opcodes.CHECKCAST] = true
// Array creation can throw exception (in case of negative array size)
a[Opcodes.NEWARRAY] = true
a[Opcodes.ANEWARRAY] = true
a[Opcodes.MULTIANEWARRAY] = true
// Array access instructions can throw exception
for (i in Opcodes.IALOAD..Opcodes.SALOAD) a[i] = true
for (i in Opcodes.IASTORE..Opcodes.SASTORE) a[i] = true
}
private const val MARKER_INPLACE_CALL_START = "<INPLACE-CALL-START>"
private const val MARKER_INPLACE_ARGUMENT_START = "<INPLACE-ARGUMENT-START>"
private const val MARKER_INPLACE_ARGUMENT_END = "<INPLACE-ARGUMENT-END>"
private const val MARKER_INPLACE_CALL_END = "<INPLACE-CALL-END>"
private fun InstructionAdapter.addMarker(name: String) {
visitMethodInsn(Opcodes.INVOKESTATIC, INLINE_MARKER_CLASS_NAME, name, "()V", false)
}
fun InstructionAdapter.addInplaceCallStartMarker() = addMarker(MARKER_INPLACE_CALL_START)
fun InstructionAdapter.addInplaceCallEndMarker() = addMarker(MARKER_INPLACE_CALL_END)
fun InstructionAdapter.addInplaceArgumentStartMarker() = addMarker(MARKER_INPLACE_ARGUMENT_START)
fun InstructionAdapter.addInplaceArgumentEndMarker() = addMarker(MARKER_INPLACE_ARGUMENT_END)
internal fun AbstractInsnNode.isInplaceCallStartMarker() = isInlineMarker(this, MARKER_INPLACE_CALL_START)
internal fun AbstractInsnNode.isInplaceCallEndMarker() = isInlineMarker(this, MARKER_INPLACE_CALL_END)
internal fun AbstractInsnNode.isInplaceArgumentStartMarker() = isInlineMarker(this, MARKER_INPLACE_ARGUMENT_START)
internal fun AbstractInsnNode.isInplaceArgumentEndMarker() = isInlineMarker(this, MARKER_INPLACE_ARGUMENT_END)
|
package com.insightfullogic.honest_profiler.framework.generator;
import static com.insightfullogic.honest_profiler.core.aggregation.grouping.CombinedGrouping.combine;
import static com.insightfullogic.honest_profiler.framework.AggregationUtil.nano;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Optional;
import com.insightfullogic.honest_profiler.core.aggregation.AggregationProfile;
import com.insightfullogic.honest_profiler.core.aggregation.aggregator.FlatProfileAggregator;
import com.insightfullogic.honest_profiler.core.aggregation.filter.FilterSpecification;
import com.insightfullogic.honest_profiler.core.aggregation.grouping.CombinedGrouping;
import com.insightfullogic.honest_profiler.core.aggregation.grouping.FrameGrouping;
import com.insightfullogic.honest_profiler.core.aggregation.grouping.ThreadGrouping;
import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Entry;
import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat;
import com.insightfullogic.honest_profiler.core.profiles.lean.LeanProfile;
import com.insightfullogic.honest_profiler.framework.LeanLogCollectorDriver;
public class FlatGenerator extends LeanLogCollectorDriver
{
// Class Methods
public static final void assertAggregationSizeEquals(Flat flat, int size)
{
assertEquals("Wrong size of the Flat aggregation.", size, flat.getData().size());
}
public static final void assertContains(Flat flat, String key, int selfCount, int totalCount,
long selfTime, long totalTime)
{
Optional<Entry> result = flat.getData().stream().filter(entry -> key.equals(entry.getKey()))
.findFirst();
assertTrue("No entry found with key " + key, result.isPresent());
Entry entry = result.get();
assertEquals("Wrong self count for entry " + key, selfCount, entry.getSelfCnt());
assertEquals("Wrong total count for entry " + key, totalCount, entry.getTotalCnt());
assertEquals("Wrong self time for entry " + key, selfTime, entry.getSelfTime());
assertEquals("Wrong total time for entry " + key, totalTime, entry.getTotalTime());
}
public static final void assertContains(Flat flat, String key, int selfCount, int totalCount)
{
assertContains(flat, key, selfCount, totalCount, nano(selfCount), nano(totalCount));
}
public static final Entry getEntry(Flat flat, String key)
{
return flat.getData().stream().filter(entry -> key.equals(entry.getKey())).findFirst()
.get();
}
// Instance Properties
private CombinedGrouping grouping;
private Flat flat;
// Instance Constructors
public FlatGenerator(ThreadGrouping threadGrouping, FrameGrouping frameGrouping)
{
grouping = combine(threadGrouping, frameGrouping);
reset();
}
// Instance accessors
public Flat getFlat()
{
return flat;
}
public Entry getEntry(String key)
{
return getEntry(flat, key);
}
// Filter delegation
public Flat filter(FilterSpecification<Entry> filter)
{
return flat.filter(filter);
}
// LeanProfileLister Implementation
@Override
public void accept(LeanProfile profile)
{
flat = new FlatProfileAggregator().aggregate(new AggregationProfile(profile), grouping);
}
// Assertions
public void assertAggregationSizeEquals(int size)
{
assertAggregationSizeEquals(flat, size);
}
public void assertContains(String key, int selfCount, int totalCount, long selfTime,
long totalTime)
{
assertContains(flat, key, selfCount, totalCount, selfTime, totalTime);
}
public void assertContains(String key, int selfCount, int totalCount)
{
assertContains(flat, key, selfCount, totalCount);
}
}
|
using System.Threading.Tasks;
namespace RavenNest.SDK.Endpoints
{
public abstract class GamePacketHandler
{
protected readonly GameManager GameManager;
protected GamePacketHandler(GameManager gameManager)
{
GameManager = gameManager;
}
public abstract Task HandleAsync(GamePacket packet);
}
}
|
# Network transport based implementation
It defines an implementation of the service based on the
network transport library.
|
import java.util.Scanner;
public class AckermannFunction
{
public static void main(String[] args)
{
Scanner keyboard = new Scanner(System.in);
int x,y;
System.out.print("Enter an integer: ");
x = keyboard.nextInt();
System.out.print("Enter another integer: ");
y = keyboard.nextInt();
System.out.print("Ackermann's function is : " + ackermann(x,y));
}
public static int ackermann(int m, int n)
{
if(m == 0)
return n + 1;
else if (n == 0)
return ackermann(m-1, 1);
else
return ackermann(m - 1, ackermann(m , n -1 ));
}
}
|
using System;
namespace Graph.Exceptions
{
/// <summary>
/// An exception thrown when a vertex isn't found in a graph.
/// </summary>
public class VertexNotFoundException : Exception
{
}
}
|
package com.coinffeine.common.bitcoin
import java.math.BigInteger
import scala.collection.JavaConversions._
import com.google.bitcoin.core.Transaction
import com.google.bitcoin.core.Transaction.SigHash
import com.google.bitcoin.script.ScriptBuilder
import com.google.bitcoin.wallet.WalletTransaction
import com.coinffeine.common._
import com.coinffeine.common.Currency.Implicits._
object Implicits {
implicit class PimpMyMutableTransaction(val tx: MutableTransaction) extends AnyVal {
def addChangeOutput(inputAmount: BitcoinAmount,
spentAmount: BitcoinAmount,
changeAddress: Address): Unit = {
val changeAmount = inputAmount - spentAmount
require(!changeAmount.isNegative)
if (changeAmount.isPositive) {
tx.addOutput((inputAmount - spentAmount).asSatoshi, changeAddress)
}
}
def addMultisignOutput(amount: BitcoinAmount, requiredSignatures: Seq[PublicKey]): Unit = {
require(requiredSignatures.size > 1, "should have at least two signatures")
tx.addOutput(
amount.asSatoshi,
ScriptBuilder.createMultiSigOutputScript(requiredSignatures.size, requiredSignatures)
)
}
def outputAmount: BitcoinAmount = Currency.Bitcoin.fromSatoshi(
tx.getOutputs.foldLeft(BigInteger.ZERO)((a, b) => a.add(b.getValue)))
}
implicit class PimpMyWallet(val wallet: Wallet) extends AnyVal {
def value(tx: MutableTransaction): BitcoinAmount =
Currency.Bitcoin.fromSatoshi(tx.getValue(wallet))
def valueSentFromMe(tx: MutableTransaction): BitcoinAmount =
Currency.Bitcoin.fromSatoshi(tx.getValueSentFromMe(wallet))
def valueSentToMe(tx: MutableTransaction): BitcoinAmount =
Currency.Bitcoin.fromSatoshi(tx.getValueSentToMe(wallet))
def balance(): BitcoinAmount = Currency.Bitcoin.fromSatoshi(wallet.getBalance)
def blockFunds(tx: MutableTransaction): Unit = {
wallet.commitTx(tx)
}
def blockFunds(to: Address, amount: BitcoinAmount): MutableTransaction = {
val tx = wallet.createSend(to, amount.asSatoshi)
blockFunds(tx)
tx
}
def blockMultisignFunds(requiredSignatures: Seq[PublicKey],
amount: BitcoinAmount): MutableTransaction = {
require(amount.isPositive, s"Amount to block must be greater than zero ($amount given)")
val inputFunds = collectFunds(amount)
val totalInputFunds = valueOf(inputFunds)
require(totalInputFunds >= amount,
"Input funds must cover the amount of funds to commit")
val tx = new MutableTransaction(wallet.getNetworkParameters)
inputFunds.foreach(tx.addInput)
tx.addMultisignOutput(amount, requiredSignatures)
tx.addChangeOutput(totalInputFunds, amount, wallet.getChangeAddress)
tx.signInputs(SigHash.ALL, wallet)
blockFunds(tx)
tx
}
def releaseFunds(tx: Transaction): Unit = {
tx.getInputs.foreach { input =>
val parentTx = input.getOutpoint.getConnectedOutput.getParentTransaction
if (contains(parentTx)) {
if (!input.disconnect()) {
throw new IllegalStateException(s"cannot disconnect outputs from $input in $tx")
}
moveToPool(parentTx, WalletTransaction.Pool.UNSPENT)
}
}
moveToPool(tx, WalletTransaction.Pool.DEAD)
}
def collectFunds(amount: BitcoinAmount): Set[MutableTransactionOutput] = {
val inputFundCandidates = wallet.calculateAllSpendCandidates(true)
val necessaryInputCount =
inputFundCandidates.view.scanLeft(Currency.Bitcoin.Zero)((accum, output) =>
accum + Currency.Bitcoin.fromSatoshi(output.getValue))
.takeWhile(_ < amount)
.length
inputFundCandidates.take(necessaryInputCount).toSet
}
def contains(tx: Transaction): Boolean = Option(wallet.getTransaction(tx.getHash)).isDefined
private def valueOf(outputs: Traversable[MutableTransactionOutput]): BitcoinAmount =
outputs.map(funds => Currency.Bitcoin.fromSatoshi(funds.getValue)).reduce(_ + _)
private def moveToPool(tx: Transaction, pool: WalletTransaction.Pool): Unit = {
val wtxs = wallet.getWalletTransactions
wallet.clearTransactions(0)
wallet.addWalletTransaction(new WalletTransaction(pool, tx))
wtxs.foreach { wtx =>
if (tx.getHash != wtx.getTransaction.getHash) {
wallet.addWalletTransaction(wtx)
}
}
}
}
}
|
<?php
use PHPUnit\Framework\TestCase;
use Comhon\Object\Config\Config;
use Comhon\Exception\Config\ConfigFileNotFoundException;
use Comhon\Exception\Config\ConfigMalformedException;
use Comhon\Model\Restriction\RegexCollection;
use Test\Comhon\Data;
use Comhon\Model\Singleton\ModelManager;
class ConfigTest extends TestCase
{
public static function setUpBeforeClass()
{
Config::resetSingleton();
ModelManager::resetSingleton();
}
public function testNotFoundConfig()
{
$this->expectException(ConfigFileNotFoundException::class);
Config::setLoadPath('./config/not-existing-config.json');
}
public function testMalformedConfig()
{
$this->expectException(ConfigMalformedException::class);
Config::setLoadPath('./config/malformed-config.json');
Config::getInstance();
}
/**
* @depends testNotFoundConfig
* @depends testMalformedConfig
*/
public function testDatabaseFileNotFoundConfig()
{
$this->expectException(ConfigFileNotFoundException::class);
Config::setLoadPath('./config/inconsistent-config.json');
Config::getInstance();
}
/**
* @depends testDatabaseFileNotFoundConfig
*/
public function testRegexFileNotFoundConfig()
{
Config::setLoadPath('./config/inconsistent-2-config.json');
$config = Config::getInstance();
$configPath = $config->getDirectory() . '/' . basename(Config::getLoadPath());
$this->assertTrue(strpos($configPath, 'test/config/inconsistent-2-config.json') !== false);
$this->expectException(ConfigFileNotFoundException::class);
RegexCollection::getInstance();
}
/**
* @depends testDatabaseFileNotFoundConfig
*/
public function testSuccessConfigExtended()
{
ModelManager::resetSingleton();
Config::resetSingleton();
Config::setLoadPath(__DIR__ . '/../config/config-extended.json');
$config = Config::getInstance();
$this->assertEquals('Test\Config', $config->getModel()->getName());
$this->assertEquals('test', $config->getValue('test_config'));
}
/**
* @depends testSuccessConfigExtended
*/
public function testSuccessConfigWithoutSql()
{
ModelManager::resetSingleton();
Config::resetSingleton();
Config::setLoadPath(__DIR__ . '/../config/config-without-sql.json');
Config::getInstance();
$this->assertFalse(ModelManager::getInstance()->hasInstanceModel('Comhon\SqlTable'));
$this->assertFalse(ModelManager::getInstance()->hasInstanceModel('Comhon\SqlDatabase'));
}
/**
* @depends testSuccessConfigWithoutSql
*/
public function testSuccessConfigWithSql()
{
ModelManager::resetSingleton();
Config::resetSingleton();
Config::setLoadPath(Data::$config);
$config = Config::getInstance();
$configPath = $config->getDirectory() . '/' . basename(Config::getLoadPath());
$this->assertTrue(strpos($configPath, realpath(Data::$config)) !== false);
$this->assertTrue(ModelManager::getInstance()->hasInstanceModel('Comhon\SqlTable'));
$this->assertTrue(ModelManager::getInstance()->hasInstanceModel('Comhon\SqlDatabase'));
$this->assertTrue(in_array($config->getManifestFormat(), ['xml', 'json', 'yaml']));
$expected = '{
"manifest_format": "json",
"autoload": {
"manifest": {
"Test": "..\/manifests\/test\/manifest",
"Sql": "..\/manifests\/sql\/manifest",
"Binder": "..\/manifests\/binder\/manifest"
},
"serialization": {
"Test": "..\/manifests\/test\/serialization_pgsql",
"Sql": "..\/manifests\/sql\/serialization",
"Binder": "..\/manifests\/binder\/serialization"
},
"options": {
"Test": "..\/manifests\/test\/options"
}
},
"regex_list": ".\/regex.json",
"date_time_format": "c",
"database": {
"charset": "utf8",
"timezone": "UTC"
},
"sql_table": ".\/table",
"sql_database": ".\/database",
"cache_settings": "directory:..\/..\/..\/cache",
"request_collection_limit": 20
}
';
if ($config->getManifestFormat() == 'xml') {
$expected = str_replace(['"json"', 'serialization_pgsql'], ['"xml"', 'serialization'], $expected);
$expected = str_replace('"directory:..\/..\/..\/cache"', '"memcached:host=127.0.0.1;port=11211"', $expected);
} elseif ($config->getManifestFormat() == 'yaml') {
$expected = str_replace('"json"', '"yaml"', $expected);
$expected = str_replace(',' . PHP_EOL . ' "cache_settings": "directory:..\/..\/..\/cache"', '', $expected);
}
$this->assertEquals($expected, $config->__toString());
RegexCollection::getInstance();
}
}
|
<?php
class Foo
{
protected $bar;
protected $what;
public function __construct(Bar $bar, $what = 'thisiswhat')
{
$this->bar = $bar;
$this->what = $what;
}
public function getBar()
{
return $this->bar;
}
public function getWhat()
{
return $this->what;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.