text stringlengths 1 1.05M |
|---|
#!/bin/sh
# Copyright 2019, Oath Inc.
#
# Licensed under the terms of the Apache 2.0 license. See LICENSE file in https://github.com/yahoo/panoptes_docker/LICENSE for terms.
tail -f /home/panoptes/logs/discovery_plugin_scheduler.log \
/home/panoptes/logs/polling_plugin_scheduler.log \
/home/panoptes/logs/enrichment_plugin_scheduler.log \
/home/panoptes/logs/polling_plugin_scheduler/current \
/home/panoptes/logs/enrichment_plugin_scheduler/current \
/home/panoptes/logs/discovery_plugin_scheduler/current
|
#! /bin/sh
IMAGE=allanfann/scrapy
docker pull ${IMAGE}
docker run -v `pwd`:/app ${IMAGE} sh run.sh
|
#include "mqtt.h"
#include <mosquittopp.h>
#include <iostream>
#include <stdio.h>
#include <string.h>
Mqtt::Mqtt(const char * _id, const char * _host, int _port, const char * _username, const char * _password, const char * _will_topic, const char * _will_message) : mosquittopp(_id)
{
int version = MQTT_PROTOCOL_V311;
mosqpp::lib_init();
this->keepalive = 30;
this->id = _id;
this->port = _port;
this->host = _host;
this->will_topic = _will_topic;
this->will_message = _will_message;
// Set version to 3.1.1
opts_set(MOSQ_OPT_PROTOCOL_VERSION, &version);
// Set username and password if non-null
if (strlen(_username) > 0 && strlen(_password) > 0) {
username_pw_set(_username, _password);
}
// Set last will and testament (LWT) message
if (will_topic != NULL && will_message != NULL) {
int rc = set_will(will_topic, will_message);
if ( rc ) {
std::cout <<">> Mqtt - set LWT message to: " << will_message << std::endl;
} else {
std::cout <<">> Mqtt - Failed to set LWT message!" << std::endl;
}
}
// non blocking connection to broker request;
connect_async(host, port, keepalive);
// Start thread managing connection / publish / subscribekeepalive);
loop_start();
};
Mqtt::~Mqtt() {
loop_stop();
mosqpp::lib_cleanup();
}
bool Mqtt::set_will(const char * _topic, const char * _message)
{
int ret = will_set(_topic, strlen(_message), _message, 1, true);
return ( ret == MOSQ_ERR_SUCCESS );
}
void Mqtt::on_disconnect(int rc) {
std::cout << ">> Mqtt - disconnected(" << rc << ")" << std::endl;
}
void Mqtt::on_connect(int rc)
{
if ( rc == 0 ) {
std::cout << ">> Mqtt - connected" << std::endl;
} else {
std::cout << ">> Mqtt - failed to connect: (" << rc << ")" << std::endl;
}
}
void Mqtt::on_publish(int mid)
{
std::cout << ">> Mqtt - Message (" << mid << ") published " << std::endl;
}
bool Mqtt::send(const char * _topic, const char * _message)
{
// Send - depending on QoS, mosquitto lib managed re-submission this the thread
//
// * NULL : Message Id (int *) this allow to latter get status of each message
// * topic : topic to be used
// * length of the message
// * message
// * qos (0,1,2)
// * retain (boolean) - indicates if message is retained on broker or not
// Should return MOSQ_ERR_SUCCESS
int ret = publish(NULL, _topic, strlen(_message), _message, 1, true);
return ( ret == MOSQ_ERR_SUCCESS );
}
|
function ExtensionStorage() {
function errorCB(callback) {
return callback();
}
this.getLocal = function (key, callback) {
browser.storage.local.get(key).then(callback, errorCB.bind(this, callback))
};
this.setLocal = function (data, callback) {
browser.storage.local.set(data).then(callback, errorCB.bind(this, callback))
};
this.getSync = function (key, callback) {
if (browser.storage.sync) {
browser.storage.sync.get(key).then(callback, errorCB.bind(this, callback))
} else {
browser.storage.local.get(key).then(callback, errorCB.bind(this, callback))
}
};
this.setSync = function (data, callback) {
if (browser.storage.sync) {
browser.storage.sync.set(data).then(callback, errorCB.bind(this, callback))
} else {
browser.storage.local.set(data).then(callback, errorCB.bind(this, callback))
}
}
}
|
#!/bin/bash
function_arn=$1
cache_id=$2
cat <<EOT
{
"RequestType": "Create",
"ServiceToken": "$function_arn",
"ResponseURL": "https://cloudformation-custom-resource-response-euwest1.s3-eu-west-1.amazonaws.com/arn%3Aaws%3Acloudformation%3Aeu-west-1%3A143044406720%3Astack/test-route53-stack/22b48750-f4b2-11e4-b550-5067141f4d5d%7CRoute53%7Cafd8d7c5-9376-4013-8b3b-307517b8719e?AWSAccessKeyId=AKIAJ7MCS7PVEUOADEEA&Expires=1431008011&Signature=PzIB%2FPeLtpWutgn7QAgUnEKr%2Fgg%3D",
"StackId": "arn:aws:cloudformation:eu-west-1:143044406720:stack/test-vpc-stack/22b48750-f4b2-11e4-b550-5067141f4d5d",
"RequestId": "afd8d7c5-9376-4013-8b3b-307517b8719e",
"LogicalResourceId": "DefaultVpc",
"ResourceType": "Custom::VpcDependency",
"ResourceProperties": {
"ServiceToken": "$function_arn",
"CacheId": "$cache_id"
}
}
EOT
|
<reponame>tomegorny/temp<filename>running-java-within-postgres/simple-java-function/src/main/java/de/rieckpil/blog/Functions.java
package de.rieckpil.blog;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.time.LocalDate;
import java.time.Period;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.postgresql.pljava.annotation.Function;
public class Functions {
@Function
public static String greet(String personName) {
return "Hello World, " + personName + " !";
}
@Function(name = "split_string_by_delimiter")
public static Iterator<String> splitStringByDelimiter(String tagString, String delimiter) {
if (delimiter == null || delimiter.isEmpty()) {
delimiter = ">";
}
List<String> tags = new ArrayList<String>();
for (String currentTag : tagString.split(delimiter)) {
tags.add(currentTag.trim());
}
return tags.iterator();
}
@Function(name = "get_oldest_person")
public static String getOldestPerson() throws SQLException {
try (Statement statement = DriverManager //
.getConnection("jdbc:default:connection") //
.createStatement(); //
ResultSet resultSet = statement.executeQuery("SELECT * FROM persons")) {
List<Person> personList = new ArrayList<>();
while (resultSet.next()) {
Person person = new Person();
person.setId(resultSet.getLong("id"));
person.setFirstName(resultSet.getString("first_name"));
person.setLastName(resultSet.getString("last_name"));
person.setDayOfBirth(resultSet.getDate("day_of_birth").toLocalDate());
personList.add(person);
}
Collections.sort(personList);
Person oldestPerson = personList.get(0);
return String.format("The oldest person is %s, %s with %s years!", oldestPerson.getFirstName(),
oldestPerson.getLastName(), Period.between(oldestPerson.getDayOfBirth(), LocalDate.now()).getYears());
}
}
} |
import test from 'ava';
import deepFreeze from 'deep-freeze';
import buildNavigation from '../buildNavigation';
test('It takes the categories array and maps into an array of navigaiton objects', t => {
const beforeValue = ["foo", "bar"];
const expected = [
{
text: 'foo',
collapsed: true,
width: 0
},
{
text: 'bar',
collapsed: true,
width: 0
}
];
deepFreeze(beforeValue);
const actual = buildNavigation(beforeValue);
t.deepEqual(expected, actual);
});
|
// Aleth: Ethereum C++ client, tools and libraries.
// Copyright 2014-2019 Aleth Authors.
// Licensed under the GNU General Public License, Version 3.
#pragma once
#include <string>
namespace dev
{
/**
* @brief temporary directory implementation
* It creates temporary directory in the given path. On dealloc it removes the directory
* @throws if the given path already exists, throws an exception
*/
class TransientDirectory
{
public:
TransientDirectory();
TransientDirectory(std::string const& _path);
~TransientDirectory();
std::string const& path() const { return m_path; }
private:
std::string m_path;
};
}
|
sudo sh -c 'echo "deb http://deb.torproject.org/torproject.org $(lsb_release -c -s) main" >> /etc/apt/sources.list'
sudo sh -c 'echo "deb-src http://deb.torproject.org/torproject.org $(lsb_release -c -s) main" >> /etc/apt/sources.list'
sudo gpg --keyserver keys.gnupg.net --recv 886DDD89
sudo gpg --export A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89 | sudo apt-key add -
sudo apt-get update
sudo apt-get -y install tor deb.torproject.org-keyring vim curl tor-arm python-pip
sudo pip install stem
wget https://raw.githubusercontent.com/tfukui95/tor-experiment/master/utilities/exit-relay.py
wget https://raw.githubusercontent.com/tfukui95/tor-experiment/master/utilities/list-circuits.p
sudo /etc/init.d/tor stop
sudo -u debian-tor tor --list-fingerprint --orport 1 \
--dirserver "x 127.0.0.1:1 ffffffffffffffffffffffffffffffffffffffff" \
--datadirectory /var/lib/tor/
sudo wget -O /etc/tor/torrc http://directoryserver/client.conf
HOSTNAME=$(hostname -s)
echo "Nickname $HOSTNAME" | sudo tee -a /etc/tor/torrc
ADDRESS=$(hostname -I | tr " " "\n" | grep "192.168")
echo "Address $ADDRESS" | sudo tee -a /etc/tor/torrc
sudo cat /etc/tor/torrc
sudo /etc/init.d/tor restart
|
<filename>rofl/utils/memory.py
from rofl.functions.const import *
from rofl.functions.functions import rnd
from rofl.functions.dicts import mergeDicts
from rofl.functions.torch import array2Tensor, list2Tensor
def itemsSeq(low, high):
"""
Generator for the range [low, high)
"""
for i in range(low, high):
yield i
def itemsRnd(low, high, nItems):
"""
Generator for nItems in the interval [low, high]
"""
for i in range(nItems):
yield rnd.randint(low, high)
class simpleMemory():
"""
By default always looks for observation, reward, and
done as the most basic form of an experience.
parameters
----------
- config: dict
- additionalKeys: tuple(str, Any dtype)
Specify what aditional keys are required to process after
gathering a sample to a torch.tensor and its data type.
eg. ('return', torch.float16)
"""
_exp_ = None
memType = 'simple'
__keysDeft__ = [("reward", F_TDTYPE_DEFT), ("done", B_TDTYPE_DEFT)]
def __init__(self, config, *additionalKeys):
self.size = config["agent"].get("memory_size", DEFT_MEMORY_SIZE)
self.gamma = config["agent"]["gamma"]
self.gae = config["agent"]["gae"]
self.lmbda = config["agent"]["lambda"]
self._mem_, self._i_, self._li_ = None, 0, 0
self.fillOnce = False
self._keys_ = self.__keysDeft__.copy()
for key in additionalKeys:
self._keys_.append(key)
def reset(self,):
self._mem_, self._i_, self._li_ = [None]*self.size, 0, 0
self.fillOnce = False
return self
def add(self, infoDict):
self._mem_[self._i_] = infoDict
self._i_ = (self._i_ + 1 )% self.size
if self._i_ == 0:
self.fillOnce = True
@property
def last(self,):
return self._i_ - 1
def sample(self, size, device = DEVICE_DEFT, keys = None):
"""
Standard method to sample the memory. This is
intended to be used as the main method to interface
the memory.
returns
--------
obsDict
"""
memSize = len(self)
if size > memSize:
raise ValueError("Not enough data to generate sample")
if size == memSize or size < 0:
return self.createSample(self.gatherMem(), device, keys)
return self.createSample(self.gatherSample(size), device, keys)
def gatherMem(self):
"""
Returns all the items from memory
returns
-------
index generator
"""
if self.fillOnce:
return itemsSeq(0, self.size)
return itemsSeq(0, self._i_)
def gatherSample(self, size):
"""
Gathers the indexes from memory for the sample
generation.
returns
-------
index generator
"""
if self.fillOnce:
return itemsRnd(0, self.size - 1, size)
return itemsRnd(0, self.last, size)
def createSample(self, genSample, device, keys) -> dict:
"""
Generates and process the sample from the gatherSample method.
This could be done per item or in bulk. Either way is expected to
return a single obsDict.
returns
--------
obsDict
"""
sample = mergeDicts(*[self[i] for i in genSample], targetDevice = device, keys = keys)
for key, dtype in self._keys_:
aux = sample.get(key)
if aux is None: # When the key in memory was not asked in keys for the merge result
continue
if isinstance(aux, TENSOR):
continue
elif isinstance(aux, list):
sample[key] = list2Tensor(aux, device, dtype)
elif isinstance(aux, ARRAY):
sample[key] = array2Tensor(aux, device, dtype, batch=True)
elif isinstance(aux, (int, float)): # when sample[N] = 1, then some elements could raise expception
sample[key] = list2Tensor([aux], device, dtype)
else:
raise NotImplementedError('This wasnt expected yet... oopsy')
return sample
def copyMemory(self, memory):
"""
Resets and copy the takes the target memory reference to
the memory list. This does not copy any object howsoever.
Modifies the memory state to match the target withour changing
the memory configuration.
"""
self.reset()
self._assertSize_(memory)
self._mem_[:len(memory)] = memory._mem_
def addMemory(self, memory):
"""
Add the experiences from a memory to another.
"""
self._assertSize_(memory)
lTarget = len(memory)
if (lTarget + self._i_) > self.size:
underThat = self.size - self._i_
overThis = (lTarget + self._i_) % self.size
self._mem_[self._i_:] = memory._mem_[:underThat]
self._mem_[:overThis] = memory._mem_[underThat:]
self._i_, self.fillOnce = overThis, True
else:
self._mem_[self._i_:self._i_ + lTarget] = memory._mem_
self._i_ = (self._i_ + lTarget) % self.size
if self._i_ == 0:
self.fillOnce = True
def _assertSize_(self, memory):
lTarget = len(memory)
if lTarget > self.size:
raise ValueError('That memory (%d) is bigger than this (%d)' % (lTarget, len(self)))
def __len__(self):
if self.fillOnce:
return self.size
return self._i_
def __getitem__(self, i):
return self._mem_[i]
def __repr__(self) -> str:
s = 'Memory %s with %d capacity, %d items stored'%(self.memType, self.size, len(self))
return s
class episodicMemory(simpleMemory):
"""
Meant to store and process one episode at a time
"""
memType = 'episodic simple'
def __init__(self, config, *additionalKeys):
keys = [("return", F_TDTYPE_DEFT)]
super().__init__(config, *keys, *additionalKeys)
def reset(self):
super().reset()
self._lastEpisode_ = -1
def add(self, infoDict):
super().add(infoDict)
if infoDict["done"]:
self.resolveReturns()
def resolveReturns(self):
lastEpsisode = self._lastEpisode_
self._lastEpisode_ = last = self.last
gamma = self.gamma
lastReturn = self[last].get('bootstrapping', 0.0)
if isinstance(lastReturn, TENSOR):
lastReturn = lastReturn.cpu().item()
for i in range(last, lastEpsisode, - 1):
lastDict = super().__getitem__(i)
lastReturn = lastDict['return'] = lastDict["reward"] + gamma * lastReturn
def getEpisode(self, device = DEVICE_DEFT, keys = None):
if self._lastEpisode_ == -1:
raise AttributeError("Memory does not have an episode ready!")
return self.createSample(self.gatherMem(), device, keys)
class multiMemory:
memType = 'multi episodic'
def __init__(self, config, *additionalKeys):
self.n = config['agent']['workers']
self.config = config
self._addKeys = additionalKeys
self._hasInit = False
self._memories, self._memList = {}, []
import rofl.utils.memory as memories
self.unitMemType = getattr(memories, config['agent'].get('memory_type', ''), episodicMemory)
def reset(self):
for mem in self._memList:
mem.reset()
def __getitem__(self, i):
memories = self._memories
mem = memories.get(i)
if mem is not None:
return mem
elif mem is None and len(memories) < self.n:
new = self.unitMemType(self.config, *self._addKeys)
new.reset()
self._memList.append(new)
memories[i] = new
return new
elif len(memories) >= self.n:
raise ValueError('New memories cannot be crated, already at max capacity.')
def __repr__(self) -> str:
s = 'Memory %s, managing %d units.' % (self.name, len(self._memories))
return s
def add(self, *obsDict):
for dict_ in obsDict:
iD = dict_['id']
mem = self[iD]
mem.add(dict_)
def getEpisodes(self, device = DEVICE_DEFT, keys = None, forceResolve = True):
episodes = []
for mem in self._memList:
if forceResolve: mem.resolveReturns()
episode = mem.getEpisode(device, keys)
episodes.append(episode)
return episodes
def getSamples(self, size, device = DEVICE_DEFT, keys = None):
samples = []
for mem in self._memList:
sample = mem.getSample(size, device, keys)
samples.append(sample)
return samples
class dqnMemory(simpleMemory):
"""
Works like simpleMemory but expects the agent to use the
dqnStepv0 while using lHistObsProcess to process
the observations. As this memory composes the observations in a
similar manner.
Sacrifices time complexity instead of space complexity.
Use simpleMemory instead when not composing an lhist observation
from saved frames. Incurring in a greater memory cost but less
time to generate a sample.
"""
memType = 'dqn v0'
def __init__(self, config, *additionalKeys):
super().__init__(config, *additionalKeys)
self.channels = config['agent'].get('channels', 1)
self.lhist = config['agent']['lhist']
assert self.lhist > 0, 'Lhist needs to be at least 1'
def __getitem__(self, i):
lHist, channels = self.lhist, self.channels
item = super().__getitem__(i).copy() # shallow, the original will keep the ndarray references
obs, nObs = item['observation'], item['next_observation']
newObs = np.zeros_like(obs, shape = (lHist, *obs.shape)) # (4,4,36,36) when using channels
newNObs = np.zeros_like(newObs)
newObs[0:channels], newNObs[0:channels] = obs, nObs
for j in range(1, lHist):
prevItem = super().__getitem__(i - j)
jL = j * channels
jU = jL + channels
newNObs[jL:jU] = obs
if prevItem is None or prevItem["done"]:
break
newObs[jL:jU] = obs = prevItem['observation']
item['observation'], item['next_observation'] = newObs, newNObs
return item
def gatherSample(self, size):
if self.fillOnce:
return itemsRnd(0, self.size - 1, size)
return itemsRnd(1, self._i_ - 1, size)
class episodicMemoryFrames(dqnMemory, episodicMemory):
pass
|
#!/bin/bash
# MIT License
#
# Copyright (c) 2020 Dmitrii Ustiugov, Shyam Jesalpura and EASE lab
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
PWD="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
$PWD/setup_system.sh
$PWD/create_devmapper.sh
# install Protocol Buffer Compiler
PROTO_VERSION=3.11.4
if [ ! -f "protoc-$PROTO_VERSION-linux-x86_64.zip" ]; then
wget -c "https://github.com/google/protobuf/releases/download/v$PROTO_VERSION/protoc-$PROTO_VERSION-linux-x86_64.zip"
sudo unzip -u "protoc-$PROTO_VERSION-linux-x86_64.zip" -d /usr/local
rm "protoc-$PROTO_VERSION-linux-x86_64.zip"
fi
# Necessary for containerd as container runtime but not docker
sudo modprobe overlay
sudo modprobe br_netfilter
# Set up required sysctl params, these persist across reboots.
sudo tee /etc/sysctl.d/99-kubernetes-cri.conf <<EOF
net.bridge.bridge-nf-call-iptables = 1
net.ipv4.ip_forward = 1
net.bridge.bridge-nf-call-ip6tables = 1
EOF
sudo sysctl --system
sudo swapoff -a
# we want the command (expected to be systemd) to be PID1, so exec to it
exec "$@"
|
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
trap 'CHILDREN=$(jobs -p); if test -n "${CHILDREN}"; then kill ${CHILDREN} && wait; fi' TERM
export HOME=/tmp
export SSH_PRIV_KEY_PATH=${CLUSTER_PROFILE_DIR}/ssh-privatekey
export SSH_PUB_KEY_PATH=${CLUSTER_PROFILE_DIR}/ssh-publickey
export OPENSHIFT_INSTALL_INVOKER=openshift-internal-ci/${JOB_NAME_SAFE}/${BUILD_ID}
export AWS_SHARED_CREDENTIALS_FILE=/var/run/vault/vsphere-aws/.awscred
export AWS_DEFAULT_REGION=us-east-1
echo "$(date -u --rfc-3339=seconds) - sourcing context from vsphere_context.sh..."
source "${SHARED_DIR}/vsphere_context.sh"
cluster_name=$(<"${SHARED_DIR}"/clustername.txt)
installer_dir=/tmp/installer
echo "$(date -u --rfc-3339=seconds) - Copying config from shared dir..."
mkdir -p "${installer_dir}/auth"
pushd ${installer_dir}
cp -t "${installer_dir}" \
"${SHARED_DIR}/install-config.yaml" \
"${SHARED_DIR}/metadata.json" \
"${SHARED_DIR}/terraform.tfvars" \
"${SHARED_DIR}/secrets.auto.tfvars" \
"${SHARED_DIR}/bootstrap.ign" \
"${SHARED_DIR}/worker.ign" \
"${SHARED_DIR}/master.ign"
cp -t "${installer_dir}/auth" \
"${SHARED_DIR}/kubeadmin-password" \
"${SHARED_DIR}/kubeconfig"
# Copy sample UPI files
cp -rt "${installer_dir}" \
/var/lib/openshift-install/upi/"${CLUSTER_TYPE}"/*
export KUBECONFIG="${installer_dir}/auth/kubeconfig"
function gather_console_and_bootstrap() {
# shellcheck source=/dev/null
source "${SHARED_DIR}/govc.sh"
# list all the virtual machines in the folder/rp
clustervms=$(govc ls "/${GOVC_DATACENTER}/vm/${cluster_name}")
GATHER_BOOTSTRAP_ARGS=()
for ipath in $clustervms; do
# split on /
# shellcheck disable=SC2162
IFS=/ read -a ipath_array <<< "$ipath";
hostname=${ipath_array[-1]}
# create png of the current console to determine if a virtual machine has a problem
echo "$(date -u --rfc-3339=seconds) - capture console image"
govc vm.console -vm.ipath="$ipath" -capture "${ARTIFACT_DIR}/${hostname}.png"
# based on the virtual machine name create variable for each
# with ip addresses as the value
# wait 1 minute for an ip address to become available
# shellcheck disable=SC2140
declare "${hostname//-/_}_ip"="$(govc vm.ip -wait=1m -vm.ipath="$ipath" | awk -F',' '{print $1}')"
done
GATHER_BOOTSTRAP_ARGS+=('--bootstrap' "${bootstrap_0_ip}")
GATHER_BOOTSTRAP_ARGS+=('--master' "${control_plane_0_ip}" '--master' "${control_plane_1_ip}" '--master' "${control_plane_2_ip}")
# 4.5 and prior used the terraform.tfstate for gather bootstrap. This causes an error with:
# state snapshot was created by Terraform v0.12.24, which is newer than current v0.12.20; upgrade to Terraform v0.12.24 or greater to work with this state"
# move the state temporarily
mv "${installer_dir}/terraform.tfstate" "${installer_dir}/terraform.tfstate.backup"
openshift-install --log-level debug --dir="${installer_dir}" gather bootstrap --key "${SSH_PRIV_KEY_PATH}" "${GATHER_BOOTSTRAP_ARGS[@]}"
mv "${installer_dir}/terraform.tfstate.backup" "${installer_dir}/terraform.tfstate"
}
function approve_csrs() {
# The cluster won't be ready to approve CSR(s) yet anyway
sleep 30
echo "$(date -u --rfc-3339=seconds) - Approving the CSR requests for nodes..."
while true; do
oc get csr -ojson | jq -r '.items[] | select(.status == {} ) | .metadata.name' | xargs --no-run-if-empty oc adm certificate approve || true
sleep 15
if [[ -f "/tmp/install-complete" ]]; then
return 0
fi
done
}
function update_image_registry() {
sleep 30
echo "$(date -u --rfc-3339=seconds) - Waiting for imageregistry config to be available"
while true; do
oc get configs.imageregistry.operator.openshift.io/cluster > /dev/null && break
sleep 15
done
echo "$(date -u --rfc-3339=seconds) - Patching image registry configuration..."
oc patch configs.imageregistry.operator.openshift.io cluster --type merge --patch '{"spec":{"managementState":"Managed","storage":{"emptyDir":{}}}}'
}
function setE2eMirror() {
oc create -f - <<EOF
apiVersion: machineconfiguration.openshift.io/v1
kind: MachineConfig
metadata:
labels:
machineconfiguration.openshift.io/role: worker
name: 98-e2e-registry-mirror
spec:
config:
ignition:
version: 3.1.0
storage:
files:
- contents:
source: data:text/plain;charset=utf-8;base64,dW5xdWFsaWZpZWQtc2VhcmNoLXJlZ2lzdHJpZXMgPSBbInJlZ2lzdHJ5LmFjY2Vzcy5yZWRoYXQuY29tIiwgImRvY2tlci5pbyJdCgpbW3JlZ2lzdHJ5XV0KcHJlZml4ID0gImRvY2tlci5pbyIKbG9jYXRpb24gPSAiZG9ja2VyLmlvIgoKW1tyZWdpc3RyeS5taXJyb3JdXQpsb2NhdGlvbiA9ICJlMmUtY2FjaGUudm1jLWNpLmRldmNsdXN0ZXIub3BlbnNoaWZ0LmNvbTo1MDAwIgo=
mode: 0544
overwrite: true
path: /etc/containers/registries.conf
EOF
echo "Waiting for machineconfig to begin rolling out"
oc wait --for=condition=Updating mcp/worker --timeout=5m
echo "Waiting for machineconfig to finish rolling out"
oc wait --for=condition=Updated mcp/worker --timeout=30m
}
# inject porxy information into haproxy.service if deploying with proxy configuration
if proxy_info="$(cat install-config.yaml | grep -oP 'httpProxy\s*:\s*\K.*')" ; then
echo "$(date -u --rfc-3339=seconds) - inject proxy env into haproxy service..."
sed -i "/TimeoutStartSec=0/a Environment=HTTPS_PROXY=${proxy_info}" ./lb/haproxy.service
fi
date +%s > "${SHARED_DIR}/TEST_TIME_INSTALL_START"
echo "$(date -u --rfc-3339=seconds) - terraform init..."
terraform init -input=false -no-color &
wait "$!"
date "+%F %X" > "${SHARED_DIR}/CLUSTER_INSTALL_START_TIME"
echo "$(date -u --rfc-3339=seconds) - terraform apply..."
terraform apply -auto-approve -no-color &
wait "$!"
# The terraform state could be larger than the maximum 1mb
# in a secret
tar -Jcf "${SHARED_DIR}/terraform_state.tar.xz" terraform.tfstate
## Monitor for `bootstrap-complete`
echo "$(date -u --rfc-3339=seconds) - Monitoring for bootstrap to complete"
openshift-install --dir="${installer_dir}" wait-for bootstrap-complete &
set +e
wait "$!"
ret="$?"
set -e
if [ $ret -ne 0 ]; then
set +e
# Attempt to gather bootstrap logs.
echo "$(date -u --rfc-3339=seconds) - Bootstrap failed, attempting to gather bootstrap logs..."
gather_console_and_bootstrap
sed 's/password: .*/password: REDACTED/' "${installer_dir}/.openshift_install.log" >>"${ARTIFACT_DIR}/.openshift_install.log"
echo "$(date -u --rfc-3339=seconds) - Copy log-bundle to artifacts directory..."
cp --verbose "${installer_dir}"/log-bundle-*.tar.gz "${ARTIFACT_DIR}"
set -e
exit "$ret"
fi
## Approving the CSR requests for nodes
approve_csrs &
## Configure image registry
update_image_registry &
## Monitor for cluster completion
echo "$(date -u --rfc-3339=seconds) - Monitoring for cluster completion..."
# When using line-buffering there is a potential issue that the buffer is not filled (or no new line) and this waits forever
# or in our case until the four hour CI timer is up.
openshift-install --dir="${installer_dir}" wait-for install-complete 2>&1 | stdbuf -o0 grep -v password &
set +e
wait "$!"
ret="$?"
set -e
date +%s > "${SHARED_DIR}/TEST_TIME_INSTALL_END"
date "+%F %X" > "${SHARED_DIR}/CLUSTER_INSTALL_END_TIME"
touch /tmp/install-complete
if test "${ret}" -eq 0 ; then
touch "${SHARED_DIR}/success"
# Save console URL in `console.url` file so that ci-chat-bot could report success
echo "https://$(env KUBECONFIG=${installer_dir}/auth/kubeconfig oc -n openshift-console get routes console -o=jsonpath='{.spec.host}')" > "${SHARED_DIR}/console.url"
fi
sed 's/password: .*/password: REDACTED/' "${installer_dir}/.openshift_install.log" >>"${ARTIFACT_DIR}/.openshift_install.log"
cp -t "${SHARED_DIR}" \
"${installer_dir}/auth/kubeconfig"
# Maps e2e images on dockerhub to locally hosted mirror
if [[ "$JOB_NAME" == *"4.6-e2e"* ]]; then
echo "Remapping dockerhub e2e images to local mirror for 4.6 e2e vSphere jobs"
setE2eMirror
fi
exit "$ret"
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-10 16:45
from __future__ import unicode_literals
from django.db import migrations
def copy_locationgroups(apps, schema_editor):
LegacyLocationGroup = apps.get_model('mapdata', 'LegacyLocationGroup')
LocationGroup = apps.get_model('mapdata', 'LocationGroup')
for legacyobj in LegacyLocationGroup.objects.all():
obj = LocationGroup()
obj.slug = legacyobj.locationslug_ptr.slug
slug_ptr = legacyobj.locationslug_ptr
slug_ptr.slug = None
slug_ptr.save()
obj.titles = legacyobj.titles
obj.can_search = legacyobj.can_search
obj.can_describe = legacyobj.can_describe
obj.color = legacyobj.color
obj.public = legacyobj.public
obj.compiled_room = legacyobj.compiled_room
obj.save()
obj.arealocations.add(*legacyobj.arealocations.all())
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0082_auto_20170510_1644'),
]
operations = [
migrations.RunPython(copy_locationgroups),
]
|
#!/bin/bash
source scripts/env.sh
docker rm -f $NAME
|
import IAchievementsRepository from 'modules/dashboard/domain/repositories/IAchievementsRepository';
import IAchievement from 'modules/dashboard/domain/entities/IAchievement';
interface IExecute {
achievements?: IAchievement[];
error?: string;
shouldLogout?: boolean;
}
export default class ListAchievementsService {
constructor(private achievementsRepository: IAchievementsRepository) {}
public async execute(): Promise<IExecute> {
try {
const achievements = await this.achievementsRepository.findAll();
return { achievements };
} catch (error) {
return { error: error.message, shouldLogout: error.shouldLogout };
}
}
}
|
<filename>src/apps/admin/src/client/com/sun/j2ee/blueprints/admin/client/About.java
/*
* Copyright 2002 Sun Microsystems, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
* DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN
* OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR
* FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR
* PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF
* LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that Software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of
* any nuclear facility.
*/
package com.sun.j2ee.blueprints.admin.client;
import javax.swing.*;
import javax.swing.border.*;
import java.awt.*;
import java.awt.event.*;
/**
* About dialog that displays a short string of info
* and a scrolling list of names...with a waving Duke!
*
* @author <NAME>
*/
public class About extends JDialog {
private AboutPanel panel;
public About(String title, String display, String[] names, Frame owner,
boolean modal) {
super(owner, title, modal);
panel = new AboutPanel(names);
panel.setLayout(new BorderLayout());
panel.add(new TranLabel(display), BorderLayout.NORTH);
setContentPane(panel);
setSize(320, 150);
setResizable(false);
}
public void setVisible(boolean b) {
if (b) {
panel.start();
} else {
panel.stop();
}
super.setVisible(b);
}
}
class AboutPanel extends JPanel implements ActionListener {
private static final ImageIcon[] icons = new ImageIcon[10];
static {
for (int i = 0; i < icons.length; i++) {
icons[i] = new ImageIcon(About.class.getResource("/resources/duke"
+ i + ".gif"));
}
}
private static final Color bgColorA = Color.white;
private static final Color bgColorB = new Color(120, 120, 255);
private static final Font nameFont = new Font("Dialog", Font.BOLD, 25);
private static final RenderingHints hints =
new RenderingHints(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
private int iconIndex = icons.length;
private String[] names;
private Timer timer = new Timer(80, this);
private boolean fading = false;
private int nameIndex = 0;
private int nameY = -10;
private float fade = 1.0f;
private int cachedWidth = 0;
private int cachedHeight = 0;
private GradientPaint bgPaint = null;
public AboutPanel(String[] names) {
this.names = names;
setOpaque(true);
setBorder(BorderFactory.createLoweredBevelBorder());
}
public void paintComponent(Graphics g) {
Graphics2D g2d = (Graphics2D)g.create();
int xloc;
int yloc;
int width = getWidth();
int height = getHeight();
if (cachedWidth != width || cachedHeight != height || bgPaint == null) {
cachedWidth = width;
cachedHeight = height;
bgPaint = new GradientPaint(0, 0, bgColorA, cachedWidth,
cachedHeight, bgColorB);
}
g2d.setRenderingHints(hints);
g2d.setPaint(bgPaint);
g2d.fillRect(0, 0, cachedWidth, cachedHeight);
ImageIcon icon = icons[iconIndex >= icons.length ? 0 : iconIndex];
xloc = getInsets().left;
yloc = cachedHeight - getInsets().bottom - 6 - icon.getIconHeight();
icon.paintIcon(this, g2d, xloc, yloc);
if (fading) {
g2d.setComposite(AlphaComposite.getInstance(
AlphaComposite.SRC_OVER, fade));
} else if (nameY >= cachedHeight - getInsets().bottom - 22) {
fading = true;
iconIndex = 0;
}
Rectangle rect = nameFont.getStringBounds(names[nameIndex],
g2d.getFontRenderContext()).getBounds();
g2d.setFont(nameFont);
xloc = xloc + icon.getIconWidth() + 5;
Paint paint = new GradientPaint(xloc, nameY, Color.red, xloc
+ rect.width, nameY, Color.blue);
g2d.setPaint(paint);
g2d.drawString(names[nameIndex], xloc, nameY);
g2d.dispose();
}
public void start() {
timer.start();
}
public void stop() {
timer.stop();
}
public void actionPerformed(ActionEvent ae) {
if (fading || iconIndex <= icons.length) {
iconIndex++;
}
if (fading) {
fade = fade - 0.05f;
if (fade <= 0.0f) {
nameIndex = (nameIndex + 1) % names.length;
nameY = -10;
fading = false;
fade = 1.0f;
}
} else {
nameY += 2;
}
repaint();
}
}
class TranLabel extends JLabel {
private static final RenderingHints hints =
new RenderingHints(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
private static final AlphaComposite composite =
AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.6f);
private static final Color bgColor = Color.white;
public TranLabel(String display) {
super();
setHorizontalAlignment(SwingConstants.CENTER);
setText("<html><font color=black face=\"Dialog\">" + display
+ "</font></html>");
}
public void paintComponent(Graphics g) {
Graphics2D g2d = (Graphics2D)g.create();
g2d.setRenderingHints(hints);
g2d.setComposite(composite);
g2d.setColor(bgColor);
g2d.fillRect(0, 0, getWidth(), getHeight());
super.paintComponent(g2d);
g2d.dispose();
}
}
|
<!DOCTYPE html>
<html>
<head>
<title>Periodic Table</title>
<style>
table {
border-collapse: collapse;
}
th {
border: 1px solid #dddddd;
padding: 8px;
text-align: center;
}
td {
border: 1px solid #dddddd;
padding: 8px;
}
</style>
</head>
<body>
<table>
<tr>
<th>Element</th>
<th>Atomic Number</th>
</tr>
<!-- rows of elements go here -->
</table>
</body>
</html> |
import { Body, Controller, Delete, Get, Param, Post, Put, Query, UnauthorizedException, UseGuards, ValidationPipe } from '@nestjs/common';
import { AuthGuard } from '@nestjs/passport';
import { GetUser } from 'src/authentication/get-user.decorator';
import { GetUserTasksDto } from './dto/get-user-tasks.dto';
import { User } from 'src/users/user.entity';
import { CreateTaskDto } from './dto/create-task.dto';
import { ReturnTaskDto } from './dto/return-task.dto';
import { UpdateTaskDto } from './dto/update-task.dto';
import { TasksService } from './tasks.service';
@Controller('tasks')
@UseGuards(AuthGuard())
export class TasksController {
constructor(private taskService: TasksService) { }
@Post('/new')
async createTask(@Body(ValidationPipe) createTaskDto: CreateTaskDto, @GetUser() user: User): Promise<ReturnTaskDto> {
const task = await this.taskService.createTask(createTaskDto, user);
return {
task: task,
message: "Atividade criada com sucesso!"
};
}
@Get('/filter')
async getUserTasks(@Query(ValidationPipe) getUserTasksDto: GetUserTasksDto, @GetUser() user: User) {
return await this.taskService.getUserTasks(getUserTasksDto, user);
}
@Get(':id')
async findTaskById(@Param('id') id: number, @GetUser() user: User): Promise<ReturnTaskDto> {
const task = await this.taskService.findTaskById(id);
if (user && user.id === task.user.id) {
return { task: task, message: 'Atividade encontrada!' }
} else {
throw new UnauthorizedException('Você não tem permissão para acessar esta atividade!');
}
}
@Put(':id')
async updateTask(@Body() updateTaskDto: UpdateTaskDto, @Param('id') id: number, @GetUser() user: User): Promise<ReturnTaskDto> {
const task = await this.taskService.findTaskById(id);
if (user && (user.id === task.user.id)) {
return {
task: await this.taskService.updateTask(updateTaskDto, id),
message: 'Atividade atualizada com sucesso!'
};
} else {
throw new UnauthorizedException('Você não tem permissão para atualizar esta atividade!');
}
}
@Delete(':id')
async deleteTask(id: number, @GetUser() user: User): Promise<{ message: string }> {
const task = await this.taskService.findTaskById(id);
if (user && user.id === task.user.id) {
await this.taskService.deleteTask(id);
return { message: 'Atividade deletada com sucesso!' };
} else {
throw new UnauthorizedException('Você não tem permissão para acessar esta atividade!');
}
}
} |
const headlessClients = require('./headlessClients');
module.exports = {
headlessClients,
};
|
<reponame>dmage/lproc
package rewriter
import (
"crypto/md5"
"fmt"
"io"
)
func init() {
register("md5", funcMD5)
}
func funcMD5(state *State, args Arguments) (string, error) {
if len(args) != 1 {
return "", fmt.Errorf("expected exactly one argument")
}
val, err := args.Evaluate(0, state)
if err != nil {
return "", err
}
h := md5.New()
io.WriteString(h, val)
return fmt.Sprintf("%x", h.Sum(nil)), nil
}
|
<gh_stars>1-10
from .base import *
DEBUG = False
# TODO: Add allowed hosts
ALLOWED_HOSTS = []
LOGGING = {
'version': 1,
'loggers': {
'project_name': {
'level': "INFO"
}
}
}
# For HTTPS
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
|
/**
* handleFile
*
* This function gets the file from the input field and creates
* the apropriate DOM elements for the UI
*/
function handleFile() {
var file, fileName, image;
file = jQuery('#filename')[0].files[0];
fileName = file.name;
/*Empty all elements first*/
jQuery('#filemeta').empty();
/*Create elements*/
image = document.createElement('img');
image.src = window.URL.createObjectURL(file);
jQuery('#filemeta').append("<p>" + "File name: " + fileName + "</p>");
jQuery('#filemeta').append(image);
}
/**
* buildTableModel
*
* This function build the table for display in the UI
*
* @param: csv data
*
* @return: DOM table
* */
function buildTableModel(data){
console.log(data);
var arrOfData = data.split(',');
arrOfData.splice(0, 1);//remove the first element. Dont know what this is. Ask Elliot
var arrLength = arrOfData.length;
var halfLength = arrLength / 2;
console.log(halfLength);
var table = jQuery('<table></table>').addClass('model-answer');
table.append("<th>Model</th><th>Is this a car?</th>");
for(i=0; i < arrLength; i+=2){
if(i==arrLength-2)
var row = jQuery("<tr><td><b>" + arrOfData[i] +"</b></td> <td><b>" + arrOfData[i+1] + "</b></td></tr>");
else
var row = jQuery("<tr><td>" + arrOfData[i] +"</td> <td>" + arrOfData[i+1] + "</td></tr>");
table.append(row);
}
return table;
}
|
import React from 'react';
import { FlatList, TouchableOpacity } from 'react-native';
import { translate } from 'react-i18next';
import { Container, Content, Text, View } from 'native-base';
import HeaderCustom from '../../components/HeaderCustom';
import styles from './styles';
import ItemDX from './ItemDX';
import OptionsTicket from './optionsTicket';
import material from '../../theme/variables/material';
import CustomButton from '../../components/CustomButton';
import Calendars from '../../components/Calendars';
import SearchAddress from './searchAddress';
@translate('translations')
export default class Booking extends React.PureComponent {
constructor(props) {
super(props);
this.state = {
arrayDeXuat: [
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
},
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
},
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
},
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
},
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
},
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
},
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
},
{
toAddress: 'Hà Nội',
fromAddress: 'Thái Lan',
img:
'https://increasify.com.au/wp-content/uploads/2016/08/default-image.png',
price: 570000
}
],
fromAddress: {
address: 'Hà Nội, Việt Nam',
hint: 'HAN'
},
toAddress: {
address: 'Hà Nội, Việt Nam',
hint: 'HAN'
},
modalVisibleFromAddress: false,
modalVisibleToAddress: false
};
}
renderDeXuat() {
const { t } = this.props;
return (
<View style={styles.deXuat}>
<View style={styles.line} />
<Text style={styles.textDeXuat}>{t('labels.deXuat')}</Text>
<View style={styles.line} />
</View>
);
}
renderItemDX(item) {
return <ItemDX data={item} />;
}
checkAddress(key) {
key === 1
? this.setState({
modalVisibleFromAddress: true
})
: this.setState({
modalVisibleToAddress: true
});
}
chooseAddress(title, obj, key) {
return (
<View style={styles.chooseAddress}>
<Text style={styles.textSmall}>{title}</Text>
<TouchableOpacity
activeOpacity={0.5}
style={styles.contentChooseAddress}
onPress={() => this.checkAddress(key)}
>
<Text style={{ ...styles.textNormal, fontWeight: 'bold' }}>
{obj.address}
</Text>
<Text style={{ ...styles.textNormal, color: material.colorDark2 }}>
{obj.hint}
</Text>
</TouchableOpacity>
</View>
);
}
render() {
const { t } = this.props;
return (
<Container>
<HeaderCustom iconName="filter-list" text="Booking.com" />
<Content contentContainerStyle={styles.container}>
{this.chooseAddress(
t('labels.fromAddress'),
this.state.fromAddress,
1
)}
{this.chooseAddress(t('labels.toAddress'), this.state.toAddress, 2)}
<View style={styles.calendar}>
<Calendars title={t('labels.fromDate')} />
<Calendars title={t('labels.toDate')} />
</View>
<OptionsTicket />
<CustomButton
onPress={() => console.log('ok')}
style={styles.btnSearch}
requestKey="done"
text={t('labels.search')}
upperCase={false}
textStyle={styles.textSearch}
/>
{this.renderDeXuat()}
<FlatList
showsVerticalScrollIndicator={false}
style={styles.listBooking}
showsHorizontalScrollIndicator={false}
data={this.state.arrayDeXuat}
keyExtractor={(item, index) => index + '.'}
renderItem={this.renderItemDX.bind(this)}
/>
</Content>
<SearchAddress
closeModal={() =>
this.setState({
modalVisibleFromAddress: false
})
}
modalVisible={this.state.modalVisibleFromAddress}
/>
<SearchAddress
closeModal={() =>
this.setState({
modalVisibleToAddress: false
})
}
modalVisible={this.state.modalVisibleToAddress}
/>
</Container>
);
}
}
|
# WRITE YOUR SOLUTION HERE:
class ShoppingList:
def __init__(self):
self.products = []
def number_of_items(self):
return len(self.products)
def add(self, product: str, number: int):
self.products.append((product, number))
def __iter__(self):
self.n = 0
return self
def __next__(self):
if self.n < len(self.products):
product = self.products[self.n]
self.n += 1
return product
else:
raise StopIteration
def products_in_shopping_list(shopping_list, amount: int):
return [shop_object[0] for shop_object in shopping_list if shop_object[1] >= amount]
if __name__=="__main__":
my_list = ShoppingList()
my_list.add("bananas", 10)
my_list.add("apples", 5)
my_list.add("alcohol free beer", 24)
my_list.add("pineapple", 1)
print("the shopping list contains at least 8 of the following items:")
for product in products_in_shopping_list(my_list, 8):
print(product)
|
#!/bin/bash
# temp_sed.sh
# replace 3k0n_w7f in the std sim scripts dir with target PDB
# 1 arg = pdb system prefix : e.g. 2kod_wt
PDB=$1
# apply globally to all files in current directory
sed -i "s/3k0n_w7f/${PDB}/g" *
|
def withdraw_cash(amount):
if not isinstance(amount, int) or amount <= 0:
return "Invalid input amount"
banknotes = [50, 20, 10, 1]
result = {}
remaining_amount = amount
for note in banknotes:
count = remaining_amount // note
result[note] = count
remaining_amount -= count * note
return result |
interface IMarkdownParserOptions {
// Define any specific parsing options here
}
class MyMarkdownParser implements IMarkdownParser {
parse(markdownText: string, options?: IMarkdownParserOptions): Promise<string> {
return new Promise<string>((resolve, reject) => {
// Implement the parsing logic here
// Use the provided options to customize the parsing behavior if needed
// Convert the markdownText to HTML and resolve the Promise with the result
// If any error occurs during parsing, reject the Promise with an appropriate error message
});
}
}
// Example usage
const markdownParser = new MyMarkdownParser();
markdownParser.parse('# Hello, *world*!', { /* optional parsing options */ })
.then((html) => {
console.log(html); // Output: "<h1>Hello, <em>world</em>!</h1>"
})
.catch((error) => {
console.error(error);
}); |
<form>
<div>
<label>First Name:</label>
<input type="text" id="first_name" minlength="3" required />
</div>
<div>
<label>Last Name:</label>
<input type="text" id="last_name" minlength="3" required />
</div>
<div>
<label>Age:</label>
<input type="number" id="age" min="0" required />
</div>
<input type="submit" value="Submit" />
</form>
<script type="text/javascript">
let form = document.querySelector('form');
form.addEventListener('submit', (e) => {
e.preventDefault();
let firstName = document.getElementById('first_name').value;
if (firstName.length < 3) {
alert('First name must be at least 3 characters long');
return;
}
let lastName = document.getElementById('last_name').value;
if (lastName.length < 3) {
alert('Last name must be at least 3 characters long');
return;
}
let age = document.getElementById('age').value;
if (isNaN(age) || age < 0) {
alert('Age must be a number greater than 0');
return;
}
alert('Form is valid!');
});
</script> |
<reponame>sean-olson/training_resources
System.register(['@angular/router', './media-item-form.component', './media-item-list.component'], function(exports_1, context_1) {
"use strict";
var __moduleName = context_1 && context_1.id;
var router_1, media_item_form_component_1, media_item_list_component_1;
var appRoutes, routing;
return {
setters:[
function (router_1_1) {
router_1 = router_1_1;
},
function (media_item_form_component_1_1) {
media_item_form_component_1 = media_item_form_component_1_1;
},
function (media_item_list_component_1_1) {
media_item_list_component_1 = media_item_list_component_1_1;
}],
execute: function() {
appRoutes = [
{ path: 'add', component: media_item_form_component_1.MediaItemFormComponent },
{ path: ':medium', component: media_item_list_component_1.MediaItemListComponent },
{ path: '', pathMatch: 'full', redirectTo: 'all' }
];
exports_1("routing", routing = router_1.RouterModule.forRoot(appRoutes));
}
}
});
//# sourceMappingURL=app.routing.js.map |
#include <bits/stdc++.h>
using namespace std;
class Solution {
public:
void rotate(vector<vector<int>>& matrix) {
for (int i = 0; i < matrix.size(); ++i) {
for (int j = 0; j < matrix.size(); ++j){
swap(matrix[i][j], matrix[j][i]);
}
reverse(matrix[i].begin(), matrix[i].end());
}
}
};
int main(){
return 0;
} |
class CreateTags < ActiveRecord::Migration
def change
create_table :tags do |t|
t.references :post, :index => true, :null => false
t.references :category, :index => true, :null => false
t.timestamps
end
add_index :tags, [:post_id, :category_id], :unique => true
end
end
|
<gh_stars>1-10
package edu.washington.cse.instrumentation.analysis.rectree;
public interface TreeVisitor {
void visitCallNode(CallNode callNode);
void visitParamNode(ParamNode paramNode);
void visitPrime(PrimingNode primingNode);
void visitTransitionNode(TransitiveNode transitiveNode);
void visitCompressedNode(CompressedTransitiveNode compressedTransitiveNode);
}
|
#!/usr/bin/env sh
ARGS="$@"
[ -z "$ARGS" ] && ARGS="discover"
python -m unittest "$ARGS"
|
/* GENERATED FILE */
import { html, svg, define } from "hybrids";
const PhThermometerSimple = {
color: "currentColor",
size: "1em",
weight: "regular",
mirrored: false,
render: ({ color, size, weight, mirrored }) => html`
<svg
xmlns="http://www.w3.org/2000/svg"
width="${size}"
height="${size}"
fill="${color}"
viewBox="0 0 256 256"
transform=${mirrored ? "scale(-1, 1)" : null}
>
${weight === "bold" &&
svg`<circle cx="128" cy="188" r="12" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<line x1="128" y1="168" x2="128" y2="92" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<path d="M88,154.77579V56a40,40,0,0,1,80,0v98.77485l.0017-.0014a52,52,0,1,1-80.00385.00054Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>`}
${weight === "duotone" &&
svg`<path d="M160.00787,147.01562,160,147.02539V48a32,32,0,0,0-64,0v99.02539l-.00787-.00977a52,52,0,1,0,64.01574,0ZM128,208a20,20,0,1,1,20-20A19.9999,19.9999,0,0,1,128,208Z" opacity="0.2"/>
<path d="M96,147.02548V48a32,32,0,0,1,64,0v99.02548l.00787-.01006a52,52,0,1,1-64.01574,0Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<circle cx="128" cy="188" r="20" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="128" y1="168" x2="128" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
${weight === "fill" &&
svg`<path d="M168,143.27441V48a40,40,0,0,0-80,0v95.27441a60,60,0,1,0,80,0ZM128,24a24.02718,24.02718,0,0,1,24,24V80H104V48A24.02718,24.02718,0,0,1,128,24Z"/>`}
${weight === "light" &&
svg`<path d="M96,147.02548V48a32,32,0,0,1,64,0v99.02548l.00787-.01006a52,52,0,1,1-64.01574,0Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<circle cx="128" cy="188" r="20" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<line x1="128" y1="168" x2="128" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>`}
${weight === "thin" &&
svg`<path d="M96,147.02548V48a32,32,0,0,1,64,0v99.02548l.00787-.01006a52,52,0,1,1-64.01574,0Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<circle cx="128" cy="188" r="20" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<line x1="128" y1="168" x2="128" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>`}
${weight === "regular" &&
svg`<path d="M96,147.02548V48a32,32,0,0,1,64,0v99.02548l.00787-.01006a52,52,0,1,1-64.01574,0Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<circle cx="128" cy="188" r="20" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<line x1="128" y1="168" x2="128" y2="88" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
</svg>
`,
};
define("ph-thermometer-simple", PhThermometerSimple);
export default PhThermometerSimple;
|
#!/usr/bin/env sh
# remove Conda environment
conda remove --name modelci --all -y
# stop docker service
# shellcheck disable=SC2046
docker stop $(docker ps -a -q --filter="name=modelci.*")
# remove tmp files
rm -rf ~/tmp/tensorrtserver
rm -f scripts/libnvidia-ml.cache
# remove log
rm -rf /tmp/modelci-install*.log
|
#!/usr/bin/env bash
# install puppet
apt-get -y update && apt-get -y install puppet
# install modules potentially used by manifest
puppet module install puppetlabs-ntp
puppet module install puppetlabs-apache
# apply a manifest eventually
MANIFEST=$1
if [ -f "$MANIFEST" ]
then
puppet apply $MANIFEST | tee "$MANIFEST.log"
fi
|
package internal
import (
"fmt"
"testing"
"github.com/justanotherorganization/justanotherbotkit/internal/test"
)
func TestGet(t *testing.T) {
db := initDB(t)
defer db.Close()
u := createUser(t, db)
_u, err := Get(u.GetID(), db)
test.OK(t, err)
test.Assert(t, u.GetID() == _u.GetID(), "ids must be equal")
test.Assert(t, u.GetName() == _u.GetName(), "names must be equal")
}
func TestGetSnap(t *testing.T) {
db := initDB(t)
defer db.Close()
u := createUser(t, db)
s, err := GetSnap([]byte(u.GetID()), db)
test.OK(t, err)
test.Assert(t, s.Version == 1, "version %d should be 1", s.Version)
test.Assert(t, s.ID == fmt.Sprintf("%s%d", u.GetID(), s.Version), "snapshot ID is not valid")
test.Assert(t, u.GetID() == s.User.GetID(), "ids must be equal")
test.Assert(t, u.GetName() == s.User.GetName(), "names must be equal")
}
|
package org.purevalue.arbitrage.trader
import java.time.{Duration, Instant}
import java.util.UUID
import akka.actor.typed.scaladsl.{AbstractBehavior, ActorContext, Behaviors}
import akka.actor.typed.{ActorRef, ActorSystem, Behavior}
import com.typesafe.config.Config
import org.purevalue.arbitrage._
import org.purevalue.arbitrage.traderoom._
import org.purevalue.arbitrage.traderoom.exchange.OrderLimitChooser
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContextExecutor
object FooTrader {
def apply(traderConfig: Config, tradeRoom: ActorRef[TradeRoom.Message]):
Behavior[Command] =
Behaviors.setup(context => new FooTrader(context, traderConfig, tradeRoom))
sealed trait Command
case class SearchRun(tc: TradeContext) extends Command
}
/**
* A basic trader to evolve the concept
*/
class FooTrader(context: ActorContext[FooTrader.Command],
traderConfig: Config,
tradeRoom: ActorRef[TradeRoom.Message]) extends AbstractBehavior[FooTrader.Command](context) {
import FooTrader._
private val log = LoggerFactory.getLogger(getClass)
implicit val system: ActorSystem[UserRootGuardian.Reply] = Main.actorSystem
implicit val executionContext: ExecutionContextExecutor = system.executionContext
val traderName: String = "FooTrader"
val maxOpenOrderBundles: Int = traderConfig.getInt("max-open-order-bundles")
var pendingOrderBundles: Map[UUID, OrderRequestBundle] = Map()
var activeOrderBundles: Map[UUID, OrderRequestBundle] = Map()
var numSearchesTotal: Int = 0
var numSearchesDiff: Int = 0
var numSingleSearchesDiff: Int = 0
var shotsDelivered: Int = 0
var lastLifeSign: Instant = Instant.now()
val OrderbookBasedLimitQuantityOverbookingRate: Double = traderConfig.getDouble("order-bundle.orderbook-based-tx-limit-quantity-overbooking")
val TickerBasedOrderLimitRealityAdjustmentRate: Double = traderConfig.getDouble("order-bundle.ticker-based-tx-limit-beyond-edge-limit")
val OrderBundleMinGainInUSD: Double = traderConfig.getDouble("order-bundle.min-gain-in-usd")
val TradeAmountInUSD: Double = traderConfig.getDouble("order-bundle.trade-amount-in-usd")
sealed trait NoResultReason
case class NotEnoughExchangesAvailableForTrading() extends NoResultReason
case class BuyOrSellBookEmpty() extends NoResultReason
case class BidAskGap() extends NoResultReason
case class Confused() extends NoResultReason
case class NoUSDTConversion(asses: Asset) extends NoResultReason
case class MinGainTooLow() extends NoResultReason
case class MinGainTooLow2() extends NoResultReason
def canTrade(exchange: String, tradePair: TradePair)(implicit tc: TradeContext): Boolean =
!tc.doNotTouch(exchange).contains(tradePair.baseAsset) && !tc.doNotTouch(exchange).contains(tradePair.quoteAsset)
// finds (average) exchange rate based on reference ticker, if tradepair is available there
// otherwise ticker rate is retrieved from fallBackTickerExchanges
def findPrice(tradePair: TradePair, fallBackTickerExchanges: Iterable[String])(implicit tc: TradeContext): Option[Double] = {
def _findPrice(exchangeOptions: List[String]): Option[Double] = {
if (exchangeOptions.isEmpty) None
else tc.tickers(exchangeOptions.head).get(tradePair)
.map(_.priceEstimate)
.orElse(_findPrice(exchangeOptions.tail))
}
val exchangesInOrder = tc.referenceTickerExchange :: fallBackTickerExchanges.filterNot(_ == tc.referenceTickerExchange).toList
_findPrice(exchangesInOrder)
}
def determineLimit(exchange: String, tradePair: TradePair, tradeSide: TradeSide, amountBaseAsset: Double)(implicit tc: TradeContext): Option[Double] = {
new OrderLimitChooser(tc.orderBooks(exchange).get(tradePair), tc.tickers(exchange)(tradePair))
.determineRealisticOrderLimit(tradeSide, amountBaseAsset, OrderbookBasedLimitQuantityOverbookingRate, TickerBasedOrderLimitRealityAdjustmentRate)
}
def findBestShot(tradePair: TradePair)(implicit tc: TradeContext): Either[OrderRequestBundle, NoResultReason] = {
val availableExchanges: Iterable[String] =
tc.tradePairs
.filter(_._2.contains(tradePair))
.filter(e => canTrade(e._1, tradePair))
.keys
if (availableExchanges.size <= 1) return Right(NotEnoughExchangesAvailableForTrading())
val usdEquivatentCalcCoin: Asset = Asset.UsdEquivalentCoins
.find(_.canConvertTo(tradePair.baseAsset, tp => findPrice(tp, availableExchanges).isDefined)).get // there must be one exchange having that trade pair
val tradeAmountBaseAsset: Double =
CryptoValue(usdEquivatentCalcCoin, TradeAmountInUSD)
.convertTo(tradePair.baseAsset, tp => findPrice(tp, availableExchanges))
.amount
val buyLimits: Map[String, Double] = availableExchanges
.map(exchange => exchange -> determineLimit(exchange, tradePair, TradeSide.Buy, tradeAmountBaseAsset))
.filter(_._2.isDefined)
.map(e => e._1 -> e._2.get)
.toMap
val sellLimits: Map[String, Double] = availableExchanges
.map(exchange => exchange -> determineLimit(exchange, tradePair, TradeSide.Sell, tradeAmountBaseAsset))
.filter(_._2.isDefined)
.map(e => e._1 -> e._2.get)
.toMap
val buyExchange: String = buyLimits.minBy(_._2)._1
val sellExchange: String = sellLimits.maxBy(_._2)._1
val buyLimit: Double = buyLimits(buyExchange)
val sellLimit: Double = sellLimits(sellExchange)
val minGainInUSD: Double = OrderBundleMinGainInUSD
val calculatedPureWinUSD: Double = CryptoValue(tradePair.baseAsset, tradeAmountBaseAsset * sellLimit - tradeAmountBaseAsset * buyLimit)
.convertTo(usdEquivatentCalcCoin, tp => findPrice(tp, availableExchanges)).amount
if (calculatedPureWinUSD < minGainInUSD) return Right(MinGainTooLow())
val orderBundleId = UUID.randomUUID()
val ourBuyBaseAssetOrder =
OrderRequest(
UUID.randomUUID(),
Some(orderBundleId),
buyExchange,
tradePair,
TradeSide.Buy,
tc.feeRates(buyExchange),
tradeAmountBaseAsset / (1.0 - tc.feeRates(buyExchange)), // usually we have to buy X + fee, because fee gets substracted; an exeption is on binance when paying with BNB
buyLimit
)
val ourSellBaseAssetOrder =
OrderRequest(
UUID.randomUUID(),
Some(orderBundleId),
sellExchange,
tradePair,
TradeSide.Sell,
tc.feeRates(sellExchange),
tradeAmountBaseAsset,
sellLimit
)
val bill: OrderBill = OrderBill.calc(Seq(ourBuyBaseAssetOrder, ourSellBaseAssetOrder), usdEquivatentCalcCoin, tc.referenceTicker)
if (bill.sumUSDAtCalcTime < OrderBundleMinGainInUSD) {
Right(MinGainTooLow2())
} else {
Left(OrderRequestBundle(
orderBundleId,
traderName,
Instant.now(),
List(ourBuyBaseAssetOrder, ourSellBaseAssetOrder),
bill
))
}
}
var noResultReasonStats: Map[NoResultReason, Int] = Map()
def findBestShots(topN: Int)(implicit tc: TradeContext): Seq[OrderRequestBundle] = {
var result: List[OrderRequestBundle] = List()
val cryptoTradePairs = tc.tickers.values.flatMap(_.keys).filterNot(_.involvedAssets.exists(_.isFiat))
for (tradePair: TradePair <- cryptoTradePairs) {
if (tc.tickers.count(_._2.keySet.contains(tradePair)) > 1) {
numSingleSearchesDiff += 1
findBestShot(tradePair) match {
case Left(shot) if result.size < topN =>
result = shot :: result
case Left(shot) if shot.bill.sumUSDAtCalcTime > result.map(_.bill.sumUSDAtCalcTime).min =>
result = shot :: result.sortBy(_.bill.sumUSDAtCalcTime).tail
case Left(_) => // ignoring result
case Right(noResultReason) =>
noResultReasonStats += (noResultReason -> (1 + noResultReasonStats.getOrElse(noResultReason, 0)))
case _ => throw new IllegalStateException()
}
}
}
result.sortBy(_.bill.sumUSDAtCalcTime).reverse
}
def lifeSign(): Unit = {
val duration = Duration.between(lastLifeSign, Instant.now())
if (duration.compareTo(traderConfig.getDuration("lifesign-interval")) > 0) {
log.info(s"FooTrader life sign: $shotsDelivered shots delivered. $numSearchesDiff search runs " +
s"($numSingleSearchesDiff single searches) done in last ${duration.toMinutes} minutes. Total search runs: $numSearchesTotal")
log.info(s"FooTrader no-result-reasons: $noResultReasonStats")
lastLifeSign = Instant.now()
numSingleSearchesDiff = 0
numSearchesDiff = 0
}
}
override def onMessage(message: Command): Behavior[Command] = message match {
case SearchRun(tc) =>
lifeSign()
numSearchesDiff += 1
numSearchesTotal += 1
findBestShots(3)(tc).foreach { b =>
shotsDelivered += 1
tradeRoom ! TradeRoom.PlaceOrderRequestBundle(b)
}
Behaviors.same
}
log.info("FooTrader started")
}
|
#! /bin/sh
AUTH_TOKEN=`./okapi-login`
OKAPI="http://localhost:9130"
TENANT="diku"
if [ -f .okapirc ]; then
. .okapirc
elif [ -f $HOME/.okapirc ]; then
. $HOME/.okapirc
fi
# serviceType:"Loan",
PATRON_REQ_1=`curl --header "X-Okapi-Tenant: diku" -H "X-Okapi-Token: ${AUTH_TOKEN}" -H "Content-Type: application/json" -X POST http://localhost:9130/rs/patronrequests -d ' {
requestingInstitutionSymbol:"RESHARE:LOCALHOSTA",
title:"Platform for Change",
author:"Beer, Stafford A",
neededBy: "2020-01-01",
pickupLocation: "An undisclosed shadowy corridor somewhere",
patronNote: "Can I have it in red, please",
subtitle:"A message from Stafford Beer",
sponsoringBody:"A sponsoring body",
publisher: "Wiley",
placeOfPublication: "London",
volume: "1",
issue: "1",
startPage: "1",
numberOfPages: "123",
publicationDate: "1972",
edition:"2nd",
isbn: "0471948403",
informationSource: "Made up by Ian",
patronReference:"Patron001",
patronSurname:"patronsurname",
patronEmail:"patron@surname.tld",
patronGivenName: "patronGivenName",
patronType: "Faculty Staff",
sendToPatron: true,
state:"Idle",
isRequester:true,
notARealProperty:"Test that this is ignored in accordance with Postels law",
pendingAction:"approve",
tags:[
"Testdata", "TestCase001", "MonographTest"
],
customProperties:{
"patronWalletHash": ["298348743738748728524854289743765"],
},
rota:[
{
directoryId:"RESHARE:LOCALHOSTB",
rotaPosition:"0",
}
]
}
'`
echo Result : $PATRON_REQ_1
echo Parse result to extract request ID
PATRON_REQ_1_ID=`echo $PATRON_REQ_1 | jq -r ".id" | tr -d '\r'`
echo Created request 1: $PATRON_REQ_1_ID
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-SS-N/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-SS-N/13-512+512+512-SS-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_first_third_sixth --eval_function last_sixth_eval |
#!/bin/bash
# patch version 1, CVE-2019-5736
set -ex
VERSION=$1
ARCH=$2
if [ "$ARCH" == "arm64" ]; then
DOCKERARCH="aarch64"
URL="https://download.docker.com/linux/static/stable/${DOCKERARCH}/docker-${VERSION}-ce.tgz"
SUFFIX="_${ARCH}"
fi
DEST="./images/10-docker-${VERSION}${SUFFIX}"
mkdir -p $DEST
curl -sL ${URL} | tar xzf - -C $DEST
mv $DEST/docker $DEST/engine
mv $DEST/engine/completion $DEST || true
curl -sL -o $DEST/engine/docker-runc https://github.com/rancher/runc-cve/releases/download/CVE-2019-5736-build2/runc-v${VERSION}-${ARCH}
|
<filename>app/controllers/api/v1/hide_subscribe_to_controller.rb
class Api::V1::SubscribeToController < ApplicationController
skip_before_filter :verify_authenticity_token,
:if => Proc.new { |c| c.request.format == 'application/json' }
# Just skip the authentication for now
before_filter :authenticate_user!
respond_to :json
def update
#subscription_type = SubscribedTo.find(params[:id])
subscriptions = SubscribedTo.where(:user_id => current_user.id)
subscription_type = subscriptions.find_by_provider_id(params[:provider_id])
if subscription_type.hidden == 0
subscription_type.update_column(:hidden, 1)
render :status => 200,
:json => { :success => true,
:info => "Subscription Hidden",
:data => {"subscription" => subscription_type} }
elsif subscription_type.hidden.nil?
subscription_type.update_column(:hidden, 1)
render :status => 200,
:json => { :success => true,
:info => "Subscription Hidden",
:data => {"subscription" => subscription_type} }
else
subscription_type.update_column(:hidden, 0)
render :status => 200,
:json => { :success => true,
:info => "Subscription Unhidden",
:data => {"subscription" => subscription_type} }
end
end
end |
import pandas as pd
import numpy as np
from sklearn import linear_model
# Read data
housing_data = pd.read_csv("data.csv")
# Extract features and labels
X = housing_data[['house_size', 'bedrooms', 'location']]
y = housing_data['price']
# Create linear regression object
reg = linear_model.LinearRegression()
# Train the model using the training sets
reg.fit(X, y)
# Make predictions using the testing set
predictions = reg.predict(X) |
#!/bin/bash
# Copyright the Hyperledger Fabric contributors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
set -euo pipefail
go_files=$(find . -type f -name '*.go'| grep -v "/vendor/") # filter out vendor
## Formatting
echo "running gofmt..."
gofmt_output="$(gofmt -l -s $go_files)"
if [ -n "$gofmt_output" ]; then
echo "The following files contain gofmt errors:"
echo "$gofmt_output"
echo "Please run 'gofmt -l -s -w' for these files."
exit 1
fi
## Import management
echo "running goimports..."
goimports_output="$(goimports -l $go_files)"
if [ -n "$goimports_output" ]; then
echo "The following files contain goimport errors:"
echo "$goimports_output"
echo "Please run 'goimports -l -w' for these files."
exit 1
fi
## go vet
echo "running go vet..."
go vet ./...
## golint
echo "running golint..."
golint -set_exit_status $(go list ./... | grep -v "/vendor/" | grep -v "protolator")
# TODO also lint protolator
## Protobuf decoration
# TODO verify protolator decorates all config protobuf messages
|
<reponame>PinoEire/archi
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.preferences;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Spinner;
import com.archimatetool.editor.diagram.sketch.ISketchEditor;
import com.archimatetool.model.ITextAlignment;
/**
* Diagram Appearance Preferences Tab panel
*
* @author <NAME>
*/
public class DiagramAppearancePreferenceTab implements IPreferenceConstants {
private Combo fDefaultGradientCombo;
private String[] GRADIENT_STYLES = {
Messages.DiagramAppearancePreferenceTab_16,
Messages.DiagramAppearancePreferenceTab_17,
Messages.DiagramAppearancePreferenceTab_18,
Messages.DiagramAppearancePreferenceTab_19,
Messages.DiagramAppearancePreferenceTab_20
};
private Spinner fDefaultArchimateFigureWidthSpinner, fDefaultArchimateFigureHeightSpinner;
private Combo fWordWrapStyleCombo;
private String[] WORD_WRAP_STYLES = {
Messages.DiagramAppearancePreferenceTab_4,
Messages.DiagramAppearancePreferenceTab_5,
Messages.DiagramAppearancePreferenceTab_6
};
private Combo fDefaultSketchBackgroundCombo;
private Combo fDefaultTextAlignmentCombo, fDefaultTextPositionCombo;
private String[] TEXT_ALIGNMENTS = {
Messages.DiagramAppearancePreferenceTab_8,
Messages.DiagramAppearancePreferenceTab_9,
Messages.DiagramAppearancePreferenceTab_10
};
private int[] TEXT_ALIGNMENT_VALUES = {
ITextAlignment.TEXT_ALIGNMENT_LEFT,
ITextAlignment.TEXT_ALIGNMENT_CENTER,
ITextAlignment.TEXT_ALIGNMENT_RIGHT
};
private String[] TEXT_POSITIONS = {
Messages.DiagramAppearancePreferenceTab_11,
Messages.DiagramAppearancePreferenceTab_12,
Messages.DiagramAppearancePreferenceTab_13
};
public Composite createContents(Composite parent) {
Composite client = new Composite(parent, SWT.NULL);
client.setLayout(new GridLayout());
// -------------- Global ----------------------------
Group globalGroup = new Group(client, SWT.NULL);
globalGroup.setText(Messages.DiagramAppearancePreferenceTab_0);
globalGroup.setLayout(new GridLayout(2, false));
globalGroup.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// Word wrap style
Label label = new Label(globalGroup, SWT.NULL);
label.setText(Messages.DiagramAppearancePreferenceTab_7);
fWordWrapStyleCombo = new Combo(globalGroup, SWT.READ_ONLY);
fWordWrapStyleCombo.setItems(WORD_WRAP_STYLES);
fWordWrapStyleCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// -------------- Defaults ----------------------------
Group defaultsGroup = new Group(client, SWT.NULL);
defaultsGroup.setText(Messages.DiagramAppearancePreferenceTab_1);
defaultsGroup.setLayout(new GridLayout(2, false));
defaultsGroup.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// Sizes
label = new Label(defaultsGroup, SWT.NULL);
label.setText(Messages.DiagramAppearancePreferenceTab_2);
fDefaultArchimateFigureWidthSpinner = new Spinner(defaultsGroup, SWT.BORDER);
fDefaultArchimateFigureWidthSpinner.setMinimum(30);
fDefaultArchimateFigureWidthSpinner.setMaximum(300);
label = new Label(defaultsGroup, SWT.NULL);
label.setText(Messages.DiagramAppearancePreferenceTab_3);
fDefaultArchimateFigureHeightSpinner = new Spinner(defaultsGroup, SWT.BORDER);
fDefaultArchimateFigureHeightSpinner.setMinimum(30);
fDefaultArchimateFigureHeightSpinner.setMaximum(300);
// Default Text Alignment
label = new Label(defaultsGroup, SWT.NULL);
label.setText(Messages.DiagramAppearancePreferenceTab_14);
fDefaultTextAlignmentCombo = new Combo(defaultsGroup, SWT.READ_ONLY);
fDefaultTextAlignmentCombo.setItems(TEXT_ALIGNMENTS);
fDefaultTextAlignmentCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// Default Text Position
label = new Label(defaultsGroup, SWT.NULL);
label.setText(Messages.DiagramAppearancePreferenceTab_15);
fDefaultTextPositionCombo = new Combo(defaultsGroup, SWT.READ_ONLY);
fDefaultTextPositionCombo.setItems(TEXT_POSITIONS);
fDefaultTextPositionCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// Default Gradient
label = new Label(defaultsGroup, SWT.NULL);
label.setText(Messages.DiagramFiguresPreferencePage_9);
fDefaultGradientCombo = new Combo(defaultsGroup, SWT.READ_ONLY);
fDefaultGradientCombo.setItems(GRADIENT_STYLES);
fDefaultGradientCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// -------------- Sketch ----------------------------
Group sketchGroup = new Group(client, SWT.NULL);
sketchGroup.setLayout(new GridLayout(2, false));
sketchGroup.setText(Messages.DiagramPreferencePage_19);
sketchGroup.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// Default Sketch background
label = new Label(sketchGroup, SWT.NULL);
label.setText(Messages.DiagramPreferencePage_20);
fDefaultSketchBackgroundCombo = new Combo(sketchGroup, SWT.READ_ONLY);
fDefaultSketchBackgroundCombo.setItems(ISketchEditor.BACKGROUNDS);
fDefaultSketchBackgroundCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
setValues();
return client;
}
private void setValues() {
fDefaultGradientCombo.select(getPreferenceStore().getInt(DEFAULT_GRADIENT) + 1); // Starts at -1
fWordWrapStyleCombo.select(getPreferenceStore().getInt(ARCHIMATE_FIGURE_WORD_WRAP_STYLE));
fDefaultArchimateFigureWidthSpinner.setSelection(getPreferenceStore().getInt(DEFAULT_ARCHIMATE_FIGURE_WIDTH));
fDefaultArchimateFigureHeightSpinner.setSelection(getPreferenceStore().getInt(DEFAULT_ARCHIMATE_FIGURE_HEIGHT));
// The values of these are 1, 2 and 4
fDefaultTextAlignmentCombo.select(getPreferenceStore().getInt(DEFAULT_ARCHIMATE_FIGURE_TEXT_ALIGNMENT) / 2);
// The values of these are 0, 1 and 2
fDefaultTextPositionCombo.select(getPreferenceStore().getInt(DEFAULT_ARCHIMATE_FIGURE_TEXT_POSITION));
fDefaultSketchBackgroundCombo.select(getPreferenceStore().getInt(SKETCH_DEFAULT_BACKGROUND));
}
private IPreferenceStore getPreferenceStore() {
return Preferences.STORE;
}
public boolean performOk() {
getPreferenceStore().setValue(DEFAULT_GRADIENT, fDefaultGradientCombo.getSelectionIndex() - 1); // Starts at -1
getPreferenceStore().setValue(ARCHIMATE_FIGURE_WORD_WRAP_STYLE, fWordWrapStyleCombo.getSelectionIndex());
getPreferenceStore().setValue(DEFAULT_ARCHIMATE_FIGURE_WIDTH, fDefaultArchimateFigureWidthSpinner.getSelection());
getPreferenceStore().setValue(DEFAULT_ARCHIMATE_FIGURE_HEIGHT, fDefaultArchimateFigureHeightSpinner.getSelection());
getPreferenceStore().setValue(DEFAULT_ARCHIMATE_FIGURE_TEXT_ALIGNMENT, TEXT_ALIGNMENT_VALUES[fDefaultTextAlignmentCombo.getSelectionIndex()]);
getPreferenceStore().setValue(DEFAULT_ARCHIMATE_FIGURE_TEXT_POSITION, fDefaultTextPositionCombo.getSelectionIndex());
getPreferenceStore().setValue(SKETCH_DEFAULT_BACKGROUND, fDefaultSketchBackgroundCombo.getSelectionIndex());
return true;
}
protected void performDefaults() {
fDefaultGradientCombo.select(getPreferenceStore().getDefaultInt(DEFAULT_GRADIENT) + 1); // Starts at -1
fWordWrapStyleCombo.select(getPreferenceStore().getDefaultInt(ARCHIMATE_FIGURE_WORD_WRAP_STYLE));
fDefaultArchimateFigureWidthSpinner.setSelection(getPreferenceStore().getDefaultInt(DEFAULT_ARCHIMATE_FIGURE_WIDTH));
fDefaultArchimateFigureHeightSpinner.setSelection(getPreferenceStore().getDefaultInt(DEFAULT_ARCHIMATE_FIGURE_HEIGHT));
fDefaultTextAlignmentCombo.select(getPreferenceStore().getDefaultInt(DEFAULT_ARCHIMATE_FIGURE_TEXT_ALIGNMENT) / 2); // Value = 2
fDefaultTextPositionCombo.select(getPreferenceStore().getDefaultInt(DEFAULT_ARCHIMATE_FIGURE_TEXT_POSITION));
fDefaultSketchBackgroundCombo.select(getPreferenceStore().getDefaultInt(SKETCH_DEFAULT_BACKGROUND));
}
} |
package com.datasift.client.managedsource.sources;
import com.datasift.client.DataSiftConfig;
import com.datasift.client.managedsource.ManagedDataSourceType;
public class TwitterGnip extends BaseSource<TwitterGnip> {
public TwitterGnip(DataSiftConfig config) {
super(config, ManagedDataSourceType.TWITTER_GNIP);
// set the gnip specific ingestion mapping for data sent to ODP
setParametersField("mapping", "gnip_1");
}
public TwitterGnip setMapping(String mapping) {
if (mapping == null) {
throw new IllegalArgumentException("Mapping required");
}
return setParametersField("mapping", mapping);
}
}
|
const regex = /^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$/; |
import random
def roll_dice():
roll1 = random.randint(1, 6)
roll2 = random.randint(1, 6)
roll_sum = roll1 + roll2
print(f"You rolled {roll1} and {roll2}. The total is {roll_sum}, which is {'even' if roll_sum % 2 == 0 else 'odd'}.")
roll_dice() |
const db = require('../data/dbConfig');
const find = () => {
return db('ingredients');
};
const findByRecipeID = id => {
return db('ingredients')
.where({recipe_id: id});
};
const add = ingredient => {
return db('ingredients')
.insert(ingredient, 'id')
.then((id) => {
return id
});
};
const remove = id => {
return db('ingredients')
.where({id})
.del();
};
const update = (id, ingredient) => {
return db('ingredients')
.where({id})
.update(ingredient)
};
module.exports = {
find,
findByRecipeID,
add,
remove,
update
}; |
<gh_stars>1-10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('payment', '0015_auto_20180220_1437'),
]
operations = [
migrations.AlterField(
model_name='payment',
name='ip_address',
field=models.GenericIPAddressField(blank=True, db_index=True, null=True),
),
]
|
<gh_stars>0
"use strict";
const operation = op => (...f) => (...args) => op(...f.map(el => el(...args)));
const add = operation((a, b) => a + b);
const subtract = operation((a, b) => a - b);
const multiply = operation((a, b) => a * b);
const divide = operation((a, b) => a / b);
const negate = operation(a => -a);
const sin = operation(Math.sin);
const cos = operation(Math.cos);
const cube = operation(a => Math.pow(a, 3));
const cuberoot = operation(Math.cbrt);
const abs = operation(Math.abs);
const iff = operation((a, b, c) => a >= 0 ? b : c);
const avg5 = operation((...param) => param.reduce((sum, el) => sum + el) / param.length);
const med3 = operation((...param) => param.sort((a, b) => a - b)[Math.floor(param.length / 2)]);
const getIndexOfVariable = {
"x" : 0,
"y" : 1,
"z" : 2
};
const variable = a => (...variables) => variables[getIndexOfVariable[a] || 0];
const cnst = a => () => a;
const pi = () => Math.PI;
const e = () => Math.E;
const one = cnst(1);
const two = cnst(2);
const OpExpr = {
"+" : [add, 2],
"-" : [subtract, 2],
"*" : [multiply, 2],
"/" : [divide, 2],
"negate" : [negate, 1],
"sin" : [sin, 1],
"cos" : [cos, 1],
"cube" : [cube, 1],
"cuberoot" : [cuberoot, 1],
"abs" : [abs, 1],
"iff" : [iff, 3],
"avg5" : [avg5, 5],
"med3" : [med3, 3]
};
const CnstVarExpr = {
"x" : variable("x"),
"y" : variable("y"),
"z" : variable("z"),
"pi" : pi,
"e" : e,
"one" : one,
"two" : two
};
function parse(input) {
return input.split(" ").filter(s => s.length > 0).reduce((storage, lexeme) => {
if (OpExpr.hasOwnProperty(lexeme)) {
storage.push(OpExpr[lexeme][0](...storage.splice(storage.length - OpExpr[lexeme][1], OpExpr[lexeme][1])));
} else if (CnstVarExpr.hasOwnProperty(lexeme)) {
storage.push(CnstVarExpr[lexeme]);
} else {
storage.push(cnst(+lexeme));
}
return storage;
}, []).pop();
}
|
#!/bin/sh -
#
# Copyright (c) 1984, 1986, 1990, 1993
# The Regents of the University of California. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the University nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# @(#)newvers.sh 8.1 (Berkeley) 4/20/94
# $FreeBSD: head/sys/conf/newvers.sh 297687 2016-04-07 20:30:46Z emaste $
TYPE="FreeBSD"
REVISION="11.0"
BRANCH="CURRENT"
if [ -n "${BRANCH_OVERRIDE}" ]; then
BRANCH=${BRANCH_OVERRIDE}
fi
RELEASE="${REVISION}-${BRANCH}"
VERSION="${TYPE} ${RELEASE}"
if [ -z "${SYSDIR}" ]; then
SYSDIR=$(dirname $0)/..
fi
if [ -n "${PARAMFILE}" ]; then
RELDATE=$(awk '/__FreeBSD_version.*propagated to newvers/ {print $3}' \
${PARAMFILE})
else
RELDATE=$(awk '/__FreeBSD_version.*propagated to newvers/ {print $3}' \
${SYSDIR}/sys/param.h)
fi
b=share/examples/etc/bsd-style-copyright
if [ -r "${SYSDIR}/../COPYRIGHT" ]; then
year=$(sed -Ee '/^Copyright .* The FreeBSD Project/!d;s/^.*1992-([0-9]*) .*$/\1/g' ${SYSDIR}/../COPYRIGHT)
else
year=$(date +%Y)
fi
# look for copyright template
for bsd_copyright in ../$b ../../$b ../../../$b /usr/src/$b /usr/$b
do
if [ -r "$bsd_copyright" ]; then
COPYRIGHT=`sed \
-e "s/\[year\]/1992-$year/" \
-e 's/\[your name here\]\.* /The FreeBSD Project./' \
-e 's/\[your name\]\.*/The FreeBSD Project./' \
-e '/\[id for your version control system, if any\]/d' \
$bsd_copyright`
break
fi
done
# no copyright found, use a dummy
if [ -z "$COPYRIGHT" ]; then
COPYRIGHT="/*-
* Copyright (c) 1992-$year The FreeBSD Project.
* All rights reserved.
*
*/"
fi
# add newline
COPYRIGHT="$COPYRIGHT
"
# VARS_ONLY means no files should be generated, this is just being
# included.
if [ -n "$VARS_ONLY" ]; then
return 0
fi
LC_ALL=C; export LC_ALL
if [ ! -r version ]
then
echo 0 > version
fi
touch version
v=`cat version`
u=${USER:-root}
d=`pwd`
h=${HOSTNAME:-`hostname`}
if [ -n "$SOURCE_DATE_EPOCH" ]; then
if ! t=`date -r $SOURCE_DATE_EPOCH 2>/dev/null`; then
echo "Invalid SOURCE_DATE_EPOCH" >&2
exit 1
fi
else
t=`date`
fi
i=`${MAKE:-make} -V KERN_IDENT`
compiler_v=$($(${MAKE:-make} -V CC) -v 2>&1 | grep -w 'version')
for dir in /usr/bin /usr/local/bin; do
if [ ! -z "${svnversion}" ] ; then
break
fi
if [ -x "${dir}/svnversion" ] && [ -z ${svnversion} ] ; then
# Run svnversion from ${dir} on this script; if return code
# is not zero, the checkout might not be compatible with the
# svnversion being used.
${dir}/svnversion $(realpath ${0}) >/dev/null 2>&1
if [ $? -eq 0 ]; then
svnversion=${dir}/svnversion
break
fi
fi
done
if [ -z "${svnversion}" ] && [ -x /usr/bin/svnliteversion ] ; then
/usr/bin/svnliteversion $(realpath ${0}) >/dev/null 2>&1
if [ $? -eq 0 ]; then
svnversion=/usr/bin/svnliteversion
else
svnversion=
fi
fi
for dir in /usr/bin /usr/local/bin; do
if [ -x "${dir}/p4" ] && [ -z ${p4_cmd} ] ; then
p4_cmd=${dir}/p4
fi
done
if [ -d "${SYSDIR}/../.git" ] ; then
for dir in /usr/bin /usr/local/bin; do
if [ -x "${dir}/git" ] ; then
git_cmd="${dir}/git --git-dir=${SYSDIR}/../.git"
break
fi
done
fi
if [ -d "${SYSDIR}/../.hg" ] ; then
for dir in /usr/bin /usr/local/bin; do
if [ -x "${dir}/hg" ] ; then
hg_cmd="${dir}/hg -R ${SYSDIR}/.."
break
fi
done
fi
if [ -n "$svnversion" ] ; then
svn=`cd ${SYSDIR} && $svnversion 2>/dev/null`
case "$svn" in
[0-9]*) svn=" r${svn}" ;;
*) unset svn ;;
esac
fi
if [ -n "$git_cmd" ] ; then
git=`$git_cmd rev-parse --verify --short HEAD 2>/dev/null`
svn=`$git_cmd svn find-rev $git 2>/dev/null`
if [ -n "$svn" ] ; then
svn=" r${svn}"
git="=${git}"
else
svn=`$git_cmd log | fgrep 'git-svn-id:' | head -1 | \
sed -n 's/^.*@\([0-9][0-9]*\).*$/\1/p'`
if [ -z "$svn" ] ; then
svn=`$git_cmd log --format='format:%N' | \
grep '^svn ' | head -1 | \
sed -n 's/^.*revision=\([0-9][0-9]*\).*$/\1/p'`
fi
if [ -n "$svn" ] ; then
svn=" r${svn}"
git="+${git}"
else
git=" ${git}"
fi
fi
git_b=`$git_cmd rev-parse --abbrev-ref HEAD`
if [ -n "$git_b" ] ; then
git="${git}(${git_b})"
fi
if $git_cmd --work-tree=${SYSDIR}/.. diff-index \
--name-only HEAD | read dummy; then
git="${git}-dirty"
fi
fi
if [ -n "$p4_cmd" ] ; then
p4version=`cd ${SYSDIR} && $p4_cmd changes -m1 "./...#have" 2>&1 | \
awk '{ print $2 }'`
case "$p4version" in
[0-9]*)
p4version=" ${p4version}"
p4opened=`cd ${SYSDIR} && $p4_cmd opened ./... 2>&1`
case "$p4opened" in
File*) ;;
//*) p4version="${p4version}+edit" ;;
esac
;;
*) unset p4version ;;
esac
fi
if [ -n "$hg_cmd" ] ; then
hg=`$hg_cmd id 2>/dev/null`
svn=`$hg_cmd svn info 2>/dev/null | \
awk -F': ' '/Revision/ { print $2 }'`
if [ -n "$svn" ] ; then
svn=" r${svn}"
fi
if [ -n "$hg" ] ; then
hg=" ${hg}"
fi
fi
cat << EOF > vers.c
$COPYRIGHT
#define SCCSSTR "@(#)${VERSION} #${v}${svn}${git}${hg}${p4version}: ${t}"
#define VERSTR "${VERSION} #${v}${svn}${git}${hg}${p4version}: ${t}\\n ${u}@${h}:${d}\\n"
#define RELSTR "${RELEASE}"
char sccs[sizeof(SCCSSTR) > 128 ? sizeof(SCCSSTR) : 128] = SCCSSTR;
char version[sizeof(VERSTR) > 256 ? sizeof(VERSTR) : 256] = VERSTR;
char compiler_version[] = "${compiler_v}";
char ostype[] = "${TYPE}";
char osrelease[sizeof(RELSTR) > 32 ? sizeof(RELSTR) : 32] = RELSTR;
int osreldate = ${RELDATE};
char kern_ident[] = "${i}";
EOF
echo $((v + 1)) > version
|
#!/usr/bin/env bash
: ${1:?Call with 'packet-driver.sh <SIZE>'!}
: ${mtu:=$1}
if ( echo $mtu | grep -vE '1500|9000' ); then
echo Please use 1500 or 9000
exit 1
fi
#echo "MTU: $mtu"
declare -A localHosts
declare -A remoteHosts
localHosts[9000]=192.168.154.4
localHosts[1500]=192.168.199.35
remoteHosts[9000]=192.168.154.5
remoteHosts[1500]=192.168.199.36
blocksize=8192
testfile=testdata-1G.dat
cmd="./client.pl --remote-host ${remoteHosts[$mtu]} --local-host ${localHosts[$mtu]} --file $testfile --buffer-size $blocksize"
for i in {0..22}
do
echo "executing: $cmd"
$cmd
done
|
<gh_stars>1-10
export * from './TilePanes'
export * from './TabsBars'
export * from './StretchBars'
|
#!/bin/bash
DIR="testing/tests-xpl-daily-201701121739/"
EXPECTED="testing/tests-xpl-daily-201701121739/expected/"
# DIR="testing/double_basic_tests/"
# EXPECTED="testing/double_basic_tests/expected/"
# DIR="testing/read_tests/"
# EXPECTED="testing/read_tests/expected/"
COUNTER=1
FAILED=0
COMPILERFAIL=()
YASMFAIL=()
LDFAIL=()
DIFFFAIL=()
for file in ${DIR}*.xpl
do
# detecta numero do teste
FILENAME=$(basename $file)
NUM=`echo "$FILENAME" | cut -d'-' -f3`
# comando a ser executado
NAME=`echo "$file" | cut -d'.' -f1`
N=`echo "$FILENAME" | cut -d'.' -f1`
if [[ "$COUNTER" -eq "1" ]]; then
echo "-----------------------------------------------------"
fi
# executar o compilador
printf "%s : %s " "$COUNTER" "$N"
{ ./xpl/xpl "$file" --target asm; } >& "$NAME.output";
if [[ "$?" -eq "0" ]]; then
printf "..... Compiler: OK, "
else
printf "..... Compiler: Failed, ";
COMPILERFAIL+=("$N")
let FAILED=FAILED+1
fi
# produzir o ficheiro binario
cd $DIR;
{ yasm -felf32 "$N.asm"; } >> /dev/null
if [[ "$?" -eq "0" ]]; then
printf "YASM: OK, "
else
printf "YASM: Failed, ";
YASMFAIL+=("$N")
fi
# gerar o executavel linkando a biblioteca RTS
{ ld -m elf_i386 -o "$N"exec "$N.o" -lrts; } >> /dev/null
if [[ "$?" -eq "0" ]]; then
echo "LD: OK."
else
echo "LD: Failed.";
fi
{ ./"$N"exec > "$N.out"; } >> /dev/null
{ cd ../..; } >& /dev/null
echo
echo "<<<<< Esperado: >>>>>"
echo "$(cat $EXPECTED$N.out)"
echo
echo "««««« Obtido: »»»»»"
echo "$(cat $NAME.out)"
echo
DIFF=$(diff -w -E -B "$NAME.out" "$EXPECTED$N.out")
if [ "$DIFF" != "" ];
then
let FAILEDTESTS=FAILEDTESTS+1
echo "#ERRODIFF"
DIFFFAIL+=("$N")
fi
echo "-----------------------------------------------------"
let COUNTER=COUNTER+1
done
#--------------------------------------------------------------------------------------#
echo
echo
echo $(($COUNTER - 1)) " testes efectuados, falhas abaixo:"
echo
echo "COMPILADOR xpl:"
for i in "${COMPILERFAIL[@]}"
do
echo " !falha : " $i
done
echo "YASM:"
for i in "${YASMFAIL[@]}"
do
echo " !falha : " $i
done
echo "LD:"
for i in "${LDFAIL[@]}"
do
echo " !falha : " $i
done
echo "DIFF:"
for i in "${DIFFFAIL[@]}"
do
echo " !falha : " $i
done
echo
echo "Passam " $(($(($COUNTER - 1)) - $FAILEDTESTS)) "/" $(($COUNTER - 1))
echo
|
package com.ytzb.chart;
import com.ytzb.chart.data.Axis;
/**
* Created by xinxin.wang on 18/5/3.
*/
public interface IAxisLabelsComputer {
/**
* 在给定的闭区间中,计算出要绘制在轴线处的文本
* @param min
* @param max
* @param axis
*/
void compute(float min, float max, Axis axis);
}
|
#!/bin/sh
# Copyright 2016, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
for file in `ls -1 NetKernel*.xml`; do
echo
echo "### $file"
cat $file | \
grep "template name" | \
sed s/"^.*<template name=\""/"- \*\*\`"/ | \
sed s/" toReformat.*$"// | \
sed s/"\".*description="/"\`\*\*: "/ | \
sort
done
|
<reponame>Nirovision/mimiron
# -*- coding: utf-8 -*-
import os
import json
from collections import defaultdict
from mimiron.exceptions.vendor import TFVarsMissingConfigFile
from mimiron.exceptions.vendor import InvalidTFVarsConfig
from mimiron.exceptions.vendor import TFVarsDuplicateKeys
__all__ = ['TFVarsConfig', 'TFVarsHelpers']
class TFVarsConfig(object):
def __init__(self, repo, paths, load_config=True):
self.paths = paths
self.repo = repo
self.data = {}
if load_config:
self.load()
self.find_duplicates()
def load(self):
for path in self.paths:
try:
# NOTE: The expected file name format should be `<name>[.<group>].json`.
_, full_filename = os.path.split(path)
filename, _ = os.path.splitext(full_filename)
_, group = os.path.splitext(filename)
with open(path, 'rU') as f:
self.data[path] = {
'group': group.strip('.'),
'path': path,
'data': json.load(f),
}
except IOError:
raise TFVarsMissingConfigFile(path)
except (TypeError, ValueError) as e:
raise InvalidTFVarsConfig(path, e)
def find_duplicates(self):
"""Finds duplicates between multiple tfvar config files."""
duplicates = defaultdict(lambda: defaultdict(bool))
for tfvars in self.data.itervalues():
for k in tfvars['data'].iterkeys():
if duplicates[tfvars['group']].get(k):
raise TFVarsDuplicateKeys(k, self.repo['path'])
duplicates[tfvars['group']][k] = True
return None
def save(self):
"""Flushes contents in `self.data` onto disk based on the tfvar path defined as the key."""
for path, tfvars in self.data.iteritems():
try:
with open(path, 'w') as f:
data = json.dumps(tfvars['data'], f, ensure_ascii=False, indent=2, sort_keys=True)
data = data.split('\n')
data = [d.rstrip() for d in data]
data = '\n'.join(data) + '\n'
f.write(data)
except IOError:
raise TFVarsMissingConfigFile(path)
def get_services(self, group):
"""Retrieves all services found based on tfvar files found in `self.data`.
`self.data` is a dictionary in the form:
>>> data = {
... 'path_1': {
... 'path': 'path_1',
... 'group': 'xxx',
... 'data': {
... 'service_1_image': 'xxx',
... 'service_1_attr_1': 'yyy',
... 'service_1_attr_2': 'zzz',
... 'service_2_image': 'xxx',
... 'service_3_image': 'xxx',
... },
... },
... 'path_2': {
... 'path': 'path_2',
... 'group': 'xxx',
... 'data': {
... 'service_4_image': 'xxx',
... 'service_4_attr_1': 'yyy',
... 'service_4_attr_2': 'zzz',
... 'service_5_image': 'xxx',
... },
... },
... }
The result from all of this, should look like:
>>> result = {
... 'service_1': {
... 'image': 'xxx',
... 'attr_1': 'yyy',
... 'attr_2': 'zzz',
... },
... 'service_2': {
... 'image': 'xxx',
... },
... 'service_3': {
... 'image': 'xxx',
... },
... 'service_4': {
... 'image': 'xxx',
... 'attr_1': 'yyy',
... 'attr_2': 'zzz',
... },
... 'service_5': {
... 'image': 'xxx',
... },
... }
"""
services = defaultdict(dict)
service_names = self.get_service_names()
for service_name in service_names:
for tfvars in self.data.itervalues():
if tfvars['group'] and tfvars['group'] != group:
continue
for k, v in tfvars['data'].iteritems():
if k.startswith(service_name):
services[service_name][k.replace(service_name + '_', '')] = v
# Clean up any services that don't have any data (due to mismatched groups).
return {k: v for k, v in services.iteritems() if v}
def get_service_names(self):
service_names = []
for tfvars in self.data.itervalues():
for k in tfvars['data'].iterkeys():
if k.endswith('_image'):
service_names.append(k.replace('_image', ''))
return service_names
def find(self, key, group):
for path, tfvars in self.data.iteritems():
if group is not None and group != tfvars['group']:
continue
if key in tfvars['data']:
return tfvars
return None
def get(self, key, group):
tfvars = self.find(key, group)
return tfvars['data'].get(key) if tfvars else None
def set(self, key, value, group):
tfvars = self.find(key, group)
if tfvars is None:
raise NotImplementedError # updating multiple tfvars not yet supported.
tfvars['data'][key] = value
class TFVarsHelpers(object):
@classmethod
def normalize_service_name(cls, service):
return str(service).strip().replace('-', '_')
@classmethod
def get_artifact_key(cls, service):
return service + '_image'
@classmethod
def find_deployment_repo(cls, service, repos):
"""Given the `service` name and a list of deployment repos, determine the host repo."""
service_name = cls.normalize_service_name(service)
for repo in repos:
if service_name in repo['tfvars'].get_service_names():
return repo
return None
|
import assert from "assert"
import { Readable } from "stream"
import { HTMLElement } from "node-html-parser"
import { CannotAccess, InvalidFormat } from "../errors.js"
import { isImageURL, shallowCopy } from "../utils/common.js"
import { parseHTML } from "../utils/html.js"
import { downloadFile, fetchPage } from "../utils/request.js"
import { BaseSource } from "./bases.js"
import { BackupContent, BackupFile, BackupOptions } from "./types.js"
import { getInlines, getTagName } from "../utils/html.js"
export type WeiboOptions = {
} & BackupOptions
export type WeiboUser = {
id: number,
screen_name: string,
description: string,
statuses_count: number | string,
follow_count: number | string,
followers_count: number | string,
avatar_hd: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[key: string]: any
}
export type WeiboDetail = {
id: string,
bid: string,
source: string,
text: string,
pics?: Array<{ large: { url: string } }>,
reposts_count: number | string,
comments_count: number | string,
attitudes_count: number | string,
page_info?: { type: string, media_info: { stream_url_hd: string } },
isLongText?: boolean,
retweeted_status?: WeiboDetail,
user: WeiboUser,
created_at: string,
edited_at?: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[key: string]: any
}
const WeiboMobileURL = "https://m.weibo.cn"
const WeiboURL = "https://www.weibo.com"
const WeiboAPI = "https://m.weibo.cn/statuses/show?id="
const WeiboURLRegex = /^(https?:\/\/)?(.*?\.)?weibo\.(com|cn)\/.*$/i
const WeiboPathRegex = /(?<type>detail|status|\d+)\/(?<post_id>.+?)\/?$/
type WeiboTypes = "post" | "article"
const Base62Codes = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
const base62Encode = (x: number): string => {
if (x === 0) {
return Base62Codes[0]
}
const arr = []
const base = Base62Codes.length
while (x > 0) {
const num = x % base
arr.push(Base62Codes[num])
x = (x - num) / base
}
return arr.reverse().join("")
}
const idToPostID = (weiboID: number): string => {
const weiboIDString = weiboID.toString()
let len = weiboIDString.length
const arr = []
while (len > 0) {
const l = len >= 7 ? len - 7 : 0
const current = parseInt(weiboIDString.slice(l, len), 10)
let encoded = base62Encode(current)
if (l > 0) {
while (encoded.length < 4) {
encoded = "0" + encoded
}
}
arr.push(encoded)
len = l
}
return arr.reverse().join("")
}
export class Weibo extends BaseSource<WeiboOptions, WeiboDetail> {
public readonly key = "weibo"
public testURL(url: string): string | undefined {
if (!WeiboURLRegex.test(url)) {
return undefined
}
try {
return this.getID(url)
} catch {
return undefined
}
}
getID(url: string): string {
const { pathname, searchParams } = this.getURL(url)
let postID = searchParams.get("weibo_id")
if (postID === null) {
const m = WeiboPathRegex.exec(pathname)
if (m === null || m.groups === undefined) {
throw new InvalidFormat(url)
}
postID = m.groups.post_id
}
const weiboID = parseInt(postID, 10)
if (!Number.isNaN(weiboID)) {
postID = idToPostID(weiboID)
}
return `post-${postID}`
}
getStandardURL(id: string): string {
const [type, postID] = id.split("-") as [WeiboTypes, string]
if (type === "post") {
return `${WeiboMobileURL}/status/${postID}`
}
throw new InvalidFormat(id)
}
getTypeName(urlOrid: string): string {
const id = WeiboURLRegex.test(urlOrid) ? this.getID(urlOrid) : urlOrid
const type = id.split("-")[0] as WeiboTypes
switch (type) {
case "post": return "微博"
case "article": return "头条文章"
default: throw new InvalidFormat(type)
}
}
async backupInner(url: string, options: WeiboOptions): Promise<BackupContent<WeiboDetail>> {
if (options.htmlFromBrowser !== null) {
return await this.backupFromBrowser(url, options)
}
const { id } = options
const [type, postID] = id.split("-") as [WeiboTypes, string]
// TODO: support article
assert(type === "post")
const weibo = await this.getWeibo(postID, options)
if (weibo === undefined) {
throw new CannotAccess(url)
}
const standardURL = `${WeiboURL}/${weibo.user.id}/${weibo.bid}`
const authorName = weibo.user.screen_name
const authorURL = `${WeiboURL}/${weibo.user.id}`
const title = `微博存档 - ${weibo.bid}`
const createdAt = new Date(weibo.created_at)
const updatedAt = weibo.edited_at === undefined ? undefined : new Date(weibo.edited_at)
const reposted = []
if (options.backupReposted && weibo.retweeted_status !== undefined) {
const repostedID = `post-${weibo.retweeted_status.bid}`
try {
const repostedData = await this.backupInner("", shallowCopy(options, { id: repostedID }))
reposted.push(repostedData)
} catch {
// ignore
}
}
const pictures: BackupFile[] = weibo.pics === undefined ? [] : weibo.pics.map(
pic => ({
type: "image",
source: pic.large.url,
download: async () => await downloadFile(
pic.large.url, await options.getCookie(pic.large.url)
) as Readable
})
)
if (weibo.page_info !== undefined && weibo.page_info.type === "video") {
const videoURL = weibo.page_info.media_info.stream_url_hd
pictures.push({
type: "video",
source: videoURL,
download: async () => await downloadFile(
videoURL, await options.getCookie(videoURL)
) as Readable
})
}
const parsedHTML = parseHTML(weibo.text)
for (const node of parsedHTML.querySelectorAll("span.url-icon > img")) {
const alt = node.getAttribute("alt")
node.replaceWith(alt === undefined ? "" : alt)
}
const inlineNodes = getInlines(
parsedHTML,
options.inlineImages,
options.uploadVideos,
options.inlineLinks || options.inlineImages
).map(node => {
if (getTagName(node) === "a") {
const href = node.getAttribute("href")
if (href !== undefined && isImageURL(href, url)) {
const img = new HTMLElement("img", {}, "", node.parentNode)
img.setAttribute("src", href)
node.replaceWith(img)
return img
} else if (!options.inlineLinks) {
return undefined
}
}
return node
}).filter(node => node !== undefined) as HTMLElement[]
return {
id,
title,
authorName,
authorURL,
createdAt,
updatedAt,
source: standardURL,
parsedHTML,
inlineNodes,
otherFiles: pictures,
data: weibo,
reposted
}
}
async getWeibo(postID: string, options: BackupOptions): Promise<WeiboDetail | undefined> {
try {
const response = await fetchPage(WeiboAPI + postID, options.getCookie, options.setCookie)
const data = await response.json() as unknown as {
ok: number,
data?: WeiboDetail
}
return data.ok === 1 ? data.data : undefined
} catch {
return undefined
}
}
backupFromBrowser(url: string, options: WeiboOptions): Promise<BackupContent<WeiboDetail>> {
throw new Error("Method not implemented.")
}
}
|
<gh_stars>1-10
package com.fiixsoftware.challenges.rpgbot.persistence.repositories;
import com.fiixsoftware.challenges.rpgbot.persistence.models.Affection;
import com.fiixsoftware.challenges.rpgbot.persistence.models.GameEntity;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
public interface AffectionRepository extends CrudRepository<Affection, Long>
{
Affection findById(long id);
List<Affection> findByEntityWithAffection(GameEntity entityWithAffection);
}
|
<reponame>Frky/scat
#ifndef REGISTERS_H_
#define REGISTERS_H_
#include "pin.H"
#include <stdexcept>
/**
* Register families
*
* A set of register corresponding to the same "register memory space"
* (e.g: REG_RAX, REG_EAX, REG_AX, REG_AH, REG_AL) with different
* sizes forms a family.
* Since we mostly care about which space get read & written, this helps
* to streamline the code.
*/
typedef enum {
/* ERROR REGISTER */
REGF_NULL,
/* Return register */
REGF_AX,
REGF_FIRST = REGF_AX,
/* Parameter registers */
REGF_DI,
REGF_SI,
REGF_DX,
REGF_CX,
REGF_R8,
REGF_R9,
/* Float registers */
REGF_XMM0, /* Also a return register */
REGF_XMM1,
REGF_XMM2,
REGF_XMM3,
REGF_XMM4,
REGF_XMM5,
REGF_XMM6,
REGF_XMM7,
REGF_LAST = REGF_XMM7,
REGF_COUNT
} REGF;
#define regf_is_float(regf__) ((regf__) >= REGF_XMM0)
REGF regf(REG reg) {
switch (reg) {
case REG_RAX:
case REG_EAX:
case REG_AX:
case REG_AH:
case REG_AL:
return REGF_AX;
case REG_RDI:
case REG_EDI:
case REG_DI:
case REG_DIL:
return REGF_DI;
case REG_RSI:
case REG_ESI:
case REG_SI:
case REG_SIL:
return REGF_SI;
case REG_RDX:
case REG_EDX:
case REG_DX:
case REG_DH:
case REG_DL:
return REGF_DX;
case REG_RCX:
case REG_ECX:
case REG_CX:
case REG_CH:
case REG_CL:
return REGF_CX;
case REG_R8:
case REG_R8D:
case REG_R8W:
case REG_R8B:
return REGF_R8;
case REG_R9:
case REG_R9D:
case REG_R9W:
case REG_R9B:
return REGF_R9;
case REG_XMM0:
return REGF_XMM0;
case REG_XMM1:
return REGF_XMM1;
case REG_XMM2:
return REGF_XMM2;
case REG_XMM3:
return REGF_XMM3;
case REG_XMM4:
return REGF_XMM4;
case REG_XMM5:
return REGF_XMM5;
case REG_XMM6:
return REGF_XMM6;
case REG_XMM7:
return REGF_XMM7;
default:
// throw new runtime_error("Invalid register");
// TODO handle this properly
return REGF_NULL;
}
}
UINT32 reg_size(REG reg) {
switch (reg) {
case REG_RAX:
case REG_RDI:
case REG_RSI:
case REG_RDX:
case REG_RCX:
case REG_R8:
case REG_R9:
return 64;
case REG_EAX:
case REG_EDI:
case REG_ESI:
case REG_EDX:
case REG_ECX:
case REG_R8D:
case REG_R9D:
return 32;
case REG_AX:
case REG_DI:
case REG_SI:
case REG_DX:
case REG_CX:
case REG_R8W:
case REG_R9W:
return 16;
case REG_AH:
case REG_CH:
case REG_DH:
case REG_DIL:
case REG_SIL:
case REG_AL:
case REG_DL:
case REG_CL:
case REG_R8B:
case REG_R9B:
return 8;
case REG_XMM0:
case REG_XMM1:
case REG_XMM2:
case REG_XMM3:
case REG_XMM4:
case REG_XMM5:
case REG_XMM6:
case REG_XMM7:
return 128;
default:
// throw new runtime_error("Invalid register");
// TODO handle this properly
return REGF_NULL;
}
}
string regf_string(REGF regf) {
switch (regf) {
case REGF_AX: return "REGF_AX";
case REGF_DI: return "REGF_DI";
case REGF_SI: return "REGF_SI";
case REGF_DX: return "REGF_DX";
case REGF_CX: return "REGF_CX";
case REGF_R8: return "REGF_R8";
case REGF_R9: return "REGF_R9";
case REGF_XMM0: return "REGF_XMM0";
case REGF_XMM1: return "REGF_XMM1";
case REGF_XMM2: return "REGF_XMM2";
case REGF_XMM3: return "REGF_XMM3";
case REGF_XMM4: return "REGF_XMM4";
case REGF_XMM5: return "REGF_XMM5";
case REGF_XMM6: return "REGF_XMM6";
case REGF_XMM7: return "REGF_XMM7";
default: return "Unknown";
}
}
REG param_reg(unsigned int pid) {
switch (pid) {
case 0:
return REG_RAX;
case 1:
return REG_RDI;
case 2:
return REG_RSI;
case 3:
return REG_RDX;
case 4:
return REG_RCX;
case 5:
return REG_R8;
case 6:
return REG_R9;
default:
return REG_INVALID();
}
}
ADDRINT get_param_value(CONTEXT *ctxt, unsigned int pid) {
if (pid <= 6)
return PIN_GetContextReg(ctxt, param_reg(pid));
else
/* TODO take stack parameters into account */
return 0;
}
#endif
|
<reponame>sakai-mirror/evaluation
/**
* Copyright 2005 Sakai Foundation Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.sakaiproject.evaluation.tool;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.evaluation.logic.EvalCommonLogic;
import org.sakaiproject.evaluation.logic.EvalSettings;
import uk.org.ponder.rsf.components.UIContainer;
import uk.org.ponder.rsf.components.UILink;
import uk.org.ponder.rsf.view.ComponentChecker;
import uk.org.ponder.rsf.viewstate.ViewParameters;
/**
* This bean is called at the top of every producer,
* this is a singleton service
*
* @author <NAME> (<EMAIL>)
*/
public class CommonProducerBean {
private static Log log = LogFactory.getLog(CommonProducerBean.class);
public void init() {
log.info("INIT");
}
public void beforeProducer(String viewId, UIContainer tofill, ViewParameters viewparams, ComponentChecker checker) {
//log.info("BEFORE "+viewId);
String localCSS = (String) settings.get(EvalSettings.LOCAL_CSS_PATH);
if (localCSS != null && ! "".equals(localCSS)) {
UILink.make(tofill, "local_css_include", localCSS);
}
}
public void afterProducer(String viewId, UIContainer tofill, ViewParameters viewparams, ComponentChecker checker) {
//log.info("AFTER "+viewId);
// nothing here right now
}
private EvalSettings settings;
public void setEvalSettings(EvalSettings settings) {
this.settings = settings;
}
@SuppressWarnings("unused")
private EvalCommonLogic commonLogic;
public void setCommonLogic(EvalCommonLogic commonLogic) {
this.commonLogic = commonLogic;
}
} |
#!/usr/bin/env bash
. "test/testlib.sh"
begin_test "ls-files"
(
set -e
mkdir repo
cd repo
git init
git lfs track "*.dat" | grep "Tracking \*.dat"
echo "some data" > some.dat
echo "some text" > some.txt
echo "missing" > missing.dat
git add missing.dat
git commit -m "add missing file"
[ "6bbd052ab0 * missing.dat" = "$(git lfs ls-files)" ]
git rm missing.dat
git add some.dat some.txt
git commit -m "added some files, removed missing one"
git lfs ls-files | tee ls.log
grep some.dat ls.log
[ `wc -l < ls.log` = 1 ]
)
end_test
begin_test "ls-files: outside git repository"
(
set +e
git lfs ls-files 2>&1 > ls-files.log
res=$?
set -e
if [ "$res" = "0" ]; then
echo "Passes because $GIT_LFS_TEST_DIR is unset."
exit 0
fi
[ "$res" = "128" ]
grep "Not in a git repository" ls-files.log
)
end_test
begin_test "ls-files: with zero files"
(
set -e
mkdir empty
cd empty
git init
git lfs track "*.dat"
git add .gitattributes
set +e
git lfs ls-files 2> ls-files.log
res=$?
set -e
cat ls-files.log
[ "$res" = "2" ]
grep "Git can't resolve ref:" ls-files.log
git commit -m "initial commit"
[ "$(git lfs ls-files)" = "" ]
)
end_test
begin_test "ls-files: show duplicate files"
(
set -e
mkdir dupRepoShort
cd dupRepoShort
git init
git lfs track "*.tgz" | grep "Tracking \*.tgz"
echo "test content" > one.tgz
echo "test content" > two.tgz
git add one.tgz
git add two.tgz
git commit -m "add duplicate files"
expected="$(echo "a1fff0ffef * one.tgz
a1fff0ffef * two.tgz")"
[ "$expected" = "$(git lfs ls-files)" ]
)
end_test
begin_test "ls-files: show duplicate files with long OID"
(
set -e
mkdir dupRepoLong
cd dupRepoLong
git init
git lfs track "*.tgz" | grep "Tracking \*.tgz"
echo "test content" > one.tgz
echo "test content" > two.tgz
git add one.tgz
git add two.tgz
git commit -m "add duplicate files with long OID"
expected="$(echo "a1fff0ffefb9eace7230c24e50731f0a91c62f9cefdfe77121c2f607125dffae * one.tgz
a1fff0ffefb9eace7230c24e50731f0a91c62f9cefdfe77121c2f607125dffae * two.tgz")"
[ "$expected" = "$(git lfs ls-files --long)" ]
)
end_test
|
package main
import (
"os"
app "github.com/wlanboy/kanbantabs/v2/application"
)
func main() {
kanban := app.Kanban{}
kanban.Initialize()
kanban.RunCommands(os.Args)
}
|
<filename>mmaction/models/necks/self_feature_regularizer.py
import torch
import torch.nn as nn
import torch.nn.functional as F
from mmcv.cnn import constant_init, kaiming_init
from ..registry import NECKS
from ...core.ops import conv_1x1x1_bn, HSwish
class ChannelReducer(nn.Module):
def __init__(self, dim=1, keepdim=False):
super().__init__()
self.dim = dim
self.keepdim = keepdim
def forward(self, x):
return torch.mean(x, dim=self.dim, keepdim=self.keepdim)
@NECKS.register_module()
class SelfFeatureRegularizer(nn.Module):
""" Based on the paper: https://arxiv.org/abs/2103.07350
"""
def __init__(self, in_channels, spatial_size=7, temporal_size=1, hidden_size=256, loss_weight=1.0):
super().__init__()
self.loss_weight = float(loss_weight)
assert self.loss_weight > 0.0
self.hidden_size = int(hidden_size)
assert self.hidden_size > 0
self.scale = self.hidden_size ** (-0.5)
self.in_channels = in_channels if isinstance(in_channels, (tuple, list)) else [in_channels]
num_inputs = len(self.in_channels)
assert num_inputs > 1
self.temporal_size = temporal_size if isinstance(temporal_size, (tuple, list)) else [temporal_size]
assert len(self.temporal_size) == num_inputs
spatial_size = spatial_size if isinstance(spatial_size, (tuple, list)) else [spatial_size]
self.spatial_size = [ss if isinstance(ss, (tuple, list)) else (ss, ss) for ss in spatial_size]
assert len(self.spatial_size) == num_inputs
self.keys = nn.ModuleList([
nn.Sequential(
nn.AvgPool3d((self.temporal_size[input_id],) + self.spatial_size[input_id], stride=1, padding=0),
conv_1x1x1_bn(self.in_channels[input_id], self.hidden_size, as_list=False),
HSwish(),
conv_1x1x1_bn(self.hidden_size, self.hidden_size, as_list=False),
)
for input_id in range(num_inputs)
])
self.student_tokens = nn.Parameter(torch.Tensor(1, num_inputs - 1, self.hidden_size))
self.student_tokens.data.normal_(std=0.02)
self.teacher_token = nn.Parameter(torch.Tensor(1, 1, self.hidden_size))
self.teacher_token.data.normal_(std=0.02)
self.student_mappers = nn.ModuleList([
ChannelReducer(dim=1, keepdim=True)
for _ in range(num_inputs - 1)
])
self.teacher_reducer = ChannelReducer(dim=1, keepdim=True)
self.teacher_mappers = nn.ModuleList([
nn.Upsample(size=(self.temporal_size[input_id],) + self.spatial_size[input_id],
mode='trilinear', align_corners=False)
for input_id in range(num_inputs - 1)
])
def init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv3d):
kaiming_init(m)
elif isinstance(m, nn.BatchNorm3d):
constant_init(m, 1.0, 0.0)
elif isinstance(m, nn.Parameter):
m.data.normal_()
def forward(self, inputs, return_extra_data=False):
assert len(inputs) == len(self.keys) + 1
inputs = inputs[:-2] + [inputs[-1]]
attention, student_features, teacher_features = None, None, None
if self.training:
keys = [
key_module(input_feature).view(-1, 1, self.hidden_size)
for input_feature, key_module in zip(inputs, self.keys)
]
student_keys = torch.cat(keys[:-1], dim=1)
teacher_key = keys[-1].view(-1, self.hidden_size, 1)
key_prod = torch.matmul(student_keys, teacher_key).squeeze(2)
token_prod = torch.sum(self.teacher_token * self.student_tokens, dim=-1)
attention = F.softmax(self.scale * (key_prod + token_prod), dim=-1)
student_features = [mapper(x_) for x_, mapper in zip(inputs[:-1], self.student_mappers)]
teacher_feature = self.teacher_reducer(inputs[-1])
teacher_features = [mapper(teacher_feature) for mapper in self.teacher_mappers]
# returns the input unchanged
if return_extra_data:
return inputs, dict(attention=attention,
student_features=student_features,
teacher_features=teacher_features)
else:
return inputs
def loss(self, attention=None, student_features=None, teacher_features=None, **kwargs):
losses = dict()
if attention is None or student_features is None or teacher_features is None:
return losses
all_losses = [
self._reg_loss(student_feature, teacher_feature).view(-1, 1)
for student_feature, teacher_feature in zip(student_features, teacher_features)
]
weighted_losses = attention * torch.cat(all_losses, dim=1)
losses['loss/sfr'] = self.loss_weight * torch.mean(torch.sum(weighted_losses, dim=1))
return losses
@staticmethod
def _reg_loss(x, y):
sqr_diff = (x - y) ** 2
return torch.mean(sqr_diff, dim=(1, 2, 3, 4))
|
class GemPackages(FactBase):
def __init__(self):
self.packages = {}
def add_package(self, package_name, version):
if package_name in self.packages:
self.packages[package_name].append(version)
else:
self.packages[package_name] = [version]
def remove_package(self, package_name):
if package_name in self.packages:
del self.packages[package_name]
else:
print(f"{package_name} is not installed.")
def get_package_info(self, package_name):
if package_name in self.packages:
return {package_name: self.packages[package_name]}
else:
return f"{package_name} is not installed."
def get_installed_packages(self):
return self.packages |
<filename>clock/team.js<gh_stars>0
function teamArrayStatic() {
return [
'Lillian',
'Alex',
'Carole',
'Brian'
];
};
|
# For vcf: transform 0/0, 0/1, 1/1 to A, B and H; missing to N.
vcf=$1
perl -ne 'if(/\#/){next unless /\#CHR/} chomp; @t=split /\s+/,$_; if(/\#CHR/){print join("\t", @t[0,1,3,4,9..$#t]),"\n"; next} map{if($t[$_]=~/0\/0/){$t[$_]="A"}elsif($t[$_]=~/0\/1/){$t[$_]="H"}elsif($t[$_]=~/1\/1/){$t[$_]="B"}else{$t[$_]="N"} }9..$#t; print join("\t", @t[0,1,3,4,9..$#t]),"\n";' $vcf
|
#!/bin/bash
/usr/bin/python3 antsocksd.py -s
|
// App.js
import React from 'react';
import SearchForm from './components/SearchForm';
import BookList from './components/BookList';
class App extends React.Component {
state = {
books: [],
};
handleSearch = searchText => {
fetch(`https://www.googleapis.com/books/v1/volumes?q=${searchText}`)
.then(response => response.json())
.then(data => {
const books = data.items.map(book => ({
title: book.volumeInfo.title,
author: book.volumeInfo.authors[0],
link: book.volumeInfo.infoLink
}));
this.setState({ books: books });
});
};
render() {
return (
<div>
<SearchForm onSearch={this.handleSearch} />
<BookList books={this.state.books} />
</div>
);
}
}
export default App; |
// open all dbs
FIND="Discussion Forum";
i=0;
$("a").each(function(){
str = $(this).text();
if (str.match(FIND)) {
console.log(str);
link = $(this).attr("href");
console.log(link);
$.ajax({
url: link,
}).done(function(data) {
val = $("td a", data.responseXML).text(); //can't parse with jquery selector
console.log(val);
});
}
i = i + 1;
});
// show all links (works)
FIND = [ "Discussion Forum", "Learning Journal", "Written Assignment", "Self Quiz", "Graded Quiz", "Final Exam" ];
OUT="";
$.each(FIND, function(key, find) {
OUT= OUT + get(find);
});
document.write(OUT);
function get(FIND) {
RES="<h3>"+FIND+"</h3>";
$("a").each(function(){
str = $(this).text();
if (str.match(FIND)) {
console.log(str);
text = $(this).text();
url = $(this).attr("href");
RES=RES+"<a href='"+url+"'>"+text+"</a><br><br>";
}
});
return RES;
}
|
"""Preprocessing for specialized losses."""
from omegaconf import DictConfig, ListConfig
import numpy as np
from sklearn.decomposition import PCA
import torch
from torchtyping import TensorType, patch_typeguard
from typeguard import typechecked
from typing import List, Optional, Union
from pose_est_nets.datasets.datamodules import UnlabeledDataModule
from pose_est_nets.datasets.datasets import HeatmapDataset
from pose_est_nets.datasets.utils import clean_any_nans
_TORCH_DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
patch_typeguard() # use before @typechecked
@typechecked
def compute_multiview_pca_params(
data_module: UnlabeledDataModule,
components_to_keep: int = 3,
empirical_epsilon_percentile: float = 90.0,
) -> None:
"""Compute eigenvalues and eigenvectors of labeled data for multiview pca loss.
Note: this function updates attributes of `data_module`
Args:
data_module: initialized unlabeled data module, which contains all the relevant
information
components_to_keep: projections of predicted keypoints onto remaining components
will be penalized; enforces a low-dimensional prediction from the network
empirical_epsilon_percentile: ?
"""
print("Computing PCA on multiview keypoints...")
# collect data on which to run pca from data module
# Subset inherits from dataset, it doesn't have access to dataset.keypoints
if type(data_module.train_dataset) == torch.utils.data.dataset.Subset:
# copy data module to manipulate it without interfering with original
if type(data_module.dataset) == HeatmapDataset:
pca_data = super(type(data_module.dataset), data_module.dataset)
else:
pca_data = data_module.dataset
indxs = torch.tensor(data_module.train_dataset.indices)
data_arr = torch.index_select(
data_module.dataset.keypoints.detach().clone(), 0, indxs
) # data_arr is shape (train_batches, keypoints, 2)
# apply augmentation which *downsamples* the frames
if data_module.dataset.imgaug_transform:
i = 0
for idx in indxs:
batch_dict = pca_data.__getitem__(idx)
data_arr[i] = batch_dict["keypoints"].reshape(-1, 2)
i += 1
else:
data_arr = (
data_module.train_dataset.keypoints.detach().clone()
) # won't work for random splitting
# apply augmentation which *downsamples* the frames/keypoints
if data_module.train_dataset.imgaug_transform:
for i in range(len(data_arr)):
data_arr[i] = super(
type(data_module.train_dataset), data_module.train_dataset
).__getitem__(i)["keypoints"]
# format data and run pca
# shape will be (2 * num_views, num_batches * num_keypoints)
arr_for_pca = format_multiview_data_for_pca(
data_arr,
data_module.loss_param_dict["pca_multiview"]["mirrored_column_matches"],
)
print("Initial array for pca shape: {}".format(arr_for_pca.shape))
good_arr_for_pca = clean_any_nans(arr_for_pca, dim=0)
pca = PCA(n_components=good_arr_for_pca.shape[0], svd_solver="full")
pca.fit(good_arr_for_pca.T)
print("Done!")
print(
"good_arr_for_pca shape: {}".format(good_arr_for_pca.shape)
) # TODO: have prints as tests
pca_prints(pca, components_to_keep) # print important params
data_module.loss_param_dict["pca_multiview"]["kept_eigenvectors"] = torch.tensor(
pca.components_[:components_to_keep],
dtype=torch.float32,
device=_TORCH_DEVICE, # TODO: be careful for multinode
)
data_module.loss_param_dict["pca_multiview"][
"discarded_eigenvectors"
] = torch.tensor(
pca.components_[components_to_keep:],
dtype=torch.float32,
device=_TORCH_DEVICE, # TODO: be careful for multinode
)
# compute the keypoints' projections on the discarded components, to
# estimate the e.g., 90th percentile and determine epsilon.
# absolute value is important -- projections can be negative.
discarded_eigs = data_module.loss_param_dict["pca_multiview"][
"discarded_eigenvectors"
]
proj_discarded = torch.abs(
torch.matmul(
arr_for_pca.T,
discarded_eigs.clone().detach().cpu().T,
)
)
# setting axis = 0 generalizes to multiple discarded components
epsilon = np.nanpercentile(
proj_discarded.numpy(), empirical_epsilon_percentile, axis=0
)
print(epsilon)
data_module.loss_param_dict["pca_multiview"]["epsilon"] = torch.tensor(
epsilon,
dtype=torch.float32,
device=_TORCH_DEVICE, # TODO: be careful for multinode
)
@typechecked
def compute_singleview_pca_params(
data_module: UnlabeledDataModule, empirical_epsilon_percentile: float = 90.0
) -> None:
"""Compute eigenvalues and eigenvectors of labeled data for singleview pca loss.
Note: this function updates attributes of `data_module`
Args:
data_module: initialized unlabeled data module, which contains all the relevant
information
empirical_epsilon_percentile: ?
"""
print("Computing PCA on singleview keypoints...")
# collect data on which to run pca from data module
# Subset inherits from dataset, it doesn't have access to dataset.keypoints
if type(data_module.train_dataset) == torch.utils.data.dataset.Subset:
# copy data module to manipulate it without interfering with original
if type(data_module.dataset) == HeatmapDataset:
pca_data = super(type(data_module.dataset), data_module.dataset)
else:
pca_data = data_module.dataset
indxs = torch.tensor(data_module.train_dataset.indices)
data_arr = torch.index_select(
data_module.dataset.keypoints.detach().clone(), 0, indxs
) # data_arr is shape (train_batches, keypoints, 2)
# apply augmentation which *downsamples* the frames/keypoints
if data_module.dataset.imgaug_transform:
i = 0
for idx in indxs:
batch_dict = pca_data.__getitem__(idx)
data_arr[i] = batch_dict["keypoints"].reshape(-1, 2)
i += 1
else:
data_arr = (
data_module.train_dataset.keypoints.detach().clone()
) # won't work for random splitting
# apply augmentation which *downsamples* the frames
if data_module.train_dataset.imgaug_transform:
for i in range(len(data_arr)):
data_arr[i] = super(
type(data_module.train_dataset), data_module.train_dataset
).__getitem__(i)["keypoints"]
# format data and run pca
# shape is (num_batches, num_keypoints * 2)
arr_for_pca = data_arr.reshape(data_arr.shape[0], -1)
print("Initial array for pca shape: {}".format(arr_for_pca.shape))
good_arr_for_pca = clean_any_nans(arr_for_pca, dim=1)
print(
"good_arr_for_pca shape: {}".format(good_arr_for_pca.shape)
) # TODO: have prints as tests
# want to make sure we have more rows than columns after doing nan filtering
assert (
good_arr_for_pca.shape[0] >= good_arr_for_pca.shape[1]
), "filtered out too many nan frames"
pca = PCA(n_components=good_arr_for_pca.shape[1], svd_solver="full")
pca.fit(good_arr_for_pca)
print("Done!")
tot_explained_variance = np.cumsum(pca.explained_variance_ratio_)
components_to_keep = int(
np.where(
tot_explained_variance
>= data_module.loss_param_dict["pca_singleview"]["min_variance_explained"]
)[0][0]
)
components_to_keep += 1 # cumsum is a d - 1 dimensional vector where the 0th element is the sum of the 0th and 1st element of the d dimensional vector it is summing over
pca_prints(pca, components_to_keep) # print important params
data_module.loss_param_dict["pca_singleview"]["kept_eigenvectors"] = torch.tensor(
pca.components_[:components_to_keep],
dtype=torch.float32,
device=_TORCH_DEVICE, # TODO: be careful for multinode
)
data_module.loss_param_dict["pca_singleview"][
"discarded_eigenvectors"
] = torch.tensor(
pca.components_[components_to_keep:],
dtype=torch.float32,
device=_TORCH_DEVICE, # TODO: be careful for multinode
)
# compute the keypoints' projections on the discarded components, to
# estimate the e.g., 90th percentile and determine epsilon.
# absolute value is important -- projections can be negative.
# shape is (num_discarded_components, num_keypoints * 2)
discarded_eigs = data_module.loss_param_dict["pca_singleview"][
"discarded_eigenvectors"
]
# array for pca shape is (num_batches, num_keypoints * 2)
proj_discarded = torch.abs(
torch.matmul(
arr_for_pca,
discarded_eigs.clone().detach().cpu().T,
)
)
# setting axis = 0 generalizes to multiple discarded components
# shape (num_discarded_components, 1)
epsilon = np.nanpercentile(
proj_discarded.numpy(), empirical_epsilon_percentile, axis=0
)
print(epsilon)
data_module.loss_param_dict["pca_singleview"]["epsilon"] = torch.tensor(
epsilon,
dtype=torch.float32,
device=_TORCH_DEVICE, # TODO: be careful for multinode
)
@typechecked
def pca_prints(pca: PCA, components_to_keep: int) -> None:
print("Results of running PCA on keypoints:")
evr = np.round(pca.explained_variance_ratio_, 3)
print("components kept: {}".format(components_to_keep))
print("explained_variance_ratio_: {}".format(evr))
tev = np.round(np.sum(pca.explained_variance_ratio_[:components_to_keep]), 3)
print("total_explained_var: {}".format(tev))
@typechecked
def format_multiview_data_for_pca(
data_arr: TensorType["batch", "num_keypoints", "2"],
mirrored_column_matches: Union[ListConfig, List],
) -> TensorType["two_time_num_views", "batch_times_num_keypoints"]:
"""
Args:
data_arr: keypoints from training data
mirrored_column_matches: one element for each camera view; each element is
itself a list that contains indices into the overall ordering of the
keypoints
Returns:
formatted data to run pca
"""
n_views = len(mirrored_column_matches)
n_keypoints = len(mirrored_column_matches[0])
data_arr_views = []
# separate views and reformat
for view in range(n_views):
assert len(mirrored_column_matches[view]) == n_keypoints
data_arr_tmp = data_arr[:, np.array(mirrored_column_matches[view]), :]
data_arr_tmp = data_arr_tmp.permute(2, 0, 1).reshape(2, -1)
data_arr_views.append(data_arr_tmp)
# concatenate views
data_arr = torch.cat(data_arr_views, dim=0)
return data_arr
|
<filename>modules/carto/src/auth.js
const defaultCredentials = {
username: 'public',
apiKey: 'default_public',
serverUrlTemplate: 'https://{user}.carto.com'
};
let credentials = defaultCredentials;
export function setDefaultCredentials(opts) {
credentials = {
...credentials,
...opts
};
}
export function getDefaultCredentials() {
return credentials;
}
|
"""add test_result_status items
Revision ID: 95ecf01d9cb4
Revises: ea71f73f5460
Create Date: 2017-03-29 19:41:26.581925
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '95ecf01d9cb4'
down_revision = 'ea71f73f5460'
branch_labels = None
depends_on = None
def upgrade():
#~ op.bulk_insert('test_result_status',
#~ [
#~ {'status': 'Created'},
#~ {'status': 'Completed'},
#~ {'status': 'Failed'}
#~ ]
#~ )
op.execute("INSERT INTO test_result_status (status) VALUES ('Created')")
op.execute("INSERT INTO test_result_status (status) VALUES ('Completed')")
op.execute("INSERT INTO test_result_status (status) VALUES ('Failed')")
op.execute("UPDATE test_result SET status_id=(SELECT id FROM test_result_status where status='Created') WHERE test_passed is null")
op.execute("UPDATE test_result SET status_id=(SELECT id FROM test_result_status where status='Completed') WHERE test_passed=true")
op.execute("UPDATE test_result SET status_id=(SELECT id FROM test_result_status where status='Failed') WHERE test_passed=false")
def downgrade():
op.execute("delete from test_result_status where status in('Created', 'Completed', 'Failed')") |
#!/bin/bash
#
##################################################
## Use BSD License ##
## Site: http://blog.iternull.com ##
## By: ZMOM1031 ##
## Time: 2015/08/15 ##
##################################################
## Download Links
# https://developers.google.com/android/nexus/images
l_android_nexus_5_kitkat="https://dl.google.com/dl/android/aosp/hammerhead-ktu84p-factory-35ea0277.tgz"
l_android_nexus_5_lollipop="https://dl.google.com/dl/android/aosp/hammerhead-lmy48i-factory-a38c3441.tgz"
#l_android_nexus_5_lollipop="https://dl.google.com/dl/android/aosp/hammerhead-lmy47i-factory-df127988.tgz"
l_android_nexus_6_lollipop="https://dl.google.com/dl/android/aosp/shamu-lmy47i-factory-c8afc588.tgz"
l_android_nexus_7_2012_wifi_kitkat="https://dl.google.com/dl/android/aosp/nakasi-ktu84p-factory-2c6e4d6f.tgz"
l_android_nexus_7_2012_wifi_lollipop="https://dl.google.com/dl/android/aosp/nakasi-lmy47v-factory-b48bd71d.tgz"
l_android_nexus_7_2012_mobile_kitkat="https://dl.google.com/dl/android/aosp/nakasig-ktu84p-factory-ef6aaa76.tgz"
l_android_nexus_7_2012_mobile_lollipop="https://dl.google.com/dl/android/aosp/nakasig-lmy47v-factory-b2d44613.tgz"
l_android_nexus_7_2013_wifi_kitkat="https://dl.google.com/dl/android/aosp/razor-ktu84p-factory-b1b2c0da.tgz"
l_android_nexus_7_2013_wifi_lollipop="https://dl.google.com/dl/android/aosp/razor-lmy47v-factory-a58e6175.tgz"
l_android_nexus_7_2013_mobile_kitkat="https://dl.google.com/dl/android/aosp/razorg-ktu84p-factory-f21762aa.tgz"
l_android_nexus_7_2013_mobile_lollipop="https://dl.google.com/dl/android/aosp/razorg-lmy47v-factory-f230ab31.tgz"
l_android_nexus_9_lte_lollipop="https://dl.google.com/dl/android/aosp/volantisg-lmy48i-factory-77cd6bfa.tgz"
l_android_nexus_9_wifi_lollipop="https://dl.google.com/dl/android/aosp/volantis-lmy48i-factory-01106fec.tgz"
l_android_nexus_10_kitkat="https://dl.google.com/dl/android/aosp/mantaray-ktu84p-factory-74e52998.tgz"
l_android_nexus_10_lollipop="https://dl.google.com/dl/android/aosp/mantaray-lmy48i-factory-6ac8b73b.tgz"
l_android_oneplus_one_cm11="http://mirror.lucky.li/android/oneplus/one/firmware/cm-11.0-XNPH05Q-bacon-signed-fastboot.zip"
l_android_oneplus_one_cm12="http://mirror.lucky.li/android/oneplus/one/firmware/cm-12.0-YNG1TAS2I3-bacon-signed-fastboot.zip"
# http://www.offensive-security.com/kali-linux-nethunter-download
l_nethunter_nexus_5_kitkat="http://images.kali.org/kali_linux_nethunter_2.0_hammerhead_kitkat.zip"
l_nethunter_nexus_5_lollipop="http://images.kali.org/kali_linux_nethunter_2.0_hammerhead_lollipop.zip"
l_nethunter_nexus_6_lollipop="http://images.kali.org/kali_linux_nethunter_2.0_shamu_lollipop.zip"
l_nethunter_nexus_7_2012_kitkat="http://images.kali.org/kali_linux_nethunter_2.0_nakasi_kitkat.zip"
l_nethunter_nexus_7_2012_lollipop="http://images.kali.org/kali_linux_nethunter_2.0_nakasi_lollipop.zip"
l_nethunter_nexus_7_2013_kitkat="http://images.kali.org/kali_linux_nethunter_2.0_razor_kitkat.zip"
l_nethunter_nexus_7_2013_lollipop="http://images.kali.org/kali_linux_nethunter_2.0_razor_lollipop.zip"
l_nethunter_nexus_9_lollipop="http://images.kali.org/kali_linux_nethunter_2.0_volantisg_lollipop.zip"
l_nethunter_nexus_10_kitkat="http://images.kali.org/kali_linux_nethunter_2.0_mantaray_kitkat.zip"
l_nethunter_nexus_10_lollipop="http://images.kali.org/kali_linux_nethunter_2.0_mantaray_lollipop.zip"
l_nethunter_oneplus_one_cm11="http://images.kali.org/kali_linux_nethunter_2.0_bacon_cm11.zip"
l_nethunter_oneplus_one_cm12="http://images.kali.org/kali_linux_nethunter_2.0_bacon_cm12.zip"
# http://teamw.in/Devices
l_recovery_twrp_nexus_5="https://dl.twrp.me/hammerhead/twrp-2.8.7.1-hammerhead.img"
l_recovery_twrp_nexus_6="https://dl.twrp.me/shamu/twrp-2.8.7.1-shamu.img"
l_recovery_twrp_nexus_7_2012_wifi="https://dl.twrp.me/grouper/twrp-2.8.7.0-grouper.img"
l_recovery_twrp_nexus_7_2012_mobile="https://dl.twrp.me/tilapia/twrp-2.8.7.0-tilapia.img"
l_recovery_twrp_nexus_7_2013_wifi="https://dl.twrp.me/flo/twrp-2.8.7.0-flo.img"
l_recovery_twrp_nexus_7_2013_mobile="https://dl.twrp.me/deb/twrp-2.8.7.0-deb.img"
l_recovery_twrp_nexus_9="https://dl.twrp.me/flounder/twrp-2.8.7.0-flounder.img"
l_recovery_twrp_nexus_10="https://dl.twrp.me/manta/twrp-2.8.7.0-manta.img"
l_recovery_twrp_oneplus_one="https://dl.twrp.me/bacon/twrp-2.8.7.0-bacon.img"
## File Name
n_android_nexus_5_kitkat=$(echo $l_android_nexus_5_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_5_lollipop=$(echo $l_android_nexus_5_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_6_lollipop=$(echo $l_android_nexus_6_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2012_wifi_kitkat=$(echo $l_android_nexus_7_2012_wifi_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2012_wifi_lollipop=$(echo $l_android_nexus_7_2012_wifi_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2012_mobile_kitkat=$(echo $l_android_nexus_7_2012_mobile_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2012_mobile_lollipop=$(echo $l_android_nexus_7_2012_mobile_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2013_wifi_kitkat=$(echo $l_android_nexus_7_2013_wifi_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2013_wifi_lollipop=$(echo $l_android_nexus_7_2013_wifi_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2013_mobile_kitkat=$(echo $l_android_nexus_7_2013_mobile_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_7_2013_mobile_lollipop=$(echo $l_android_nexus_7_2013_mobile_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_9_lte_lollipop=$(echo $l_android_nexus_9_lte_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_9_wifi_lollipop=$(echo $l_android_nexus_9_wifi_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_10_kitkat=$(echo $l_android_nexus_10_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_nexus_10_lollipop=$(echo $l_android_nexus_10_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep tgz)
n_android_oneplus_one_cm11=$(echo $l_android_oneplus_one_cm11 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_android_oneplus_one_cm12=$(echo $l_android_oneplus_one_cm12 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_5_kitkat=$(echo $l_nethunter_nexus_5_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_5_lollipop=$(echo $l_nethunter_nexus_5_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_6_lollipop=$(echo $l_nethunter_nexus_6_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_7_2012_kitkat=$(echo $l_nethunter_nexus_7_2012_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_7_2012_lollipop=$(echo $l_nethunter_nexus_7_2012_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_7_2013_kitkat=$(echo $l_nethunter_nexus_7_2013_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_7_2013_lollipop=$(echo $l_nethunter_nexus_7_2013_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_9_lollipop=$(echo $l_nethunter_nexus_9_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_10_kitkat=$(echo $l_nethunter_nexus_10_kitkat | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_nexus_10_lollipop=$(echo $l_nethunter_nexus_10_lollipop | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_oneplus_one_cm11=$(echo $l_nethunter_oneplus_one_cm11 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_nethunter_oneplus_one_cm12=$(echo $l_nethunter_oneplus_one_cm12 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep zip)
n_recovery_twrp_nexus_5=$(echo $l_recovery_twrp_nexus_5 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_nexus_6=$(echo $l_recovery_twrp_nexus_6 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_nexus_7_2012_wifi=$(echo $l_recovery_twrp_nexus_7_2012_wifi | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_nexus_7_2012_mobile=$(echo $l_recovery_twrp_nexus_7_2012_mobile | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_nexus_7_2013_wifi=$(echo $l_recovery_twrp_nexus_7_2013_wifi | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_nexus_7_2013_mobile=$(echo $l_recovery_twrp_nexus_7_2013_mobile | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_nexus_9=$(echo $l_recovery_twrp_nexus_9 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_nexus_10=$(echo $l_recovery_twrp_nexus_10 | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_recovery_twrp_oneplus_one=$(echo $l_recovery_twrp_oneplus_one | awk -F '/' '{print $4"\n"$5"\n"$6"\n"$7"\n"$8"\n"$9}' | grep img)
n_root_files="UPDATE-SuperSU-v2.46.zip"
n_root_nexus_5_kitkat="cf-auto-root-nexus-5-hammerhead-4.4.4.img"
n_root_nexus_5_lollipop="cf-auto-root-nexus-5-hammerhead-5.0.0.img"
n_root_nexus_6_lollipop="cf-auto-root-nexus-6-shamu-5.0.0.img"
n_root_nexus_7_2012_wifi_kitkat=""
n_root_nexus_7_2012_wifi_lollipop="cf-auto-root-nexus-7-nakasi-5.0.0.img"
n_root_nexus_7_2012_mobile_kitkat="cf-auto-root-nexus-7-nakasig-4.4.4.img"
n_root_nexus_7_2012_mobile_lollipop=""
n_root_nexus_7_2013_wifi_kitkat=""
n_root_nexus_7_2013_wifi_lollipop="cf-auto-root-nexus-7-razor-5.0.0.img"
n_root_nexus_7_2013_mobile_kitkat="cf-auto-root-nexus-7-razorg-4.4.4.img"
n_root_nexus_7_2013_mobile_lollipop=""
n_root_nexus_9_lte_lollipop=""
n_root_nexus_9_wifi_lollipop="cf-auto-root-nexus-9-volantis-5.0.0.img"
n_root_nexus_10_kitkat=""
n_root_nexus_10_lollipop="cf-auto-root-nexus-10-mantaray-5.0.0.img"
n_root_oneplus_one_cm11=""
n_root_oneplus_one_cm12=""
## Menu
f_interface(){
clear
echo -e "\033[40;34m _ _ _ \033[40;31m _ _ _ \033[0m\033[0m"
echo -e "\033[40;34m| \ | | ___| |_\033[40;31m| | | |_ _ _ __ | |_ ___ _ __ \033[0m\033[0m"
echo -e "\033[40;34m| \| |/ _ \ __\033[40;31m| |_| | | | | '_ \| __/ _ \ '__|\033[0m\033[0m"
echo -e "\033[40;34m| |\ | __/ |_\033[40;31m| _ | |_| | | | | || __/ | \033[0m\033[0m"
echo -e "\033[40;34m|_| \_|\___|\__\033[40;31m|_| |_|\__,_|_| |_|\__\___|_| \033[0m\033[0m"
echo ""
echo "Kali NetHunter Install Tools"
echo ""
echo "[1] UnLook Device"
echo "[2] Root System"
echo "[3] Flash Android"
echo "[4] Flash NetHunter"
echo "[5] More"
echo ""
echo "[q] exit"
echo ""
read -p "Choice: " menu_choice
case $menu_choice in
1) f_unlook_device ;;
2) f_root ;;
3) f_flash_android ;;
4) f_flash_nethunter ;;
5) f_more ;;
q) exit 1 ;;
*) echo "Incorrect choice..." ;
esac
}
f_unlook_device(){
fastboot oem unlock
}
f_root(){
echo "[1] Nexus 5 KitKat"
echo "[2] Nexus 5 Lollipop"
echo "[3] Nexus 6 Lollipop"
echo "[4] Nexus 7 [2012] (WiFi) KitKat"
echo "[5] Nexus 7 [2012] (WiFi) Lollipop"
echo "[6] Nexus 7 [2012] (Mobile) KitKat"
echo "[7] Nexus 7 [2012] (Mobile) Lollipop"
echo "[8] Nexus 7 [2013] (WiFi) KitKat"
echo "[9] Nexus 7 [2013] (WiFi) Lollipop"
echo "[10] Nexus 7 [2013] (Mobile) KitKat"
echo "[11] Nexus 7 [2013] (Mobile) Lollipop"
echo "[12] Nexus 9 (LTE) Lollipop"
echo "[13] Nexus 9 (WiFi) Lollipop"
echo "[14] Nexus 10 KitKat"
echo "[15] Nexus 10 Lollipop"
echo "[16] OnePlus One CM11"
echo "[17] OnePlus One CM12"
echo ""
echo "[b] Back to menu"
echo ""
read -p "Choice: " root_choice
case $root_choice in
1) f_root_nexus_5_kitkat ;;
2) f_root_nexus_5_lollipop ;;
3) f_root_nexus_6_lollipop ;;
4) f_root_nexus_7_2012_wifi_kitkat ;;
5) f_root_nexus_7_2012_wifi_lollipop ;;
6) f_root_nexus_7_2012_mobile_kitkat ;;
7) f_root_nexus_7_2012_mobile_lollipop ;;
8) f_root_nexus_7_2013_wifi_kitkat ;;
9) f_root_nexus_7_2013_wifi_lollipop ;;
10) f_root_nexus_7_2013_mobile_kitkat ;;
11) f_root_nexus_7_2013_mobile_lollipop ;;
12) f_root_nexus_9_lte_lollipop ;;
13) f_root_nexus_9_wifi_lollipop ;;
14) f_root_nexus_10_kitkat ;;
15) f_root_nexus_10_lollipop ;;
16) f_root_oneplus_one_cm11 ;;
17) f_root_oneplus_one_cm12 ;;
b) f_interface ;;
*) echo "Incorrect choice..." ;
esac
}
f_flash_android(){
echo "[1] Nexus 5 KitKat"
echo "[2] Nexus 5 Lollipop"
echo "[3] Nexus 6 Lollipop"
echo "[4] Nexus 7 [2012] (WiFi) KitKat"
echo "[5] Nexus 7 [2012] (WiFi) Lollipop"
echo "[6] Nexus 7 [2012] (Mobile) KitKat"
echo "[7] Nexus 7 [2012] (Mobile) Lollipop"
echo "[8] Nexus 7 [2013] (WiFi) KitKat"
echo "[9] Nexus 7 [2013] (WiFi) Lollipop"
echo "[10] Nexus 7 [2013] (Mobile) KitKat"
echo "[11] Nexus 7 [2013] (Mobile) Lollipop"
echo "[12] Nexus 9 (LTE) Lollipop"
echo "[13] Nexus 9 (WiFi) Lollipop"
echo "[14] Nexus 10 KitKat"
echo "[15] Nexus 10 Lollipop"
echo "[16] OnePlus One CM11"
echo "[17] OnePlus One CM12"
echo ""
echo "[b] Back to menu"
echo ""
read -p "Choice: " flash_android_choice
case $flash_android_choice in
1) f_flash_android_nexus_5_kitkat ;;
2) f_flash_android_nexus_5_lollipop ;;
3) f_flash_android_nexus_6_lollipop ;;
4) f_flash_android_nexus_7_2012_wifi_kitkat ;;
5) f_flash_android_nexus_7_2012_wifi_lollipop ;;
6) f_flash_android_nexus_7_2012_mobile_kitkat ;;
7) f_flash_android_nexus_7_2012_mobile_lollipop ;;
8) f_flash_android_nexus_7_2013_wifi_kitkat ;;
9) f_flash_android_nexus_7_2013_wifi_lollipop ;;
10) f_flash_android_nexus_7_2013_mobile_kitkat ;;
11) f_flash_android_nexus_7_2013_mobile_lollipop ;;
12) f_flash_android_nexus_9_lte_lollipop ;;
13) f_flash_android_nexus_9_wifi_lollipop ;;
14) f_flash_android_nexus_10_kitkat ;;
15) f_flash_android_nexus_10_lollipop ;;
16) f_flash_android_oneplus_one_cm11 ;;
17) f_flash_android_oneplus_one_cm12 ;;
b) f_interface ;;
*) echo "Incorrect choice..." ;
esac
}
f_flash_nethunter(){
echo "[1] Nexus 5 KitKat"
echo "[2] Nexus 5 Lollipop"
echo "[3] Nexus 6 Lollipop"
echo "[4] Nexus 7 [2012] (WiFi) KitKat"
echo "[5] Nexus 7 [2012] (WiFi) Lollipop"
echo "[6] Nexus 7 [2012] (Mobile) KitKat"
echo "[7] Nexus 7 [2012] (Mobile) Lollipop"
echo "[8] Nexus 7 [2013] (WiFi) KitKat"
echo "[9] Nexus 7 [2013] (WiFi) Lollipop"
echo "[10] Nexus 7 [2013] (Mobile) KitKat"
echo "[11] Nexus 7 [2013] (Mobile) Lollipop"
echo "[12] Nexus 9 (LTE) Lollipop"
echo "[13] Nexus 9 (WiFi) Lollipop"
echo "[14] Nexus 10 KitKat"
echo "[15] Nexus 10 Lollipop"
echo "[16] OnePlus One CM11"
echo "[17] OnePlus One CM12"
echo ""
echo "[b] Back to menu"
echo ""
read -p "Choice: " flash_nethunter_choice
case $flash_nethunter_choice in
1) f_flash_nethunter_nexus_5_kitkat ;;
2) f_flash_nethunter_nexus_5_lollipop ;;
3) f_flash_nethunter_nexus_6_lollipop ;;
4) f_flash_nethunter_nexus_7_2012_wifi_kitkat ;;
5) f_flash_nethunter_nexus_7_2012_wifi_lollipop ;;
6) f_flash_nethunter_nexus_7_2012_mobile_kitkat ;;
7) f_flash_nethunter_nexus_7_2012_mobile_lollipop ;;
8) f_flash_nethunter_nexus_7_2013_wifi_kitkat ;;
9) f_flash_nethunter_nexus_7_2013_wifi_lollipop ;;
10) f_flash_nethunter_nexus_7_2013_mobile_kitkat ;;
11) f_flash_nethunter_nexus_7_2013_mobile_lollipop ;;
12) f_flash_nethunter_nexus_9_lollipop ;;
13) f_flash_nethunter_nexus_9_lollipop ;;
14) f_flash_nethunter_nexus_10_kitkat ;;
15) f_flash_nethunter_nexus_10_lollipop ;;
16) f_flash_nethunter_oneplus_one_cm11 ;;
17) f_flash_nethunter_oneplus_one_cm12 ;;
b) f_interface ;;
*) echo "Incorrect choice..." ;
esac
}
f_more(){
echo "[1] Look Device"
echo "[2] Reboot System"
echo "[3] Backup System"
echo "[4] Restore System"
echo "[5] Enter Recovery"
echo "[6] Enter Fastboot"
echo "[7] Flash Recovery"
echo "[8] ADB CMD"
echo ""
echo "[b] Back to menu"
echo ""
read -p "Choice: " more_choice
case $more_choice in
1) f_look_device ;;
2) f_reboot_system ;;
3) f_backup_system ;;
4) f_restore_system ;;
5) f_recovery_mode ;;
6) f_fastboot_mode ;;
7) f_flash_recovery ;;
8) f_adb_cmd ;;
b) f_interface ;;
*) echo "Incorrect choice..." ;
esac
}
## Root
f_root_nexus_5_kitkat(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_5_kitkat
ls images/$n_recovery_twrp_nexus_5 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_5
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_5
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_5_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_5_lollipop
ls images/$n_recovery_twrp_nexus_5 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_5
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_5
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_6_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./images/$n_root_nexus_6_lollipop
ls images/$n_recovery_twrp_nexus_6 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_6
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_6
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2012_wifi_kitkat(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2012_wifi_kitkat
ls images/$n_recovery_twrp_nexus_7_2012_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_wifi
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_wifi
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2012_wifi_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2012_wifi_lollipop
ls images/$n_recovery_twrp_nexus_7_2012_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_wifi
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_wifi
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2012_mobile_kitkat(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2012_mobile_kitkat
ls images/$n_recovery_twrp_nexus_7_2012_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_mobile
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_mobile
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2012_mobile_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2012_mobile_lollipop
ls images/$n_recovery_twrp_nexus_7_2012_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_mobile
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_mobile
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2013_wifi_kitkat(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2013_wifi_kitkat
ls images/$n_recovery_twrp_nexus_7_2013_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_wifi
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_wifi
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2013_wifi_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2013_wifi_lollipop
ls images/$n_recovery_twrp_nexus_7_2013_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_wifi
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_wifi
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2013_mobile_kitkat(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2013_mobile_kitkat
ls images/$n_recovery_twrp_nexus_7_2013_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_mobile
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_mobile
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_7_2013_mobile_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_7_2013_mobile_lollipop
ls images/$n_recovery_twrp_nexus_7_2013_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_mobile
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_mobile
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_9_lte_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_9_lte_lollipop
ls images/$n_recovery_twrp_nexus_9 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_9
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_9
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_9_wifi_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_9_wifi_lollipop
ls images/$n_recovery_twrp_nexus_9 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_9
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_9
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_10_kitkat(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_10_kitkat
ls images/$n_recovery_twrp_nexus_10 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_10
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_10
echo "Recovery Mode Manually install the Root File"
}
f_root_nexus_10_lollipop(){
#./tools/linux/adb reboot-bootloader 2> /dev/null
#./tools/linux/fastboot boot ./root/$n_root_nexus_10_lollipop
ls images/$n_recovery_twrp_nexus_10 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_10
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_10
echo "Recovery Mode Manually install the Root File"
}
f_root_oneplus_one_cm11(){
ls images/$n_recovery_twrp_oneplus_one >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_oneplus_one
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_oneplus_one
echo "Recovery Mode Manually install the Root File"
}
f_root_oneplus_one_cm12(){
ls images/$n_recovery_twrp_oneplus_one >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_oneplus_one
exit 1
fi
./tools/linux/adb push ./root/$n_root_files /sdcard/
sleep 1
./tools/linux/adb reboot-bootloader
sleep 1
./tools/linux/fastboot boot ./images/$n_recovery_twrp_oneplus_one
echo "Recovery Mode Manually install the Root File"
}
## Flash Android
f_flash_android_nexus_5_kitkat(){
wget -cP images $l_android_nexus_5_kitkat
tar xvf ./images/$n_android_nexus_5_kitkat -C ./images/
n_images_file=$(echo $n_android_nexus_5_kitkat | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_5_lollipop(){
wget -cP images $l_android_nexus_5_lollipop
tar xvf ./images/$n_android_nexus_5_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_5_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_6_lollipop(){
wget -cP images $l_android_nexus_6_lollipop
tar xvf ./images/$n_android_nexus_6_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_6_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2012_wifi_kitkat(){
wget -cP images $l_android_nexus_7_2012_wifi_kitkat
tar xvf ./images/$n_android_nexus_7_2012_wifi_kitkat -C ./images/
n_images_file=$(echo $n_android_nexus_7_2012_wifi_kitkat | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2012_wifi_lollipop(){
wget -cP images $l_android_nexus_7_2012_wifi_lollipop
tar xvf ./images/$n_android_nexus_7_2012_wifi_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_7_2012_wifi_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2012_mobile_kitkat(){
wget -cP images $l_android_nexus_7_2012_mobile_kitkat
tar xvf ./images/$n_android_nexus_7_2012_mobile_kitkat -C ./images/
n_images_file=$(echo $n_android_nexus_7_2012_mobile_kitkat | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2012_mobile_lollipop(){
wget -cP images $l_android_nexus_7_2012_mobile_lollipop
tar xvf ./images/$n_android_nexus_7_2012_mobile_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_7_2012_mobile_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2013_wifi_kitkat(){
wget -cP images $l_android_nexus_7_2013_wifi_kitkat
tar xvf ./images/$n_android_nexus_7_2013_wifi_kitkat -C ./images/
n_images_file=$(echo $n_android_nexus_7_2013_wifi_kitkat | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2013_wifi_lollipop(){
wget -cP images $l_android_nexus_7_2013_wifi_lollipop
tar xvf ./images/$n_android_nexus_7_2013_wifi_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_7_2013_wifi_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2013_mobile_kitkat(){
wget -cP images $l_android_nexus_7_2013_mobile_kitkat
tar xvf ./images/$n_android_nexus_7_2013_mobile_kitkat -C ./images/
n_images_file=$(echo $n_android_nexus_7_2013_mobile_kitkat | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_7_2013_mobile_lollipop(){
wget -cP images $l_android_nexus_7_2013_mobile_lollipop
tar xvf ./images/$n_android_nexus_7_2013_mobile_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_7_2013_mobile_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_9_lte_lollipop(){
wget -cP images $l_android_nexus_9_lte_lollipop
tar xvf ./images/$n_android_nexus_9_lte_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_9_lte_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_9_wifi_lollipop(){
wget -cP images $l_android_nexus_9_wifi_lollipop
tar xvf ./images/$n_android_nexus_9_wifi_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_9_wifi_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_10_kitkat(){
wget -cP images $l_android_nexus_10_kitkat
tar xvf ./images/$n_android_nexus_10_kitkat -C ./images/
n_images_file=$(echo $n_android_nexus_10_kitkat | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_nexus_10_lollipop(){
wget -cP images $l_android_nexus_10_lollipop
tar xvf ./images/$n_android_nexus_10_lollipop -C ./images/
n_images_file=$(echo $n_android_nexus_10_lollipop | cut -d '-' -f 1,2)
n_bootloader=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^bootloader)
n_radio=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^radio)
n_image=$(cd ./images/$n_images_file; ls | sed 's/ /\n/g' | grep ^image)
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot erase boot
./tools/linux/fastboot erase cache
./tools/linux/fastboot erase recovery
./tools/linux/fastboot erase system
./tools/linux/fastboot erase userdata
./tools/linux/fastboot flash bootloader ./images/$n_images_file/$n_bootloader
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot flash radio ./images/$n_images_file/$n_radio
./tools/linux/fastboot reboot-bootloader
sleep 5
./tools/linux/fastboot -w update ./images/$n_images_file/$n_image
}
f_flash_android_oneplus_one_cm11(){
echo ""
#wget -cP images $l_android_oneplus_one_cm11
#wget -cP images $l_recovery_twrp_oneplus_one
#./tools/linux/adb push ./images/$n_android_oneplus_one_cm11 /sdcard/
#./tools/linux/adb reboot-bootloader
#./tools/linux/fastboot boot ./images/$n_recovery_twrp_oneplus_one
#echo "Recovery Mode Manually install the Android"
echo -e "\
\033[40;34m$(echo "5YaZ6ISa5pys55qE5a625LyZCg==" | base64 -d)\
\033[40;31m$(echo "5rKhT25lUGx1c+iuvuWkh+i/m+ihjOa1i+ivle+8gQo=" | base64 -d)\
\033[40;34m$(echo "5pyq5byA5Y+R5q2k5Yqf6IO944CCCg==" | base64 -d)\
\033[0m\033[0m"
}
f_flash_android_oneplus_one_cm12(){
echo ""
#wget -cP images $l_android_oneplus_one_cm12
#wget -cP images $l_recovery_twrp_oneplus_one
#./tools/linux/adb push ./images/$n_android_oneplus_one_cm12 /sdcard/
#./tools/linux/adb reboot-bootloader
#./tools/linux/fastboot boot ./images/$n_recovery_twrp_oneplus_one
#echo "Recovery Mode Manually install the Android"
echo -e "\
\033[40;34m$(echo "5YaZ6ISa5pys55qE5a625LyZCg==" | base64 -d)\
\033[40;31m$(echo "5rKhT25lUGx1c+iuvuWkh+i/m+ihjOa1i+ivle+8gQo=" | base64 -d)\
\033[40;34m$(echo "5pyq5byA5Y+R5q2k5Yqf6IO944CCCg==" | base64 -d)\
\033[0m\033[0m"
}
## Flash NetHunter
f_flash_nethunter_nexus_5_kitkat(){
ls images/$n_nethunter_nexus_5_kitkat >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_5_kitkat
exit 1
fi
ls images/$n_recovery_twrp_nexus_5 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_5
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_5_kitkat /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_5
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_5_lollipop(){
ls images/$n_nethunter_nexus_5_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_5_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_5 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_5
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_5_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_5
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_6_lollipop(){
ls images/$n_nethunter_nexus_6_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_6_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_6 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_6
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_6_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_6
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2012_wifi_kitkat(){
ls images/$n_nethunter_nexus_7_2012_kitkat >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2012_kitkat
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2012_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_wifi
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2012_kitkat /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_wifi
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2012_wifi_lollipop(){
ls images/$n_nethunter_nexus_7_2012_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2012_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2012_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_wifi
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2012_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_wifi
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2012_mobile_kitkat(){
ls images/$n_nethunter_nexus_7_2012_kitkat >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2012_kitkat
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2012_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_mobile
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2012_kitkat /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_mobile
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2012_mobile_lollipop(){
ls images/$n_nethunter_nexus_7_2012_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2012_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2012_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2012_mobile
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2012_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2012_mobile
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2013_wifi_kitkat(){
ls images/$n_nethunter_nexus_7_2013_kitkat >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2013_kitkat
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2013_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_wifi
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2013_kitkat /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_wifi
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2013_wifi_lollipop(){
ls images/$n_nethunter_nexus_7_2013_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2013_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2013_wifi >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_wifi
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2013_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_wifi
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2013_mobile_kitkat(){
ls images/$n_nethunter_nexus_7_2013_kitkat >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2013_kitkat
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2013_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_mobile
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2013_kitkat /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_mobile
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_7_2013_mobile_lollipop(){
ls images/$n_nethunter_nexus_7_2013_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_7_2013_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_7_2013_mobile >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_7_2013_mobile
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_7_2013_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_7_2013_mobile
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_9_lollipop(){
ls images/$n_nethunter_nexus_9_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_9_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_9 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_9
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_9_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_9
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_10_kitkat(){
ls images/$n_nethunter_nexus_10_kitkat >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_10_kitkat
exit 1
fi
ls images/$n_recovery_twrp_nexus_10 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_10
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_10_kitkat /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_10
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_nexus_10_lollipop(){
ls images/$n_nethunter_nexus_10_lollipop >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_nexus_10_lollipop
exit 1
fi
ls images/$n_recovery_twrp_nexus_10 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_nexus_10
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_nexus_10_lollipop /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_nexus_10
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_oneplus_one_cm11(){
ls images/$n_nethunter_oneplus_one_cm11 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_oneplus_one_cm11
exit 1
fi
ls images/$n_recovery_twrp_oneplus_one >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_oneplus_one
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_oneplus_one_cm11 /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_oneplus_one
echo "Recovery Mode Manually install the NetHunter"
}
f_flash_nethunter_oneplus_one_cm12(){
ls images/$n_nethunter_oneplus_one_cm12 >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_nethunter_oneplus_one_cm12
exit 1
fi
ls images/$n_recovery_twrp_oneplus_one >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo ""
else
wget -cP images $l_recovery_twrp_oneplus_one
exit 1
fi
echo "Uploading NetHunter ..."
./tools/linux/adb push ./images/$n_nethunter_oneplus_one_cm12 /sdcard/
./tools/linux/adb reboot-bootloader
./tools/linux/fastboot boot ./images/$n_recovery_twrp_oneplus_one
echo "Recovery Mode Manually install the NetHunter"
}
## More
f_look_device(){
./tools/linux/adb reboot-bootloader 2> /dev/null
./tools/linux/fastboot oem lock
}
f_reboot_system(){
./tools/linux/adb reboot 2> /dev/null
./tools/linux/fastboot reboot 2> /dev/null
}
f_backup_system(){
./tools/linux/adb backup -apk -obb -shared -all -system -f ./images/Android_System_$(date +%F-%H-%M).bak
}
f_restore_system(){
#n_backup_system=$(cd ./images/; ls | sed 's/ /\n/g' | grep ^Android_System)
#
#./tools/linux/adb restore $
echo 'Please Enter "Backup System" File Path or [q]uit'
echo ""
read -p "File: " l_backup_system_link
case $l_backup_system_link in
q) exit 1 ;;
*) ./tools/linux/adb restore $l_backup_system_link ;
esac
}
f_recovery_mode(){
echo 'Please Enter "recovery.img" File Path or [q]uit'
echo ""
read -p "File: " l_recovery_link
case $l_recovery_link in
q) exit 1 ;;
*) ./tools/linux/adb reboot-bootloader 2> /dev/null; ./tools/linux/fastboot boot $l_recovery_link ;
esac
}
f_fastboot_mode(){
./tools/linux/adb reboot-bootloader 2> /dev/null
./tools/linux/fastboot reboot-bootloader 2> /dev/null
}
f_flash_recovery(){
echo 'Please Enter "recovery.img" File Path or [q]uit'
echo ""
read -p "File: " l_recovery_link
case $l_recovery_link in
q) exit 1 ;;
*) ./tools/linux/adb reboot-bootloader 2> /dev/null; ./tools/linux/fastboot flash recovery $l_recovery_link ;
esac
}
f_adb_cmd(){
clear
export PATH=$PATH:$(pwd)/tools/linux
echo -e "\n\
You can now directly use adb or fastboot Command\n\
\n\
List adb Device:\033[40;31madb devices\033[0m\n\
Enter Shell:\033[40;31madb shell\033[0m\n\
Show Device Log:\033[40;31madb logcat\033[0m\n\
Send files to the device SD card:\033[40;31madb push ./myfile.txt /sdcard/\033[0m\n\
Get files from the device SD card:\033[40;31madb pull /sdcard/myfile.txt ./\033[0m\n\
List Fastboot Device:\033[40;31mfastboot devices\033[0m\n\
Show adb Help Message:\033[40;31madb help\033[0m\n\
Show fastboot Help Message:\033[40;31mfastboot help\033[0m\n"
bash
}
chmod 755 ./tools/linux/{adb,fastboot,dmtracedump,etc1tool,hprof-conv,sqlite3}
./tools/linux/adb start-server
f_interface
|
import _winreg as wr
aReg = wr.ConnectRegistry(None, wr.HKEY_LOCAL_MACHINE)
targ = r'SOFTWARE\Microsoft\Windows\CurrentVersion\Run'
aKey = wr.OpenKey(aReg, targ, 0, wr.KEY_WRITE)
wr.DeleteValue(aKey, "MyNewKey")
wr.CloseKey(aKey)
wr.CloseKey(aReg)
|
/*****************************************************************************
* Licensed to Qualys, Inc. (QUALYS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* QUALYS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
****************************************************************************/
#ifndef _IB_DSO_H_
#define _IB_DSO_H_
/**
* @file
* @brief IronBee --- DSO Utility Functions
*
* @author <NAME> <<EMAIL>>
* @author <NAME> <<EMAIL>>
*/
#include <ironbee/build.h>
#include <ironbee/mm.h>
#include <ironbee/types.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* @defgroup IronBeeUtilDso Dynamic Shared Object (DSO)
* @ingroup IronBeeUtil
*
* Code to load and interact with DSOs. Used for module loading.
*
* @{
*/
/**
* A dso file.
*/
typedef struct ib_dso_t ib_dso_t;
/**
* Generic type for a DSO symbol.
*
* @sa ib_dso_sym_find().
*/
typedef void ib_dso_sym_t;
/**
* Open a dynamic shared object (DSO) from a file.
*
* @param[out] pdso DSO handle is stored in @a *dso.
* @param[in] file DSO filename.
* @param[in] mm Memory manager to use.
*
* @returns
* - IB_EINVAL -- Unable to open DSO file.
* - IB_EALLOC -- Allocation error.
* - IB_OK -- Success.
*/
ib_status_t DLL_PUBLIC ib_dso_open(
ib_dso_t **pdso,
const char *file,
ib_mm_t mm
)
NONNULL_ATTRIBUTE(1,2);
/**
* Close a dynamic shared object (DSO).
*
* @param[in] dso DSO to close.
*
* @returns
* - IB_EINVAL -- @a dso is null.
* - IB_EUNKNOWN -- Failed to close DSO.
* - IB_OK -- Success.
*/
ib_status_t DLL_PUBLIC ib_dso_close(
ib_dso_t *dso
)
ALL_NONNULL_ATTRIBUTE;
/**
* Find a given symbol in a dynamic shared object (DSO).
*
* @param[out] psym DSO symbol handle is stored in @a *sym.
* @param[in] dso DSO to search in.
* @param[in] name DSO symbol name.
*
* @returns
* - IB_EINVAL -- dso or psym is null.
* - IB_ENOENT -- No symbol in @a dso named @a name.
* - IB_OK -- Success.
*/
ib_status_t DLL_PUBLIC ib_dso_sym_find(
ib_dso_sym_t **psym,
ib_dso_t *dso,
const char *name
)
ALL_NONNULL_ATTRIBUTE;
/**
* Given @a addr, look up the symbol name and file name of the dynamic library.
*
* @param[out] fname File name of the dynamic library.
* @param[out] sname The name of the symbol. If the given address does not
* point to a symbol, the closest symbol with an address less than
* @a addr is returned.
* @param[in] mm Copy the file name and symbol name assigned to
* @a fname and @a sname.
* @param[in] addr The address to look up a symbol for.
*
* @returns
* - IB_OK On success.
* - IB_EALLOC On allocation errors.
* - IB_EOTHER On system call failure.
*/
ib_status_t DLL_PUBLIC ib_dso_sym_name_find(
const char **fname,
const char **sname,
ib_mm_t mm,
void *addr
)
NONNULL_ATTRIBUTE(1, 2, 4);
/** @} IronBeeUtilDso */
#ifdef __cplusplus
}
#endif
#endif /* _IB_DSO_H_ */
|
#!/bin/sh
# (C) Copyright 2017 FUJITSU LIMITED
MONASCA_LOG_AGENT_WAIT_RETRIES=${MONASCA_LOG_AGENT_WAIT_RETRIES:-"24"}
MONASCA_LOG_AGENT_WAIT_DELAY=${MONASCA_LOG_AGENT_WAIT_DELAY:-"5"}
if [ $MONASCA_WAIT_FOR_LOG_AGENT = "true" ]; then
RETRIES=$MONASCA_LOG_AGENT_WAIT_RETRIES \
SLEEP_LENGTH=$MONASCA_LOG_AGENT_WAIT_DELAY \
/wait-for.sh ${MONASCA_LOG_AGENT_URI} && /bin/logspout
else
/bin/logspout
fi
|
if (typeof(define) !== 'function') var define = require('amdefine')(module);
define(function(require){
"use strict";
var svg = require("../svg"),
Axis = require('../svg/axis'),
Viz = require('../viz'),
scale = require('../metric');
return Viz.extend(function(option){
var barHeight = this.$height / (this.data.length),
barPadding = option.barPadding || 0.2,
barChart = this.$svg(),
vmap = option.vmap,
domain = option.domain,
bars = barChart.append("g");
var x = Axis({
container: barChart,
dim: "x",
domain: [0, domain[vmap.size].max],
scale: "linear",
align: "bottom",
// width: this.$width / 2,
labelPos: {x: 0, y: -20},
format: d3.format(".3s")
});
var y = Axis({
container: barChart,
dim: "y",
// scale: "linear",
// domain: [domain[vmap.y].min, domain[vmap.y].max],
scale: "ordinal",
domain: this.data.map(function(d) { return d[vmap.y]}),
align: "left",
ticks: this.data.length,
tickInterval: "fit",
tickAlign: "end",
labelPos: {x: -20, y: -4},
format: d3.format(".3s")
});
var that = this;
bars.render({
mark: "rect",
y: function(d) { return y(d[vmap.y]) + barHeight * barPadding/2; },
// y: function(d, i) { return (i + 0.05) * barHeight; },
x: 0,
height: barHeight * (1-barPadding),
width: function(d){ return x(d[vmap.size]); },
fill: "steelblue"
})(this.data);
bars.translate(this.$padding.left, this.$padding.top);
this.svg.push(barChart);
this.viz();
});
});
|
#! /bin/bash -x
# set -eu
###################################################################
### FUNCTIONS
###################################################################
generateHostPackage() {
## $1: Target Dir
## $2: api_prefix
## $3: Cmd | Qry | Sub | Etl | Hub
## $4: img_prefix-cmd | -qry | -sub | -etl | -hub
# Package
mkdir -p $1
cd $1
dotnet new classlib -n $2.$3.Infra
cd $1/$2.$3.Infra
rm -rf Class1.cs
dotnet add package $schema_sdk -v $sdk_version
dotnet add package $infra_sdk -v $sdk_version
dotnet add reference $internal/$2.Domain
# Integration Test
mkdir -p $1
cd $1
generateTestLib $2.$3.IntegrationTests
dotnet add reference $1/$2.$3.Infra
# Host
mkdir -p $1
cd $1
generateWebApi $2.$3 $4
cd $1/$2.$3
dotnet add reference $1/$2.$3.Infra
./build.sh
# docker build -f Dockerfile .
}
generateWebApi() {
dotnet new webapi --no-https --auth None --no-openapi -n $1
cd $1
rm -rf Controllers
rm -rf WeatherForecast.cs
generateDockerfile hosts $1
generateLocalDockerfile $1
generateBuildSh $1 $2
generateRunSh $2
} |
<reponame>camplight/hylo-evo<gh_stars>0
import { mapDispatchToProps } from './Review.connector'
const dispatch = jest.fn(x => x)
const props = {}
const dispatchProps = mapDispatchToProps(dispatch, props)
// const stateProps = mapStateToProps(state, props)
describe('Review', () => {
it('should call updateUserSettings', () => {
const name = 'My Name'
expect(dispatchProps.updateUserSettings(name)).toMatchSnapshot()
})
it('should call fetchMySkills', () => {
expect(dispatchProps.fetchMySkills()).toMatchSnapshot()
})
})
|
<filename>src/juegodecartas21/Interfaz.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package juegodecartas21;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Vector;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.*;
import javax.swing.border.TitledBorder;
/**
*
* @author ~Antares~
*/
public class Interfaz extends JFrame implements ActionListener{
//-------------
private Juego juego21;
//-------------
private JMenuBar menu;
private JMenu archivo, integrantes;
private JMenuItem salir, quienesSomos;
private JTextField efectivo, apuesta, puntajeJugador, puntajeCupier;
private JLabel estado, estadoBaraja, efectivoNom, apuestaNom;
private JButton repartir, otraCarta, seguro, otroJuego;
//-------------
private JPanel panelEfectivoApuesta, panelBarraMenu;
private JPanel panelBotones, panelCartasJugador , panelCartasCupier;
private JPanel panelCartasJugadorYCupier, panelCartasYBotones, panelFinal;
//-------------
private Vector<JButton>vectorFichas;
private Vector<JLabel> vectorCartasJ;
private Vector<JLabel> vectorCartasC;
//-------------
private Container contenedor;
private boolean hayGanador, seguroJuego;
private int contadorDeCartasJ, contadorDeCartasC;
int contadorDeCartasB=0;
Interfaz(){
JOptionPane.showMessageDialog(null, null, null, WIDTH, new ImageIcon("src/juegodecartas21/EXTRAS/Bienvenido.png"));
juego21= new Juego();
vectorCartasJ = new Vector<JLabel>(11);
vectorCartasC = new Vector<JLabel>(11);
vectorFichas = new Vector<JButton>(6);
contadorDeCartasJ=2; //inicia a contar a partir de la carta tres
contadorDeCartasC=1; //inicia a contar a partir de la carta tres
hayGanador=false;
seguroJuego=false;
iniciarVentanaPrincipal();
}
public void iniciarVentanaPrincipal(){
panelEfectivoApuesta();
//------------
panelBarraMenu();
panelBotones();
//------------
panelCartasJugador();
panelCartasCupier();
panelCartasJugadorYCupier();
//------------
panelCartasYBotones();
panelFinal();
contenedor = getContentPane();
contenedor.setLayout(new BorderLayout());
//Fondo fondo = new Fondo();
contenedor.add(panelFinal);
//contenedor.add(fondo);
setTitle("Juego De cartas (21)");
setLocation(50, 150);
setSize(880, 450);
setResizable(false);
setVisible(true);
}
public void panelEfectivoApuesta(){
panelEfectivoApuesta = new JPanel();
panelEfectivoApuesta.setLayout(null);
panelEfectivoApuesta.setBorder(BorderFactory.createTitledBorder(null,""
+ "Apuestas",TitledBorder.CENTER,0,null, Color.BLUE));
//inicilizar componentes de apuesta
efectivoNom = new JLabel("Efectivo");
efectivoNom.setBounds(20, 40, 100, 20);
efectivo = new JTextField();
efectivo.setEnabled(false);
efectivo.setText(""+1500);
efectivo.setBounds(20, 70, 100, 20);
apuestaNom = new JLabel("Apuesta");
apuestaNom.setBounds(20, 100, 100, 20);
apuesta = new JTextField();
apuesta.setEnabled(false);
apuesta.setText(""+0);
apuesta.setBounds(20, 130, 100, 20);
//inicializar botones de fichas
JButton aux;
for (int i=0; i<vectorFichas.capacity();i++){
aux= new JButton();
aux.setIcon(new ImageIcon("src/juegodecartas21/Fichas/m("+i+").png"));
aux.addActionListener(this);
vectorFichas.insertElementAt(aux, i);
}
//pocisiones de los botones fichas
int ancho, alto;
ancho=alto=30;
vectorFichas.get(0).setBounds(20, 200, ancho, alto);
vectorFichas.get(1).setBounds(60, 200, ancho, alto);
vectorFichas.get(2).setBounds(100, 200, ancho, alto);
vectorFichas.get(3).setBounds(20, 240, ancho, alto);
vectorFichas.get(4).setBounds(60, 240, ancho, alto);
vectorFichas.get(5).setBounds(100, 240, ancho, alto);
//anadir elementos al contenedor
panelEfectivoApuesta.add(efectivoNom);
panelEfectivoApuesta.add(efectivo);
panelEfectivoApuesta.add(apuestaNom);
panelEfectivoApuesta.add(apuesta);
for(int i=0; i<vectorFichas.capacity();i++)
panelEfectivoApuesta.add(vectorFichas.get(i));
}
public void panelBarraMenu(){
panelBarraMenu = new JPanel();
panelBarraMenu.setLayout(null);
menu = new JMenuBar();
menu.setBounds(0, 0, 200, 40);
menu.setLayout(new GridLayout(1, 2));
archivo = new JMenu("Archivo");
integrantes = new JMenu("Integrantes");
salir = new JMenuItem("Salir");
quienesSomos = new JMenuItem("Quienes Somos");
salir.addActionListener(this);
quienesSomos.addActionListener(this);
archivo.add(salir);
integrantes.add(quienesSomos);
menu.add(archivo);
menu.add(integrantes);
panelBarraMenu.add(menu);
}
public void panelBotones(){
panelBotones= new JPanel();
panelBotones.setLayout(null);
int posY, posX, ancho, alto, esp;
ancho=100;
alto=30;
posY=50;
posX=100;
esp=20;
repartir = new JButton("Repartir");
repartir.setBounds(posX, posY, ancho, alto);
otraCarta = new JButton("Otra Carta");
otraCarta.setBounds(posX+=ancho+esp, posY, ancho, alto);
seguro = new JButton("Seguro");
seguro.setBounds(posX+=ancho+esp, posY, ancho, alto);
otroJuego = new JButton("Otro Juego");
otroJuego.setBounds(posX+=ancho+esp, posY, ancho, alto);
repartir.setEnabled(false);
otraCarta.setEnabled(false);
seguro.setEnabled(false);
estado= new JLabel("****** Haga su Apuesta******");
estado.setBounds(150, 10, 250, 30);
estadoBaraja= new JLabel("Nº cartas en baraja: "+juego21.getBaraja().getSize());
estadoBaraja.setBounds(320, 10, 250, 30);
repartir.addActionListener(this);
otraCarta.addActionListener(this);
seguro.addActionListener(this);
otroJuego.addActionListener(this);
panelBotones.add(repartir);
panelBotones.add(otraCarta);
panelBotones.add(seguro);
panelBotones.add(otroJuego);
panelBotones.add(estado);
panelBotones.add(estadoBaraja);
}
public void panelCartasJugador(){
panelCartasJugador = new JPanel();
panelCartasJugador.setLayout(null);
panelCartasJugador.setBorder(BorderFactory.createTitledBorder(null,""
+ "Cartas de "+juego21.getJugador().getNombre(),TitledBorder.CENTER,0,null, Color.BLUE));
puntajeJugador = new JTextField("0");
puntajeJugador.setEnabled(false);
puntajeJugador.setBounds(120, 180, 100, 20);
inicializarVCartasJ();
panelCartasJugador.add(puntajeJugador);
}
public void panelCartasCupier(){
panelCartasCupier = new JPanel();
panelCartasCupier.setLayout(null);
panelCartasCupier.setBorder(BorderFactory.createTitledBorder(null,""
+ "Cartas De Cupier",TitledBorder.CENTER,0,null, Color.BLUE));
puntajeCupier = new JTextField("0");
puntajeCupier.setEnabled(false);
puntajeCupier.setBounds(120, 180, 100, 20);
inicializarVCartasC();
panelCartasCupier.add(puntajeCupier);
}
public void panelCartasJugadorYCupier(){
panelCartasJugadorYCupier= new JPanel();
panelCartasJugadorYCupier.setLayout(new BoxLayout(panelCartasJugadorYCupier, BoxLayout.X_AXIS));
panelCartasJugadorYCupier.add(panelCartasJugador);
panelCartasJugadorYCupier.add(panelCartasCupier);
}
public void panelCartasYBotones(){
panelCartasYBotones= new JPanel();
panelCartasYBotones.setLayout(new BoxLayout(panelCartasYBotones, BoxLayout.Y_AXIS));
panelCartasYBotones.add(panelCartasJugadorYCupier);
panelCartasYBotones.add(panelBotones);
}
public void panelFinal(){
panelFinal= new JPanel();
panelFinal.setLayout(null);
panelBarraMenu.setBounds(0, 0, 200, 40);
panelEfectivoApuesta.setBounds(10, 40, 200, 350);
panelCartasYBotones.setBounds(210, 40, 650, 500);
panelFinal.add(panelBarraMenu);
panelFinal.add(panelEfectivoApuesta);
panelFinal.add(panelCartasYBotones);
}
public void inicializarVCartasJ(){
JLabel labelX;
int ancho=71;
int alto=96;
int posX=220;
int posY=40;
for(int i=0;i<vectorCartasJ.capacity();i++){
labelX = new JLabel();
labelX.setVisible(false);
labelX.setBounds(posX, posY, ancho, alto);
posX=posX-20;
posY=posY+2;
vectorCartasJ.insertElementAt(labelX, i);
}
vectorCartasJ.get(0).setIcon(new ImageIcon("src/juegodecartas21/Cartas/tapada.png"));
vectorCartasJ.get(0).setVisible(true);
for(int i=0;i<vectorCartasJ.size();i++){
panelCartasJugador.add(vectorCartasJ.get(i));
}
}
public void inicializarVCartasC(){
JLabel labelX;
int ancho=71;
int alto=96;
int posX=220;
int posY=40;
for(int i=0;i<vectorCartasC.capacity();i++){
labelX = new JLabel();
labelX.setVisible(false);
labelX.setBounds(posX, posY, ancho, alto);
posX=posX-20;
posY=posY+2;
vectorCartasC.insertElementAt(labelX, i);
}
vectorCartasC.get(0).setIcon(new ImageIcon("src/juegodecartas21/Cartas/tapada.png"));
vectorCartasC.get(0).setVisible(true);
for(int i=0;i<vectorCartasC.size();i++){
panelCartasCupier.add(vectorCartasC.get(i));
}
}
public void inicilizarJuegoNuevo(){
activarFichas();
ponerBotonesInicio();
juego21.getBaraja().borrarBaraja();
juego21.getBaraja().inicializarBaraja();
apuesta.setText(""+0);
efectivo.setText(""+1500);
inicializarImagenCartas();
}
public void inicializarImagenCartas(){
juego21.getJugador().inicializarDatos();
for(int i=0; i<vectorCartasJ.size();i++){
vectorCartasJ.get(i).setVisible(false);
}
vectorCartasJ.get(0).setIcon(new ImageIcon("src/juegodecartas21/Cartas/tapada.png"));
vectorCartasJ.get(0).setVisible(true);
juego21.getCupier().inicializarDatos();
for(int i=0; i<vectorCartasJ.size();i++){
vectorCartasC.get(i).setVisible(false);
}
vectorCartasC.get(0).setIcon(new ImageIcon("src/juegodecartas21/Cartas/tapada.png"));
vectorCartasC.get(0).setVisible(true);
otroJuego.setEnabled(true);
contadorDeCartasJ=2;
contadorDeCartasC=1;
hayGanador=false;
seguroJuego=false;
actualizarPuntajes();
estado.setText("****** Haga su Apuesta******");
}
public void actualizarPuntajes(){
puntajeJugador.setText(""+juego21.getJugador().getPuntaje());
puntajeCupier.setText(""+juego21.getCupier().getPuntaje());
}
public void activarFichas(){
for(int i=0;i<vectorFichas.capacity();i++)
vectorFichas.get(i).setEnabled(true);
}
public void desactivarFichas(){
for(int i=0;i<vectorFichas.capacity();i++)
vectorFichas.get(i).setEnabled(false);
}
public void desactivarBotones(){
repartir.setEnabled(false);
otraCarta.setEnabled(false);
seguro.setEnabled(false);
otroJuego.setEnabled(false);
}
public void ponerBotonesInicio(){
repartir.setEnabled(false);
otraCarta.setEnabled(false);
seguro.setEnabled(false);
otroJuego.setEnabled(true);
}
public void juez(){
int ptsCupier, ptsJugador;
ptsCupier=Integer.parseInt(puntajeCupier.getText());
ptsJugador=Integer.parseInt(puntajeJugador.getText());
if(seguroJuego){
if (ptsCupier>ptsJugador && ptsCupier<=21){
juego21.getCupier().setGanador();
hayGanador=true;
}
if (ptsCupier>21){
juego21.getJugador().setGanador();
hayGanador=true;
}
}
else{
if (ptsJugador==21){
juego21.getJugador().setGanador();
hayGanador=true;
}
if (ptsJugador>21){
juego21.getCupier().setGanador();
hayGanador=true;
}
}
if(juego21.getJugador().getGanador()){
estado.setVisible(true);
estado.setText("****** GANA "+juego21.getJugador().getNombre()+" ******");
JOptionPane.showMessageDialog(null,
"GANA "+juego21.getJugador().getNombre(),
"***** GANADOR *****", 1);
int efectivoX=Integer.parseInt(efectivo.getText());
int apuestaX=Integer.parseInt(apuesta.getText())*2;
int suma=efectivoX+apuestaX;
efectivo.setText(""+suma);
}
if(juego21.getCupier().getGanador()){
estado.setVisible(true);
estado.setText("****** <NAME> ******");
JOptionPane.showMessageDialog(null,
"<NAME>",
"***** GANADOR *****", 1);
}
}
public void apostar(int valorApuesta){
int efectivoX,apuestaX;
efectivoX=Integer.parseInt(efectivo.getText());
apuestaX=Integer.parseInt(apuesta.getText());
if(efectivoX<valorApuesta){
JOptionPane.showMessageDialog(null,
"YA NO PUEDE APOSTAR MAS, SE HA QUEDADO SIN DINERO",
"SIN DINERO", 1);
}
else{
efectivoX-=valorApuesta;
efectivo.setText(""+efectivoX);
apuestaX+=valorApuesta;
apuesta.setText(""+apuestaX);
}
}
public void actionPerformed(ActionEvent e) {
//para las fichas
if (e.getSource().equals(vectorFichas.get(0))){
repartir.setEnabled(true);
otroJuego.setEnabled(true);
apostar(1);
}
if (e.getSource().equals(vectorFichas.get(1))){
repartir.setEnabled(true);
otroJuego.setEnabled(true);
apostar(5);
}
if (e.getSource().equals(vectorFichas.get(2))){
repartir.setEnabled(true);
otroJuego.setEnabled(true);
apostar(10);
}
if (e.getSource().equals(vectorFichas.get(3))){
repartir.setEnabled(true);
otroJuego.setEnabled(true);
apostar(25);
}
if (e.getSource().equals(vectorFichas.get(4))){
repartir.setEnabled(true);
otroJuego.setEnabled(true);
apostar(50);
}
if (e.getSource().equals(vectorFichas.get(5))){
repartir.setEnabled(true);
otroJuego.setEnabled(true);
apostar(100);
}
//para los botones
if (e.getSource().equals(repartir)){
repartir.setEnabled(false);
desactivarFichas();
estado.setVisible(false);
desactivarBotones();
seguro.setEnabled(true);
otraCarta.setEnabled(true);
juego21.repartir();
vectorCartasJ.get(0).setIcon(juego21.getJugador().getCarta(0).getImagen());
vectorCartasJ.get(1).setIcon(juego21.getJugador().getCarta(1).getImagen());
vectorCartasJ.get(1).setVisible(true);
vectorCartasC.get(0).setIcon(juego21.getCupier().getCarta(0).getImagen());
actualizarPuntajes();
estadoBaraja.setText("Nº cartas en baraja: "+juego21.getBaraja().getSize());
juez();
}
if (e.getSource().equals(otraCarta)){
if (contadorDeCartasJ>10){
JOptionPane.showMessageDialog(null,"SE AGOTARON LAS CARTAS DE SU BARAJA","SIN CARTAS", 1);
JOptionPane.showMessageDialog(null, "USTED PIERDE", "GAME OVER", 1);
otraCarta.setEnabled(false);
seguro.setEnabled(false);
inicializarImagenCartas();
activarFichas();
ponerBotonesInicio();
contadorDeCartasJ=2;
}
else{
juego21.otraCarta();
vectorCartasJ.get(contadorDeCartasJ).setIcon(juego21.getJugador().getCarta(contadorDeCartasJ).getImagen());
vectorCartasJ.get(contadorDeCartasJ).setVisible(true);
estadoBaraja.setText("Nº cartas en baraja: "+juego21.getBaraja().getSize());
actualizarPuntajes();
juez();
}
contadorDeCartasJ++;
}
if(e.getSource().equals(seguro)){
seguroJuego=true;
desactivarBotones();
}
if(e.getSource().equals(otroJuego)){
inicilizarJuegoNuevo();
estadoBaraja.setText("Nº cartas en baraja: "+juego21.getBaraja().getSize());
contadorDeCartasJ=1;
}
if(e.getSource().equals(quienesSomos))
JOptionPane.showMessageDialog(null,"MADE BY:\n<NAME> 1056353\nLeidy <NAME> 1056234","QUIENES SOMOS", 1);
if(e.getSource().equals(salir)){
setVisible(false);
JOptionPane.showMessageDialog(null, null, null, WIDTH, new ImageIcon("src/juegodecartas21/extras/bye.png"));
System.exit(0);
}
if (seguroJuego){
desactivarBotones();
seguro.setEnabled(true);
while(!hayGanador){
juego21.repartirCartasCupier();
vectorCartasC.get(contadorDeCartasC).setIcon(juego21.getCupier().getCarta(contadorDeCartasC).getImagen());
vectorCartasC.get(contadorDeCartasC).setVisible(true);
estadoBaraja.setText("Nº cartas en baraja: "+juego21.getBaraja().getSize());
actualizarPuntajes();
juez();
contadorDeCartasC++;
}
}
if (hayGanador){
inicializarImagenCartas();
activarFichas();
ponerBotonesInicio();
apuesta.setText(""+0);
}
if((Integer.parseInt(apuesta.getText())==0) &&
(Integer.parseInt(apuesta.getText())==0) )
repartir.setEnabled(false);
}
}
|
import os
import glob
import pickle
from input_representation import InputRepresentation
def process_midi_file(input_file, output_folder):
try:
input_data = InputRepresentation(input_file) # Process the MIDI file using InputRepresentation
output_file = os.path.join(output_folder, os.path.basename(input_file) + '.processed')
with open(output_file, 'wb') as file:
pickle.dump(input_data, file) # Save the processed data in the output folder
print(f"Processed {input_file} and saved the data in {output_file}")
except Exception as e:
print(f"Error processing {input_file}: {e}")
if __name__ == '__main__':
folder = os.getenv('LMD_DIR', './lmd_full/0')
out_folder = os.getenv('REMI_DIR', './lmd_remi')
os.makedirs(out_folder, exist_ok=True)
midi_files = glob.glob(os.path.join(folder, '**', '*.mid'), recursive=True)
for midi_file in midi_files:
process_midi_file(midi_file, out_folder) |
#!/bin/bash
. ./env.sh
sudo docker exec -it $DOCKER_TAG /bin/bash
|
///***************************************************************************
// * (C) Copyright 2003-2010 - Stendhal *
// ***************************************************************************
// ***************************************************************************
// * *
// * This program is free software; you can redistribute it and/or modify *
// * it under the terms of the GNU General Public License as published by *
// * the Free Software Foundation; either version 2 of the License, or *
// * (at your option) any later version. *
// * *
// ***************************************************************************/
//package games.stendhal.client.gui.settings;
//
//import java.awt.Frame;
//import java.awt.event.ActionEvent;
//import java.awt.event.ActionListener;
//
//import javax.swing.BorderFactory;
//import javax.swing.JButton;
//import javax.swing.JDialog;
//import javax.swing.JTabbedPane;
//
//import games.stendhal.client.gui.WindowUtils;
//import games.stendhal.client.gui.layout.SBoxLayout;
//
///**
// * Dialog for game settings.
// */
//@SuppressWarnings("serial")
//public class SettingsDialog extends JDialog {
// /**
// * Create a new SettingsDialog.
// *
// * @param parent parent window, or <code>null</code>
// */
// public SettingsDialog(Frame parent) {
// super(parent, "Settings");
// setDefaultCloseOperation(DISPOSE_ON_CLOSE);
// int pad = SBoxLayout.COMMON_PADDING;
// setLayout(new SBoxLayout(SBoxLayout.VERTICAL, pad));
// JTabbedPane tabs = new JTabbedPane();
// add(tabs);
// tabs.add("General", new GeneralSettings().getComponent());
// tabs.add("Visuals", new VisualSettings().getComponent());
// tabs.add("Sound", new SoundSettings().getComponent());
// setResizable(false);
// JButton closeButton = new JButton("Close");
// closeButton.setAlignmentX(RIGHT_ALIGNMENT);
// closeButton.setBorder(BorderFactory.createCompoundBorder(BorderFactory.createEmptyBorder(pad, pad, pad, pad),
// closeButton.getBorder()));
// closeButton.addActionListener(new ActionListener() {
// @Override
// public void actionPerformed(ActionEvent arg0) {
// dispose();
// }
// });
// add(closeButton);
// WindowUtils.closeOnEscape(this);
// WindowUtils.watchFontSize(this);
// WindowUtils.trackLocation(this, "settings", false);
// pack();
// }
//}
|
import { Observable } from "../valoo";
import { InspectData } from "../../adapter/adapter/adapter";
import { createSearchStore } from "./search";
import { createFilterStore } from "./filter";
import { createSelectionStore } from "./selection";
import { Collapser } from "./collapser";
import { EmitFn } from "../../adapter/hook";
import { ProfilerState } from "../components/profiler/data/commits";
export type ID = number;
export enum DevNodeType {
FunctionComponent,
ClassComponent,
Element,
ForwardRef,
Memo,
Context,
Consumer,
Suspense,
}
export interface DevNode {
id: ID;
type: DevNodeType;
name: string;
key: string;
parent: ID;
children: ID[];
// Display (Elements + Profiler)
depth: number;
// Raw absolute timing data.
startTime: number;
endTime: number;
// Normalized timing data to keep the timings
// of the whole tree consistent across future
// commits. These timings are relative to the
// very first node.
treeStartTime: number;
treeEndTime: number;
}
export type Theme = "auto" | "light" | "dark";
export type Tree = Map<ID, DevNode>;
export interface Store {
profiler: ProfilerState;
isPicking: Observable<boolean>;
inspectData: Observable<InspectData | null>;
roots: Observable<ID[]>;
nodes: Observable<Tree>;
nodeList: Observable<ID[]>;
theme: Observable<Theme>;
treeDepth: Observable<number>;
search: ReturnType<typeof createSearchStore>;
filter: ReturnType<typeof createFilterStore>;
selection: ReturnType<typeof createSelectionStore>;
collapser: Collapser<ID>;
actions: {
inspect: (id: ID) => void;
highlightNode: (id: ID | null) => void;
clear(): void;
startPickElement(): void;
stopPickElement(): void;
};
emit: EmitFn;
subscribe(fn: Listener): () => void;
}
export type Listener = (name: string, data: any) => void;
|
import os
import platform
import polyswarm_api
# API constants
DEFAULT_GLOBAL_API = 'https://api.polyswarm.network/v2'
DEFAULT_PERMALINK_BASE = os.getenv('POLYSWARM_PORTAL_URI', 'https://polyswarm.network/scan/results/file')
DEFAULT_COMMUNITY = 'default'
DEFAULT_SCAN_TIMEOUT = 60*15
RESULT_CHUNK_SIZE = 100
POLL_FREQUENCY = 1
# HTTP settings
DEFAULT_HTTP_TIMEOUT = 30
DEFAULT_RETRIES = 0
DEFAULT_BACKOFF = 1
DEFAULT_RETRY_CODES = (502, 504)
DEFAULT_USER_AGENT = 'polyswarm-api/{} ({}-{}-{}-{})'.format(
polyswarm_api.__version__, platform.machine(), platform.system(),
platform.python_implementation(), platform.python_version(),
)
# concurrent HTTP workers
DEFAULT_WORKER_COUNT = 8
# API maximums
MAX_HUNT_RESULTS = 20000
MAX_ARTIFACT_BATCH_SIZE = 256
# Filesystem constants
FILE_CHUNK_SIZE = 8192
MAX_OPEN_FDS = 256
# this results in worst case 32MB memory usage during downloads
DOWNLOAD_CHUNK_SIZE = 1024*1024*4
MAX_SINCE_TIME_STREAM = 2 * 60 * 24
|
#!/bin/bash
# Set the value of NODE_ROLE
NODE_ROLE="example-role"
# Use kubectl to retrieve the IP address of the specified node and store it in HOST_ALIAS_IP2
export HOST_ALIAS_IP2=$(kubectl get nodes test-gslb1-${NODE_ROLE} -o custom-columns='IP:status.addresses[0].address' --no-headers)
# Verify the value of HOST_ALIAS_IP2
echo "The IP address of the node test-gslb1-${NODE_ROLE} is $HOST_ALIAS_IP2" |
/*
BLIS
An object-based framework for developing high-performance BLAS-like
libraries.
Copyright (C) 2014, The University of Texas at Austin
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of The University of Texas at Austin nor the names
of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "blis.h"
#undef GENTFUNC
#define GENTFUNC( ctype, ch, kername ) \
\
void PASTEMAC(ch,kername) \
( \
dim_t k, \
ctype* restrict alpha, \
ctype* restrict a, \
ctype* restrict b, \
ctype* restrict beta, \
ctype* restrict c, inc_t rs_c, inc_t cs_c, \
auxinfo_t* restrict data, \
cntx_t* restrict cntx \
) \
{ \
ctype a0; \
ctype a1; \
ctype a2; \
ctype a3; \
\
ctype b0, b1, b2, b3; \
\
ctype ab00, ab01, ab02, ab03; \
ctype ab10, ab11, ab12, ab13; \
ctype ab20, ab21, ab22, ab23; \
ctype ab30, ab31, ab32, ab33; \
\
ctype* c00, * c01, * c02, * c03; \
ctype* c10, * c11, * c12, * c13; \
ctype* c20, * c21, * c22, * c23; \
ctype* c30, * c31, * c32, * c33; \
\
dim_t i; \
\
\
c00 = (c + 0*rs_c + 0*cs_c); \
c10 = (c + 1*rs_c + 0*cs_c); \
c20 = (c + 2*rs_c + 0*cs_c); \
c30 = (c + 3*rs_c + 0*cs_c); \
\
c01 = (c + 0*rs_c + 1*cs_c); \
c11 = (c + 1*rs_c + 1*cs_c); \
c21 = (c + 2*rs_c + 1*cs_c); \
c31 = (c + 3*rs_c + 1*cs_c); \
\
c02 = (c + 0*rs_c + 2*cs_c); \
c12 = (c + 1*rs_c + 2*cs_c); \
c22 = (c + 2*rs_c + 2*cs_c); \
c32 = (c + 3*rs_c + 2*cs_c); \
\
c03 = (c + 0*rs_c + 3*cs_c); \
c13 = (c + 1*rs_c + 3*cs_c); \
c23 = (c + 2*rs_c + 3*cs_c); \
c33 = (c + 3*rs_c + 3*cs_c); \
\
PASTEMAC(ch,set0s)( ab00 ); \
PASTEMAC(ch,set0s)( ab10 ); \
PASTEMAC(ch,set0s)( ab20 ); \
PASTEMAC(ch,set0s)( ab30 ); \
\
PASTEMAC(ch,set0s)( ab01 ); \
PASTEMAC(ch,set0s)( ab11 ); \
PASTEMAC(ch,set0s)( ab21 ); \
PASTEMAC(ch,set0s)( ab31 ); \
\
PASTEMAC(ch,set0s)( ab02 ); \
PASTEMAC(ch,set0s)( ab12 ); \
PASTEMAC(ch,set0s)( ab22 ); \
PASTEMAC(ch,set0s)( ab32 ); \
\
PASTEMAC(ch,set0s)( ab03 ); \
PASTEMAC(ch,set0s)( ab13 ); \
PASTEMAC(ch,set0s)( ab23 ); \
PASTEMAC(ch,set0s)( ab33 ); \
\
for ( i = 0; i < k; ++i ) \
{ \
a0 = *(a + 0); \
a1 = *(a + 1); \
a2 = *(a + 2); \
a3 = *(a + 3); \
\
b0 = *(b + 0); \
b1 = *(b + 1); \
b2 = *(b + 2); \
b3 = *(b + 3); \
\
PASTEMAC(ch,dots)( a0, b0, ab00 ); \
PASTEMAC(ch,dots)( a1, b0, ab10 ); \
PASTEMAC(ch,dots)( a2, b0, ab20 ); \
PASTEMAC(ch,dots)( a3, b0, ab30 ); \
\
PASTEMAC(ch,dots)( a0, b1, ab01 ); \
PASTEMAC(ch,dots)( a1, b1, ab11 ); \
PASTEMAC(ch,dots)( a2, b1, ab21 ); \
PASTEMAC(ch,dots)( a3, b1, ab31 ); \
\
PASTEMAC(ch,dots)( a0, b2, ab02 ); \
PASTEMAC(ch,dots)( a1, b2, ab12 ); \
PASTEMAC(ch,dots)( a2, b2, ab22 ); \
PASTEMAC(ch,dots)( a3, b2, ab32 ); \
\
PASTEMAC(ch,dots)( a0, b3, ab03 ); \
PASTEMAC(ch,dots)( a1, b3, ab13 ); \
PASTEMAC(ch,dots)( a2, b3, ab23 ); \
PASTEMAC(ch,dots)( a3, b3, ab33 ); \
\
a += 4; \
b += 4; \
} \
\
if ( PASTEMAC(ch,eq0)( *beta ) ) \
{ \
PASTEMAC(ch,set0s)( *c00 ); \
PASTEMAC(ch,set0s)( *c10 ); \
PASTEMAC(ch,set0s)( *c20 ); \
PASTEMAC(ch,set0s)( *c30 ); \
\
PASTEMAC(ch,set0s)( *c01 ); \
PASTEMAC(ch,set0s)( *c11 ); \
PASTEMAC(ch,set0s)( *c21 ); \
PASTEMAC(ch,set0s)( *c31 ); \
\
PASTEMAC(ch,set0s)( *c02 ); \
PASTEMAC(ch,set0s)( *c12 ); \
PASTEMAC(ch,set0s)( *c22 ); \
PASTEMAC(ch,set0s)( *c32 ); \
\
PASTEMAC(ch,set0s)( *c03 ); \
PASTEMAC(ch,set0s)( *c13 ); \
PASTEMAC(ch,set0s)( *c23 ); \
PASTEMAC(ch,set0s)( *c33 ); \
} \
else \
{ \
PASTEMAC(ch,scals)( *beta, *c00 ); \
PASTEMAC(ch,scals)( *beta, *c10 ); \
PASTEMAC(ch,scals)( *beta, *c20 ); \
PASTEMAC(ch,scals)( *beta, *c30 ); \
\
PASTEMAC(ch,scals)( *beta, *c01 ); \
PASTEMAC(ch,scals)( *beta, *c11 ); \
PASTEMAC(ch,scals)( *beta, *c21 ); \
PASTEMAC(ch,scals)( *beta, *c31 ); \
\
PASTEMAC(ch,scals)( *beta, *c02 ); \
PASTEMAC(ch,scals)( *beta, *c12 ); \
PASTEMAC(ch,scals)( *beta, *c22 ); \
PASTEMAC(ch,scals)( *beta, *c32 ); \
\
PASTEMAC(ch,scals)( *beta, *c03 ); \
PASTEMAC(ch,scals)( *beta, *c13 ); \
PASTEMAC(ch,scals)( *beta, *c23 ); \
PASTEMAC(ch,scals)( *beta, *c33 ); \
} \
\
PASTEMAC(ch,dots)( *alpha, ab00, *c00 ); \
PASTEMAC(ch,dots)( *alpha, ab10, *c10 ); \
PASTEMAC(ch,dots)( *alpha, ab20, *c20 ); \
PASTEMAC(ch,dots)( *alpha, ab30, *c30 ); \
\
PASTEMAC(ch,dots)( *alpha, ab01, *c01 ); \
PASTEMAC(ch,dots)( *alpha, ab11, *c11 ); \
PASTEMAC(ch,dots)( *alpha, ab21, *c21 ); \
PASTEMAC(ch,dots)( *alpha, ab31, *c31 ); \
\
PASTEMAC(ch,dots)( *alpha, ab02, *c02 ); \
PASTEMAC(ch,dots)( *alpha, ab12, *c12 ); \
PASTEMAC(ch,dots)( *alpha, ab22, *c22 ); \
PASTEMAC(ch,dots)( *alpha, ab32, *c32 ); \
\
PASTEMAC(ch,dots)( *alpha, ab03, *c03 ); \
PASTEMAC(ch,dots)( *alpha, ab13, *c13 ); \
PASTEMAC(ch,dots)( *alpha, ab23, *c23 ); \
PASTEMAC(ch,dots)( *alpha, ab33, *c33 ); \
}
INSERT_GENTFUNC_BASIC0( gemm_c99_4x4 )
|
using System;
using System.Collections.Generic;
public class AccessPolicy
{
private List<string> accessPolicies = new List<string>();
[Input("objectId", required: true)]
public Input<string> ObjectId { get; set; } = null!;
public bool AddAccessPolicy(string newObjectId)
{
// Input validation for the object ID
if (string.IsNullOrEmpty(newObjectId))
{
throw new ArgumentException("Object ID cannot be null or empty.");
}
// Check if the object ID already exists in the list of access policies
if (accessPolicies.Contains(newObjectId))
{
return false; // Object ID already exists, return false
}
else
{
accessPolicies.Add(newObjectId); // Add the new object ID to the list of access policies
return true; // Object ID added successfully, return true
}
}
} |
import os
import json
from slackclient import SlackClient
from runners.helpers import log
from runners.helpers import db
from runners.helpers import vault
API_TOKEN = os.environ.get('SA_SLACK_API_TOKEN', os.environ.get('SLACK_API_TOKEN'))
def message_template(vars):
payload = None
# remove handlers data, it might contain JSON incompatible strucutres
vars['alert'].pop('HANDLERS')
# if we have Slack user data, send it to template
if 'user' in vars:
params = {
'alert': vars['alert'],
'properties': vars['properties'],
'user': vars['user'],
}
else:
params = {'alert': vars['alert'], 'properties': vars['properties']}
log.debug(f"Javascript template parameters", params)
try:
# retrieve Slack message structure from javascript UDF
rows = db.connect_and_fetchall(
"select " + vars['template'] + "(parse_json(%s))",
params=[json.dumps(params)],
)
row = rows[1]
if len(row) > 0:
log.debug(f"Template {vars['template']}", ''.join(row[0]))
payload = json.loads(''.join(row[0]))
else:
log.error(f"Error loading javascript template {vars['template']}")
raise Exception(f"Error loading javascript template {vars['template']}")
except Exception as e:
log.error(f"Error loading javascript template", e)
raise
log.debug(f"Template payload", payload)
return payload
def handle(
alert,
recipient_email=None,
channel=None,
template=None,
message=None,
file_content=None,
file_type=None,
file_name=None,
blocks=None,
attachments=None,
api_token=API_TOKEN,
slack_api_token=None,
):
slack_token_ct = slack_api_token or api_token
slack_token = vault.decrypt_if_encrypted(slack_token_ct)
sc = SlackClient(slack_token)
# otherwise we will retrieve email from assignee and use it to identify Slack user
# Slack user id will be assigned as a channel
title = alert['TITLE']
if recipient_email is not None:
result = sc.api_call("users.lookupByEmail", email=recipient_email)
# log.info(f'Slack user info for {email}', result)
if result['ok'] is True and 'error' not in result:
user = result['user']
userid = user['id']
else:
log.error(f'Cannot identify Slack user for email {recipient_email}')
return None
# check if channel exists, if yes notification will be delivered to the channel
if channel is not None:
log.info(f'Creating new SLACK message for {title} in channel', channel)
else:
if recipient_email is not None:
channel = userid
log.info(
f'Creating new SLACK message for {title} for user {recipient_email}'
)
else:
log.error(f'Cannot identify assignee email')
return None
text = title
if template is not None:
properties = {'channel': channel, 'message': message}
# create Slack message structure in Snowflake javascript UDF
payload = message_template(locals())
if payload is not None:
if 'blocks' in payload:
blocks = json.dumps(payload['blocks'])
if 'attachments' in payload:
attachments = json.dumps(payload['attachments'])
if 'text' in payload:
text = payload['text']
else:
log.error(f'Payload is empty for template {template}')
return None
else:
# does not have template, will send just simple message
if message is not None:
text = message
response = None
if file_content is not None:
if template is not None:
response = sc.api_call(
"chat.postMessage",
channel=channel,
text=text,
blocks=blocks,
attachments=attachments,
)
file_descriptor = sc.api_call(
"files.upload",
content=file_content,
title=text,
channels=channel,
iletype=file_type,
filename=file_name,
)
if file_descriptor['ok'] is True:
file = file_descriptor["file"]
file_url = file["url_private"]
else:
log.error(f"Slack file upload error", file_descriptor['error'])
else:
response = sc.api_call(
"chat.postMessage",
channel=channel,
text=text,
blocks=blocks,
attachments=attachments,
)
if response is not None:
log.debug(f'Slack response', response)
if response['ok'] is False:
log.error(f"Slack handler error", response['error'])
return None
if 'message' in response:
del response['message']
return response
|
all_drug = 0
good_drug = 0
all_o = 0
good_o =0
for type in dataset_dict:
for sentence in dataset_dict[type]:
(words,tag) = sentence
res_tag = net.predict_for_token_batch([words])[0]
if tag==['O']:
all_o =all_o+1
if res_tag==tag: good_o=good_o+1
else:
all_drug = all_drug+1
if res_tag==tag: good_drug = good_drug +1
all_drug = 0
good_drug = 0
all_o = 0
good_o =0
for type in dataset_dict:
for sentence in dataset_dict[type]:
(words,tag) = sentence
res_tag = net.predict_for_token_batch([words])[0]
if tag==['O']:
all_o =all_o+1
if res_tag==tag: good_o=good_o+1
else:
all_drug = all_drug+1
if res_tag==tag: good_drug = good_drug +1
all_drug = 0
good_drug = 0
all_o = 0
good_o =0
for sentence in dataset_dict['test']:
(words,tag) = sentence
res_tag = net.predict_for_token_batch([words])[0]
if tag==['O']:
all_o =all_o+1
if res_tag==tag: good_o=good_o+1
else:
all_drug = all_drug+1
if res_tag==tag: good_drug = good_drug +1
good_o/all_o
good_drug/all_drug
gold = ["N-pyrrolyl",
"8-(n-Decyl)protopalmatine",
"isoquinoline-1, 3-(2H, 4H)-diones",
"4-(2,5-Dimethylpyrrol-1-yl)piperidine",
"4-aminopiperidine",
"tert-Butyl 4-(2,5-dimethyl-1H-pyrrol-1-yl)piperidine-1-carboxylate",
"tricyclic phthalimide",
"8-(n-Decyl)pseudoprotoberberine chloride",
"gallate",
"pyrrolones",
"8-(p-Methyl)phenylprotopalmatine choride",
"amide",
"DMF",
"carbonyl",
"Difluorophosgene",
"acetone",
"1-benzylpiperidine",
"Perfluoroheptanoyl fluoride",
"8-(p-Methoxy)phenylprotopalmatine choride",
"acetonitrile",
"8-(n-Decyl)protopalmatine chloride",
"(E)-5-Benzyl-2-((2,5-dimethyl-1-(2-(trifluoromethyl)phenyl)-1H-pyrrol-3-yl)methylene)-1H-pyrrolo[3,2-c]pyridine-3,4(2H, 5H)dione",
"hydroxyl",
"4-[(3-hydroxybenzylamino)-methylene]-4H-isoquinoline-1,3-diones",
"5H)dione",
"3,4-dichlorophenyl",
"glycerol monolaurate",
"pyrrolo [3,2-c] pyridine-3, 4-dione",
"dihydropseudoberberine",
"dihydroberberine",
"(E)-Ethyl 5-((1-(1-benzylpiperidin-4-yl)-2,5-dimethyl-1H-pyrrol-3-yl)methylene)-2-methyl-4-oxo-4,5-dihydro-1H-pyrrole-3-carboxylate",
"Sepharose",
"Fcontrol",
"N,N-dimethylformamide",
"4-fluorobenzyl",
"aminomethyl",
"8-(1-Naphthyl)protoberberine choride",
"8-(p-Methyl)phenylpseudoprotoberberine choride",
"indinavir",
"piperidinyl",
"3-furyl",
"pyrrolo[3,2-c]",
"-NHAc",
"amine",
"amino",
"prodelphinidin A2-3′-O-gallate",
"diethyl acetal",
"pyrazine",
"protomol",
"pyrrolone",
"phthalimide",
"methanol",
"1-benzyl-4-(2,5-dimethyl-1H-pyrrol-1-yl)piperidine",
"1-Benzyl-4-(2,5-dimethyl-1H-pyrrol-1-yl)piperidine",
"ester",
"4-(2,5-dimethyl-1H-pyrrol-1-yl)piperidine",
"1-piperidinylmethyl",
"1-naphthyl",
"8-(m-Chloro)phenylpseudoprotoberberine",
"elvitegravir",
"isoquinoline-1,3-diones",
"iso-propanol",
"2-furyl",
"acetic acid",
"4-oxo-4,5-dihydro-1H-pyrrole-3-carboxylate",
"bromate",
"phenyl",
"4-methylumbelliferyl (4-MU) oleate",
"difluorophosgene",
"imino",
"GML",
"Sephacryl S-300",
")-4-oxo-4,5-dihydro-1H-pyrrole-3-carboxylate",
"3-furyl, N-pyrrolyl",
"PFCAs",
"tetrazolium 3-(4,5-dimethylthiazol-2-yl)-2,5-diphenyltetrazolium bromide",
"8-(n-Dodecyl)pseudoprotoberberine chloride",
"8-(1-Naphthyl)protopalmatine choride",
"perfluorohexyl",
"pyrimethamine",
"perfluorooctanoic acid",
"cyclohexane",
"methoxyl",
"Trifluoroacetic Acid",
"piperazinyl",
"pseudoberberine",
"kcals",
"polyphenolic compounds",
"(2R,3R,2′R,3′R)-desgalloyloolongtheanin-3,3′-O-digallate",
"palmatine",
"acetal",
"isoxazole",
"2,5-dimethyl-1-(1-substituted-piperidine)-1H-pyrrole",
"sulfonamide",
"Phenyl Sepharose",
"Perfluorooctanoic Acid",
"Perfluorinated carboxylic acids (PFCAs)",
"RIF",
"8-(n-Dodecyl)pseudoprotoberberine",
"dihydropalmatine",
"polyphenols",
"8-(p-Chloro)phenylpseudoprotoberberine chloride",
"carboxylic acids",
"ethanol",
"1-(1-Benzylpiperidin-4-yl)-2,5-dimethyl-1H-pyrrole-3-carbaldehyde",
"carboxylic acid",
"aa–aj)",
"tert-butyloxycarbonyl",
"surfactants",
"Pyrrolone",
"morpholinyl",
"carbonyl-hydroxy-aromatic nitrogen",
"PFCA",
"propyl-2-",
"4-methylumbelliferone",
"galloyl",
"3-formylpyrrole 4-aminopiperidine (7, 16 a–z, 16 aa–aj)",
"8-n-decylberberines",
"2,5-hexandione",
"benzoyl",
"2,5-Hexandione",
"trifluoroacetic acid",
"4-pyridyl",
"perfluorinated carboxylic acids",
"perfluorinated",
"N,N-dimethylformamido-",
"8-(m-Methoxy)phenylprotopalmatine choride",
"8-(p-Methoxy)phenylprotoberberine choride",
"8-(1-Naphthyl)pseudoprotoberberine choride",
"piperidine",
"berberine",
"8-(p-Methoxy)phenylpseudoprotoberberine choride",
"perfluorinated acid",
"PFOA",
"(E)-Ethyl 5-((2,5-dimethyl-1-(2-(trifluoromethyl)phenyl)-1H-pyrrol-3-yl)methylene)-2-((E)-2-(dimethylamino)vinyl)-4-oxo-4,5-dihydro-1H-pyrrole-3-carboxylate",
"8-(n-Decyl)pseudoprotoberberine",
"8-(p-Chloro)phenylpseudoprotoberberine",
"polyphenol",
"pyrogallol",
"tetramethylsilane",
"tetrazolium 3-(4,5-dimethylthiazol-2-yl)-2,5-diphenyltetrazolium",
"/dichloromethane",
"isoquinoline-1,3-(2H,4H)-dione",
"4-quinazolinone",
"dimethylamine",
"5H-pyrido[4,3-b]indol-4-carboxamide",
"isoniazid",
"dichloromethane",
"phthalimides",
"4-methylpiperazinyl, -CH2-(1-piperidinyl)",
"8-(n-Decyl)protoberberine chloride",
"DOTD",
"3-Formylpyrrole",
"isoquinoline-1, 3-(2H, 4H)-dione",
"8-(m-Methoxy)phenylprotoberberine choride",
"polyphenol oxidase",
"para-toluenesulfonic acid",
"m-methoxyphenyl",
"5-O-gallate",
"4-carboxamide",
"3-thienyl",
"Isoquinolin-1,3-Dione",
"p-methoxyphenyl",
"pseudoprotoberberine",
"oleate",
"-NH",
"4-dione",
"(E)-ethyl 5-((1-substituted-piperidin-4-yl)-2,5-dimethyl-1H-pyrrol-3-yl)methylene)-2-methyl-4-oxo-4,5-dihydro-1H-pyrrole-3-carboxylates",
"isoquinoline- 1, 3-(2H,4H)-dione",
"pyridone",
"rifampin",
"p-methylphenyl",
"efavirenz",
"perfluoroheptyl",
"chloroquine",
"phenol",
"n-decyl",
"3-pyridyl",
"3” hydroxyl",
"ethyl piperidine-1-",
"PFHA",
"raltegravir",
"perfluoroheptanoic acid",
"4-amino-1-benzylpiperidine",
"2,5-dimethyl-1-(1-substituted-piperidine)-3-formylpyrroles",
"2,5-dimethyl-1-(1-substituted-piperidine)-3-formylpyrrole",
"monoglycerides",
"5H-Pyrido[4,3-b]indol-4-carboxamide",
"isoquinolinedione",
"carboxyl",
"Bedaquiline",
"diethyl aceta",
"8-(n-Decyl)protoberberine",
"8-(m-Methoxy)phenylpseudoprotoberberine choride",
"benzyl",
"8-(m-Chloro)phenylpseudoprotoberberine chloride",
"benzopyrone",
"4H)-diones",
"HOAc",
"methylpyrazole",
"2,5-dimethyl-1-aryl/substituted-aryl-3-formylpyrrole",
"protoberberine",
"10,11-dimethoxy",
"prodelphinidin",
"pesudoberberine",
"8-(p-Methyl)phenylprotoberberine choride"]
for e in gold:
tokens = tokenize(e.lower())
tags = net.predict_for_token_batch([tokens])[0]
for token, tag in zip(tokens, tags):
print(token, tag) |
#!/bin/sh
CURRENT_DIR=`pwd`
BUILD_DIR="build"
BUILD_OPTIONS="-DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DASMJIT_TEST=1"
echo "** Configuring ${BUILD_DIR}_dbg [Debug Build] **"
mkdir -p ../${BUILD_DIR}_dbg
cd ../${BUILD_DIR}_dbg
eval cmake .. -DCMAKE_BUILD_TYPE=Debug ${BUILD_OPTIONS} -DASMJIT_SANITIZE=1
cd ${CURRENT_DIR}
echo "** Configuring ${BUILD_DIR}_rel [Release Build] **"
mkdir -p ../${BUILD_DIR}_rel
cd ../${BUILD_DIR}_rel
eval cmake .. -DCMAKE_BUILD_TYPE=Release ${BUILD_OPTIONS}
cd ${CURRENT_DIR}
|
<filename>sdk/metric/sdkapi/instrumentkind.go
// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:generate stringer -type=InstrumentKind
package sdkapi // import "go.opentelemetry.io/otel/sdk/metric/sdkapi"
// InstrumentKind describes the kind of instrument.
type InstrumentKind int8
const (
// HistogramInstrumentKind indicates a Histogram instrument.
HistogramInstrumentKind InstrumentKind = iota
// GaugeObserverInstrumentKind indicates an GaugeObserver instrument.
GaugeObserverInstrumentKind
// CounterInstrumentKind indicates a Counter instrument.
CounterInstrumentKind
// UpDownCounterInstrumentKind indicates a UpDownCounter instrument.
UpDownCounterInstrumentKind
// CounterObserverInstrumentKind indicates a CounterObserver instrument.
CounterObserverInstrumentKind
// UpDownCounterObserverInstrumentKind indicates a UpDownCounterObserver
// instrument.
UpDownCounterObserverInstrumentKind
)
// Synchronous returns whether this is a synchronous kind of instrument.
func (k InstrumentKind) Synchronous() bool {
switch k {
case CounterInstrumentKind, UpDownCounterInstrumentKind, HistogramInstrumentKind:
return true
}
return false
}
// Asynchronous returns whether this is an asynchronous kind of instrument.
func (k InstrumentKind) Asynchronous() bool {
return !k.Synchronous()
}
// Adding returns whether this kind of instrument adds its inputs (as opposed to Grouping).
func (k InstrumentKind) Adding() bool {
switch k {
case CounterInstrumentKind, UpDownCounterInstrumentKind, CounterObserverInstrumentKind, UpDownCounterObserverInstrumentKind:
return true
}
return false
}
// Grouping returns whether this kind of instrument groups its inputs (as opposed to Adding).
func (k InstrumentKind) Grouping() bool {
return !k.Adding()
}
// Monotonic returns whether this kind of instrument exposes a non-decreasing sum.
func (k InstrumentKind) Monotonic() bool {
switch k {
case CounterInstrumentKind, CounterObserverInstrumentKind:
return true
}
return false
}
// PrecomputedSum returns whether this kind of instrument receives precomputed sums.
func (k InstrumentKind) PrecomputedSum() bool {
return k.Adding() && k.Asynchronous()
}
|
<filename>app/src/main/java/com/example/opensorcerer/models/Conversation.java
package com.example.opensorcerer.models;
import android.os.Parcelable;
import android.util.Log;
import com.parse.ParseClassName;
import com.parse.ParseException;
import com.parse.ParseObject;
import com.parse.ParseQuery;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* Class for handling Conversation objects for user chats
*/
@ParseClassName("Conversation")
public class Conversation extends ParseObject implements Parcelable {
// Database keys
private static final String KEY_PARTICIPANTS = "participants";
private static final String KEY_MESSAGES = "messages";
/**
* Gets an active conversation between the current user and another, if it exists
*/
private static Conversation findActiveConversation(User opposite) {
User current = User.getCurrentUser();
//Create a query for projects that contain both users
ParseQuery<Conversation> query = ParseQuery.getQuery(Conversation.class)
.whereContainsAll("participants", Arrays.asList(current.getHandler(), opposite.getHandler()));
//Check if there exists an active conversation
try {
return query.getFirst();
} catch (ParseException e) {
return null;
}
}
/**
* Gets an active conversation with another user, or creates one if there aren't any
*/
public static Conversation getConversationWithUser(User opposite) {
Conversation conversation = findActiveConversation(opposite);
if (conversation == null) {
conversation = new Conversation();
conversation.setParticipants(Arrays.asList(User.getCurrentUser(), opposite));
}
return conversation;
}
/**
* Messages getter
*/
public List<String> getMessages() {
return getList(KEY_MESSAGES);
}
/**
* Messages setter
*/
public void setMessages(List<String> messages) {
put(KEY_MESSAGES, messages);
update();
}
/**
* Participants getter
*/
public List<User> getParticipants() {
return User.toUserArray(Objects.requireNonNull(getList(KEY_PARTICIPANTS)));
}
/**
* Participants setter
*/
public void setParticipants(List<User> participants) {
put(KEY_PARTICIPANTS, User.toParseUserArray(participants));
}
/**
* Gets the user that is not the current user in this conversation
*/
public User getOpposite() {
List<User> participants = getParticipants();
User current = User.getCurrentUser();
return participants.get(0).getObjectId().equals(current.getObjectId())
? participants.get(1)
: participants.get(0);
}
/**
* Adds a single message to the conversation
*/
public void addMessage(Message message) {
List<String> messages = getMessages();
if (messages == null) {
messages = new ArrayList<>();
}
messages.add(message.getObjectId());
setMessages(messages);
}
/**
* Saves the conversation into the database
*/
private void update() {
saveInBackground(e -> {
if (e == null) {
Log.d("Conversation", "Conversation updated");
} else {
e.printStackTrace();
}
});
}
}
|
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/master/script-library/docs/ruby.md
#
# Syntax: ./ruby-debian.sh [Ruby version] [non-root user] [Add to rc files flag] [Install tools flag]
RUBY_VERSION=${1:-"latest"}
USERNAME=${2:-"automatic"}
UPDATE_RC=${3:-"true"}
INSTALL_RUBY_TOOLS=${6:-"true"}
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Determine the appropriate non-root user
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=root
fi
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root
fi
# Determine appropriate settings for rvm
DEFAULT_GEMS="rake ruby-debug-ide debase"
if [ "${RUBY_VERSION}" = "none" ]; then
RVM_INSTALL_ARGS=""
else
if [ "${RUBY_VERSION}" = "latest" ] || [ "${RUBY_VERSION}" = "current" ] || [ "${RUBY_VERSION}" = "lts" ]; then
RVM_INSTALL_ARGS="--ruby"
RUBY_VERSION=""
else
RVM_INSTALL_ARGS="--ruby=${RUBY_VERSION}"
fi
if [ "${INSTALL_RUBY_TOOLS}" = "true" ]; then
SKIP_GEM_INSTALL="true"
else
DEFAULT_GEMS=""
fi
fi
function updaterc() {
if [ "${UPDATE_RC}" = "true" ]; then
echo "Updating /etc/bash.bashrc..."
echo -e "$1" >> /etc/bash.bashrc
if [ -d "/etc/zsh" ]; then
echo "Updating /etc/zsh/zshrc..."
echo -e "$1" >> /etc/zsh/zshrc
fi
fi
}
export DEBIAN_FRONTEND=noninteractive
# Install curl, software-properties-common, build-essential, gnupg2 if missing
if ! dpkg -s curl ca-certificates software-properties-common build-essential gnupg2 > /dev/null 2>&1; then
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
apt-get update
fi
apt-get -y install --no-install-recommends curl ca-certificates software-properties-common build-essential gnupg2
fi
# Just install Ruby if RVM already installed
if [ -d "/usr/local/rvm" ]; then
echo "Ruby Version Manager already exists."
if [ "${RUBY_VERSION}" != "none" ]; then
echo "Installing specified Ruby version."
su ${USERNAME} -c "source /usr/local/rvm/scripts/rvm && rvm install ruby ${RUBY_VERSION}"
fi
SKIP_GEM_INSTALL="false"
else
# Use a temporary locaiton for gpg keys to avoid polluting image
export GNUPGHOME="/tmp/rvm-gnupg"
mkdir -p ${GNUPGHOME}
echo "disable-ipv6" >> ${GNUPGHOME}/dirmngr.conf
gpg --keyserver hkp://pool.sks-keyservers.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB 2>&1
# Install RVM
curl -sSL https://get.rvm.io | bash -s stable --ignore-dotfiles ${RVM_INSTALL_ARGS} --with-default-gems="${DEFAULT_GEMS}" 2>&1
usermod -aG rvm ${USERNAME}
su ${USERNAME} -c "source /usr/local/rvm/scripts/rvm && rvm fix-permissions system"
rm -rf ${GNUPGHOME}
fi
if [ "${INSTALL_RUBY_TOOLS}" = "true" ] && [ "${SKIP_GEM_INSTALL}" != "true" ]; then
# Non-root user may not have "gem" in path when script is run and no ruby version
# is installed by rvm, so handle this by using root's default gem in this case
ROOT_GEM="$(which gem)"
su ${USERNAME} -c "source /usr/local/rvm/scripts/rvm && \"$(which gem || ${ROOT_GEM})\" install ${DEFAULT_GEMS}"
fi
# VS Code server usually first in the path, so silence annoying rvm warning (that does not apply) and then source it
updaterc "if ! grep rvm_silence_path_mismatch_check_flag \$HOME/.rvmrc > /dev/null 2>&1; then echo 'rvm_silence_path_mismatch_check_flag=1' >> \$HOME/.rvmrc; fi\nsource /usr/local/rvm/scripts/rvm"
# Install rbenv/ruby-build for good measure
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
https://github.com/rbenv/rbenv.git /usr/local/share/rbenv
ln -s /usr/local/share/rbenv/bin/rbenv /usr/local/bin
updaterc 'eval "$(rbenv init -)"'
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
https://github.com/rbenv/ruby-build.git /usr/local/share/ruby-build
mkdir -p /root/.rbenv/plugins
ln -s /usr/local/share/ruby-build /root/.rbenv/plugins/ruby-build
if [ "${USERNAME}" != "root" ]; then
mkdir -p /home/${USERNAME}/.rbenv/plugins
chown -R ${USERNAME} /home/${USERNAME}/.rbenv
ln -s /usr/local/share/ruby-build /home/${USERNAME}/.rbenv/plugins/ruby-build
fi
# Clean up
source /usr/local/rvm/scripts/rvm
rvm cleanup all
gem cleanup
echo "Done!"
|
// Add a link to the HTML page
<a href="#" id="myLink">Click me</a>
// Create JavaScript for the modal
const myLink = document.getElementById('myLink');
myLink.addEventListener('click', () => {
const modal = document.createElement('div');
modal.className = 'modal';
modal.innerHTML = '<p>You clicked the link!</p>';
document.body.appendChild(modal);
});
// Add styles to your CSS page
.modal {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: rgba(0, 0, 0, 0.5);
} |
import numpy as np
import pytest
from numpy.testing import assert_equal, assert_raises
from .. import SIMULATIONS, gaussian_3samp, ksamp_sim, rot_ksamp
class TestKSampleSimShape:
@pytest.mark.parametrize("n", [100, 1000])
@pytest.mark.parametrize("p", [1, 5])
@pytest.mark.parametrize(
"indep_sim",
SIMULATIONS.keys(),
)
@pytest.mark.parametrize("k, degree", [(2, 90), (3, [90, 90])])
def test_shapes(self, indep_sim, k, n, p, degree):
np.random.seed(123456789)
sims = rot_ksamp(indep_sim, n, p, k=k, degree=degree)
sims1 = rot_ksamp(indep_sim, n, p, k=k, degree=degree, pow_type="dim")
sims2 = ksamp_sim("rot_ksamp", n, sim=indep_sim, p=p, k=k, degree=degree)
if indep_sim in [
"joint_normal",
"logarithmic",
"sin_four_pi",
"sin_sixteen_pi",
"two_parabolas",
"square",
"diamond",
"circle",
"ellipse",
"multiplicative_noise",
"multimodal_independence",
]:
[assert_equal(sim.shape[1], p * 2) for sim in sims]
[assert_equal(sim.shape[1], p * 2) for sim in sims1]
[assert_equal(sim.shape[1], p * 2) for sim in sims2]
else:
[assert_equal(sim.shape[1], p + 1) for sim in sims]
[assert_equal(sim.shape[1], p + 1) for sim in sims1]
[assert_equal(sim.shape[1], p + 1) for sim in sims2]
class TestGaussianSimShape:
@pytest.mark.parametrize("n", [100, 1000])
@pytest.mark.parametrize(
"case",
[1, 2, 3, 4, 5],
)
def test_shapes(self, n, case):
np.random.seed(123456789)
sims = gaussian_3samp(n, case=case)
[assert_equal(sim.shape[0], n) for sim in sims]
class TestKSampleSimErrorWarn:
"""Tests errors and warnings."""
def test_wrong_powtype(self):
assert_raises(ValueError, rot_ksamp, sim="linear", n=100, p=1, pow_type="abcd")
def test_wrong_k(self):
assert_raises(
ValueError, rot_ksamp, sim="linear", k=3, n=100, p=1, pow_type="abcd"
)
assert_raises(
ValueError,
rot_ksamp,
sim="linear",
degree=[90, 90],
n=100,
p=1,
pow_type="abcd",
)
|
public class CountSpaces {
public static int countSpaces(String str) {
int count = 0;
for (int i = str.length() - 1; i >= 0; i--) {
if (str.charAt(i) == ' ') {
count++;
} else {
break;
}
}
return count;
}
public static void main(String[] args) {
String str = "Hello world, How are you?";
System.out.println("Number of spaces to end of string: " + countSpaces(str)); // prints "Number of spaces to end of string: 2"
}
} |
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Stack;
public class FileCompression {
public static void compressDirectory(File directory, String zipFileName) throws IOException {
try (ZipArchiveOutputStream zipOut = new ZipArchiveOutputStream(new FileOutputStream(zipFileName))) {
Stack<File> fileStack = new Stack<>();
Stack<String> parentStack = new Stack<>();
fileStack.push(directory);
parentStack.push(null);
while (!fileStack.isEmpty()) {
File file = fileStack.pop();
String parent = parentStack.pop();
if (file.isDirectory()) {
File[] children = file.listFiles();
if (children != null) {
for (File child : children) {
parentStack.push(parent == null ? file.getName() : parent + File.separator + file.getName());
fileStack.push(child);
}
}
} else {
String entryName = parent == null ? file.getName() : parent + File.separator + file.getName();
ZipArchiveEntry zipEntry = new ZipArchiveEntry(file, entryName);
zipOut.putArchiveEntry(zipEntry);
try (FileInputStream fileInput = new FileInputStream(file)) {
byte[] buffer = new byte[1024];
int length;
while ((length = fileInput.read(buffer)) > 0) {
zipOut.write(buffer, 0, length);
}
}
zipOut.closeArchiveEntry();
}
}
}
}
public static void main(String[] args) {
File directory = new File("path_to_directory");
String zipFileName = "compressed.zip";
try {
compressDirectory(directory, zipFileName);
System.out.println("Directory compressed successfully.");
} catch (IOException e) {
System.err.println("Error compressing directory: " + e.getMessage());
}
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.