text stringlengths 1 1.05M |
|---|
#include <SFML/Graphics.hpp>
class Camera2D {
private:
sf::RenderTargetPtr m_pRenderTarget;
sf::View m_view;
public:
Camera2D(sf::RenderTargetPtr renderTarget, float frustrumWidth, float frustrumHeight)
: m_pRenderTarget(renderTarget) {
m_view = m_pRenderTarget->getView();
m_view.setViewport(sf::FloatRect(0, 0, frustrumWidth, frustrumHeight));
resize(m_pRenderTarget->getSize());
updateTarget();
}
void updateTarget() {
m_pRenderTarget->setView(m_view);
}
void resize(sf::Vector2u size) {
m_view.setSize(static_cast<float>(size.x), static_cast<float>(size.y));
}
}; |
fn main() {
let mut a = 7;
let mut b = 3;
a = a + b;
b = a - b;
a = a - b;
println!("a: {}, b: {}", a, b);
} |
<filename>src/components/views/TopView.js
import React from "react";
import Top from "../view_components/Top.js";
const TopView = props => {
return (
<div className="top">
<Top />
</div>
);
};
export default TopView; |
from scipy import stats
import pandas as pd
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils.validation import check_is_fitted
class NormConstVarianceImputer(BaseEstimator, TransformerMixin):
def __init__(self, column=None):
self.column = column
def fit(self, X, y=None):
if self.column is None:
raise ValueError("Column to impute missing values must be specified.")
if self.column not in X.columns:
raise ValueError(f"Column '{self.column}' not found in the input DataFrame.")
self.mean_ = X[self.column].mean()
self.std_ = 1 # Constant unit variance
return self
def transform(self, X):
check_is_fitted(self, 'mean_')
X_transformed = X.copy()
X_transformed[self.column].fillna(stats.norm.rvs(loc=self.mean_, scale=self.std_, size=X_transformed[self.column].isnull().sum()), inplace=True)
return X_transformed
# Sample dataset
data = {
'A': [1, 2, np.nan, 4, 5],
'B': [np.nan, 2, 3, 4, 5]
}
df = pd.DataFrame(data)
# Impute missing values in column 'A' using NormConstVarianceImputer
imputer = NormConstVarianceImputer(column='A')
df_imputed = imputer.fit_transform(df)
print(df_imputed) |
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
class SentimentClassifier:
def __init__(self):
self.vectorizer = CountVectorizer(ngram_range=(1, 2))
self.classifier = MultinomialNB()
def fit(self, X, y):
self.vectorizer.fit(X)
self.classifier.fit(self.vectorizer.transform(X), y)
def predict(self, X):
return self.classifier.predict(self.vectorizer.transform(X)) |
'use strict';
const { LANGUAGE, PREFIX } = require('@data/config');
module.exports = {
guilds: {
_id: { required: true, type: String },
autorole: { default: false, type: String },
disableChannels: { type: Array },
lang: { default: LANGUAGE, type: String },
logs: {
GuildMemberAdd: { default: null, type: String },
GuildMemberRemove: { default: null, type: String },
GuildUpdates: { default: null, type: String },
MessageUpdate: { default: null, type: String },
UserUpdate: { default: null, type: String },
VoiceChannelLogs: { default: null, type: String },
},
prefix: { default: PREFIX, type: String },
starboard: { default: null, type: String },
},
joinLeaveGuild: {
date_at: { required: true, type: Date },
guild_id: { required: true, type: String },
type: { enum: ['JOIN', 'LEAVE'], required: true, type: String },
},
};
|
import numpy as np
def custom_activation(x):
if x < 0:
return -np.log(1 - x)
else:
return x
# Example usage
result = custom_activation(-0.5)
print(result) # Output: 0.6931471805599453 |
#!/bin/bash
#SBATCH -A p30137 # Allocation
#SBATCH -p short # Queue
#SBATCH -t 1:00:00 # Walltime/duration of the job
#SBATCH -N 1 # Number of Nodes
#SBATCH --mem=64G # Memory per node in GB needed for a job. Also see --mem-per-cpu
#SBATCH --ntasks-per-node=24 # Number of Cores (Processors)
#SBATCH --mail-user=xander.hall@northwestern.edu # Designate email address for job communications
#SBATCH --mail-type=END # Events options are job BEGIN, END, NONE, FAIL, REQUEUE
#SBATCH --output="jobout86"
#SBATCH --error="joberr86"
#SBATCH --job-name="Calculate Possible FU Oris" # Name of job
echo deploying job ...
# add a project directory to your PATH (if needed)
export PATH=$PATH:/projects/p30137/xhall/
# load modules you need to use
module purge all
module load python/anaconda3.6
source activate pymc3_env
python /home/xjh0560/GitHub/PyMC3_Supernova/MultipleLCAnalysis/analyzeall.py 86
echo Done |
package main
import (
"fmt"
"log"
"time"
"github.com/tarm/serial"
)
var Serial *serial.Port
const (
SERIAL_CONNECTED_TOPIC = "serial_connected"
)
var (
serial_connected="n"
)
func RawBytes2StrBytes(data []byte) string {
length := len(data)
if length < 1 {
return ""
}
str := ""
for i := 0; i < length; i++ {
str = str + fmt.Sprintf("%02X ", data[i])
}
return str
}
func SerialOpen() error {
var err error
c := &serial.Config{Name: Config.SerialPort, Baud: Config.SerialBuad}
Serial, err = serial.OpenPort(c)
return err
}
func SerialReadThread() {
buf := make([]byte, 10240)
var n int
var err error
for{
err = SerialOpen()
if err == nil {
break
}
log.Printf("Cannot Open %s\n", Config.SerialPort)
time.Sleep(time.Second*2)
}
serial_connected="y"
emit2All(SERIAL_CONNECTED_TOPIC,serial_connected)
for {
if Config.SendInterval > 0 {
time.Sleep(time.Duration(Config.SendInterval) * time.Millisecond)
}
n, err = Serial.Read(buf)
if err != nil {
serial_connected="n"
emit2All(SERIAL_CONNECTED_TOPIC,serial_connected)
for {
log.Printf("[Retry] Opening SerialPort")
if Serial != nil {
Serial.Close()
}
err = SerialOpen()
if err == nil {
serial_connected="y"
emit2All(SERIAL_CONNECTED_TOPIC,serial_connected)
break
}
time.Sleep(time.Second * 2)
}
}
if n > 0 {
log.Printf("Serial Recv: %d", n)
str:=RawBytes2StrBytes(buf[:n])
Flogger.Printf(str)
emit2All("rx",str)
}
}
}
|
const express = require('express');
const app = express();
app.use(express.json());
// GET endpoint
app.get('/data', (req, res) => {
res.status(200).json({data: {a: 1, b: 2}});
});
// POST endpoint
app.post('/data', (req, res) => {
const data = req.body;
res.status(201).json(data);
});
app.listen(3000);
console.log('Listening on port 3000...'); |
#!/usr/bin/env bash
set -e
# TODO: Set to URL of git repo.
PROJECT_GIT_URL='https://github.com/Will-Mower/profiles-rest-api.git'
PROJECT_BASE_PATH='/usr/local/apps/profiles-rest-api'
echo "Installing dependencies..."
apt-get update
apt-get install -y python3-dev python3-venv sqlite python-pip supervisor nginx git
# Create project directory
mkdir -p $PROJECT_BASE_PATH
git clone $PROJECT_GIT_URL $PROJECT_BASE_PATH
# Create virtual environment
mkdir -p $PROJECT_BASE_PATH/env
python3 -m venv $PROJECT_BASE_PATH/env
# Install python packages
$PROJECT_BASE_PATH/env/bin/pip install -r $PROJECT_BASE_PATH/requirements.txt
$PROJECT_BASE_PATH/env/bin/pip install uwsgi==2.0.18
# Run migrations and collectstatic
cd $PROJECT_BASE_PATH
$PROJECT_BASE_PATH/env/bin/python manage.py migrate
$PROJECT_BASE_PATH/env/bin/python manage.py collectstatic --noinput
# Configure supervisor
cp $PROJECT_BASE_PATH/deploy/supervisor_profiles_api.conf /etc/supervisor/conf.d/profiles_api.conf
supervisorctl reread
supervisorctl update
supervisorctl restart profiles_api
# Configure nginx
cp $PROJECT_BASE_PATH/deploy/nginx_profiles_api.conf /etc/nginx/sites-available/profiles_api.conf
rm /etc/nginx/sites-enabled/default
ln -s /etc/nginx/sites-available/profiles_api.conf /etc/nginx/sites-enabled/profiles_api.conf
systemctl restart nginx.service
echo "DONE! :)"
|
function calculateR(a, b, c, fa, fb, fc) {
let q31 = (a - b) * fa / (fb - fa);
let d21 = (b - c) * fc / (fc - fb);
let d31 = (a - b) * fb / (fb - fa);
let q22 = (d21 - q11) * fb / (fd - fb);
let q32 = (d31 - q21) * fa / (fc - fa);
let d32 = (d31 - q21) * fc / (fc - fa);
let q33 = (d32 - q22) * fa / (fd - fa);
let r = a + q31 + q32 + q33;
return r;
}
// Example usage
let a = 5, b = 3, c = 2, fa = 10, fb = 8, fc = 6;
console.log(calculateR(a, b, c, fa, fb, fc)); // Output: 18.333333333333332 |
<filename>google/cloud/securitycenter/settings/v1beta1/google-cloud-securitycenter-settings-v1beta1-ruby/test/google/cloud/security_center/settings/v1beta1/security_center_settings_service_test.rb
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/securitycenter/settings/v1beta1/securitycenter_settings_service_pb"
require "google/cloud/securitycenter/settings/v1beta1/securitycenter_settings_service_services_pb"
require "google/cloud/security_center/settings/v1beta1/security_center_settings_service"
class ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_get_service_account
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::ServiceAccount.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
get_service_account_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_service_account, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::GetServiceAccountRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_service_account_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_service_account({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_service_account name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_service_account ::Google::Cloud::SecurityCenter::Settings::V1beta1::GetServiceAccountRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_service_account({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_service_account(::Google::Cloud::SecurityCenter::Settings::V1beta1::GetServiceAccountRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_service_account_client_stub.call_rpc_count
end
end
def test_get_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::Settings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
get_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::GetSettingsRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_settings({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_settings name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::GetSettingsRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_settings({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::GetSettingsRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_settings_client_stub.call_rpc_count
end
end
def test_update_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::Settings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
settings = {}
update_mask = {}
update_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::UpdateSettingsRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::Settings::V1beta1::Settings), request["settings"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_settings({ settings: settings, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_settings settings: settings, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::UpdateSettingsRequest.new(settings: settings, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_settings({ settings: settings, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::UpdateSettingsRequest.new(settings: settings, update_mask: update_mask), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_settings_client_stub.call_rpc_count
end
end
def test_reset_settings
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
etag = "hello world"
reset_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :reset_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::ResetSettingsRequest, request
assert_equal "hello world", request["name"]
assert_equal "hello world", request["etag"]
refute_nil options
end
Gapic::ServiceStub.stub :new, reset_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.reset_settings({ name: name, etag: etag }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.reset_settings name: name, etag: etag do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.reset_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::ResetSettingsRequest.new(name: name, etag: etag) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.reset_settings({ name: name, etag: etag }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.reset_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::ResetSettingsRequest.new(name: name, etag: etag), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, reset_settings_client_stub.call_rpc_count
end
end
def test_batch_get_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchGetSettingsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
names = ["hello world"]
batch_get_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :batch_get_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchGetSettingsRequest, request
assert_equal "hello world", request["parent"]
assert_equal ["hello world"], request["names"]
refute_nil options
end
Gapic::ServiceStub.stub :new, batch_get_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.batch_get_settings({ parent: parent, names: names }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.batch_get_settings parent: parent, names: names do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.batch_get_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchGetSettingsRequest.new(parent: parent, names: names) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.batch_get_settings({ parent: parent, names: names }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.batch_get_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchGetSettingsRequest.new(parent: parent, names: names), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, batch_get_settings_client_stub.call_rpc_count
end
end
def test_calculate_effective_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::Settings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
calculate_effective_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :calculate_effective_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveSettingsRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, calculate_effective_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.calculate_effective_settings({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.calculate_effective_settings name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.calculate_effective_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveSettingsRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.calculate_effective_settings({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.calculate_effective_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveSettingsRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, calculate_effective_settings_client_stub.call_rpc_count
end
end
def test_batch_calculate_effective_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchCalculateEffectiveSettingsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
requests = [{}]
batch_calculate_effective_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :batch_calculate_effective_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchCalculateEffectiveSettingsRequest, request
assert_equal "hello world", request["parent"]
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveSettingsRequest, request["requests"].first
refute_nil options
end
Gapic::ServiceStub.stub :new, batch_calculate_effective_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.batch_calculate_effective_settings({ parent: parent, requests: requests }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.batch_calculate_effective_settings parent: parent, requests: requests do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.batch_calculate_effective_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchCalculateEffectiveSettingsRequest.new(parent: parent, requests: requests) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.batch_calculate_effective_settings({ parent: parent, requests: requests }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.batch_calculate_effective_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::BatchCalculateEffectiveSettingsRequest.new(parent: parent, requests: requests), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, batch_calculate_effective_settings_client_stub.call_rpc_count
end
end
def test_get_component_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::ComponentSettings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
get_component_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_component_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::GetComponentSettingsRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_component_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_component_settings({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_component_settings name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_component_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::GetComponentSettingsRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_component_settings({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_component_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::GetComponentSettingsRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_component_settings_client_stub.call_rpc_count
end
end
def test_update_component_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::ComponentSettings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
component_settings = {}
update_mask = {}
update_component_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_component_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::UpdateComponentSettingsRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecurityCenter::Settings::V1beta1::ComponentSettings), request["component_settings"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_component_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_component_settings({ component_settings: component_settings, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_component_settings component_settings: component_settings, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_component_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::UpdateComponentSettingsRequest.new(component_settings: component_settings, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_component_settings({ component_settings: component_settings, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_component_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::UpdateComponentSettingsRequest.new(component_settings: component_settings, update_mask: update_mask), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_component_settings_client_stub.call_rpc_count
end
end
def test_reset_component_settings
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
etag = "hello world"
reset_component_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :reset_component_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::ResetComponentSettingsRequest, request
assert_equal "hello world", request["name"]
assert_equal "hello world", request["etag"]
refute_nil options
end
Gapic::ServiceStub.stub :new, reset_component_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.reset_component_settings({ name: name, etag: etag }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.reset_component_settings name: name, etag: etag do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.reset_component_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::ResetComponentSettingsRequest.new(name: name, etag: etag) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.reset_component_settings({ name: name, etag: etag }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.reset_component_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::ResetComponentSettingsRequest.new(name: name, etag: etag), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, reset_component_settings_client_stub.call_rpc_count
end
end
def test_calculate_effective_component_settings
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::ComponentSettings.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "<NAME>"
calculate_effective_component_settings_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :calculate_effective_component_settings, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveComponentSettingsRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, calculate_effective_component_settings_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.calculate_effective_component_settings({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.calculate_effective_component_settings name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.calculate_effective_component_settings ::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveComponentSettingsRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.calculate_effective_component_settings({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.calculate_effective_component_settings(::Google::Cloud::SecurityCenter::Settings::V1beta1::CalculateEffectiveComponentSettingsRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, calculate_effective_component_settings_client_stub.call_rpc_count
end
end
def test_list_detectors
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::ListDetectorsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
filter = "hello world"
page_size = 42
page_token = "hello world"
list_detectors_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_detectors, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::ListDetectorsRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["filter"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_detectors_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_detectors({ parent: parent, filter: filter, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_detectors parent: parent, filter: filter, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_detectors ::Google::Cloud::SecurityCenter::Settings::V1beta1::ListDetectorsRequest.new(parent: parent, filter: filter, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_detectors({ parent: parent, filter: filter, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_detectors(::Google::Cloud::SecurityCenter::Settings::V1beta1::ListDetectorsRequest.new(parent: parent, filter: filter, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_detectors_client_stub.call_rpc_count
end
end
def test_list_components
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecurityCenter::Settings::V1beta1::ListComponentsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_size = 42
page_token = "<NAME>"
list_components_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_components, name
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::ListComponentsRequest, request
assert_equal "hello world", request["parent"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_components_client_stub do
# Create client
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_components({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_components parent: parent, page_size: page_size, page_token: page_token do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_components ::Google::Cloud::SecurityCenter::Settings::V1beta1::ListComponentsRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_components({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_components(::Google::Cloud::SecurityCenter::Settings::V1beta1::ListComponentsRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_components_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::SecurityCenter::Settings::V1beta1::SecurityCenterSettingsService::Client::Configuration, config
end
end
|
#!/bin/bash
CURR_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
[ -d "$CURR_DIR" ] || { echo "FATAL: no current dir (maybe running in zsh?)"; exit 1; }
: "${E2E_SKIP:=""}"
# shellcheck source=./common.sh
source "$CURR_DIR/common.sh"
#########################################################################################
[ -n "$EXE" ] || abort "no EXE provided"
info "Preparing filesystem and environment..."
mkdir -p /root/.kube
for i in $CURR_DIR/test_*.sh ; do
base=$(basename $i .sh)
if [[ $E2E_SKIP =~ (^| )$base($| ) ]]; then
highlight "***** Skipping $base *****"
else
highlight "***** Running $base *****"
$i || abort "test $base failed"
fi
done
exit 0
|
import scrapy
class MySpider(scrapy.Spider):
name = 'myspider'
start_urls = [
'https://example.com/page1',
'https://example.com/page2',
]
def parse(self, response):
for item in response.css('item'):
yield {
'title': item.css('item-title::text').get(),
'description': item.css('item-description::text').get(),
} |
#!/bin/sh
[ -n "$IBPY_VERSION" ] || IBPY_VERSION=master
curl --location https://github.com/rstms/IbPy/tarball/$IBPY_VERSION | tar zxfv -
mv rstms-IbPy-* IbPy
cd IbPy
python setup.py sdist
cd
[ -n "$TXTRADER_VERSION" ] || TXTRADER_VERSION=master
curl --location -o- https://github.com/rstms/txTrader/tarball/$TXTRADER_VERSION | tar zxfv -
mv rstms-txTrader-* txTrader
cd txTrader
[ -n "$MODE" ] || MODE=tws
sudo make config
make build
make venv
sudo make -e MODE=$MODE install
|
import collections
import stats
from apps.analytics.bots import is_bot
class User(object):
# ip -> signup ts
all_signup_ts = {}
def __init__(self):
self.first_ts = 0
self.day_bins = collections.defaultdict(lambda: collections.defaultdict(lambda: 0))
self.post_signup_bins = collections.defaultdict(lambda: collections.defaultdict(lambda: 0))
self.facts = collections.defaultdict(lambda: 0)
self.ids = set()
self.experiments = {}
@property
def id(self):
try:
return list(self.ids)[0]
except IndexError:
return None
@property
def signup_ts(self):
return self.all_signup_ts.get(self.id, 0)
class FactAggregation(object):
def __init__(self, cutoff_user_id, browser_sessions, logged_in_users, uniques):
self.cutoff_user_id = cutoff_user_id
self.browser_sessions = browser_sessions
self.logged_in_users = logged_in_users
self.uniques = uniques
def aggregate_facts_by_tokens(fact_iter, experiment=None):
cutoff_user_id = float('inf')
browser_sessions = collections.defaultdict(User)
logged_in_users = collections.defaultdict(User)
uniques = collections.defaultdict(set)
for row in fact_iter():
if 'ip' in row:
if is_bot(row['ip']):
continue
for key in ('ip', 'session_key', 'utma','user'):
if key in row:
uniques[key].add(row[key])
users = []
if row.get('ip') is not None:
if row.get('utma') is not None:
if row['utma'] not in browser_sessions:
# First time we've seen this google session
# Let's tie any previous requests from this IP to this utma session
browser_sessions[row['utma']] = browser_sessions[row['ip']]
del browser_sessions[row['ip']]
users.append(browser_sessions[row['utma']])
else:
# We don't have any google session, use the ip instead
users.append(browser_sessions[row['ip']])
if 'user' in row:
users.append(logged_in_users[row['user']])
experiments = row.get('experiments', {})
key = row.get('type') if row.get('type') != 'metric' else row.get('metric')
if key == 'signup':
User.all_signup_ts[row['user']] = row['ts']
if key == 'logged_out_view':
# We don't care about logged out / logged in
key = 'view'
if key == 'logged_out_infinite_scroll':
key = 'infinite_scroll'
for user in users:
if experiment and not experiments.get(experiment.name) and not user.experiments.get(experiment.name):
# Don't count facts prior to being placed in the experiment
# Experiments don't (yet) carry over from logged out to logged in, need to fix this asap
continue
if not user.first_ts:
user.first_ts = row.get('ts')
user.experiments.update(experiments)
if row.get('user'):
user.ids.add(row['user'])
if key == 'signup':
cutoff_user_id = min(cutoff_user_id, row['user'])
dayify = lambda ts, first_ts: int((ts - first_ts) // 86400)
user.day_bins[dayify(row['ts'], user.first_ts)][key] += 1
if user.signup_ts:
user.post_signup_bins[dayify(row['ts'], user.signup_ts)][key] += 1
user.facts[key] += 1
return FactAggregation(cutoff_user_id, browser_sessions, logged_in_users, uniques)
def gather_results(aggr, tests, users, new_users, basis=None):
""" See ab_results for a usage example. """
basis_counts = {}
def populate_counts(counts, base_fun=None):
for ip, user in users.iteritems():
# Skip users that did stuff like logging out then logging back in as a different user.
if len(user.ids) > 1:
continue
if new_users and user.id is not None and user.id < aggr.cutoff_user_id:
continue
if base_fun and not base_fun(user):
continue
for test_name, test_fun in tests:
if test_fun(user):
counts[test_name] += 1
if basis:
for base_name, base_fun in basis:
basis_counts[base_name] = counts = collections.defaultdict(lambda: 0)
populate_counts(counts, base_fun=base_fun)
else:
basis_counts = collections.defaultdict(lambda: 0)
populate_counts(basis_counts)
results = []
def get_result(test_name, test_fun, basis=None):
result = {
'fact': test_name,
}
if not basis:
result['count'] = basis_counts[test_name]
return result
result.update({
'branch': basis_name,
'control': basis_name == 'control',
'count': basis_counts[basis_name][test_name],
})
if basis_name != "control":
vexp = basis_counts[basis_name][test_name]
exp_total = basis_counts[basis_name]['total']
vcontrol = basis_counts['control'][test_name]
control_total = basis_counts['control']['total']
z = stats.z_test(vexp, exp_total, vcontrol, control_total)
rate = lambda amount, total: float(amount) / total if total else 0
exp_rate = rate(vexp, exp_total)
control_rate = rate(vcontrol, control_total)
perc_change = (exp_rate - control_rate) / float(control_rate) if control_rate else 0
stats.z_to_ci(z) * 100, perc_change * 100, z
result.update({
'confidence': stats.z_to_ci(z) * 100,
'change': perc_change * 100,
'z': z,
})
return result
for test_name, test_fun in tests:
test_results = []
if basis:
for basis_name, bases_fun in basis:
result = get_result(test_name, test_fun, basis=basis)
test_results.append(result)
else:
result = get_result(test_name, test_fun)
test_results.append(result)
results.append(test_results)
return results
|
<reponame>The-Fireplace-Minecraft-Mods/AdventureMMO
package me.mrdaniel.adventuremmo.io.items;
import me.mrdaniel.adventuremmo.catalogtypes.skills.SkillType;
import me.mrdaniel.adventuremmo.catalogtypes.skills.SkillTypes;
import javax.annotation.Nonnull;
import java.util.Optional;
public class BlockData {
private final SkillType skill;
private final int exp;
public BlockData(@Nonnull final SkillType skill, final int exp) {
this.skill = skill;
this.exp = exp;
}
@Nonnull
public SkillType getSkill() {
return this.skill;
}
public int getExp() {
return this.exp;
}
@Nonnull
public String serialize() {
return this.skill.getId() + "," + this.exp;
}
@Nonnull
public static Optional<BlockData> deserialize(@Nonnull final String str) {
String[] s = str.split(",");
try {
return Optional.of(new BlockData(SkillTypes.of(s[0]).get(), Integer.parseInt(s[1])));
} catch (final Exception exc) {
return Optional.empty();
}
}
} |
<filename>client/src/components/Column/index.js
import React from 'react';
function Column(props) {
return (
<div
className={
props.width ? 'col-' + props.width + ' ' + props.className : 'col ' + props.className
}
>
{props.children}
</div>
);
}
export default Column;
|
class DataFrameMismatchException(Exception):
pass
class RubiconException(Exception):
pass
def validate_dataframe(read_dataframe, written_dataframe):
if read_dataframe.id != written_dataframe.id:
raise DataFrameMismatchException("Dataframes do not match")
if getattr(read_dataframe, 'df_type', None) != 'dask':
raise RubiconException("df_type not set to dask") |
/**
* Angular directive that will call a function when the
* control receives a blur event AND the value was changed.
*
* Version: 1.0
*/
(function () {
'use strict';
angular
.module('purplefox.blurchanged', [])
.directive('blurChanged', blurChanged);
blurChanged.$inject = [];
function blurChanged() {
// Usage:
// <input blur-changed="fn">
var directive = {
link: link,
require: '?ngModel',
restrict: 'A',
scope: {
blurChanged: '&'
}
};
return directive;
function link(scope, element, attrs, ngModel) {
if (!ngModel) { return; }
var oldValue = null;
element.bind('focus', function () {
scope.$apply(function () {
// Save old value on focus.
oldValue = element.val();
});
});
element.bind('blur', function () {
scope.$apply(function () {
var newValue = element.val();
if (newValue != oldValue) {
// Call the function only when value changed.
scope.blurChanged()(newValue);
}
});
});
}
}
})(); |
import type { AST_NODE_TYPES } from '../../ast-node-types';
import type { BaseNode } from '../../base/BaseNode';
import type { Identifier } from '../../expression/Identifier/spec';
import type { TSExternalModuleReference } from '../../special/TSExternalModuleReference/spec';
import type { EntityName } from '../../unions/EntityName';
export interface TSImportEqualsDeclaration extends BaseNode {
type: AST_NODE_TYPES.TSImportEqualsDeclaration;
id: Identifier;
moduleReference: EntityName | TSExternalModuleReference;
importKind: 'type' | 'value';
isExport: boolean;
}
|
<reponame>jvirtanen/stirling<filename>core/src/main/java/stirling/fix/messages/fix42/chix/europe/ExecutionReport.java
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stirling.fix.messages.fix42.chix.europe;
import stirling.fix.messages.MessageHeader;
import stirling.fix.messages.MessageVisitor;
import stirling.fix.messages.Required;
import stirling.fix.tags.fix42.Account;
import stirling.fix.tags.fix42.AvgPx;
import stirling.fix.tags.fix42.ClOrdID;
import stirling.fix.tags.fix42.ClientID;
import stirling.fix.tags.fix42.ContraBroker;
import stirling.fix.tags.fix42.CumQty;
import stirling.fix.tags.fix42.Currency;
import stirling.fix.tags.fix42.ExecID;
import stirling.fix.tags.fix42.ExecRefID;
import stirling.fix.tags.fix42.ExecTransType;
import stirling.fix.tags.fix42.ExecType;
import stirling.fix.tags.fix42.LastPx;
import stirling.fix.tags.fix42.LastShares;
import stirling.fix.tags.fix42.LeavesQty;
import stirling.fix.tags.fix42.MaxFloor;
import stirling.fix.tags.fix42.MinQty;
import stirling.fix.tags.fix42.NoContraBrokers;
import stirling.fix.tags.fix42.OrdStatus;
import stirling.fix.tags.fix42.OrdType;
import stirling.fix.tags.fix42.OrderCapacity;
import stirling.fix.tags.fix42.OrderID;
import stirling.fix.tags.fix42.OrderQty;
import stirling.fix.tags.fix42.OrigClOrdID;
import stirling.fix.tags.fix42.Price;
import stirling.fix.tags.fix42.SecurityID;
import stirling.fix.tags.fix42.Symbol;
import stirling.fix.tags.fix42.Text;
import stirling.fix.tags.fix42.TimeInForce;
import stirling.fix.tags.fix42.TradeDate;
import stirling.fix.tags.fix42.TransactTime;
import stirling.fix.tags.fix42.chix.europe.IDSource;
import stirling.fix.tags.fix42.chix.europe.TradeLiquidityIndicator;
import stirling.fix.tags.fix42.LastCapacity;
import stirling.fix.tags.fix42.Side;
public class ExecutionReport extends stirling.fix.messages.AbstractMessage implements stirling.fix.messages.ExecutionReport {
public ExecutionReport(MessageHeader header) {
super(header);
field(Account.Tag(), Required.NO);
field(AvgPx.Tag());
field(ClOrdID.Tag(), Required.NO);
field(CumQty.Tag());
field(Currency.Tag(), Required.NO);
field(ExecID.Tag());
field(ExecRefID.Tag(), Required.NO);
field(ExecTransType.Tag());
field(IDSource.Tag(), Required.NO);
field(LastCapacity.Tag(), Required.NO);
field(LastPx.Tag());
field(LastShares.Tag());
field(OrderID.Tag());
field(OrderQty.Tag());
field(OrdStatus.Tag());
field(OrdType.Tag(), Required.NO);
field(OrigClOrdID.Tag(), Required.NO);
field(Price.Tag(), Required.NO);
field(OrderCapacity.Tag(), Required.NO);
field(SecurityID.Tag(), new Required() {
@Override public boolean isRequired() {
return hasValue(IDSource.Tag());
}
});
field(Side.Tag());
field(Symbol.Tag());
field(Text.Tag(), Required.NO);
field(TimeInForce.Tag(), Required.NO);
field(TransactTime.Tag());
field(TradeDate.Tag(), Required.NO);
field(ClientID.Tag(), Required.NO);
field(MinQty.Tag(), Required.NO);
field(MaxFloor.Tag(), Required.NO);
field(ExecType.Tag());
field(LeavesQty.Tag());
field(NoContraBrokers.Tag(), Required.NO);
field(ContraBroker.Tag(), Required.NO);
field(TradeLiquidityIndicator.Tag(), Required.NO);
}
@Override public void apply(MessageVisitor visitor) {
visitor.visit(this);
}
}
|
<gh_stars>1-10
import os
import cv2
from tqdm import tqdm
import pickle
import numpy as np
from math import sqrt
from . import config
from . import keras_rmac
def get_frame(frame_index, video):
"""
Given a frame position number and the videocapture variable, returns the frame as an image object (numpy array)
"""
video.set(1,frame_index)
_, img = video.read()
return img
fouriers = [
[1,1,1,1,1,1,1,1],
[-1,1,-1,1,1,-1,1,-1],
[-sqrt(2)/2, 0, sqrt(2)/2, -1, 1, -sqrt(2)/2,0,sqrt(2)/2],
[-sqrt(2)/2, -1, -sqrt(2)/2, 0, 0, sqrt(2)/2, 1, sqrt(2)/2],
[0, -1, 0, 1, 1, 0, -1, 0],
[1,0,-1, 0, 0, -1, 0, 1],
[sqrt(2)/2, 0 , -sqrt(2)/2, -1, 1, sqrt(2)/2, 0, -sqrt(2)/2],
[-sqrt(2)/2, 1, -sqrt(2)/2, 0, 0, sqrt(2)/2, -1, sqrt(2)/2]
]
for i,f in enumerate(fouriers):
f.insert(4,0)
fouriers[i] = np.array(f)
fouriers[i] = fouriers[i].reshape((3,3)).astype('float32')
max_vals = []
for f in fouriers:
m = np.array([255])
m = cv2.matchTemplate(m.astype('float32'),f, cv2.TM_CCORR).clip(0,255)
max_vals.append(cv2.matchTemplate(m.astype('float32'),f, cv2.TM_CCORR)[0][0])
def color_texture_moments(img):
img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
result = []
for channel in range(0,3):
for template, max_val in zip(fouriers, max_vals):
r = cv2.matchTemplate(img[:,:,channel].astype('float32'),template, cv2.TM_CCORR)
r = r / max_val
result.append(r.mean())
result.append(r.std())
return result
def cnn_feature_vectors(img):
feature_vector = keras_rmac.rmac.to_feature_vector(img)
return feature_vector
def get_img_color_hist(image, binsize):
"""
Given an image as input, output its color histogram as a numpy array.
Binsize will determine the size
"""
chans = cv2.split(image)
main = np.zeros((0,1))
# loop over the image channels
for chan in chans:
# create a histogram for the current channel and
# concatenate the resulting histograms for each
# channel
hist = cv2.calcHist([chan], [0], None, [binsize], [0, 256])
main = np.append(main,hist)
#normalize so sum of all values equals 1
main = main / (image.shape[0] * image.shape[1])
return main.astype('float32')
def color_hist(img):
result = get_img_color_hist(img, 100)
return result
def construct_feature_vectors(video_fn, result_dir_name, vector_function):
"""
Function that converts a video file to a list of feature vectors,
which it then writes to a pickle file.
"""
base_video_fn = os.path.basename(video_fn)
video = cv2.VideoCapture(video_fn)
series_dir = os.path.dirname(video_fn)
vectors_fn = os.path.join(series_dir, result_dir_name, base_video_fn + ".p")
# set correct vector function to apply
if vector_function == "CH":
vector_function = color_hist
elif vector_function == "CTM":
vector_function = color_texture_moments
elif vector_function == "CNN":
vector_function = cnn_feature_vectors
# make sure folder of experimentname exists or create otherwise
os.makedirs(os.path.dirname(vectors_fn), exist_ok=True)
# check if histograms exist, else create them and save to pickle
if not os.path.isfile(vectors_fn):
# construct the histograms from frames at the start of scenes
feature_vectors = []
total = int(video.get(cv2.CAP_PROP_FRAME_COUNT) / config.FRAMEJUMP) - 1
# apply the vector function for every xth frame determined by framejump
for i in tqdm(range(total)):
img = get_frame(i * config.FRAMEJUMP, video)
feature_vector = vector_function(img)
feature_vectors.append(feature_vector)
# save to pickle file
with open(vectors_fn, 'wb') as handle:
pickle.dump(feature_vectors, handle, protocol=2) |
#!/bin/sh
# Utility to update the EEPROM image (pieeprom.bin) and signature
# (pieeprom.sig) with a new EEPROM config.
#
# pieeprom.original.bin - The source EEPROM from rpi-eeprom repo
# boot.conf - The bootloader config file to apply.
set -e
script_dir="$(cd "$(dirname "$0")" && pwd)"
${script_dir}/rpi-eeprom-config --config ${script_dir}/boot.conf --out ${script_dir}/pieeprom.bin ${script_dir}/pieeprom.original.bin
sha256sum ${script_dir}/pieeprom.bin | awk '{print $1}' > ${script_dir}/pieeprom.sig
echo "ts: $(date -u +%s)" >> "${script_dir}/pieeprom.sig"
|
import SwiftUI
struct ContentView: View {
@State private var color = Color.black
var body: some View {
Text("Tap to change color")
.foregroundColor(color)
.onTapGesture {
self.color = (self.color == Color.black) ? Color.red : Color.black
}
}
} |
var express = require('express');
//创建服务器应用程序,就是原来的http.createServer
var app = express();
// 在 Express中开放资源就是一个API的事儿
// 公开指定目录
// 只要这样做了,你就可以直接通过/public/xx的方式访问public目录中的所有资源了
app.use('/public/', express.static('./public/'));
app.use('/static/', express.static('./static/'));
app.use('/node_modules/', express.static('./node_modules/'));
//服务器收到get请求时,执行回调处理函数
app.get('/', function(req, res){
res.send('hello express');
});
app.listen(3000, function(){
console.log('app is running');
}); |
/*
* Tencent is pleased to support the open source community by making Blueking Container Service available.
* Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under,
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package clusterops
import (
"context"
"fmt"
"math/rand"
"strconv"
"strings"
"time"
"github.com/Tencent/bk-bcs/bcs-services/bcs-cluster-manager/internal/options"
"github.com/Tencent/bk-bcs/bcs-services/bcs-cluster-manager/internal/store"
"github.com/Tencent/bk-bcs/bcs-common/common/modules"
k8scorecliset "k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
)
// K8SOperator operator of k8s
type K8SOperator struct {
opt *options.ClusterManagerOptions
model store.ClusterManagerModel
}
// NewK8SOperator create operator of k8s
func NewK8SOperator(opt *options.ClusterManagerOptions, model store.ClusterManagerModel) *K8SOperator {
return &K8SOperator{
opt: opt,
model: model,
}
}
// GetClusterClient get cluster client
func (ko *K8SOperator) GetClusterClient(clusterID string) (k8scorecliset.Interface, error) {
cred, found, err := ko.model.GetClusterCredential(context.TODO(), clusterID)
if err != nil {
return nil, err
}
if !found {
return nil, fmt.Errorf("cluster credential not found of %s", clusterID)
}
cfg := &rest.Config{}
if cred.ConnectMode == modules.BCSConnectModeTunnel {
if len(ko.opt.ClientCert) != 0 && len(ko.opt.ClientCa) != 0 && len(ko.opt.ClientKey) != 0 {
cfg.Host = "https://" + ko.opt.Address + ":" + strconv.Itoa(int(ko.opt.HTTPPort)) +
"/clustermanager/clusters/" + clusterID
cfg.TLSClientConfig = rest.TLSClientConfig{
Insecure: false,
CertFile: ko.opt.ClientCert,
CAFile: ko.opt.ClientCa,
KeyFile: ko.opt.ClientKey,
}
} else {
cfg.Host = "http://" + ko.opt.Address + ":" + strconv.Itoa(int(ko.opt.HTTPPort)) +
"/clustermanager/clusters/" + clusterID
cfg.TLSClientConfig = rest.TLSClientConfig{
Insecure: true,
}
}
cliset, err := k8scorecliset.NewForConfig(cfg)
if err != nil {
return nil, err
}
return cliset, nil
} else if cred.ConnectMode == modules.BCSConnectModeDirect {
addressList := strings.Split(cred.ServerAddress, ",")
if len(addressList) == 0 {
return nil, fmt.Errorf("error credential server addresses %s of cluster %s", cred.ServerAddress, clusterID)
}
// get a random server
rand.Seed(time.Now().Unix())
cfg.Host = addressList[rand.Intn(len(addressList))]
cfg.TLSClientConfig = rest.TLSClientConfig{
Insecure: false,
CAData: []byte(cred.CaCertData),
}
cfg.BearerToken = cred.UserToken
cliset, err := k8scorecliset.NewForConfig(cfg)
if err != nil {
return nil, err
}
return cliset, nil
}
return nil, fmt.Errorf("invalid credential mode %s of cluster %s", cred.ConnectMode, clusterID)
}
|
import nltk
nltk.download('averaged_perceptron_tagger')
def tag_sentence(sentence):
words = nltk.word_tokenize(sentence)
tags = nltk.pos_tag(words)
return tags |
<reponame>sajanthomas01/app
import React, { useState, useEffect, useMemo, useContext } from "react";
import useScript from "../../Hooks/useScript";
import { makeStyles } from "@material-ui/core/styles";
import { leaveCall } from "../../Modules/eventSessionOperations";
import NoVideoImage from "../../Assets/illustrations/undraw_video_call_kxyp.svg";
import { Typography } from "@material-ui/core";
import { useSelector, shallowEqual } from "react-redux";
import {
getUser,
getUserGroup,
getSessionId,
getUserId,
getEventSessionDetails,
getFeatureDetails,
} from "../../Redux/eventSession";
import ReactPlayer from "react-player";
import CircularProgress from "@material-ui/core/CircularProgress";
// import JitsiContext from "../../Contexts/JitsiContext";
import { trackPage } from "../../Modules/analytics";
import {
getJistiServer,
// getJitsiOptions,
getJistiDomain,
isMeetJitsi
} from "../../Modules/jitsi";
import {
// setOffline,
setUserCurrentLocation
} from "../../Modules/userOperations";
// import { useHistory } from "react-router-dom";
import { FEATURES } from "../../Modules/features";
import { VERTICAL_NAV_OPTIONS } from "../../Contexts/VerticalNavBarContext";
// import { usePrevious } from "react-use";
// import TechnicalCheckContext from "../../Contexts/TechnicalCheckContext";
import AudioVideoCheckDialog from "../../Components/EventSession/AudioVideoCheckDialog";
import JitsiPlayerComponent from "../../Components/EventSession/JitsiPlayerComponent";
import JitsiContext from "../../Contexts/JitsiContext";
const useStyles = makeStyles((theme) => ({
videoContainer: {
width: "100%",
height: "100%"
},
root: {
position: "absolute",
top: 0,
bottom: 0,
right: 0,
left: 0
},
noVideoImage: {
maxWidth: "100%",
maxHeight: "100%",
position: "absolute",
bottom: 0,
margin: "auto",
width: "100%",
height: "60%"
},
reactPlayerContainer: {
width: "100%",
height: "100%",
position: "relative",
paddingTop: "56.25%" /* Player ratio: 100 / (1280 / 720) */,
backgroundColor: "black"
// display: "flex",
// alignItems: "center",
},
reactPlayer: {
position: "absolute",
margin: 0,
top: "50%",
left: "50%",
transform: "translate(-50%, -50%)"
}
}));
export default () => {
const classes = useStyles();
const [loadingPlayer, setLoadingPlayer] = useState(true);
const userId = useSelector(getUserId);
const user = useSelector(getUser);
const userGroup = useSelector(getUserGroup, shallowEqual);
const sessionId = useSelector(getSessionId);
const eventSessionDetails = useSelector(getEventSessionDetails, shallowEqual);
const [loaded, error] = useScript(
getJistiServer(eventSessionDetails) + "external_api.js"
);
useEffect(() => {
trackPage("ConferenceRoom/" + sessionId);
}, [sessionId]);
const { jitsiApi } = useContext(JitsiContext);
useEffect(() => {
if (eventSessionDetails.conferenceVideoType !== "JITSI") {
if (jitsiApi) {
jitsiApi.executeCommand("hangup");
jitsiApi.dispose();
}
}
}, [eventSessionDetails.conferenceVideoType, jitsiApi])
const handleCallEnded = React.useCallback(async () => {
await leaveCall(sessionId, userGroup, userId);
// await setOffline(sessionId, userGroup);
// history.push(routes.EVENT_SESSION(sessionId));
setUserCurrentLocation(sessionId, VERTICAL_NAV_OPTIONS.lobby);
}, [sessionId, userGroup, userId]);
const removeJitsiLogoFeature = useSelector(
getFeatureDetails(FEATURES.REMOVE_JITSI_LOGO),
shallowEqual
);
const customNavBarFeature = useSelector(
getFeatureDetails(FEATURES.CUSTOM_NAV_BAR),
shallowEqual
);
const mainStageTitle = useMemo(() => {
if (
customNavBarFeature &&
customNavBarFeature[VERTICAL_NAV_OPTIONS.mainStage]
) {
return customNavBarFeature[VERTICAL_NAV_OPTIONS.mainStage].label;
}
return "Main Stage";
}, [customNavBarFeature]);
const prefix = process.env.REACT_APP_JITSI_ROOM_PREFIX;
const prefixStr = prefix !== undefined ? `-${prefix}` : "";
const roomName = "veertly" + prefixStr + "-" + sessionId;
const domain = getJistiDomain(eventSessionDetails);
const subject = mainStageTitle;
const showJitsiLogo =
isMeetJitsi(domain) &&
(!removeJitsiLogoFeature || !removeJitsiLogoFeature.enabled);
if (error) {
console.log(error);
return <p>Error :(</p>;
}
if (!loaded) return <div id="conference-container">Loading...</div>;
if (loaded) {
const getYoutubeFrame = () => {
let videoId = eventSessionDetails.conferenceRoomYoutubeVideoId;
return (
<div className={classes.root}>
<iframe
className={classes.videoContainer}
src={`https://www.youtube.com/embed/${videoId}?autoplay=1&fs=0&modestbranding=0`}
frameBorder="0"
allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
title="livestream"
></iframe>
</div>
);
};
const getFacebookFrame = () => {
let facebookVideoId = eventSessionDetails.conferenceRoomFacebookVideoId;
let facebookUrl = eventSessionDetails.conferenceRoomFacebookLink;
let url = facebookUrl
? facebookUrl
: `https://www.facebook.com/facebook/videos/${facebookVideoId}`;
return (
<div className={classes.root}>
<div className={classes.reactPlayerContainer}>
<ReactPlayer
url={url}
width="100%"
height="100%"
// height="none"
className={classes.reactPlayer}
// playing
controls={true}
onReady={() => setLoadingPlayer(false)}
/>
{loadingPlayer && (
<div className={classes.reactPlayer}>
<CircularProgress color="secondary" />
</div>
)}
</div>
</div>
);
};
switch (eventSessionDetails.conferenceVideoType) {
case "YOUTUBE":
return getYoutubeFrame();
case "FACEBOOK":
return getFacebookFrame();
case "JITSI":
return (
<>
<div className={classes.root}>
<JitsiPlayerComponent
avatarUrl={user.avatarUrl}
displayName= {user.firstName + " " + user.lastName}
sessionId={sessionId}
// containerId= "#conference-container"
domain={domain}
showJitsiLogo={showJitsiLogo}
subject={subject}
roomName={roomName}
callEndedCb={handleCallEnded} />
</div>
<AudioVideoCheckDialog
title="Main Stage conference call"
subtitle="You are about enter to the main stage conference call. Please ensure that mic and camera are working properly."
sessionId={sessionId}
showClose
onCloseClicked={() => {
setUserCurrentLocation(sessionId, VERTICAL_NAV_OPTIONS.lobby)
}}
/>
</>
)
default:
return (
<div className={classes.videoContainer}>
<Typography align="center" gutterBottom style={{ marginTop: 64 }}>
Livestream not correctly configured...
</Typography>
<Typography variant="caption" display="block" align="center">
Please contact the event organizer or Veertly team
</Typography>
<img
alt="No Video available"
src={NoVideoImage}
className={classes.noVideoImage}
/>
</div>
);
}
}
};
|
<reponame>eddie4941/servicetalk
/*
* Copyright © 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.buffer.api;
import io.netty.util.AsciiString;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import java.nio.charset.StandardCharsets;
import static io.servicetalk.buffer.api.CharSequences.newAsciiString;
/*
* This benchmark compares CharSequences#parseLong(CharSequence) and Long.parseLong(String):
*
* Benchmark (value) Mode Cnt Score Error Units
* javaParseLongString -9223372036854775808 thrpt 5 24524373.658 ± 490417.495 ops/s
* stParseLongString -9223372036854775808 thrpt 5 19168467.144 ± 594280.502 ops/s
*
* javaParseLongString 9223372036854775807 thrpt 5 15573374.506 ± 1049847.936 ops/s
* stParseLongString 9223372036854775807 thrpt 5 16697590.692 ± 231851.871 ops/s
*
* javaParseLongAsciiBuffer -9223372036854775808 thrpt 5 10066121.790 ± 117298.619 ops/s
* stParseLongAsciiBuffer -9223372036854775808 thrpt 5 18155698.684 ± 436706.324 ops/s
*
* javaParseLongAsciiBuffer 9223372036854775807 thrpt 5 10730908.955 ± 116656.679 ops/s
* stParseLongAsciiBuffer 9223372036854775807 thrpt 5 19615079.368 ± 459852.132 ops/s
*
* javaParseLongAsciiString -9223372036854775808 thrpt 5 17546166.613 ± 444219.547 ops/s
* stParseLongAsciiString -9223372036854775808 thrpt 5 19169592.065 ± 499213.146 ops/s
*
* javaParseLongAsciiString 9223372036854775807 thrpt 5 22611841.803 ± 503643.380 ops/s
* stParseLongAsciiString 9223372036854775807 thrpt 5 21140372.163 ± 2921605.423 ops/s
*
*
*
* javaParseLongString -8192 thrpt 5 69974528.501 ± 6167380.442 ops/s
* stParseLongString -8192 thrpt 5 73735070.747 ± 2968101.803 ops/s
*
* javaParseLongString 8192 thrpt 5 70138556.799 ± 918507.526 ops/s
* stParseLongString 8192 thrpt 5 66549636.755 ± 1023126.881 ops/s
*
* javaParseLongAsciiBuffer -8192 thrpt 5 15418127.631 ± 271577.020 ops/s
* stParseLongAsciiBuffer -8192 thrpt 5 58372951.121 ± 920176.976 ops/s
*
* javaParseLongAsciiBuffer 8192 thrpt 5 15203126.170 ± 289559.904 ops/s
* stParseLongAsciiBuffer 8192 thrpt 5 56709314.826 ± 813985.642 ops/s
*
* javaParseLongAsciiString -8192 thrpt 5 70984579.239 ± 1614728.648 ops/s
* stParseLongAsciiString -8192 thrpt 5 68602480.185 ± 1052183.360 ops/s
*
* javaParseLongAsciiString 8192 thrpt 5 75324292.593 ± 592329.213 ops/s
* stParseLongAsciiString 8192 thrpt 5 80748587.669 ± 1076241.647 ops/s
*/
@Fork(value = 1)
@State(Scope.Benchmark)
@Warmup(iterations = 5, time = 3)
@Measurement(iterations = 5, time = 3)
@BenchmarkMode(Mode.Throughput)
public class CharSequencesParseLongBenchmark {
@Param({"-9223372036854775808", "9223372036854775807", "-8192", "8192"})
private String value;
private CharSequence asciiBuffer;
private CharSequence asciiString;
@Setup(Level.Trial)
public void setup() {
asciiBuffer = newAsciiString(value);
asciiString = new AsciiString(value.getBytes(StandardCharsets.US_ASCII));
}
@Benchmark
public long javaParseLongString() {
return Long.parseLong(value);
}
@Benchmark
public long stParseLongString() {
return CharSequences.parseLong(value);
}
@Benchmark
public long javaParseLongAsciiBuffer() {
return Long.parseLong(asciiBuffer.toString());
}
@Benchmark
public long stParseLongAsciiBuffer() {
return CharSequences.parseLong(asciiBuffer);
}
@Benchmark
public long javaParseLongAsciiString() {
return Long.parseLong(asciiString.toString());
}
@Benchmark
public long stParseLongAsciiString() {
return CharSequences.parseLong(asciiString);
}
}
|
def convert_camel_to_snake(str):
snake_string = str[0].lower()
for char in str[1:]:
if char.isupper():
snake_string += "_" + char.lower()
else:
snake_string += char
return snake_string
print(convert_camel_to_snake("GivenString")) |
#!/bin/bash -e
#
# SPDX-FileCopyrightText: 2021 SAP SE or an SAP affiliate company and Gardener contributors
#
# SPDX-License-Identifier: Apache-2.0
path_tls_output="../.landscaper/blueprint/config/secret/tls"
path_tls_config="../tls"
ca_name="gardenlogin-ca"
cert_name="gardenlogin-controller-manager-tls"
cd "$(dirname "$0")"
cfssl gencert \
-initca "$path_tls_config/$ca_name-csr.json" | cfssljson -bare "$path_tls_output/$ca_name" -
cfssl gencert \
-profile=server \
-ca="$path_tls_output/$ca_name.pem" \
-ca-key="$path_tls_output/$ca_name-key.pem" \
-config="$path_tls_config/ca-config.json" \
"$path_tls_config/$cert_name-config.json" | cfssljson -bare "$path_tls_output/$cert_name"
# cleanup csr files
rm $path_tls_output/$ca_name.csr
rm $path_tls_output/$cert_name.csr
|
package ro.ase.acs.interfacesNoSQL;
public interface CloseConnection {
public void close(MongoClient mongoClient);
}
|
<filename>app/controllers/labels_controller.rb
class LabelsController < ApplicationController
def index
@labels = Label.order(:title)
end
def show
@label = Label.includes(:artists).find(params[:id])
end
def new
@label = Label.new
end
def create
@label = Label.new params[:label]
if @label.save
redirect_to label_path(@label)
else
render :new
end
end
def edit
@label = Label.find params[:id]
render :new
end
def update
@label = Label.find params[:id]
if @label.update(label_params)
redirect_to label_path(@label)
else
render :new
end
end
def destroy
message = if Label.destroy(params[:id])
"Label destroyed!"
else
"Couldn't destroy label!"
end
redirect_to labels_path, notice: message
end
end
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
python $DIR/prepare_dataset.py --dataset pascal --year 2007,2012 --set trainval --target $DIR/../data/train.lst
python $DIR/prepare_dataset.py --dataset pascal --year 2007 --set test --target $DIR/../data/val.lst --shuffle False
# make lit voc rec for debug
#python $DIR/prepare_dataset.py --dataset pascal --year 2007 --set test --target $DIR/../data_voc_lit/train.lst
#python $DIR/prepare_dataset.py --dataset pascal --year 2007 --set test --target $DIR/../data_voc_lit/val.lst --shuffle False
|
myApp.controllerProvider.register('SignUpSingleCtrl', function($scope, userService, settings, $timeout) {
}); |
#!/bin/sh
#Example of if-down.sh script to be used with mpd5
#mpd5 call script with options:
#interface proto local-ip remote-ip authname peer-address
#example:
#command "/urs/local/etc/mpd5/if-down.sh ng0 inet 10.3.23.1/32 10.3.23.10 '-' '10.0.23.2'"
logger "$0 called with parameters: $@"
remote_inet="1.1.1.0/24"
remote_inet6="2001:db8:1::1 -prefixlen 64"
eval "
if ! route get -net -\$2 ${remote_$2}; then
logger "Route ${remote_inet} not in table"
return 0
else
cmd=\"route del \${remote_$2} \$4\"
fi
"
if $cmd; then
logger "if-down: ${cmd} succesfull"
return 0
else
logger "if-down: ${cmd} failed"
return 1
fi
|
package com.telpoo.frame.model;
import java.util.ArrayList;
import android.content.Context;
import com.telpoo.frame.database.BaseDBSupport;
public interface TaskListener {
Context getContext();
BaseDBSupport getDBSupport();
void onSuccess(int taskType, ArrayList<?> list, String result);
void onFail(int taskType, String result);
void onProgress(int taskType, int progress);
}
|
import { ThemingProps } from "@chakra-ui/system";
import { Dict } from "@chakra-ui/utils";
import { ThemeExtension } from "../extend-theme";
export declare function withDefaultVariant({ variant, components, }: {
variant: ThemingProps["variant"];
components?: string[] | Dict;
}): ThemeExtension;
//# sourceMappingURL=with-default-variant.d.ts.map |
<reponame>skyward-er/skyward-boardcore
/* Copyright (c) 2018 Skyward Experimental Rocketry
* Author: <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef TRANSCEIVER_H
#define TRANSCEIVER_H
class Transceiver
{
public:
Transceiver() {}
virtual ~Transceiver() {}
/*
* Send a packet.
* The function must block until the packet is sent (successfully or not)
*
* @param pkt Pointer to the packet (needs to be at least pkt_len
* bytes).
* @param pkt_len Lenght of the packet to be sent.
* @return True if the message was sent correctly.
*/
virtual bool send(uint8_t* pkt, size_t pkt_len) = 0;
/*
* Wait until a new packet is received.
*
* @param pkt Buffer to store the received packet into.
* @param pkt_len Maximum length of the received data.
* @return Size of the data received or -1 if failure
*/
virtual ssize_t receive(uint8_t* pkt, size_t pkt_len) = 0;
};
#endif
|
#!/bin/bash -u
set -o pipefail
# This script is executed by `git@git.ruby-lang.org:ruby.git/hooks/post-receive`.
# Its outputs are logged to `/tmp/post-receive.log`.
# script parameters
ruby_git="/var/git/ruby.git"
ruby_commit_hook="$(cd "$(dirname $0)"; cd ..; pwd)"
function log() {
echo -e "[$$: $(date "+%Y-%m-%d %H:%M:%S %Z")] $1"
}
log "### start ###"
log "args: $*"
log "==> github sync"
git remote update; git push github
log "==> notify slack"
"${ruby_commit_hook}/bin/notify-slack.rb" $*
log "==> commit-email.rb"
"${ruby_commit_hook}/bin/commit-email.rb" \
"$ruby_git" ruby-cvs@ruby-lang.org $* \
--viewer-uri "https://github.com/ruby/ruby/commit/" \
--error-to cvs-admin@ruby-lang.org
log "==> redmine fetch changesets"
curl -s "https://bugs.ruby-lang.org/sys/fetch_changesets?key=`cat ~git/config/redmine.key`" &
# Make extra commits from here.
# The above procedure will be executed for the these commits in another post-receive hook.
log "==> auto-style"
SVN_ACCOUNT_NAME=git "${ruby_commit_hook}/bin/auto-style.rb" "$ruby_git" $*
log "==> update-version.h.rb"
SVN_ACCOUNT_NAME=git "${ruby_commit_hook}/bin/update-version.h.rb" git "$ruby_git" $*
log "### end ###\n"
|
import Iterable from './iterable';
import ArrayIterator from './iterator-array';
import extend from '../utils/extend';
import isFunction from '../utils/is-function';
import iterableSymbol from './iterable-symbol';
import iteratorSymbol from '../utils/iterator-symbol';
/**
* Creates a new ArrayIterable instance.
* @param {Array|String|Array-like} value An array-like object.
*/
export default function ArrayIterable(value) {
Iterable.call(this, value);
}
extend(ArrayIterable, Iterable, {
/**
* Creates an array from the Iterable.
* @returns {Array}
*/
toArray: function () {
return this[iterableSymbol] || [];
},
toString: function () {
return '[Array Iterable]';
},
'@@iterator': function () {
var arr = this[iterableSymbol];
return isFunction(arr[iteratorSymbol]) ? arr[iteratorSymbol]() : new ArrayIterator(arr);
}
});
|
# echo "Downloading precinct results ..." &&
# echo "state;county_id;precinct_id;office_id;office_name;district;\
# cand_order;cand_name;suffix;incumbent;party;precincts_reporting;\
# precincts_voting;votes;votes_pct;votes_office" | \
# cat - <(wget -O - -o /dev/null https://electionresults.sos.state.mn.us/Results/MediaResult/115?mediafileid=13) > local.csv
csv2json -r ";" local.csv | \
ndjson-split | \
ndjson-filter 'd.office_id == "0404" && d.county_id == "27"' > sheriff-hennepin.tmp.ndjson |
#!/usr/bin/env bash
#source ../../venv/bin/activate
python ../../main.py --json_files data1.json data2.json --output_view RESTAURANT_CHAIN --output_explore RESTAURANT_CHAIN --column_name DATA --sql_table_name RESTAURANT_DETAILS --table_alias CHAINS_TABLE --handle_null_values_in_sql true --primary_key apiVersion |
<filename>gatsby-config.js
module.exports = {
siteMetadata: {
title: 'Core Data Fabric',
},
plugins: ['gatsby-plugin-react-helmet'],
}
|
<filename>js/scripts.js
// Keeps track of update cycles for updating the clock intermittantly
var blinkingText = false;
var progressPacer = 0;
// Start time with real datetime
var timeProgressRate = "regular";
// For holding time calculations
var dateHolder = new Date();
var regularTimeDateHolder = new Date();
var secondsCounterRegularTimeProgress = 0;
// Variables to hold normalized text for sending to flipClock
var writeSecs, writeMinutes, writeHours, writeDays, writeMonths, writeYears;
var baseTimerTickInterval = 100;
var baseIntervalTimer = setInterval(baseTimer, baseTimerTickInterval);
var rateChanged = false;
// Variables to hold time changing speed
var addSeconds, addMinutes, addHours, addMonts, addYears;
var positiveYear = true;
var clockOnly = false;
var divIDs = ["clock","date","message","blinking-message"];
var marginDivIDs = ["top_div_spacer"];
var progressSpeeds = ["ff2","ff3","ff4","ff5","rw2","rw3","rw4","rw5"];
var ignoreKeys = ["Alt","F5","F11","F12","Shift","Control"];
var messageKeys = ["y","u","i","o","f","p","b","g","r","t","1","2","3","4","5","6","7","8","9","0"];
var audioCounter = 0;
var audio = new Audio('sound/flip_sound.wav');
var audio1 = new Audio('sound/flip_sound.wav');
var audio2 = new Audio('sound/flip_sound.wav');
var audio3 = new Audio('sound/flip_sound.wav');
var audio4 = new Audio('sound/flip_sound.wav');
var audio5 = new Audio('sound/flip_sound.wav');
var audio6 = new Audio('sound/flip_sound.wav');
var audio7 = new Audio('sound/flip_sound.wav');
var audio8 = new Audio('sound/flip_sound.wav');
var audio9 = new Audio('sound/flip_sound.wav');
var audioVars = [audio, audio1, audio2, audio3, audio4, audio5, audio6, audio7, audio8, audio9];
var muted = true;
var matrixInterval;
var marginLeftCalc = 0;
// Array to hold normalized text month name
var months = [
'JAN',
'FEB',
'MAR',
'APR',
'MAY',
'JUN',
'JUL',
'AUG',
'SEP',
'OCT',
'NOV',
'DEC'
];
window.addEventListener("keydown", function (e) {
if(blinkingText) {
blinkingText = false;
document.getElementById("blinking-message").style.display = "none";
}
if (e.key == "ArrowRight") {
switch(timeProgressRate) {
case 'ff':
timeProgressRate = "ff2";
hideClock();
break;
case 'ff2':
timeProgressRate = "ff3";
break;
case 'ff3':
timeProgressRate = "ff4";
break;
case 'ff4':
timeProgressRate = "ff5";
break;
default:
timeProgressRate = "ff";
break;
}
rateChanged = true;
} else if (e.key == "ArrowLeft") {
switch(timeProgressRate) {
case 'rw':
timeProgressRate = "rw2";
hideClock();
break;
case 'rw2':
timeProgressRate = "rw3";
break;
case 'rw3':
timeProgressRate = "rw4";
break;
case 'rw4':
timeProgressRate = "rw5";
break;
case 'rw5':
timeProgressRate = "rw6";
break;
case 'rw6':
timeProgressRate = "rw7";
break;
default:
timeProgressRate = "rw";
break;
}
rateChanged = true;
} else if (e.key == "ArrowDown") {
timeProgressRate = "paused";
rateChanged = true;
} else if (e.key == "ArrowUp") {
timeProgressRate = "regular";
} else if (e.key.toLowerCase() == "m") {
// Increase font size for all clocks
increaseFontSize();
} else if (e.key.toLowerCase() == "j") {
increaseTopMargin();
} else if (e.key.toLowerCase() == "k") {
decreaseTopMargin();
} else if (e.key.toLowerCase() == "n") {
// Decrease font size for all clocks
decreaseFontSize();
} else if (e.key.toLowerCase() == "a") {
hideDate();
//clockOnly = true;
timeProgressRate = "ff";
rateChanged = true;
dateHolder.setSeconds(43);
dateHolder.setMinutes(59);
dateHolder.setHours(6);
} else if (messageKeys.includes(e.key.toLowerCase())) {
// Display future message
timeProgressRate = "paused";
switch(e.key.toLowerCase()) {
case 'p':
showMessage("The Big Bang");
break;
case 'f':
showMessage("The Future");
break;
case 'b':
showMessage("The Beginning of Life");
break;
case 'r':
showMessage("Reptile Era");
break;
case 't':
showMessage("Today");
break;
case 'g':
document.getElementById("message").style.display = "none";
blinkingText = true;
showMessage("88:88:88");
break;
case '1':
showMessage("<NAME>");
break;
case '2':
showMessage("<NAME>");
break;
case '3':
showMessage("<NAME>");
break;
case '4':
showMessage("<NAME>");
break;
case '5':
showMessage("<NAME>");
break;
case '6':
showMessage("<NAME>");
break;
case '7':
showMessage("<NAME>");
break;
case '8':
showMessage("888 888");
break;
case '9':
showMessage("999 999");
break;
case '0':
showMessage("000 000");
break;
case 'y':
showMessage("Scene 1");
break;
case 'u':
showMessage("Scene 2");
break;
case 'i':
showMessage("Scene 3");
break;
case 'o':
showMessage("Scene 4");
break;
default:
showMessage("Error displaying message: " + e.key);
break;
}
} else if (e.key.toLowerCase() == "s") {
muted = (muted == true ? false : true);
playSound(audio);
} else if (e.key == "[") {
showMatrix();
} else if (e.key == "]") {
hideMatrix();
} else if (e.key.toLowerCase() == "c") {
// Display counters
timeProgressRate = "ff";
clockOnly = false;
showCounters();
showDate();
rateChanged = true;
} else if (!ignoreKeys.includes(e.key)) {
//alert("Unknown key pressed: " + e.key);
}
})
function baseTimer() {
if(timeProgressRate == "regular" && new Date().getSeconds() !== dateHolder.getSeconds()) {
playSound(audio);
} else if (progressSpeeds.includes(timeProgressRate)) {
playSound(audioVars[audioCounter]);
audioCounter++;
if(audioCounter == 10) {audioCounter = 0;}
}
if(timeProgressRate == "regular") {
// Regular time follows the computer clock
// Get latest accurate time
dateHolder = new Date();
} else if(rateChanged) {
if (timeProgressRate == "ff") {
regularTimeDateHolder = new Date();
secondsCounterRegularTimeProgress = regularTimeDateHolder.getSeconds();
addSeconds = 1;
addMinutes = null;
addHours = null;
addMonts = null;
addYears = null;
} else if (timeProgressRate == "ff2") {
addSeconds = 2;
addMinutes = 4;
addHours = 4;
addMonts = 1;
addYears = 19;
} else if (timeProgressRate == "ff3") {
addYears = 29;
} else if (timeProgressRate == "ff4") {
addSeconds = 3;
addMinutes = 6;
addHours = 6;
addYears = 41;
} else if (timeProgressRate == "ff5") {
addYears = 61;
} else if (timeProgressRate == "rw") {
regularTimeDateHolder = new Date();
secondsCounterRegularTimeProgress = regularTimeDateHolder.getSeconds();
addSeconds = -1;
addMinutes = null;
addHours = null;
addMonts = null;
addYears = null;
} else if (timeProgressRate == "rw2") {
addSeconds = -2;
addMinutes = -4;
addHours = -4;
addMonts = -1;
addYears = -19;
} else if (timeProgressRate == "rw3") {
addYears = -29;
} else if (timeProgressRate == "rw4") {
addSeconds = -3;
addMinutes = -6;
addHours = -6;
addYears = -41;
} else if (timeProgressRate == "rw5") {
addYears = -90;
} else if (timeProgressRate == "rw6") {
addYears = -150;
} else if (timeProgressRate == "rw7") {
addYears = -2004;
} else if (timeProgressRate == "paused") {
addSeconds = 0;
addMinutes = 0;
addHours = 0;
addMonts = 0;
addYears = 0;
} else {
alert("Error in timeProgressRate, unknown rate requested: " + timeProgressRate);
}
rateChanged = false;
}
if(timeProgressRate != "paused" && timeProgressRate != "regular") {
// Convert seconds to double digits
if(timeProgressRate == "ff" || timeProgressRate == "rw") {
regularTimeDateHolder = new Date();
if( secondsCounterRegularTimeProgress != regularTimeDateHolder.getSeconds()) {
secondsCounterRegularTimeProgress = regularTimeDateHolder.getSeconds();
dateHolder.setSeconds(dateHolder.getSeconds() + addSeconds)
playSound(audio);
}
} else if(progressPacer % 2 == 0) {
dateHolder.setSeconds(dateHolder.getSeconds() + addSeconds);
}
//Add minutes
if(addMinutes != null) {
if(progressPacer % 2 == 0) {
dateHolder.setMinutes(dateHolder.getMinutes() + addMinutes);
}
}
//Add house
if(addHours != null) {
if(progressPacer % 2 == 0) {
dateHolder.setHours(dateHolder.getHours() + addHours);
}
}
//Add months
if(addMonts != null) {
if(progressPacer % 2 == 0) {
dateHolder.setMonth(dateHolder.getMonth() + addMonts);
}
}
// Add years
if(addYears != null) {
dateHolder.setFullYear(dateHolder.getFullYear() + addYears);
}
}
if(timeProgressRate != "paused") {
// Convert date format to printable format
writeSecs = dateHolder.getSeconds();
if(writeSecs < 10) {
writeSecs = "0" + writeSecs.toString();
}
writeMinutes = dateHolder.getMinutes();
if(writeMinutes < 10) {
writeMinutes = "0" + writeMinutes.toString();
}
writeHours = dateHolder.getHours();
if(writeHours < 10) {
writeHours = "0" + writeHours.toString();
}
writeDays = dateHolder.getDate();
if(writeDays < 10) {
writeDays = "0" + writeDays.toString();
}
if(dateHolder.getFullYear() < 0) {
writeMonths = "BC";
} else {
writeMonths = dateHolder.getMonth() + 1;
}
var tempYearString = "";
writeYears = dateHolder.getFullYear();
if (writeYears.toString().length < 4) {
for (var i = 0; i < 4 - writeYears.toString().length; i++) {
tempYearString = tempYearString + "0";
}
writeYears = tempYearString + writeYears;
}
if(dateHolder.getFullYear() < 0) {
document.getElementById("date").innerHTML = writeMonths + "  " + writeYears;
if(dateHolder.getFullYear() < -269000 && timeProgressRate.startsWith("rw")) {
timeProgressRate = "paused";
rateChanged = true;
}
} else {
document.getElementById("date").innerText = writeDays + "/" + writeMonths + "/" + writeYears;
}
document.getElementById("clock").innerText = writeHours + ":" + writeMinutes + ":" + writeSecs;
}
// To keep track of rounds in case variable does not require updating every round
progressPacer = progressPacer < 10 ? progressPacer + 1 : 0;
}
function hideCounters() {
document.getElementById("date").style.display = "none";
document.getElementById("clock").style.display = "none";
}
function hideClock() {
document.getElementById("clock").style.display = "none";
}
function hideDate() {
document.getElementById("date").style.display = "none";
document.getElementById("clock").style.display = "block";
document.getElementById("message").style.display = "none";
}
function hideBlinkingMessage() {
document.getElementById("blinking-message").style.display = "none";
}
function hideTopDivSpacer() {
document.getElementById("top_div_spacer").style.display = "none";
}
function showDate() {
document.getElementById("date").style.display = "block";
document.getElementById("clock").style.display = "block";
document.getElementById("message").style.display = "none";
}
function showCounters() {
document.getElementById("message").style.display = "none";
document.getElementById("date").style.display = "block";
document.getElementById("clock").style.display = "block";
}
function increaseFontSize(elementClassName) {
for ( var i = 0; i < divIDs.length; i ++ ) {
var el = document.getElementById(divIDs[i]);
changeFontSize(el, 2);
}
}
function decreaseFontSize() {
for ( var i = 0; i < divIDs.length; i ++ ) {
var el = document.getElementById(divIDs[i]);
changeFontSize(el, -2);
}
}
function increaseTopMargin() {
for ( var i = 0; i < marginDivIDs.length; i ++ ) {
var el = document.getElementById(marginDivIDs[i]);
changeMarginTop(el, 2);
}
}
function decreaseTopMargin() {
for ( var i = 0; i < marginDivIDs.length; i ++ ) {
var el = document.getElementById(marginDivIDs[i]);
changeMarginTop(el, -2);
}
}
function changeMarginTop(element, changedValue) {
var style = window.getComputedStyle(element, null).getPropertyValue('padding-top');
var paddingSize = parseFloat(style);
element.style.paddingTop = (paddingSize + changedValue) + 'px';
}
function changeFontSize(element, changedValue) {
var style = window.getComputedStyle(element, null).getPropertyValue('font-size');
var fontSize = parseFloat(style);
element.style.fontSize = (fontSize + changedValue) + 'px';
}
function playSound(audioElement) {
if(!muted) {
audioElement.play();
}
}
function showMessage(messageText) {
if(blinkingText) {
document.getElementById("blinking-message").innerText = messageText;
document.getElementById("blinking-message").style.display = "block";
} else {
document.getElementById("message").innerText = messageText;
document.getElementById("message").style.display = "block";
}
document.getElementById("date").style.display = "none";
document.getElementById("clock").style.display = "none";
}
function hideMatrix() {
clearInterval(matrixInterval);
var canvas = document.getElementById("q");
canvas.remove();
showDate();
showCounters();
}
function showMatrix() {
hideDate()
hideCounters();
hideBlinkingMessage();
hideTopDivSpacer();
document.getElementById("matrixCol").style.paddingLeft = "0px";
document.getElementById("matrixCol").style.paddingRight = "0px";
var screen = window.screen;
var canvas = document.createElement("canvas");
canvas.id = "q";
var width = canvas.width = screen.width;
var height = canvas.height = screen.height;
canvas.style.paddingLeft = "0 px";
canvas.style.paddingRight = "0 px";
var matrixHolder = document.getElementById("matrixCol");
matrixHolder.appendChild(canvas);
var yPositions = Array(screen.height/2).join(0).split('');
var ctx = canvas.getContext('2d');
var draw = function () {
ctx.fillStyle='rgba(0,0,0,.05)';
ctx.fillRect(0,0,width,height);
ctx.fillStyle='#0F0';
ctx.font = '10pt Georgia';
yPositions.map(function(y, index){
text = String.fromCharCode(1e2+Math.random()*33);
x = (index * 10)+10;
canvas.getContext('2d').fillText(text, x, y);
if(y > 100 + Math.random()*1e4) {
yPositions[index]=0;
} else {
yPositions[index] = y + 10;
}
});
};
RunMatrix();
function RunMatrix() {
if(typeof matrixInterval != "undefined") clearInterval(matrixInterval);
matrixInterval = setInterval(draw, 33);
}
}
|
import http from 'http';
export default {
get(uri) {
return new Promise((resolve, reject) => {
http.get(uri, (res) => {
const contentType = res.headers['content-type'];
let rawData = '';
res.setEncoding('utf8');
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
try {
if (/^application\/json/.test(contentType)) {
return resolve(JSON.parse(rawData));
}
resolve(rawData);
} catch (error) {
reject(error);
}
});
}).on('error', (error) => reject(error));
})
}
}
|
package edu.washington.cse.instrumentation.analysis.resource;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import soot.Scene;
import soot.SootMethod;
public class SimpleStringResourceResolver extends PointsToMethodResourceResolver {
private final String accessMethod;
private final int keyParam;
public SimpleStringResourceResolver(final String methodSignature) {
final String[] tokens = methodSignature.split(";", 2);
assert tokens.length == 2 : Arrays.toString(tokens) + " " + methodSignature;
this.keyParam = Integer.parseInt(tokens[0]);
this.accessMethod = tokens[1];
}
@Override
public boolean isResourceMethod(final SootMethod m) {
return m.getSignature().equals(accessMethod);
}
@Override
protected int getArgumentPosition(final SootMethod m) {
assert m.getSignature().equals(accessMethod);
return keyParam;
}
@Override
public Collection<SootMethod> getResourceAccessMethods() {
return Collections.singleton(Scene.v().getMethod(accessMethod));
}
}
|
<reponame>premss79/zignaly-webapp<gh_stars>10-100
import React from "react";
import { Box, Typography } from "@material-ui/core";
import "./CaptchaTerms.scss";
const CaptchaTerms = () => {
return (
<Box className="captchaTerms">
<Typography color="textSecondary" variant="subtitle1">
Protected by reCAPTCHA (
<a href="https://policies.google.com/privacy" rel="noreferrer" target="_blank">
Privacy
</a>
|
<a href="https://policies.google.com/terms" rel="noreferrer" target="_blank">
Terms
</a>
)
</Typography>
</Box>
);
};
export default CaptchaTerms;
|
<filename>test/lib/middleware_test.rb<gh_stars>10-100
require 'test_helper'
module Sidekiq
module Lock
describe Middleware do
before do
Sidekiq.redis = REDIS
Sidekiq.redis { |c| c.flushdb }
reset_lock_variable!
end
let(:handler) { Sidekiq::Lock::Middleware.new }
it 'sets lock variable with provided static lock options' do
handler.call(LockWorker.new, { 'class' => LockWorker, 'args' => [] }, 'default') do
true
end
assert_kind_of RedisLock, lock_container_variable
end
it 'sets lock variable with provided dynamic options' do
handler.call(DynamicLockWorker.new, { 'class' => DynamicLockWorker, 'args' => [1234, 1000] }, 'default') do
true
end
assert_equal "lock:1234", lock_container_variable.name
assert_equal 2000, lock_container_variable.timeout
end
it 'sets nothing for workers without lock options' do
handler.call(RegularWorker.new, { 'class' => RegularWorker, 'args' => [] }, 'default') do
true
end
assert_nil lock_container_variable
end
end
end
end
|
#!/bin/sh
if [ -n "$GRAPHITE_URL" ]; then
sed -i "s|GRAPHITE_URL|$GRAPHITE_URL|" /etc/grafana/provisioning/datasources/graphite.yaml
fi
/run.sh
|
<reponame>coronsaye/ConstrutNgAdmin
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { NewUserGroupComponent } from './new-user-group.component';
describe('NewUserGroupComponent', () => {
let component: NewUserGroupComponent;
let fixture: ComponentFixture<NewUserGroupComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ NewUserGroupComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(NewUserGroupComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
<reponame>BrightLoong/mylab
package io.github.brightloong.java.all.learn.reference;
/**
* Apple class
*
* @author BrightLoong
* @date 2018/5/25
*/
public class Apple {
private String name;
public Apple(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/**
* 覆盖finalize,在回收的时候会执行。
* @throws Throwable
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
System.out.println("Apple: " + name + " finalize。");
}
@Override
public String toString() {
return "Apple{" +
"name='" + name + '\'' +
'}' + ", hashCode:" + this.hashCode();
}
}
|
import os
import numbers
import base64
from django.contrib.auth.models import User
from django.db import transaction
import io
import pandas as pd
import numpy as np
from openfacstrack.apps.track.models import (
PanelMetadata,
Parameter,
ProcessedSample,
Result,
DataProcessing,
Patient,
PatientMetadataDict,
PatientMetadata,
Panel,
NumericValue,
TextValue,
DateValue,
UploadedFile,
ValidationEntry,
GatingStrategy,
)
class ClinicalSampleFile:
"""
Validates and uploads a file with results from clinical samples.
"""
def __init__(
self,
file_name=None,
file_contents=None,
uploaded_file: UploadedFile = None,
user: User = None,
gating_strategy: GatingStrategy = None,
):
"""load contents of file into a data frame and set other attribs.
Parameters
----------
file_name : string
name of file
file_contents : InMemoryUploadedFile
Django object with binary contents of uploaded file
uploaded_file : UploadedFile
custom object to store details of uploaded file
user : User
Django object representing user making upload
gating_strategy : GatingStrategy
Custom object representing the GatingStrategy for this upload
Returns
-------
None
"""
if uploaded_file:
self.upload_file = uploaded_file
file_name = uploaded_file.name
file_contents = uploaded_file.content
# print(file_contents)
self.content = file_contents
self.file_name = file_name
self.gating_strategy = gating_strategy
self.df = pd.read_csv(self.content, parse_dates=["Date"])
# List of columns always expected
# ToDo: Find out if any of these columns are 'required' - if so
# cannot continue without them.
# Use variables to store static_column names in case they change
# in future
self.sc_panel = "Panel"
self.sc_clinical_sample = "Clinical_sample"
self.sc_filename = "filename"
self.sc_operator1 = "Operator name"
self.sc_comments = "Comments"
self.sc_batch = "batch"
self.sc_date = "Date"
self.required_columns = [
self.sc_filename,
self.sc_panel,
self.sc_clinical_sample,
]
self.static_columns = [
self.sc_batch,
self.sc_operator1,
self.sc_comments,
self.sc_date,
]
# Store the unique panels in the data
# ToDo: I think there should be only one unique panel - check.
self.panels = self.df["Panel"].unique().tolist()
self.panel_name = self.panels[0].upper()
# Compute names of parameters present. These are all the other
# columns in the file that are not in the static_columns list
# and are not unregistered_derived_parameters
parameter_columns = set(self.df.columns) - set(self.static_columns)
parameter_columns -= set(self.required_columns)
self.parameter_columns = list(parameter_columns)
# Store unregistered parameters. Derived ones will be dynamically
# added to the Parameter table before upload
self.unregistered_derived_parameters = []
self.unregistered_parameters = []
for parameter_column in self.parameter_columns:
try:
parameter_object = Parameter.objects.get(
gating_hierarchy=parameter_column
)
except Parameter.DoesNotExist:
if parameter_column.endswith("Count_back") or parameter_column.endswith(
"freq"
):
self.unregistered_derived_parameters.append(parameter_column)
else:
self.unregistered_parameters.append(parameter_column)
self.parameter_columns = [
column
for column in self.parameter_columns
if column not in self.unregistered_parameters
and column not in self.unregistered_derived_parameters
]
# Names for pseudo parameters (parameters computed from data)
self.pseudo_parameters_numeric = []
if self.sc_batch in self.df.columns:
self.pseudo_parameters_numeric.append(
(self.sc_batch, f"{self.panel_name}_batch")
)
if self.sc_operator1 in self.df.columns:
self.pseudo_parameters_numeric.append(
(self.sc_operator1, f"{self.panel_name}_operator_1")
)
self.pseudo_parameters_date = []
if self.sc_date in self.df.columns:
self.pseudo_parameters_date.append(
(self.sc_date, f"{self.panel_name}_date_processed")
)
self.pseudo_parameters_text = []
if self.sc_comments in self.df.columns:
self.pseudo_parameters_text.append(
(self.sc_comments, f"{self.panel_name}_comments")
)
# Number of rows to process
self.nrows = len(self.df)
# Default uploaded file
if not uploaded_file:
self.upload_file = UploadedFile(
name=self.file_name,
user=user,
description="Panel results",
row_number=self.nrows,
content=self.content,
notes="",
content_type="PANEL_RESULTS",
)
self.upload_file.save()
def validate(self):
"""Validate file for completeness of reference data
Parameters
----------
None
Returns
-------
validation_error : list
list of validation errors. Each entry in the list is a
ValidationEntry object - basically a dict
whose keys are types of errors and values are descriptions.
Empty list is returned if there are no errors
"""
# Start validation writing errors into dictionary/or json string?
validation_errors = []
# Check we have the required columns needed for upload to proceed.
required_columns_missing = []
for required_column in self.required_columns:
if required_column not in self.df.columns:
required_columns_missing.append(required_column)
if len(required_columns_missing) > 0:
error = ValidationEntry(
subject_file=self.upload_file,
key="required_columns_missing",
value=required_columns_missing,
entry_type="FATAL",
validation_type="SYNTAX",
)
error.save()
validation_errors.append(error)
self.upload_file.valid_syntax = False
self.upload_file.save()
# Check we have the expected number of columns.
static_columns_missing = []
for static_column in self.static_columns:
if static_column not in self.df.columns:
static_columns_missing.append(static_column)
if len(static_columns_missing) > 0:
error = ValidationEntry(
subject_file=self.upload_file,
key="static_columns_missing",
value=static_columns_missing,
entry_type="ERROR",
validation_type="SYNTAX",
)
error.save()
validation_errors.append(error)
self.upload_file.valid_syntax = False
self.upload_file.save()
# Check that all the info is for the same panel
# It is dangerous to proceed otherwise as we will
# mainly because of the parameters we dynamically
# compose from the panel name.
if "Panel" in self.df.columns:
panels_in_data = self.df["Panel"].unique().tolist()
n_unique_panels_in_data = len(panels_in_data)
if n_unique_panels_in_data != 1:
error = ValidationEntry(
subject_file=self.upload_file,
key="unique_panel_error",
value=f"Expected 1 unique value for panels in each record"
+ f". Got {n_unique_panels_in_data}: {panels_in_data}",
entry_type="FATAL",
validation_type="SYNTAX",
)
error.save()
validation_errors.append(error)
self.upload_file.valid_syntax = False
self.upload_file.save()
# Check if the panel(s) are present in the Panel table
panels_in_data_pk = []
unknown_panels = []
for panel in panels_in_data:
try:
panels_in_data_pk.append(Panel.objects.get(name=panel.upper()).id)
except Panel.DoesNotExist as e:
unknown_panels.append(panel)
if len(unknown_panels) > 0:
error = ValidationEntry(
subject_file=self.upload_file,
key="unknown_panel_error",
value=f"The following panels are not in Panel table: {unknown_panels}",
entry_type="WARN",
validation_type="SYNTAX",
)
error.save()
validation_errors.append(error)
else:
# ToDo: Can we continue without unique panels?
panels_in_data = []
panels_in_data_pk = []
if len(self.unregistered_parameters) > 0:
error = ValidationEntry(
subject_file=self.upload_file,
key="unregistered_parameters",
value=self.unregistered_parameters,
entry_type="WARN",
validation_type="SYNTAX",
)
error.save()
validation_errors.append(error)
if len(self.unregistered_derived_parameters) > 0:
error = ValidationEntry(
subject_file=self.upload_file,
key="unregistered_derived_parameters - will be added during upload",
value=self.unregistered_derived_parameters,
entry_type="INFO",
validation_type="SYNTAX",
)
error.save()
validation_errors.append(error)
# Check all fields needed for processed_sample table present
# Check all clinical samples present in processed_sample table
# Enter values into processed_sample, processed_sample,
# numeric_value and text_parameter
# Print out list of validation errors
# print("Validation errors:")
return validation_errors
def upload(self, dry_run=False):
"""Upload file to respective tables
Upload data in clinical sample results for panel into the database.
We assume that all the results here are based on one panel (ToDo:
need to confirm whether to throw error during validation if more
than one panel). The upload is carried out in an atomic transaction
and if there are any errors nothing is written to the database. If
the dry_run parameter is False nothing is written to the database.
This is useful to get details of any records that have issues that
would otherwise be missed when writing to the database.
Workflow:
1 - Details of the file being uploaded are written to the
UploadedFile table - the ID of this file is saved so that
it can be stored with each record in the Result table
2 - covid patient IDs loaded into Patient table
create if they do not exist
3 - For each row create unique record in Result table if it
does not already exist. Uniqueness is by
(panel, fcs_file_name, gating_strategy) then store:
(a) patient_id in Patient table
(b) sample_id (and any other sample metadata in
ProcessedSample table
(c) FCS file metadata into DataProcessing table
(d) Parameters and values for each sample into
NumericValue, DateValue and TextValue tables
Parameters
----------
dry_run : boolean
Indicates it's going to attempt to do the upload without committing the changes.
Returns
-------
upload_report : dict
Details of how upload proceeded. Keys are:
success : boolean - whether upload was successful
rows_processed : int - No. of rows from csv file
rows_with_issues : int - No. of rows that had issues
upload_issues : dict - keys are types of issue, values are
descriptions with row in sheet where issue
occured. Empty dict is returned if there
are no issues
"""
# Assume all checks done - will stop and terminate upload if
# any errors encountered
upload_issues = []
rows_with_issues = set()
with transaction.atomic():
# Ensure all sample numbers are in processed_sample table
# and respective records for patients exist
sample_ids = self.df[self.sc_clinical_sample].unique().tolist()
patient_ids = [str(s_id).split("n")[0] for s_id in sample_ids]
processed_sample_pks = {}
for patient_id, sample_id in zip(patient_ids, sample_ids):
patient = Patient.objects.get_or_create(patient_id=patient_id)[0]
processed_sample = ProcessedSample.objects.get_or_create(
clinical_sample_id=sample_id, patient=patient
)[0]
processed_sample_pks[sample_id] = processed_sample.pk
# Get the panel(s) pks
panels_pk = {}
for panel in self.panels:
panels_pk[panel] = Panel.objects.get(name=panel.upper()).id
# Store first panel primary key for use later
panel_pk = panels_pk[self.panels[0]]
# Append any unregistered derived parameters to parameter table
for parameter_to_add in self.unregistered_derived_parameters:
parameter, created = Parameter.objects.get_or_create(
gating_hierarchy=parameter_to_add, panel_id=panel_pk
)
parameter.internal_name = parameter_to_add
parameter.public_name = parameter_to_add
parameter.is_reference_parameter = False
if parameter_to_add.endswith("freq"):
parameter.unit = "Derived frequency"
else:
parameter.unit = "Derived count"
parameter.data_type = "PanelNumeric"
parameter.description = parameter.unit
parameter.save()
self.parameter_columns.append(parameter_to_add)
# Get parameter_ids for NumericParameters
parameters_pk = {}
for parameter in self.parameter_columns:
parameters_pk[parameter] = Parameter.objects.get(
gating_hierarchy=parameter
).id
# Ditto for pseudo parameters (date, text, numeric)
pseudo_parameters_pk = {}
for column, parameter in self.pseudo_parameters_numeric:
pseudo_parameters_pk[parameter] = Parameter.objects.get(
gating_hierarchy=parameter
).id
for column, parameter in self.pseudo_parameters_date:
pseudo_parameters_pk[parameter] = Parameter.objects.get(
gating_hierarchy=parameter
).id
for column, parameter in self.pseudo_parameters_text:
pseudo_parameters_pk[parameter] = Parameter.objects.get(
gating_hierarchy=parameter
).id
# Store details in relevant tables
for index, row in self.df.iterrows():
# Only proceed if sample_id is valid
sample_id = str(row[self.sc_clinical_sample])
if not sample_id.upper().startswith("P") or len(sample_id) < 4:
validation_entry = ValidationEntry(
subject_file=self.upload_file,
key=f"row:{index} field:Clinical_sample",
value=f"Value ({sample_id}) not a valid "
+ "clinical sample id. Expected pxxxnxx. "
+ "All entries for this row not loaded.",
entry_type="WARN",
validation_type="MODEL",
)
upload_issues.append(validation_entry)
rows_with_issues.add(index)
continue
# Data processing details
fcs_file_name = row[self.sc_filename]
if type(fcs_file_name) == str and fcs_file_name.find(sample_id) >= 0:
data_processing, created = DataProcessing.objects.get_or_create(
fcs_file_name=fcs_file_name, panel_id=panels_pk[row["Panel"]]
)
else:
validation_entry = ValidationEntry(
subject_file=self.upload_file,
key=f"row:{index} field:{self.sc_filename}",
value=f"Value {fcs_file_name} does not contain the"
+ f" sample ID ({sample_id}) - row not loaded",
entry_type="WARN",
validation_type="MODEL",
)
upload_issues.append(validation_entry)
rows_with_issues.add(index)
continue
# Create an entry in the results table
result = Result.objects.get_or_create(
processed_sample_id=processed_sample_pks[sample_id],
gating_strategy=self.gating_strategy,
panel_id=panel_pk,
data_processing=data_processing,
)[0]
result.uploaded_file = self.upload_file
result.save()
# Store data for parameters
for parameter, parameter_pk in parameters_pk.items():
if isinstance(row[parameter], numbers.Number) and not np.isnan(
row[parameter]
):
numeric_value, created = NumericValue.objects.get_or_create(
result_id=result.id, parameter_id=parameters_pk[parameter]
)
numeric_value.value = row[parameter]
numeric_value.save()
else:
validation_entry = ValidationEntry(
subject_file=self.upload_file,
key=f"row:{index} parameter:{parameter}",
value=f"Value ({row[parameter]}) not a "
+ "number - not uploaded to NumericValue"
+ " table",
entry_type="WARN",
validation_type="MODEL",
)
upload_issues.append(validation_entry)
rows_with_issues.add(index)
# Store numeric pseudo parameters
for column, parameter in self.pseudo_parameters_numeric:
value = row[column]
if isinstance(value, numbers.Number) and not np.isnan(value):
numeric_value, created = NumericValue.objects.get_or_create(
result_id=result.id,
parameter_id=pseudo_parameters_pk[parameter],
)
numeric_value.value = value
numeric_value.save()
else:
validation_entry = ValidationEntry(
subject_file=self.upload_file,
key=f"row:{index} parameter:{parameter}",
value=f"Value ({value}) not a "
+ "number - not uploaded to NumericValue"
+ " table",
entry_type="WARN",
validation_type="MODEL",
)
upload_issues.append(validation_entry)
rows_with_issues.add(index)
# Stdate pseudo parameters
for column, parameter in self.pseudo_parameters_date:
value = row[column]
if isinstance(value, pd.Timestamp) and not pd.isnull(value):
date_value, created = DateValue.objects.get_or_create(
result_id=result.id,
parameter_id=pseudo_parameters_pk[parameter],
)
date_value.value = value
date_value.save()
else:
validation_entry = ValidationEntry(
subject_file=self.upload_file,
key=f"row:{index} parameter:{parameter}",
value=f"Value ({value}) not a "
+ "Date - not uploaded to DateValue"
+ " table",
entry_type="WARN",
validation_type="MODEL",
)
upload_issues.append(validation_entry)
rows_with_issues.add(index)
# Store text pseudo parameters
for column, parameter in self.pseudo_parameters_text:
value = str(row[column]).strip()
if len(value) > 0 and value != "nan":
text_value, created = TextValue.objects.get_or_create(
result_id=result.id,
parameter_id=pseudo_parameters_pk[parameter],
)
text_value.value = value
text_value.save()
upload_report = {
"rows_processed": self.nrows,
"rows_with_issues": len(rows_with_issues),
"validation": upload_issues,
}
if dry_run:
transaction.set_rollback(True)
if upload_issues:
for issue in upload_issues:
issue.save()
else:
self.upload_file.valid_model = True
self.upload_file.save()
return upload_report
class PatientFile:
"""Uploads a file with anonymised patient details."""
def __init__(
self,
file_name=None,
file_contents=None,
uploaded_file: UploadedFile = None,
user: User = None,
):
if uploaded_file:
self.upload_file = uploaded_file
file_name = uploaded_file.name
file_contents = uploaded_file.content
self.content = file_contents
self.file_name = file_name
self.df = pd.read_csv(self.content)
self.nrows = len(self.df)
# Default uploaded file
if not uploaded_file:
self.upload_file = UploadedFile(
name=self.file_name,
user=user,
description="Patient data",
row_number=self.nrows,
content=self.content,
notes="",
content_type="PATIENT_DATA",
)
self.upload_file.save()
self.patient_ids = self.df["patient"].unique().tolist()
def validate(self):
return []
def upload(self, dry_run=False):
"""Upload data to relevant tables"""
upload_issues = []
rows_with_issues = []
with transaction.atomic():
# Create metadata dict entries if necessary
columns = self.df.columns.tolist()
columns.remove("patient")
metadata_dicts = {}
for column in columns:
column_lc = column.lower()
metadata_dict, created = PatientMetadataDict.objects.get_or_create(
name=column_lc
)
if created:
metadata_dict.description = f"{column}"
metadata_dict.notes = "Dynamically added"
metadata_dict.save()
metadata_dicts[column] = metadata_dict
# Enter details for all patients
for index, row in self.df.iterrows():
patient_id = str(row["patient"])
# Create patients if necessary
if not patient_id.upper().startswith("P"):
validation_entry = ValidationEntry(
subject_file=self.upload_file,
key=f"row:{index} field:patient",
value=f"Value ({patient_id}) not valid. "
+ "Expected pxxx. Entries for this id not loaded.",
entry_type="WARN",
validation_type="MODEL",
)
upload_issues.append(validation_entry)
rows_with_issues.append(index)
continue
patient = Patient.objects.get_or_create(patient_id=patient_id)[0]
# Store metadata associated with patient
for column, metadata_dict in metadata_dicts.items():
value = row[column]
patient_metadata = PatientMetadata.objects.get_or_create(
patient=patient, metadata_key=metadata_dict
)[0]
patient_metadata.metadata_value = value
patient_metadata.save()
if upload_issues:
for issue in upload_issues:
issue.save()
else:
self.upload_file.valid_model = True
if dry_run:
transaction.set_rollback(True)
else:
# Put this here as I think uploaded file is also saved to disk. Can this be rolled back?
self.upload_file.save()
upload_report = {
"rows_processed": self.nrows,
"rows_with_issues": len(rows_with_issues),
"upload_issues": upload_issues,
}
return upload_report
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-SS-N-VB/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-SS-N-VB/7-1024+0+512-LMPI-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_sentences_low_pmi_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
public class OrderNoteDetail
{
public string OrderId { get; set; }
public string Note { get; set; }
}
public class AcknowledgeAndPostResponse
{
public OrderNoteDetail OrderNote { get; }
public AcknowledgeAndPostResponse(OrderNoteDetail orderNote)
{
OrderNote = orderNote;
}
public string Acknowledge()
{
return $"Order {OrderNote.OrderId} acknowledged: {OrderNote.Note}";
}
public string Post()
{
return $"Posted order note {OrderNote.Note} for order {OrderNote.OrderId}";
}
} |
<gh_stars>1-10
require 'open-uri'
require 'hpricot'
class ClubScraper
def self.is_aga?(element)
img = element.at('img')
if (img)
if (img['src'] =~ /agalogo/)
true
else
logger.warn("is_aga?: Unrecognized image src attribute #{img['src']}")
false
end
else
unless (element.inner_html.strip == ' ')
logger.warn("is_aga?: Unrecognized inner html #{element.inner_html}")
end
false
end
end
def self.get_club_name_city_url(element)
need_name = true
retval = {}
state = :find_name
element.children.each do |child|
case(state)
when :find_name
if (child.text?)
unless (child.to_s.strip.blank?)
retval[:name] = child.to_s.strip
state = :find_br
end
elsif (child.elem?)
if (child.name == 'a')
retval[:url] = child['href']
retval[:name] = child.inner_text.strip
state = :find_br
elsif (child.name == 'br')
state = :find_city
end
end
when :find_br
if (child.elem? && child.name == 'br')
state = :find_city
end
when :find_city
retval[:city] = child.to_s.strip unless child.to_s.strip.blank?
end
end
if !retval[:name]
logger.warn("club is missing name: #{element.inspect}")
elsif !retval[:city]
logger.warn("club is missing city: #{element.inspect}")
end
retval
end
# Ok, so it's not thread safe because state is stored at the class level,
# but that should be ok. How many threads will be scraping at once?
cattr_accessor :first_contact
cattr_accessor :current_contact
cattr_accessor :contacts
def self.get_club_contacts(element)
self.current_contact = {}
self.contacts = [current_contact]
self.first_contact = true
element.children.each do |child|
if (child.text?)
text = child.inner_text.strip
next if text.blank?
if (text =~ /@/)
# email address
if (text =~ %r{^<a href="mailto:([-0-9a-zA-Z_@.]+)"([a-zA-Z ]+)})
self.current_contact[:email] = $1
self.current_contact[:name] = $2
else
self.current_contact[:email] = text
end
validate_email(text)
elsif (text =~ /^(([a-z]+):?\s+)?([-()+ 0-9]+)(\s+([a-z]+))?$/i)
# phone number
phone_number = {}
phone_number[:number] = $3
phone_number[:type] = $2.downcase if $2
phone_number[:type] = $5.downcase if $5
current_contact[:phone] ||= []
current_contact[:phone] << phone_number
else
# name
new_contact
validate_name(text)
self.current_contact[:name] = text
end
else
next if child.bogusetag?
case child.name
when 'a'
# hyperlinked name
self.new_contact
if child[:href] =~ /:/
email = child[:href].split(/:/)[1]
elsif child[:href] =~ /^mailto(.*)$/
email = $1
else
email = child[:href]
end
email.strip!
validate_email(email)
name = child.inner_text.strip
validate_name(name)
self.current_contact[:email] = email
self.current_contact[:name] = name
when 'br'
next
end
end
end
# Remove empty hash if present
self.contacts = [] if self.contacts[0].empty?
contacts
end
def self.get_club_info(element)
info = element.to_plain_text.gsub(/\s*\n+\s*/, "\n")
address = nil
element.children.each do |child|
if (child.text?)
tmp = child.inner_text.strip
if !address && tmp =~ /([0-9]+[ \t]+[a-z0-9\. \t]+([ \t]+(lane|ln|street|st|avenue|ave|blvd|bl|boulevard|road|rd|place|pl|square|sq|court|ct|drive|dr|highway|hwy|parkway|pkwy))?\.?)/i
address = $1
# Handle case of matching time string
address = nil if address =~ /^[0-9]+ (am|pm)$/i
end
end
end
retval = {:info => info}
retval[:address] = address if address
retval
end
def self.get_club_table(element, state=:find_anchor)
element.search('*').each do |child|
if child.elem?
case state
when :find_anchor
if child.name == 'a' && child[:name] == 'listing'
state = :find_table
end
when :find_table
if child.name == 'table'
return child
end
end
end
end
end
def self.get_club_from_row(row)
returning({}) do |club|
cells = row.search('td')
club[:is_aga?] = is_aga?(cells[0])
add_hash(club, get_club_name_city_url(cells[1]))
club[:contacts] = get_club_contacts(cells[2])
add_hash(club, get_club_info(cells[3]))
end
end
def self.get_state_from_row(row)
returning({}) do |retval|
cell = row.at('td')
state = cell.inner_text.strip
if (state.blank? || state == "Overseas")
state = "XX"
end
retval[:state] = state
end
end
def self.is_state_row?(row)
row[:bgcolor] == 'silver'
end
def self.process_table(table)
first_row = true
state = {:state => "XX"}
table.search('/tr').each do |row|
# Skip the header row
if first_row
first_row = false
next
end
if is_state_row?(row)
state = get_state_from_row(row)
else
club = get_club_from_row(row)
yield add_hash(club, state)
end
end
end
def self.get_clubs_from_url(url, &block)
table = get_table_from_url(url)
process_table(table, &block)
end
def self.get_table_from_url(url)
page = get_url(url)
get_club_table(page)
end
private
def self.get_url(url)
open(url) do |file|
Hpricot(file)
end
end
def self.add_hash(original, new)
new.each_pair do |key, value|
original[key] = value
end
original
end
def self.validate_name(name)
logger.warn("Unexpected name format: #{name}") unless name =~ /^[a-z\s]+$/i
end
def self.validate_email(email)
logger.warn("Bad email address format: #{email}") unless email =~ /[a-z0-9_.]+@[a-z0-9_.]+/i
end
def self.new_contact
unless first_contact
self.current_contact = {}
self.contacts << self.current_contact
end
self.first_contact = false
end
def self.logger
RAILS_DEFAULT_LOGGER
end
end
|
#!/bin/bash
source ../../../../settings64_vivado.sh
# Disable waveform dumping
OMSP_NODUMP=1
export OMSP_NODUMP
# Choose simulator:
# - iverilog : Icarus Verilog (default)
# - msim : ModelSim
# - xsim : Xilinx Simulator
OMSP_SIMULATOR=xsim
export OMSP_SIMULATOR
rm -rf ./cov_work
rm -rf ./log/*.log
mkdir ./log
# Peripheral templates test patterns
../bin/msp430sim.sh template_periph_8b | tee ./log/template_periph_8b.log
../bin/msp430sim.sh template_periph_16b | tee ./log/template_periph_16b.log
# Report regression results
../bin/parse.results.sh
|
<filename>scraps/rconf.rb
require 'resolv'
require 'pp'
conf = Resolv::DNS::Config.new(ARGV.first)
conf.lazy_initialize
pp conf
|
<gh_stars>0
import { Tooltip } from 'antd';
import React from 'react';
import humanFileSize from 'utils/human-file-size';
import Styled from './styles';
import { Attachment, AttachmentFile, Status } from './types';
import { FilePdfOutlined, DeleteOutlined } from '@ant-design/icons';
export const getFileIcon = (fileType: string): string => {
if (fileType.includes('video')) {
return 'fa-file-video';
}
return 'fa-file-alt';
};
const getFileName = (name: string) =>
name.length >= 50 ? `${name.substr(0, 20)} ... ${name.substr(-10)}` : name;
interface Props {
file: AttachmentFile;
onRemove: (file: AttachmentFile) => void;
status?: Status;
}
const NormalItem = ({ file, onRemove, status, ...props }: Props) => {
const handleOnRemove = (file: AttachmentFile) => e => {
e.stopPropagation();
onRemove(file);
};
return (
<Tooltip {...props} title={file.name} placement="leftBottom">
<Styled.FileContainer>
{/* <Styled.Icon className={`fas ${getFileIcon(file.type)}`} marginR /> */}
<FilePdfOutlined style={{ marginRight: '0.5rem', fontSize: 24 }} />
<Styled.FileName status={status}>
{(file as Attachment).url ? (
<a
href={(file as Attachment).url}
download={file.name}
target="_blank"
rel="noreferrer"
>
{getFileName(file.name)}
</a>
) : (
getFileName(file.name)
)}
</Styled.FileName>
<Styled.FileSize status={status}>
{' - '}
{file.size && humanFileSize(file.size, true)}
</Styled.FileSize>
<DeleteOutlined
style={{ marginLeft: '0.5rem', fontSize: 16 }}
className="file__remove"
onClick={handleOnRemove(file)}
/>
</Styled.FileContainer>
</Tooltip>
);
};
export default NormalItem;
|
#!/usr/bin/env bash
echo ${1##.*}
echo ${1##*.}
echo ${1%%.*}
echo ${1%%*.}
if [ "${1##*.}" == "tar" ]
then
echo "This is a tar file"
else
echo "This is not a tar file"
fi
|
import React, { Component } from "react";
import styled from "styled-components";
// Loading Modules
export default class ChatInput extends Component {
render() {
return (
<Container>
{this.props.choices.length > 0 ? (
<BtnsContainer>
{this.props.choices.map((choice, index) => (
<StyledButton
key={index}
onClick={() => {
console.log(choice);
this.props.onMsgUpdate(choice);
this.props.onSendClick(choice, this.props.context);
}}
>
{" "}
{choice}{" "}
</StyledButton>
))}
</BtnsContainer>
) : (
<InputContainer>
<InputField
value={this.props.msg}
onKeyPress={e => {
if (e.charCode === 13) {
this.props.onSendClick(this.props.msg, this.props.context);
}
}}
onChange={e => {
this.props.onMsgUpdate(e.target.value);
}}
autofocus
/>
<SubmitButton
onClick={() => {
this.props.onSendClick(this.props.msg, this.props.context);
}}
>
{" "}
Send{" "}
</SubmitButton>
</InputContainer>
)}
</Container>
);
}
}
const Container = styled.div`
box-sizing: border-box;
width: 97%;
height: 4em;
padding-right: 10px;
padding-left: 10px;
margin-top: 20px;
margin-bottom: 5px;
`;
const InputContainer = styled.div`
box-sizing: border-box;
width: 100%;
display: flex;
padding-right: 10px;
padding-left: 10px;
flex-direction: row;
position: relative;
`;
const InputField = styled.input`
border: none;
box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.6);
width: 80%;
height: 2.5rem;
padding: 10px;
font-size: 1.5em;
border-radius: 10px;
direction: rtl;
`;
const SubmitButton = styled.button`
border: none;
border-radius: 10px;
background-color: #3d70b2;
width: 20%;
margin-left: 10px;
font-size: 1.5rem;
color: white;
`;
const BtnsContainer = styled.div`
box-sizing: border-box;
width: 97%;
height: 100%;
display: flex;
padding-right: 10px;
padding-left: 10px;
flex-direction: row;
position: relative;
justify-content: space-evenly;
`;
const StyledButton = styled.button`
border: none;
border-radius: 10px;
background-color: #ff8042;
width: 20%;
height: 2.5em;
margin-left: 10px;
font-size: 1.5rem;
color: white;
`;
|
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const axios_1 = __importDefault(require("axios"));
const cf_response_1 = require("./cf-response");
class API {
constructor(authOptions) {
this.baseUrl = 'https://api.cloudflare.com/client/v4/';
this.token = authOptions === null || authOptions === void 0 ? void 0 : authOptions.token;
this.authKey = authOptions === null || authOptions === void 0 ? void 0 : authOptions.authKey;
this.authEmail = authOptions === null || authOptions === void 0 ? void 0 : authOptions.authEmail;
this.userServiceKey = authOptions === null || authOptions === void 0 ? void 0 : authOptions.userServiceKey;
this.axios = axios_1.default;
}
set axios(axios) {
this._axios = axios;
}
get axios() {
return this._axios;
}
getAxiosConfig(config) {
const configDefault = {
baseURL: this.baseUrl,
headers: {
'Content-Type': 'application/json',
},
};
if (this.token !== undefined) {
configDefault.headers['Authorization'] = `Bearer ${this.token}`;
}
else if (this.authKey !== undefined && this.authEmail !== undefined) {
configDefault.headers['X-Auth-Key'] = this.authKey;
configDefault.headers['X-Auth-Email'] = this.authEmail;
}
else if (this.userServiceKey !== undefined) {
configDefault.headers['X-Auth-User-Service-Key'] = this.userServiceKey;
}
else {
throw new Error('Please provide one of the authentication parameters.');
}
return Object.assign({}, configDefault, config);
}
request(config) {
return __awaiter(this, void 0, void 0, function* () {
try {
const response = yield this.axios.request(config);
return new cf_response_1.CloudflareResponse(response.data);
}
catch (e) {
return new cf_response_1.CloudflareResponse(e.response.data);
}
});
}
get(endpoint, data) {
return __awaiter(this, void 0, void 0, function* () {
return this.request(this.getAxiosConfig({
method: 'get',
url: endpoint,
data: data,
}));
});
}
patch(endpoint, data) {
return __awaiter(this, void 0, void 0, function* () {
return this.request(this.getAxiosConfig({
method: 'patch',
url: endpoint,
data: data,
}));
});
}
post(endpoint, data) {
return __awaiter(this, void 0, void 0, function* () {
return this.request(this.getAxiosConfig({
method: 'post',
url: endpoint,
data: data,
}));
});
}
put(endpoint, data) {
return __awaiter(this, void 0, void 0, function* () {
return this.request(this.getAxiosConfig({
method: 'put',
url: endpoint,
data: data,
}));
});
}
delete(endpoint) {
return __awaiter(this, void 0, void 0, function* () {
return this.request(this.getAxiosConfig({
method: 'delete',
url: endpoint,
}));
});
}
}
exports.API = API;
|
$().ready(function(){
// you can use own color converting function if you want
var my_color = d3.scaleOrdinal(d3.schemeCategory10);
var href_func = function(d){ return "/Tag/List?name=" + d.text }
// maketextCloud(data, css selector that you wanna insert in, scale of svg, class name of svg, font-family, rotate or not, your color converting function)
var width = document.getElementById('tag-card').offsetWidth;
makeWordCloud(data, href_func, ".card-body", width, 300, "tag-cloud-img", "Impact", false, my_color)
// [ svg class, font-family, rotate texts or not, color function ] are optional.
// the simplest way => window.makeWordCloud(data, "body", 500)
})
|
<reponame>yamanGithub/image_capture
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
var WebcamMirrorProperties = /** @class */ (function () {
function WebcamMirrorProperties() {
}
return WebcamMirrorProperties;
}());
export { WebcamMirrorProperties };
if (false) {
/** @type {?} */
WebcamMirrorProperties.prototype.x;
}
//# sourceMappingURL=data:application/json;base64,<KEY> |
<reponame>xcorail/OTB<filename>Modules/Learning/DimensionalityReductionLearning/test/otbPCAModelTest.cxx
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "otbPCAModel.h"
#include "otbReadDataFile.h"
typedef otb::PCAModel<double> PCAModelType;
typedef PCAModelType::InputListSampleType InputListSampleType;
typedef PCAModelType::TargetListSampleType TargetListSampleType;
int otbPCAModelCanRead(int argc, char * argv [])
{
if (argc < 2)
{
std::cerr << "Usage: " << argv[0] << " <model>" << std::endl;
return EXIT_FAILURE;
}
PCAModelType::Pointer model = PCAModelType::New();
std::string filename(argv[1]);
if (! model->CanReadFile(filename) )
{
std::cerr << "Failed to read model file : "<< filename << std::endl;
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
int otbPCAModeTrain(int argc, char * argv [])
{
if (argc < 3)
{
std::cerr << "Usage: " << argv[0] << " letter.scale model.out" << std::endl;
return EXIT_FAILURE;
}
// Extract data from letter.scale
InputListSampleType::Pointer samples = InputListSampleType::New();
TargetListSampleType::Pointer target = TargetListSampleType::New();
if (!otb::ReadDataFile(argv[1], samples, target))
{
std::cout << "Failed to read samples file " << argv[1] << std::endl;
return EXIT_FAILURE;
}
PCAModelType::Pointer model = PCAModelType::New();
model->SetDimension(14);
model->SetWriteEigenvectors(true);
model->SetInputListSample(samples);
model->Train();
model->Save(std::string(argv[2]));
return EXIT_SUCCESS;
}
|
/* global io */
/* global $ */
$(document).ready(function()
{
//disable tooltips for touch-enabled screens
if(!('ontouchstart' in document.documentElement))
$('[data-toggle="tooltip"]').tooltip({container: 'body'});
//properties & ui objects mappings
var ui = {
quality480p : $("#quality-480p"),
quality720p : $("#quality-720p"),
quality1080p : $("#quality-1080p"),
alertMode : $("#alert-mode"),
imgContainer : $("#img-container"),
img : $("#image-view"),
imgPreloader : $("#image-preloader"),
imgTimestamp : $("#timestamp"),
clientsList : $("#clients"),
clientsCount : $("#clients-count"),
},
appConfig = {},
appClients = [],
socket = io.connect();
//notify server of connection
socket.emit('connected');
//get new image & update view
socket.on('refresh view', function(imageData) {
var arrayBuffer = new Uint8Array(imageData.data);
var blob = new Blob([arrayBuffer], {type: "image/jpeg"});
var urlCreator = window.URL || window.webkitURL;
var imageUrl = urlCreator.createObjectURL(blob);
//add image data to hidden preloader image, to avoid flicker
//after it is preloaded, it it sent to the visible img
ui.imgPreloader.attr('src', imageUrl);
ui.imgTimestamp.html(imageData.timestamp);
});
ui.imgPreloader.load(function() {
ui.img.attr('src',ui.imgPreloader.attr('src'));
});
//set local settings with values received from server
socket.on('update config', function(newConfig) {
appConfig = newConfig;
//reset ui button inset (selected) effect for quality control buttons
ui.quality480p.parent().removeClass("active");
ui.quality720p.parent().removeClass("active");
ui.quality1080p.parent().removeClass("active");
//update ui based on values from new config
switch(appConfig.monitoring.quality) {
case "640x480":
ui.quality480p.prop("checked", true);
ui.quality480p.parent().addClass("active");
break;
case "1280x720":
ui.quality720p.prop("checked", true);
ui.quality720p.parent().addClass("active");
break;
case "1920x1080":
ui.quality1080p.prop("checked", true);
ui.quality1080p.parent().addClass("active");
break;
}
ui.alertMode.prop("checked", appConfig.monitoring.alert);
});
//update app clients list with new items received from server
socket.on('update clients', function(serverClients) {
//update app clients count badge
appClients = serverClients;
ui.clientsCount.html(serverClients.length);
//update app clients list
ui.clientsList.empty();
appClients.forEach(function(item) {
ui.clientsList.append('<li><a href="#">' + item + '</a></li>');
});
});
//update app clients list with received Alarm state
socket.on('alarm', function(state) {
if(state==true)
ui.alertMode.parent(".btn").addClass("alarm");
else
ui.alertMode.parent(".btn").removeClass("alarm");
});
//update quality client config object with values from associated ui objects
function ConfigUpdateQuality(newQuality, newFps) {
appConfig.monitoring.quality = newQuality;
appConfig.monitoring.fps = newFps;
//send to server new config settings
socket.emit('update config quality', appConfig);
}
//update alert client config object with values from associated ui object
function ConfigUpdateAlert() {
appConfig.monitoring.alert = ui.alertMode.prop('checked');
socket.emit('update config alert', appConfig);
}
//bind ui objects to function associated with config settings update
ui.alertMode.click(function(){ ConfigUpdateAlert(); });
ui.quality480p.change(function(){ ConfigUpdateQuality("640x480",25); });
ui.quality720p.change(function(){ ConfigUpdateQuality("1280x720",15); });
ui.quality1080p.change(function(){ ConfigUpdateQuality("1920x1080",5); });
});
|
package com.samus.freya.helper;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.SparseArray;
import java.util.ArrayList;
import java.util.List;
import com.samus.freya.model.*;
/**
* Created by samus on 10.11.2016.
* TODO maybe make singleton
*/
public class DBHelper extends SQLiteOpenHelper {
// General database information
private static final String DATABASE_NAME = "MyDBName.db";
private static final int DATABASE_VERSION = 1;
// Table names
private static final String TABLE_CONTACT = "contacts";
private static final String TABLE_MONTH = "month";
private static final String TABLE_DAY = "day";
private static final String TABLE_SERVICE = "services";
private static final String TABLE_MONTH_CONTACT = "month_contact";
private static final String TABLE_MONTH_SERVICE = "month_service";
private static final String TABLE_DAY_SERVICE = "day_service";
// CONTACTS table - column names
private static final String CONTACTS_COLUMN_ID = "id";
private static final String CONTACTS_COLUMN_NAME = "name";
private static final String CONTACTS_COLUMN_WH = "wh";
private static final String CONTACTS_COLUMN_ENABLED = "enabled";
// MONTH table - column names
private static final String MONTH_COLUMN_ID = "id";
private static final String MONTH_COLUMN_YEAR = "year";
private static final String MONTH_COLUMN_MONTH = "month";
private static final String MONTH_COLUMN_FULL = "full";
// DAY table - column names
private static final String DAY_COLUMN_ID = "id";
private static final String DAY_COLUMN_MONTH = "month_id";
private static final String DAY_COLUMN_DATE = "date";
// SERVICES table - column names
private static final String SERVICES_COLUMN_ID = "id";
private static final String SERVICES_COLUMN_DESC = "desc";
private static final String SERVICES_COLUMN_VAL = "val";
private static final String SERVICES_COLUMN_DEF = "def";
private static final String SERVICES_COLUMN_SPE = "spe";
private static final String SERVICES_COLUMN_ENA = "ena";
// MONTH_CONTACT - column names
private static final String MC_COLUMN_MONTH = "month_id";
private static final String MC_COLUMN_CONTACT = "contact_id";
private static final String MC_COLUMN_REQ = "req";
// MONTH_SERVICE - column names
private static final String MS_COLUMN_MONTH = "month_id";
private static final String MS_COLUMN_SERVICE = "service_id";
// DAY_SERVICE - column names
private static final String DS_COLUMN_DAY = "day_id";
private static final String DS_COLUMN_SERVICE = "service_id";
private static final String DS_COLUMN_CONTACT = "contact_id";
// Table create statements
private static final String CREATE_TABLE_CONTACTS = "create table "
+ TABLE_CONTACT + "( " + CONTACTS_COLUMN_ID
+ " integer primary key autoincrement, " + CONTACTS_COLUMN_NAME
+ " text not null, " + CONTACTS_COLUMN_WH
+ " real not null, " + CONTACTS_COLUMN_ENABLED
+ " integer not null);";
private static final String CREATE_TABLE_MONTH = "create table "
+ TABLE_MONTH + " ( " + MONTH_COLUMN_ID
+ " integer primary key autoincrement, " + MONTH_COLUMN_YEAR
+ " integer not null, " + MONTH_COLUMN_MONTH
+ " integer not null, " + MONTH_COLUMN_FULL
+ " integer not null);";
private static final String CREATE_TABLE_DAY = "create table "
+ TABLE_DAY + " ( " + DAY_COLUMN_ID
+ " integer primary key autoincrement, " + DAY_COLUMN_MONTH
+ " integer, " + DAY_COLUMN_DATE
+ " integer not null, foreign key (" + DAY_COLUMN_MONTH
+ ") references " + TABLE_MONTH + "("
+ MONTH_COLUMN_ID + ") on delete cascade);";
private static final String CREATE_TABLE_SERVICES = "create table "
+ TABLE_SERVICE + " ( " + SERVICES_COLUMN_ID
+ " integer primary key autoincrement, " + SERVICES_COLUMN_DESC
+ " text not null, " + SERVICES_COLUMN_DEF
+ " integer not null, " + SERVICES_COLUMN_VAL
+ " integer not null, " + SERVICES_COLUMN_ENA
+ " integer not null, " + SERVICES_COLUMN_SPE
+ " float not null);";
private static final String CREATE_TABLE_MC = "create table "
+ TABLE_MONTH_CONTACT + " ( " + MC_COLUMN_REQ
+ " real not null, " + MC_COLUMN_MONTH
+ " integer, " + MC_COLUMN_CONTACT
+ " integer, foreign key (" + MC_COLUMN_MONTH
+ ") references " + TABLE_MONTH + "("
+ MONTH_COLUMN_ID + ") on delete cascade, foreign key (" + MC_COLUMN_CONTACT
+ ") references " + TABLE_CONTACT + "("
+ CONTACTS_COLUMN_ID + ") on delete cascade, primary key ("
+ MC_COLUMN_MONTH + "," + MC_COLUMN_CONTACT + "));";
private static final String CREATE_TABLE_MS = "create table "
+ TABLE_MONTH_SERVICE + " ( " + MS_COLUMN_MONTH
+ " integer, " + MS_COLUMN_SERVICE
+ " integer, foreign key (" + MS_COLUMN_MONTH
+ ") references " + TABLE_MONTH + "("
+ MONTH_COLUMN_ID + ") on delete cascade, foreign key (" + MS_COLUMN_SERVICE
+ ") references " + TABLE_SERVICE + "("
+ SERVICES_COLUMN_ID + ") on delete cascade, primary key ("
+ MS_COLUMN_MONTH + "," + MS_COLUMN_SERVICE + "));";
private static final String CREATE_TABLE_DS = "create table "
+ TABLE_DAY_SERVICE + " ( " + DS_COLUMN_DAY
+ " integer not null, " + DS_COLUMN_CONTACT
+ " integer not null, " + DS_COLUMN_SERVICE
+ " integer not null, foreign key (" + DS_COLUMN_DAY
+ ") references " + TABLE_DAY + "("
+ DAY_COLUMN_ID + ") on delete cascade, foreign key (" + DS_COLUMN_CONTACT
+ ") references " + TABLE_CONTACT + "("
+ CONTACTS_COLUMN_ID + ") on delete cascade, foreign key (" + DS_COLUMN_SERVICE
+ ") references " + TABLE_SERVICE + "("
+ SERVICES_COLUMN_ID + ") on delete cascade, primary key ("
+ DS_COLUMN_DAY + "," + DS_COLUMN_CONTACT
+ "," + DS_COLUMN_SERVICE + "));";
// Constructor
public DBHelper(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); }
@Override
public void onCreate(SQLiteDatabase db) {
// creating required tables
db.execSQL(CREATE_TABLE_CONTACTS);
db.execSQL(CREATE_TABLE_MONTH);
db.execSQL(CREATE_TABLE_DAY);
db.execSQL(CREATE_TABLE_SERVICES);
db.execSQL(CREATE_TABLE_MC);
db.execSQL(CREATE_TABLE_MS);
db.execSQL(CREATE_TABLE_DS);
}
@Override
public void onConfigure(SQLiteDatabase db){
db.setForeignKeyConstraintsEnabled(true);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
// on upgrade drop older tables
db.execSQL("DROP TABLE IF EXISTS " + TABLE_CONTACT);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_MONTH);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_DAY);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_SERVICE);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_MONTH_CONTACT);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_MONTH_SERVICE);
db.execSQL("DROP TABLE IF ECISTS " + TABLE_DAY_SERVICE);
// create new tables
onCreate(db);
}
public void resetDB() {
// delete every entry from all tables
SQLiteDatabase db = this.getWritableDatabase();
db.delete(TABLE_MONTH, null, null);
db.delete(TABLE_CONTACT, null, null);
db.delete(TABLE_SERVICE, null, null);
}
// ---------------------- contacts table methods ------------------------//
/**
* Inserting a contact into db
* @param contact Contact object
* @return id if succesfull, -1 if exception
*/
public int insertContact(Contact contact) {
SQLiteDatabase db = this.getWritableDatabase();
int contact_id = -1;
try {
db.beginTransaction();
ContentValues contentValues = new ContentValues();
contentValues.put(CONTACTS_COLUMN_NAME, contact.getName());
contentValues.put(CONTACTS_COLUMN_WH, contact.getWh());
contentValues.put(CONTACTS_COLUMN_ENABLED, contact.getEnabled());
// insert row
contact_id = (int) db.insert(TABLE_CONTACT, null, contentValues);
db.setTransactionSuccessful();
}
catch (Exception ex) {
db.close();
}
finally {
db.endTransaction();
}
db.close();
return contact_id;
}
/**
* Get a single Contact from db
* @param contact_id ID required for finding the contact in db
* @return Contact object
*/
public Contact getContact(int contact_id) {
SQLiteDatabase db = this.getReadableDatabase();
String selectQuery = "SELECT * FROM " + TABLE_CONTACT +
" WHERE " + CONTACTS_COLUMN_ID + " = " + contact_id;
Cursor c = db.rawQuery(selectQuery, null);
if (c != null)
c.moveToFirst();
Contact con = new Contact();
con.setId(c.getInt(c.getColumnIndex(CONTACTS_COLUMN_ID)));
con.setName(c.getString(c.getColumnIndex(CONTACTS_COLUMN_NAME)));
con.setWh(c.getFloat(c.getColumnIndex(CONTACTS_COLUMN_WH)));
con.setEnabled(c.getInt(c.getColumnIndex(CONTACTS_COLUMN_ENABLED)));
c.close();
db.close();
return con;
}
/**
* Change an existing contact
* @param con Contact object
* @return 1 if successfull
*/
public int updateContact(Contact con) {
SQLiteDatabase db = this.getWritableDatabase();
ContentValues values = new ContentValues();
values.put(CONTACTS_COLUMN_NAME, con.getName());
values.put(CONTACTS_COLUMN_WH, con.getWh());
values.put(CONTACTS_COLUMN_ENABLED, con.getEnabled());
// updating row
return db.update(TABLE_CONTACT, values, CONTACTS_COLUMN_ID + " = ?",
new String[] { String.valueOf(con.getId()) });
}
/**
* Delete a contact from the db
* @param contact_id contact id
*/
public void deleteContact(int contact_id) {
SQLiteDatabase db = this.getWritableDatabase();
db.delete(TABLE_CONTACT, CONTACTS_COLUMN_ID + " = ?",
new String[] { String.valueOf(contact_id) });
}
/**
* Get all valid contacts from db
* @return returns all Contacts from DB
*/
public SparseArray<Contact> getAllContacts() {
SparseArray<Contact> contacts = new SparseArray<>();
String selectQuery = "SELECT * FROM " + TABLE_CONTACT
+ " WHERE " + CONTACTS_COLUMN_ENABLED + " = 1";
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery(selectQuery, null);
// looping through all rows and adding to list
if (c.moveToFirst()) {
do {
Contact co = new Contact();
co.setId(c.getInt(c.getColumnIndex(CONTACTS_COLUMN_ID)));
co.setName(c.getString(c.getColumnIndex(CONTACTS_COLUMN_NAME)));
co.setWh(c.getFloat(c.getColumnIndex(CONTACTS_COLUMN_WH)));
co.setEnabled(c.getInt(c.getColumnIndex(CONTACTS_COLUMN_ENABLED)));
// adding to contacts list
contacts.put(co.getId(), co);
} while (c.moveToNext());
}
c.close();
db.close();
return contacts;
}
// ---------------------- month table methods ------------------------//
/**
* Inserting a month into db
* @param month Month object
* @return id if succesfull, -1 if exception
*/
public int insertMonth(Month month) {
SQLiteDatabase db = this.getWritableDatabase();
int month_id = -1;
try {
db.beginTransaction();
ContentValues contentValues = new ContentValues();
contentValues.put(MONTH_COLUMN_YEAR, month.getYear());
contentValues.put(MONTH_COLUMN_MONTH, month.getMonth());
contentValues.put(MONTH_COLUMN_FULL, month.getFull());
// insert row
month_id = (int) db.insert(TABLE_MONTH, null, contentValues);
db.setTransactionSuccessful();
}
catch (Exception ex) {
db.close();
}
finally {
db.endTransaction();
}
db.close();
return month_id;
}
/**
* Get a single month from db
* @param month_id ID required for finding the month in db
* @return returns Month object
*/
public Month getMonth(int month_id) {
SQLiteDatabase db = this.getReadableDatabase();
String selectQuery = "SELECT * FROM " + TABLE_MONTH +
" WHERE " + MONTH_COLUMN_ID + " = " + month_id;
Cursor c = db.rawQuery(selectQuery, null);
if (c != null)
c.moveToFirst();
Month mon = new Month();
mon.setId(c.getInt(c.getColumnIndex(MONTH_COLUMN_ID)));
mon.setYear(c.getInt(c.getColumnIndex(MONTH_COLUMN_YEAR)));
mon.setMonth(c.getInt(c.getColumnIndex(MONTH_COLUMN_MONTH)));
mon.setFull(c.getInt(c.getColumnIndex(MONTH_COLUMN_FULL)));
c.close();
db.close();
return mon;
}
/**
* Change an existing month
* @param mon Month object to update
* @return 1 if successfull
*/
public int updateMonth(Month mon) {
SQLiteDatabase db = this.getWritableDatabase();
ContentValues values = new ContentValues();
values.put(MONTH_COLUMN_YEAR, mon.getYear());
values.put(MONTH_COLUMN_MONTH, mon.getMonth());
values.put(MONTH_COLUMN_FULL, mon.getFull());
// updating row
return db.update(TABLE_MONTH, values, MONTH_COLUMN_ID + " = ?",
new String[] { String.valueOf(mon.getId()) });
}
/**
* Delete a month from the db
* @param month_id moth id to delete from
*/
public void deleteMonth(int month_id) {
SQLiteDatabase db = this.getWritableDatabase();
db.delete(TABLE_MONTH, MONTH_COLUMN_ID + " = ?",
new String[] { String.valueOf(month_id) });
}
/**
* Get all months from db
* @return returns all Months from DB
*/
public List<Month> getAllMonths() {
List<Month> months = new ArrayList<>();
String selectQuery = "SELECT * FROM " + TABLE_MONTH;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery(selectQuery, null);
// looping through all rows and adding to list
if (c.moveToFirst()) {
do {
Month mo = new Month();
mo.setId(c.getInt(c.getColumnIndex(MONTH_COLUMN_ID)));
mo.setYear(c.getInt(c.getColumnIndex(MONTH_COLUMN_YEAR)));
mo.setMonth(c.getInt(c.getColumnIndex(MONTH_COLUMN_MONTH)));
mo.setFull(c.getInt(c.getColumnIndex(MONTH_COLUMN_FULL)));
// adding to contacts list
months.add(mo);
} while (c.moveToNext());
}
c.close();
db.close();
return months;
}
// ---------------------- day table methods ------------------------//
/**
* Inserting a day into db
* @param day Day object
* @return id if succesfull, -1 if exception
*/
public int insertDay(Day day) {
SQLiteDatabase db = this.getWritableDatabase();
int day_id = -1;
try {
db.beginTransaction();
ContentValues contentValues = new ContentValues();
contentValues.put(DAY_COLUMN_MONTH, day.getMonth());
contentValues.put(DAY_COLUMN_DATE, day.getDate());
// insert row
day_id = (int) db.insert(TABLE_DAY, null, contentValues);
db.setTransactionSuccessful();
}
catch (Exception ex) {
db.close();
}
finally {
db.endTransaction();
}
db.close();
return day_id;
}
/**
* Get a single day from db
* @param day_id ID required for finding the month in db
* @return returns the Day object
*/
public Day getDay(int day_id) {
SQLiteDatabase db = this.getReadableDatabase();
String selectQuery = "SELECT * FROM " + TABLE_DAY +
" WHERE " + DAY_COLUMN_ID + " = " + day_id;
Cursor c = db.rawQuery(selectQuery, null);
if (c != null)
c.moveToFirst();
Day day = new Day();
day.setId(c.getInt(c.getColumnIndex(DAY_COLUMN_ID)));
day.setMonth(c.getInt(c.getColumnIndex(DAY_COLUMN_MONTH)));
day.setDate(c.getInt(c.getColumnIndex(DAY_COLUMN_DATE)));
c.close();
db.close();
return day;
}
/**
* Change an existing day, useless
* @param day Day object to update
* @return 1 if successfull
*/
public int updateDay(Day day) {
SQLiteDatabase db = this.getWritableDatabase();
ContentValues values = new ContentValues();
values.put(DAY_COLUMN_MONTH, day.getMonth());
values.put(DAY_COLUMN_DATE, day.getDate());
// updating row
return db.update(TABLE_DAY, values, DAY_COLUMN_ID + " = ?",
new String[] { String.valueOf(day.getId()) });
}
/**
* Delete a day from the db, useless cause of cascade
* @param day_id Day id to delete from db
*/
public void deleteDay(int day_id) {
SQLiteDatabase db = this.getWritableDatabase();
db.delete(TABLE_DAY, DAY_COLUMN_ID + " = ?",
new String[] { String.valueOf(day_id) });
}
/**
* Get all days for a given month from db
* @return returns all Days of the specific month
*/
public SparseArray<Day> getAllDaysForMonth(int month_id) {
SparseArray<Day> days = new SparseArray<>();
String selectQuery = "SELECT * FROM " + TABLE_DAY
+ " WHERE " + DAY_COLUMN_MONTH + " = " + month_id
+ " ORDER BY " + DAY_COLUMN_DATE;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery(selectQuery, null);
// looping through all rows and adding to list
if (c.moveToFirst()) {
do {
Day day = new Day();
day.setId(c.getInt(c.getColumnIndex(DAY_COLUMN_ID)));
day.setMonth(c.getInt(c.getColumnIndex(DAY_COLUMN_MONTH)));
day.setDate(c.getInt(c.getColumnIndex(DAY_COLUMN_DATE)));
// adding to contacts list
days.put(day.getDate()-1, day);
} while (c.moveToNext());
}
c.close();
db.close();
return days;
}
// ---------------------- services table methods ------------------------//
/**
* Inserting a service into db
* @param service Service object to add to db
* @return id if succesfull, -1 if exception
*/
public int insertService(Service service) {
SQLiteDatabase db = this.getWritableDatabase();
int service_id = -1;
try {
db.beginTransaction();
ContentValues contentValues = new ContentValues();
contentValues.put(SERVICES_COLUMN_DESC, service.getDesc());
contentValues.put(SERVICES_COLUMN_VAL, service.getVal());
contentValues.put(SERVICES_COLUMN_DEF, service.getDef());
contentValues.put(SERVICES_COLUMN_SPE, service.getSpe() ? 1 : 0);
contentValues.put(SERVICES_COLUMN_ENA, service.getEna());
// insert row
service_id = (int) db.insert(TABLE_SERVICE, null, contentValues);
db.setTransactionSuccessful();
}
catch (Exception ex) {
db.close();
}
finally {
db.endTransaction();
}
db.close();
return service_id;
}
/**
* Get a single service from db
* @param service_id ID required for finding the service in db
* @return returns the Service object
*/
public Service getService(int service_id) {
SQLiteDatabase db = this.getReadableDatabase();
String selectQuery = "SELECT * FROM " + TABLE_SERVICE +
" WHERE " + SERVICES_COLUMN_ID + " = " + service_id;
Cursor c = db.rawQuery(selectQuery, null);
if (c != null)
c.moveToFirst();
Service ser = new Service();
ser.setId(c.getInt(c.getColumnIndex(SERVICES_COLUMN_ID)));
ser.setDesc(c.getString(c.getColumnIndex(SERVICES_COLUMN_DESC)));
ser.setVal(c.getFloat(c.getColumnIndex(SERVICES_COLUMN_VAL)));
ser.setDef(c.getInt(c.getColumnIndex(SERVICES_COLUMN_DEF)));
ser.setSpe(c.getInt(c.getColumnIndex(SERVICES_COLUMN_SPE)));
ser.setEna(c.getInt(c.getColumnIndex(SERVICES_COLUMN_ENA)));
c.close();
db.close();
return ser;
}
/**
* Change an existing service
* @param ser The Service to update to
* @return 1 if successfull
*/
public int updateService(Service ser) {
SQLiteDatabase db = this.getWritableDatabase();
ContentValues values = new ContentValues();
values.put(SERVICES_COLUMN_DESC, ser.getDesc());
values.put(SERVICES_COLUMN_VAL, ser.getVal());
values.put(SERVICES_COLUMN_DEF, ser.getDef());
values.put(SERVICES_COLUMN_SPE, ser.getSpe() ? 1 : 0);
values.put(SERVICES_COLUMN_ENA, ser.getEna());
// updating row
return db.update(TABLE_SERVICE, values, SERVICES_COLUMN_ID + " = ?",
new String[] { String.valueOf(ser.getId()) });
}
/**
* Delete a service from the db
* @param service_id the id for the service to delete
*/
public void deleteService(int service_id) {
SQLiteDatabase db = this.getWritableDatabase();
db.delete(TABLE_SERVICE, SERVICES_COLUMN_ID + " = ?",
new String[] { String.valueOf(service_id) });
}
/**
* Get all services from db
* @return returns all Services from DB
*/
public SparseArray<Service> getAllServices() {
SparseArray<Service> services = new SparseArray<>();
String selectQuery = "SELECT * FROM " + TABLE_SERVICE
+ " WHERE " + SERVICES_COLUMN_ENA + " = 1";
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery(selectQuery, null);
// looping through all rows and adding to list
if (c.moveToFirst()) {
do {
Service ser = new Service();
ser.setId(c.getInt(c.getColumnIndex(SERVICES_COLUMN_ID)));
ser.setDesc(c.getString(c.getColumnIndex(SERVICES_COLUMN_DESC)));
ser.setVal(c.getFloat(c.getColumnIndex(SERVICES_COLUMN_VAL)));
ser.setDef(c.getInt(c.getColumnIndex(SERVICES_COLUMN_DEF)));
ser.setSpe(c.getInt(c.getColumnIndex(SERVICES_COLUMN_SPE)));
// adding to contacts list
services.put(ser.getId(), ser);
} while (c.moveToNext());
}
c.close();
db.close();
return services;
}
// ---------------------- month_contact table methods ------------------------//
/**
* Inserting a month_service into db
* @param month the month id for the relation
* @param contact the contact id for the relation
* @param req the required weekly work hours
*/
public void insertMC(int month, int contact, float req) {
SQLiteDatabase db = this.getWritableDatabase();
try {
db.beginTransaction();
ContentValues contentValues = new ContentValues();
contentValues.put(MC_COLUMN_MONTH, month);
contentValues.put(MC_COLUMN_CONTACT, contact);
contentValues.put(MC_COLUMN_REQ, req);
// insert row
db.insert(TABLE_MONTH_CONTACT, null, contentValues);
db.setTransactionSuccessful();
}
catch (Exception ex) {
db.close();
}
finally {
db.endTransaction();
}
db.close();
}
/**
* Get all contacts for a certain month
* @param month_id ID required for finding all contacts
* @param req the required hours per person
* @return returns all the contacts for this month
*/
public SparseArray<Contact> getAllContactsForMonth(int month_id, SparseArray<Float> req) {
SQLiteDatabase db = this.getReadableDatabase();
SparseArray<Contact> contacts = new SparseArray<>();
String selectQuery = "SELECT " + TABLE_CONTACT + "."
+ CONTACTS_COLUMN_ID + ", " + TABLE_CONTACT + "."
+ CONTACTS_COLUMN_WH + ", " + TABLE_CONTACT + "."
+ CONTACTS_COLUMN_NAME + ", " + TABLE_MONTH_CONTACT + "."
+ MC_COLUMN_REQ + " FROM " + TABLE_CONTACT
+ " JOIN " + TABLE_MONTH_CONTACT + " ON " + TABLE_CONTACT
+ "." + CONTACTS_COLUMN_ID + " = " + TABLE_MONTH_CONTACT
+ "." + MC_COLUMN_CONTACT + " JOIN " + TABLE_MONTH
+ " ON " + TABLE_MONTH_CONTACT + "." + MC_COLUMN_MONTH
+ " = " + TABLE_MONTH + "." + MONTH_COLUMN_ID
+ " WHERE " + TABLE_MONTH + "." + MONTH_COLUMN_ID
+ " = " + month_id;
Cursor c = db.rawQuery(selectQuery, null);
// looping through all rows and adding to list
if (c.moveToFirst()) {
do {
Contact con = new Contact();
con.setId(c.getInt(c.getColumnIndex(CONTACTS_COLUMN_ID)));
con.setName(c.getString(c.getColumnIndex(CONTACTS_COLUMN_NAME)));
con.setWh(c.getFloat(c.getColumnIndex(CONTACTS_COLUMN_WH)));
if (req != null) req.put(con.getId(), c.getFloat(c.getColumnIndex(MC_COLUMN_REQ)));
// adding to contacts list
contacts.put(con.getId(), con);
} while (c.moveToNext());
}
c.close();
db.close();
return contacts;
}
// ---------------------- month_service table methods ------------------------//
/**
* Inserting a month_service into db
* @param month the month id for the relation
* @param service the service id for the relation
*/
public void insertMS(int month, int service) {
SQLiteDatabase db = this.getWritableDatabase();
try {
db.beginTransaction();
ContentValues contentValues = new ContentValues();
contentValues.put(MS_COLUMN_MONTH, month);
contentValues.put(MS_COLUMN_SERVICE, service);
// insert row
db.insert(TABLE_MONTH_SERVICE, null, contentValues);
db.setTransactionSuccessful();
}
catch (Exception ex) {
db.close();
}
finally {
db.endTransaction();
}
db.close();
}
/**
* Get all services for a certain month
* @param month_id ID required for finding all services
* @return returns all the services for this month
*/
public SparseArray<Service> getAllServicesForMonth(int month_id) {
SQLiteDatabase db = this.getReadableDatabase();
SparseArray<Service> services = new SparseArray<>();
String selectQuery = "SELECT " + TABLE_SERVICE + "."
+ SERVICES_COLUMN_ID + ", " + TABLE_SERVICE + "."
+ SERVICES_COLUMN_DESC + ", " + TABLE_SERVICE + "."
+ SERVICES_COLUMN_VAL + ", " + TABLE_SERVICE + "."
+ SERVICES_COLUMN_DEF + ", " + TABLE_SERVICE + "."
+ SERVICES_COLUMN_SPE + " FROM " + TABLE_SERVICE
+ " JOIN " + TABLE_MONTH_SERVICE + " ON " + TABLE_SERVICE
+ "." + SERVICES_COLUMN_ID + " = " + TABLE_MONTH_SERVICE
+ "." + MS_COLUMN_SERVICE + " JOIN " + TABLE_MONTH
+ " ON " + TABLE_MONTH_SERVICE + "." + MC_COLUMN_MONTH
+ " = " + TABLE_MONTH + "." + MONTH_COLUMN_ID
+ " WHERE " + TABLE_MONTH + "." + MONTH_COLUMN_ID
+ " = " + month_id;
Cursor c = db.rawQuery(selectQuery, null);
// looping through all rows and adding to list
if (c.moveToFirst()) {
do {
Service ser = new Service();
ser.setId(c.getInt(c.getColumnIndex(SERVICES_COLUMN_ID)));
ser.setDesc(c.getString(c.getColumnIndex(SERVICES_COLUMN_DESC)));
ser.setVal(c.getFloat(c.getColumnIndex(SERVICES_COLUMN_VAL)));
ser.setDef(c.getInt(c.getColumnIndex(SERVICES_COLUMN_DEF)));
ser.setSpe(c.getInt(c.getColumnIndex(SERVICES_COLUMN_SPE)));
// adding to contacts list
services.put(ser.getId(), ser);
} while (c.moveToNext());
}
c.close();
db.close();
return services;
}
// ---------------------- day_service table methods ------------------------//
/**
* Inserting a day_service into db
* @param day the day id for the relation
* @param service the service id for the relation
* @param contact the contact id for the relation
*/
public void insertDS(int day, int service, int contact) {
SQLiteDatabase db = this.getWritableDatabase();
try {
db.beginTransaction();
ContentValues contentValues = new ContentValues();
contentValues.put(DS_COLUMN_DAY, day);
contentValues.put(DS_COLUMN_CONTACT, contact);
contentValues.put(DS_COLUMN_SERVICE, service);
// insert row
db.insert(TABLE_DAY_SERVICE, null, contentValues);
db.setTransactionSuccessful();
}
catch (Exception ex) {
db.close();
}
finally {
db.endTransaction();
}
db.close();
}
/**
* Delete a day_service from the db
* @param day_id the day id for the relation
* @param user_id the user id for the relation
*/
public void deleteDS(int day_id, int user_id) {
SQLiteDatabase db = this.getWritableDatabase();
db.delete(TABLE_DAY_SERVICE, DS_COLUMN_DAY + " = ? and " + DS_COLUMN_CONTACT + " = ?",
new String[] { String.valueOf(day_id), String.valueOf(user_id) });
}
/**
* Get all services for a certain day
* @param day_id ID required for finding all services
* @return return all the contacts and services for the specific day
*/
public List<ContactService> getAllContactsForDay(int day_id) {
SQLiteDatabase db = this.getReadableDatabase();
List<ContactService> contactServices = new ArrayList<>();
String selectQuery = "SELECT " + TABLE_CONTACT + "."
+ CONTACTS_COLUMN_ID + " as a, " + TABLE_SERVICE + "."
+ SERVICES_COLUMN_ID + " as b FROM " + TABLE_DAY
+ " JOIN " + TABLE_DAY_SERVICE + " ON " + TABLE_DAY
+ "." + DAY_COLUMN_ID + " = " + TABLE_DAY_SERVICE
+ "." + DS_COLUMN_DAY + " JOIN " + TABLE_CONTACT
+ " ON " + TABLE_DAY_SERVICE + "." + DS_COLUMN_CONTACT
+ " = " + TABLE_CONTACT + "." + CONTACTS_COLUMN_ID
+ " JOIN " + TABLE_SERVICE + " ON " + TABLE_DAY_SERVICE
+ "." + DS_COLUMN_SERVICE + " = " + TABLE_SERVICE
+ "." + SERVICES_COLUMN_ID + " WHERE " + TABLE_DAY
+ "." + DAY_COLUMN_ID + " = " + day_id;
Cursor c = db.rawQuery(selectQuery, null);
// looping through all rows and adding to list
if (c.moveToFirst()) {
do {
ContactService contactService = new ContactService();
contactService.contact_id = c.getInt(c.getColumnIndex("a"));
contactService.service_id = c.getInt(c.getColumnIndex("b"));
// adding to list
contactServices.add(contactService);
} while (c.moveToNext());
}
c.close();
db.close();
return contactServices;
}
} |
<reponame>schinmayee/nimbus
/*
* Copyright 2013 Stanford University.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* - Neither the name of the copyright holders nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Author: <NAME> <<EMAIL>>
*/
#include <string>
#include <boost/functional/hash.hpp>
#include "applications/physbam/water//app_utils.h"
#include "applications/physbam/water//physbam_include.h"
#include "applications/physbam/water//physbam_tools.h"
#include "src/data/app_data/app_var.h"
#include "src/shared/dbg.h"
#include "src/shared/geometric_region.h"
#include "src/worker/data.h"
#include "applications/physbam/water//app_data_compressed_scalar_array.h"
namespace application {
template<class T> AppDataCompressedScalarArray<T>::
AppDataCompressedScalarArray() {
data_ = NULL;
index_data_ = NULL;
}
template<class T> AppDataCompressedScalarArray<T>::
AppDataCompressedScalarArray(const nimbus::GeometricRegion &global_reg,
const int ghost_width,
bool make_proto,
const std::string& name)
: global_region_(global_reg),
ghost_width_(ghost_width) {
set_name(name);
data_ = new DataType();
index_data_ = NULL;
data_length_ = -1;
if (make_proto)
MakePrototype();
}
template<class T> AppDataCompressedScalarArray<T>::
AppDataCompressedScalarArray(const nimbus::GeometricRegion &global_reg,
const nimbus::GeometricRegion &ob_reg,
const int ghost_width)
: AppVar(ob_reg),
global_region_(global_reg),
local_region_(ob_reg.NewEnlarged(-ghost_width)),
ghost_width_(ghost_width) {
data_ = new DataType();
index_data_ = NULL;
data_length_ = -1;
shift_.x = local_region_.x() - global_reg.x();
shift_.y = local_region_.y() - global_reg.y();
shift_.z = local_region_.z() - global_reg.z();
}
template<class T> AppDataCompressedScalarArray<T>::
~AppDataCompressedScalarArray() {
Destroy();
}
template<class T> void AppDataCompressedScalarArray<T>::
Destroy() {
if (data_) {
delete data_;
data_ = NULL;
}
if (index_data_) {
delete index_data_;
index_data_ = NULL;
}
}
template<class T> nimbus::AppVar *AppDataCompressedScalarArray<T>::
CreateNew(const nimbus::GeometricRegion &ob_reg) const {
nimbus::AppVar* temp = new AppDataCompressedScalarArray<T>(global_region_,
ob_reg,
ghost_width_);
temp->set_name(name());
return temp;
}
template<class T> void AppDataCompressedScalarArray<T>::
ReadAppData(const nimbus::DataArray &read_set,
const nimbus::GeometricRegion &read_reg) {
nimbus::GeometricRegion ob_reg = object_region();
nimbus::GeometricRegion final_read_reg =
nimbus::GeometricRegion::GetIntersection(read_reg, ob_reg);
assert(final_read_reg.dx() > 0 && final_read_reg.dy() > 0 && final_read_reg.dz() > 0);
// Loop through each element in read set, and fetch it to the app_data object.
assert(index_data_ != NULL);
Translator::template
ReadCompressedScalarArray<T>(final_read_reg, shift_, read_set, data_,
data_length_, *index_data_);
}
template<class T> void AppDataCompressedScalarArray<T>::
WriteAppData(const nimbus::DataArray &write_set,
const nimbus::GeometricRegion &write_reg) const {
if (write_reg.dx() <= 0 || write_reg.dy() <= 0 || write_reg.dz() <= 0)
return;
nimbus::GeometricRegion ob_reg = object_region();
nimbus::GeometricRegion final_write_reg =
nimbus::GeometricRegion::GetIntersection(write_reg, ob_reg);
assert(final_write_reg.dx() > 0 && final_write_reg.dy() > 0 && final_write_reg.dz() > 0);
// Loop through each element in write_set, look up the region using index, and
// then write.
assert(index_data_ != NULL);
Translator::template
WriteCompressedScalarArray<T>(write_reg, shift_, write_set, *data_,
data_length_, *index_data_);
}
template<class T> void AppDataCompressedScalarArray<T>::
set_index_data(IndexType* d) {
// delete index_data_;
// index_data_ = new IndexType(*d);
// return;
if (d->hash_code == 0) {
dbg(APP_LOG, "Recalculate hash code for meta data.\n");
d->hash_code = CalculateHashCode(*d);
}
assert(index_data_ == NULL || index_data_->hash_code != 0);
if (index_data_ == NULL || index_data_->hash_code != d->hash_code) {
if (index_data_) {
delete index_data_;
}
index_data_ = new IndexType(*d);
index_data_->hash_code = 0;
dbg(APP_LOG, "Replace meta data, hash=%d replaced by hash=%d.\n",
index_data_->hash_code, d->hash_code);
index_data_->hash_code = d->hash_code;
}
}
template<class T> long AppDataCompressedScalarArray<T>::
CalculateHashCode(IndexType& index) {
size_t seed = 99;
boost::hash_range(seed,
index.array.base_pointer,
index.array.base_pointer + index.array.m);
return (long) seed;
}
template class AppDataCompressedScalarArray<float>;
} // namespace application
|
#!/usr/bin/env bash
# Exit immediately if a pipeline, which may consist of a single simple command,
# a list, or a compound command returns a non-zero status
set -e
readonly MONIKER=uprog
readonly VERSION=4.1.5.10498
readonly STUFF=InstallUProg_$VERSION.exe
readonly TARGET_DIR=$HOME/programs/bolid/$MONIKER
readonly START_SCRIPT=$TARGET_DIR/start-$MONIKER.sh
readonly UPROG_DIR=$TARGET_DIR/drive_c/uprog
readonly EXE=UProg.exe
create_start_script() {
echo cd $UPROG_DIR > $START_SCRIPT
echo WINEPREFIX=$TARGET_DIR WINEARCH=win32 wine $EXE >> $START_SCRIPT
chmod +x $START_SCRIPT
}
if [ -d "$TARGET_DIR" ]; then
echo Directory exists: $TARGET_DIR >&2
exit 1
fi
mkdir --parents $TARGET_DIR
WINEPREFIX=$TARGET_DIR WINEARCH=win32 wineboot
readonly TEMP_DIR=$(mktemp --directory -t delete-me-XXXXXXXXXX)
(
cd $TEMP_DIR
echo -n Downloading...
wget --quiet https://github.com/alexanderfefelov/orion-uprog-dist/raw/main/$STUFF
echo done
echo -n Extracting...
innoextract --extract $STUFF > /dev/null
echo done
echo -n Installing...
mkdir --parents $UPROG_DIR
mv --force 'code$GetExePath'/* $UPROG_DIR
create_start_script
echo done
)
rm --recursive --force $TEMP_DIR
|
<filename>chapter06/Exercise_6_24.java<gh_stars>0
package com.company;
import java.util.Scanner;
public class Exercise_6_24 {
public static void main(String[] args) {
long totalMilliseconds = System.currentTimeMillis();
long totalSeconds = totalMilliseconds/1000;
long currentSecond = totalSeconds % 60;
long totalMinutes = totalSeconds / 60;
long currentMinute = totalMinutes % 60;
long totalHours = totalMinutes / 60;
long currentHour = totalHours % 24;
long totalDays = totalHours / 24;
// compute the date from midnight, Jan 1, 1970
// get the year
int year = 1970;
long leftDays = totalDays;
while(leftDays > 365) {
// get the days of the year
leftDays -= daysOfYear(year);
year++;
}
// get the month
int month = 1;
while(leftDays > getNumberOfDaysInMonth(year, month)) {
leftDays -= getNumberOfDaysInMonth(year,month);
month ++;
}
long day = leftDays +1;
System.out.printf("Current date is %d-%d-%d\n", year, month, day);
System.out.printf("Current time is %d:%d:%d GMT\n", currentHour, currentMinute,currentSecond);
}
public static boolean isLeapYear(int year) {
return year % 400==0||(year % 4==0 && year % 100!=0);
}
public static int daysOfYear(int year) {
return isLeapYear(year)? 366: 365;
}
public static int getNumberOfDaysInMonth(int year, int month) {
if(month==1 ||month==3 || month==5|| month==7 ||month==8||month==10 || month==12)
return 31;
if(month==4 || month==6 || month==9|| month==11)
return 30;
else
return isLeapYear(year)? 29: 28;
}
}
|
<reponame>kotarondo/persha-vm
/*
Copyright (c) 2015-2017, <NAME>.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
function create_exported_object(Class, arg1, arg2) {
switch (Class) {
case 'Object':
return new Object();
case 'Array':
return new Array();
case 'Number':
return new Number(arg1);
case 'String':
return new String(arg1);
case 'Boolean':
return new Boolean(arg1);
case 'Date':
return new Date(arg1);
case 'RegExp':
return new RegExp(arg1, arg2);
case 'Buffer':
return Buffer.from(arg1);
case 'Error':
switch (arg1) {
case 'TypeError':
return new TypeError(arg2);
case 'ReferenceError':
return new ReferenceError(arg2);
case 'RangeError':
return new RangeError(arg2);
case 'SyntaxError':
return new SyntaxError(arg2);
case 'URIError':
return new URIError(arg2);
case 'EvalError':
return new EvalError(arg2);
case 'Error':
return new Error(arg2);
}
var err = new Error(arg2);
Object.defineProperty(err, "name", {
value: arg1,
writable: true,
enumerable: false,
configurable: true,
});
return err;
case 'ObjectPrototype':
return Object.prototype;
case 'ArrayPrototype':
return Array.prototype;
case 'StringPrototype':
return String.prototype;
case 'BooleanPrototype':
return Boolean.prototype;
case 'NumberPrototype':
return Number.prototype;
case 'DatePrototype':
return Date.prototype;
case 'RegExpPrototype':
return RegExp.prototype;
case 'ErrorPrototype':
return Error.prototype;
case 'EvalErrorPrototype':
return EvalError.prototype;
case 'RangeErrorPrototype':
return RangeError.prototype;
case 'ReferenceErrorPrototype':
return ReferenceError.prototype;
case 'SyntaxErrorPrototype':
return SyntaxError.prototype;
case 'TypeErrorPrototype':
return TypeError.prototype;
case 'URIErrorPrototype':
return URIError.prototype;
case 'OpaqueObject':
var obj = new Object();
Object.defineProperty(obj, "opaque", {
value: arg1,
writable: false,
enumerable: false,
configurable: true,
});
return obj;
}
assert(false, Class);
}
function initializeDefaultExport() {
realm.Object_prototype.exported = create_exported_object('ObjectPrototype');
realm.Array_prototype.exported = create_exported_object('ArrayPrototype');
realm.String_prototype.exported = create_exported_object('StringPrototype');
realm.Boolean_prototype.exported = create_exported_object('BooleanPrototype');
realm.Number_prototype.exported = create_exported_object('NumberPrototype');
realm.Date_prototype.exported = create_exported_object('DatePrototype');
realm.RegExp_prototype.exported = create_exported_object('RegExpPrototype');
realm.Error_prototype.exported = create_exported_object('ErrorPrototype');
realm.EvalError_prototype.exported = create_exported_object('EvalErrorPrototype');
realm.RangeError_prototype.exported = create_exported_object('RangeErrorPrototype');
realm.ReferenceError_prototype.exported = create_exported_object('ReferenceErrorPrototype');
realm.SyntaxError_prototype.exported = create_exported_object('SyntaxErrorPrototype');
realm.TypeError_prototype.exported = create_exported_object('TypeErrorPrototype');
realm.URIError_prototype.exported = create_exported_object('URIErrorPrototype');
}
function exportArguments(argumentsList) {
var args = [];
for (var i = 0; i < argumentsList.length; i++) {
args[i] = exportValue(argumentsList[i]);
}
return args;
}
function exportValue(A) {
if (isPrimitiveValue(A)) {
return A;
}
if (A.exported) {
return A.exported;
}
switch (A.Class) {
case 'Number':
case 'String':
case 'Boolean':
case 'Date':
A.exported = create_exported_object(A.Class, A.PrimitiveValue);
return A.exported;
case 'Buffer':
return create_exported_object('Buffer', A.wrappedBuffer); // copy the buffer
case 'RegExp':
return exportRegExp(A);
case 'Error':
return exportError(A);
case 'Function':
return null;
case 'Array':
var obj = create_exported_object('Array');
A.exported = new Proxy(obj, new ExportHandler(A));
return A.exported;
default:
if (A.ClassID === CLASSID_OpaqueObject) {
A.exported = exportOpaqueObject(A);
return A.exported;
}
var obj = create_exported_object('Object');
A.exported = new Proxy(obj, new ExportHandler(A));
return A.exported;
}
}
function exportOpaqueObject(A) {
if (A.wrapped) {
var obj = create_exported_object('OpaqueObject', A.opaque);
return new Proxy(obj, new ExportHandler(A));
}
return A.opaque;
}
function exportRegExp(A) {
var source = safe_get_primitive_value(A, 'source');
var global = safe_get_primitive_value(A, 'global');
var ignoreCase = safe_get_primitive_value(A, 'ignoreCase');
var multiline = safe_get_primitive_value(A, 'multiline');
var obj = create_exported_object('RegExp', source, (global ? "g" : "") + (ignoreCase ? "i" : "") + (multiline ? "m" : ""));
obj.lastIndex = safe_get_primitive_value(A, 'lastIndex');
return obj;
}
function exportError(A) {
var name = safe_get_primitive_value(A, 'name');
var msg = safe_get_primitive_value(A, 'message');
var stackTrace = A.stackTrace;
if (name === undefined) {
var name = "Error";
} else {
var name = String(name);
}
if (msg === undefined) {
var msg = "";
} else {
var msg = String(msg);
}
var err = create_exported_object('Error', name, msg);
var A = [];
if (name === "") A[0] = msg;
else if (msg === "") A[0] = name;
else A[0] = name + ": " + msg;
for (var i = 0; i < stackTrace.length; i++) {
var code = stackTrace[i].code;
var pos = stackTrace[i].pos;
var info = {};
Parser.locateDebugInfo(code, pos, info);
var finfo = info.filename + ":" + info.lineNumber + ":" + info.columnNumber;
A[i + 1] = finfo;
if (info.functionName) {
A[i + 1] = info.functionName + " (" + finfo + ")";
}
}
err.stack = A.join("\n at ");
return err;
}
function ExportHandler(A) {
this.A = A;
}
ExportHandler.prototype = {
getPrototypeOf: function(target) {
return exportValue(this.A.Prototype);
},
setPrototypeOf: function(target, V) {
return false;
},
preventExtensions: function(target) {
return false;
},
getOwnPropertyDescriptor: function(target, P) {
var Desc = this.A.properties[P];
var targetDesc = Object.getOwnPropertyDescriptor(target, P);
if (Desc === undefined) return targetDesc;
if (targetDesc !== undefined && targetDesc.configurable === false) {
if (targetDesc.writable === false) return targetDesc;
var obj = {
writable: true,
enumerable: targetDesc.enumerable,
configurable: false,
};
} else {
var obj = {
writable: false,
enumerable: Desc.Enumerable,
configurable: true,
};
}
if (IsDataDescriptor(Desc) === true) {
assert(Desc.Value !== absent, Desc);
obj.value = exportValue(Desc.Value);
} else {
assert(IsAccessorDescriptor(Desc), Desc);
obj.value = undefined;
}
return obj;
},
defineProperty: function(target) {
return false;
},
has: function(target, P) {
var Desc = this.getOwnPropertyDescriptor(target, P);
if (Desc === undefined) {
var proto = this.getPrototypeOf(target);
if (proto === null) return false;
return Reflect.has(proto, P);
}
return true;
},
get: function(target, P) {
var Desc = this.getOwnPropertyDescriptor(target, P);
if (Desc === undefined) {
var proto = this.getPrototypeOf(target);
if (proto === null) return undefined;
return Reflect.get(proto, P);
}
return Desc.value;
},
set: function(target, P, V) {
return false;
},
deleteProperty: function(target) {
return false;
},
ownKeys: function(target) {
return Object.getOwnPropertyNames(this.A.properties);
},
apply: function(target) {
assert(false);
},
construct: function(target) {
assert(false);
},
};
function safe_get_property(O, P) {
var prop = O.properties[P];
if (prop !== undefined) return prop;
var proto = O.Prototype;
if (proto === null) return undefined;
return safe_get_property(proto, P);
}
function safe_get_primitive_value(O, P) {
var prop = safe_get_property(O, P);
if (prop === undefined) return undefined;
if (prop.Value === absent) return undefined;
if (isPrimitiveValue(prop.Value)) {
return prop.Value;
}
return undefined;
}
|
/*Crie um programa que lê 6 valores inteiros e, em seguida, mostre na tela os valores lidos.*/
#include <stdio.h>
#include <stdlib.h>
int main (void){
int vetor[6], i=0;
for (i;i<6;i++){
printf("Digite o %d valor:\t", i+1);
scanf("%d", &vetor[i]);
}
for (i=0;i<6;i++){
printf("O valor na posicao %d e %d\n",i+1,vetor[i]);
}
system("pause");
return 0;
}
|
def word_frequency(words):
frequency_dict = {}
for word in words:
if word in frequency_dict:
frequency_dict[word] += 1
else:
frequency_dict[word] = 1
return frequency_dict |
package cz.metacentrum.perun.spRegistration.persistence.connectors;
import cz.metacentrum.perun.spRegistration.persistence.exceptions.ConnectorException;
import cz.metacentrum.perun.spRegistration.persistence.models.Facility;
import cz.metacentrum.perun.spRegistration.persistence.models.PerunAttribute;
import cz.metacentrum.perun.spRegistration.persistence.models.PerunAttributeDefinition;
import cz.metacentrum.perun.spRegistration.persistence.models.User;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Connects to Perun and obtains information.
*
* @author <NAME> <<EMAIL>>
*/
public interface PerunConnector {
/**
* Create facility in Perun.
* @param facilityJson JSON of facility to be created.
* @return Created facility.
*/
Facility createFacilityInPerun(JSONObject facilityJson) throws ConnectorException;
/**
* Update existing facility in Perun.
* @param facilityJson JSON of facility to be created.
* @return Updated facility.
*/
Facility updateFacilityInPerun(JSONObject facilityJson) throws ConnectorException;
/**
* Delete facility from Perun.
* @param facilityId ID of facility to be deleted.
* @return True if everything went OK.
*/
boolean deleteFacilityFromPerun(Long facilityId) throws ConnectorException;
/**
* Get facility from Perun with specified ID.
* @param facilityId ID of facility.
* @return Retrieved facility.
*/
Facility getFacilityById(Long facilityId) throws ConnectorException;
/**
* Get facilities having specified attribute.
*
* @return List of found facilities.
*/
List<Facility> getFacilitiesByProxyIdentifier(String proxyIdentifierAttr, String proxyIdentifier) throws ConnectorException;
/**
* Get facilities where user is admin (manager).
* @param userId ID of user.
* @return List of found facilities.
*/
List<Facility> getFacilitiesWhereUserIsAdmin(Long userId) throws ConnectorException;
/**
* Get attribute of facility.
* @param facilityId ID of facility.
* @param attrName Name of the attribute.
* @return Retrieved attribute.
*/
PerunAttribute getFacilityAttribute(Long facilityId, String attrName) throws ConnectorException;
/**
* Get specified attributes for facility.
* @param facilityId ID of facility.
* @param attrNames Names of attributes to be retrieved.
* @return Map (key = attribute name, value = attribute) of facility attributes.
*/
Map<String, PerunAttribute> getFacilityAttributes(Long facilityId, List<String> attrNames) throws ConnectorException;
/**
* Get IDs of facilities where user is admin (manager).
* @param userId ID of user.
* @return Set of facility IDs.
*/
Set<Long> getFacilityIdsWhereUserIsAdmin(Long userId) throws ConnectorException;
/**
* Set attribute for facility in Perun.
* @param facilityId ID of facility.
* @param attrJson JSON representation of attribute.
* @return True if everything went OK.
*/
boolean setFacilityAttribute(Long facilityId, JSONObject attrJson) throws ConnectorException;
/**
* Set attributes for facility in Perun.
* @param facilityId ID of facility.
* @param attrsJsons List of JSON representations of attributes.
* @return True if everything went OK.
*/
boolean setFacilityAttributes(Long facilityId, JSONArray attrsJsons) throws ConnectorException;
/**
* Get user from Perun.
* @param extLogin sub from OIDC.
* @param extSourceName Perun extSource that has been used for login (entityId of Proxy)
* @param userEmailAttr user email.
* @return Retrieved user object.
*/
User getUserWithEmail(String extLogin, String extSourceName, String userEmailAttr) throws ConnectorException;
/**
* Add user as an admin (manager) of facility.
* @param facilityId ID of facility.
* @param userId ID of user.
* @return True if everything went OK.
*/
boolean addFacilityAdmin(Long facilityId, Long userId) throws ConnectorException;
/**
* Fetch attribute definition by name.
* @param name Name of the attribute.
* @return Attribute definition
*/
PerunAttributeDefinition getAttributeDefinition(String name) throws ConnectorException;
/**
* Fetch facilities having specified attribute.
* @param attrName Name of the attribute.
* @param attrValue Value of the attribute
* @return List of facilities, empty list if o such is found.
*/
List<Facility> getFacilitiesByAttribute(String attrName, String attrValue) throws ConnectorException;
} |
<filename>fsdms-angular-app/src/app/component/user/sector-compare/sector-compare.component.ts<gh_stars>1-10
import { Component, OnInit, OnDestroy } from '@angular/core';
import { debounceTime, distinctUntilChanged, switchMap } from 'rxjs/operators';
import { FormBuilder, Validators } from '@angular/forms';
import { DatePipe } from '@angular/common';
import { LogService } from '../../../service/log.service';
import { SectorService } from '../../../service/sector.service';
import { Sector } from '../../../model/sector.model';
import { Consolidation } from '../../../model/consolidation.model';
import { finalize } from 'rxjs/operators';
import { Subject } from 'rxjs';
@Component({
selector: 'app-sector-compare',
templateUrl: './sector-compare.component.html',
styleUrls: ['./sector-compare.component.less']
})
export class SectorCompareComponent implements OnInit, OnDestroy {
// compareName: string; // An indicator to decide whether to use service or not. Don't need it at the moment.
consolData: Consolidation;
debounceText = new Subject<string>();
debounceResults: Array<Sector>;
compareForm = this.formBuilder.group({
sectorName: ['', Validators.required],
fromPeriod: [''],
toPeriod: [''],
debounceRes: [new Sector()]
});
get sectorName() { return this.compareForm.get('sectorName'); }
get fromPeriod() { return this.compareForm.get('fromPeriod'); }
get toPeriod() { return this.compareForm.get('toPeriod'); }
get debounceRes() { return this.compareForm.get('debounceRes'); }
constructor(private formBuilder: FormBuilder, private datePipe: DatePipe,
private SectorSrv: SectorService, private logSrv: LogService) { }
ngOnInit() {
this.logSrv.log('SectorCompareComponent-ngOnInit');
// subscribe
this.debounceSubscription();
}
ngOnDestroy() { // unsubscribe
this.debounceText.unsubscribe();
}
debounceSearch(sctsearch: string) { // Debouncing search requests
this.debounceText.next(sctsearch);
}
debounceSubscription() {
this.debounceText.pipe(
debounceTime(500),
distinctUntilChanged(),
switchMap(
sctsearch => this.SectorSrv.getSectorsLikeDebounce(sctsearch))
).subscribe(
response => {
if (response.status === 200) {
this.logSrv.log('debounceSubscription = ', response.data.result);
this.debounceResults = response.data.result;
}
}
);
}
debounceSelectChange() {
this.logSrv.log('debounceSelectChange happened');
if (this.debounceRes.value) {
this.compareForm.patchValue({
sectorName: this.debounceRes.value.sectorName
});
}
this.debounceResults = null;
}
onSubmit() {
// submit for sector stockprice
if ((this.fromPeriod.value && this.fromPeriod.value !== '') && (this.toPeriod.value && this.toPeriod.value !== '')) {
const fromDateTime = this.datePipe.transform(this.fromPeriod.value, 'yyyy-MM-dd HH:mm:ss');
const toDateTime = this.datePipe.transform(this.toPeriod.value, 'yyyy-MM-dd HH:mm:ss');
this.getPricesWithSectorNameAndDateTime(this.sectorName.value, fromDateTime, toDateTime);
} else {
this.getPricesWithSectorNameOnly(this.sectorName.value);
}
}
getPricesWithSectorNameOnly(sectorName: string) {
this.SectorSrv.getSectorPricesBySectorName(sectorName).pipe(
finalize(() => {
this.compareForm.reset();
})
).subscribe( // must call subscribe() or nothing happens. Just call post does not initiate the expected request
response => {
if (response.status === 200) {
this.logSrv.log('getPricesWithSectorNameOnly = ', response.data.result);
const result: Array<Sector> = response.data.result;
this.consolData = {...this.consolidate(sectorName, result)};
}
}
);
}
getPricesWithSectorNameAndDateTime(sectorName: string, dtStart: string, dtEnd: string) {
this.SectorSrv.getSectorPricesBySectorNameAndStockDateTime(
sectorName, { start: dtStart, end: dtEnd }
).pipe(
finalize(() => {
this.compareForm.reset();
})
).subscribe( // must call subscribe() or nothing happens. Just call post does not initiate the expected request
response => {
if (response.status === 200) {
this.logSrv.log('getPricesWithSectorNameAndDateTime = ', response.data.result);
const result: Array<Sector> = response.data.result;
this.consolData = {...this.consolidate(sectorName, result)};
}
}
);
}
consolidate(consolName: string, respRes: Array<any>) {
const result: Array<Sector> = respRes;
const compareDataTemp = [];
result.forEach((element, index, array) => {
compareDataTemp.push([new Date(element.sectorDateTime).getTime(), element.sectorPrice]);
});
// consolidate & pass to the generate-chart
const genDataTemp: Consolidation = {
compareName: consolName,
compareData: compareDataTemp
};
return genDataTemp;
}
}
|
#!/bin/sh
set -e
linkfile="/etc/systemd/network/10-persistent-net.link"
[ -z "$1" ] && echo "No WiFi input interface specified" && exit 1
[ -z "$2" ] && echo "No WiFi output interface specified" && exit 1
ifname=`ip --json a show $1 | jq -r '.'[0].ifname`
address=`ip --json a show $1 | jq -r '.'[0].address`
[ -z "$ifname" ] && exit 1
[ -z "$address" ] && exit 1
echo "Setting the predictable interface from $1 to $2 with MAC=$address"
> $linkfile
echo "[Match]" >> $linkfile
echo "MACAddress=$address" >> $linkfile
echo "" >> $linkfile
echo "[Link]" >> $linkfile
echo "Name=$2" >> $linkfile
echo "You can restart the machine"
|
define([
'../libs/buffers',
'../core/file_system',
'../core/api_error',
'../core/node_fs_stats',
'../libs/path',
'../generic/inode',
'../generic/preload_file',
'../core/util'
], function (buffers,file_system, api_error, node_fs_stats, path, Inode, preload_file, util) {
'use strict';
const { BaseFileSystem, SynchronousFileSystem } = file_system;
const { ApiError, ErrorCode } = api_error;
const { FileType } = node_fs_stats;
const { emptyBuffer } = util;
const { PreloadFile} = preload_file;
const { Buffer } = buffers;
/**
* @hidden
*/
const ROOT_NODE_ID = "/";
/**
* @hidden
*/
let emptyDirNode = null;
/**
* Returns an empty directory node.
* @hidden
*/
function getEmptyDirNode() {
if (emptyDirNode) {
return emptyDirNode;
}
return emptyDirNode = Buffer.from("{}");
}
/**
* Generates a random ID.
* @hidden
*/
function GenerateRandomID() {
// From http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = Math.random() * 16 | 0;
const v = c === 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
/**
* Helper function. Checks if 'e' is defined. If so, it triggers the callback
* with 'e' and returns false. Otherwise, returns true.
* @hidden
*/
function noError(e, cb) {
if (e) {
cb(e);
return false;
}
return true;
}
/**
* Helper function. Checks if 'e' is defined. If so, it aborts the transaction,
* triggers the callback with 'e', and returns false. Otherwise, returns true.
* @hidden
*/
function noErrorTx(e, tx, cb) {
if (e) {
tx.abort(() => {
cb(e);
});
return false;
}
return true;
}
class LRUNode {
constructor(key, value) {
this.key = key;
this.value = value;
this.prev = null;
this.next = null;
}
}
// Adapted from https://chrisrng.svbtle.com/lru-cache-in-javascript
class LRUCache {
constructor(limit) {
this.limit = limit;
this.size = 0;
this.map = {};
this.head = null;
this.tail = null;
}
/**
* Change or add a new value in the cache
* We overwrite the entry if it already exists
*/
set(key, value) {
const node = new LRUNode(key, value);
if (this.map[key]) {
this.map[key].value = node.value;
this.remove(node.key);
}
else {
if (this.size >= this.limit) {
delete this.map[this.tail.key];
this.size--;
this.tail = this.tail.prev;
this.tail.next = null;
}
}
this.setHead(node);
}
/* Retrieve a single entry from the cache */
get(key) {
if (this.map[key]) {
const value = this.map[key].value;
const node = new LRUNode(key, value);
this.remove(key);
this.setHead(node);
return value;
}
else {
return null;
}
}
/* Remove a single entry from the cache */
remove(key) {
const node = this.map[key];
if (!node) {
return;
}
if (node.prev !== null) {
node.prev.next = node.next;
}
else {
this.head = node.next;
}
if (node.next !== null) {
node.next.prev = node.prev;
}
else {
this.tail = node.prev;
}
delete this.map[key];
this.size--;
}
/* Resets the entire cache - Argument limit is optional to be reset */
removeAll() {
this.size = 0;
this.map = {};
this.head = null;
this.tail = null;
}
setHead(node) {
node.next = this.head;
node.prev = null;
if (this.head !== null) {
this.head.prev = node;
}
this.head = node;
if (this.tail === null) {
this.tail = node;
}
this.size++;
this.map[node.key] = node;
}
}
/**
* A simple RW transaction for simple synchronous key-value stores.
*/
class SimpleSyncRWTransaction {
constructor(store) {
this.store = store;
/**
* Stores data in the keys we modify prior to modifying them.
* Allows us to roll back commits.
*/
this.originalData = {};
/**
* List of keys modified in this transaction, if any.
*/
this.modifiedKeys = [];
}
get(key) {
const val = this.store.get(key);
this.stashOldValue(key, val);
return val;
}
put(key, data, overwrite) {
this.markModified(key);
return this.store.put(key, data, overwrite);
}
del(key) {
this.markModified(key);
this.store.del(key);
}
commit() { }
abort() {
// Rollback old values.
for (const key of this.modifiedKeys) {
const value = this.originalData[key];
if (!value) {
// Key didn't exist.
this.store.del(key);
}
else {
// Key existed. Store old value.
this.store.put(key, value, true);
}
}
}
/**
* Stashes given key value pair into `originalData` if it doesn't already
* exist. Allows us to stash values the program is requesting anyway to
* prevent needless `get` requests if the program modifies the data later
* on during the transaction.
*/
stashOldValue(key, value) {
// Keep only the earliest value in the transaction.
if (!this.originalData.hasOwnProperty(key)) {
this.originalData[key] = value;
}
}
/**
* Marks the given key as modified, and stashes its value if it has not been
* stashed already.
*/
markModified(key) {
if (this.modifiedKeys.indexOf(key) === -1) {
this.modifiedKeys.push(key);
if (!this.originalData.hasOwnProperty(key)) {
this.originalData[key] = this.store.get(key);
}
}
}
}
class SyncKeyValueFile extends PreloadFile {
constructor(_fs, _path, _flag, _stat, contents) {
super(_fs, _path, _flag, _stat, contents);
}
syncSync() {
if (this.isDirty()) {
this._fs._syncSync(this.getPath(), this.getBuffer(), this.getStats());
this.resetDirty();
}
}
closeSync() {
this.syncSync();
}
}
/**
* A "Synchronous key-value file system". Stores data to/retrieves data from an
* underlying key-value store.
*
* We use a unique ID for each node in the file system. The root node has a
* fixed ID.
* @todo Introduce Node ID caching.
* @todo Check modes.
*/
class SyncKeyValueFileSystem extends SynchronousFileSystem {
static isAvailable() { return true; }
constructor(options) {
super();
this.store = options.store;
// INVARIANT: Ensure that the root exists.
this.makeRootDirectory();
}
getName() { return this.store.name(); }
isReadOnly() { return false; }
supportsSymlinks() { return false; }
supportsProps() { return false; }
supportsSynch() { return true; }
/**
* Delete all contents stored in the file system.
*/
empty() {
this.store.clear();
// INVARIANT: Root always exists.
this.makeRootDirectory();
}
renameSync(oldPath, newPath) {
const tx = this.store.beginTransaction('readwrite'), oldParent = path.dirname(oldPath), oldName = path.basename(oldPath), newParent = path.dirname(newPath), newName = path.basename(newPath),
// Remove oldPath from parent's directory listing.
oldDirNode = this.findINode(tx, oldParent), oldDirList = this.getDirListing(tx, oldParent, oldDirNode);
if (!oldDirList[oldName]) {
throw ApiError.ENOENT(oldPath);
}
const nodeId = oldDirList[oldName];
delete oldDirList[oldName];
// Invariant: Can't move a folder inside itself.
// This funny little hack ensures that the check passes only if oldPath
// is a subpath of newParent. We append '/' to avoid matching folders that
// are a substring of the bottom-most folder in the path.
if ((newParent + '/').indexOf(oldPath + '/') === 0) {
throw new ApiError(ErrorCode.EBUSY, oldParent);
}
// Add newPath to parent's directory listing.
let newDirNode, newDirList;
if (newParent === oldParent) {
// Prevent us from re-grabbing the same directory listing, which still
// contains oldName.
newDirNode = oldDirNode;
newDirList = oldDirList;
}
else {
newDirNode = this.findINode(tx, newParent);
newDirList = this.getDirListing(tx, newParent, newDirNode);
}
if (newDirList[newName]) {
// If it's a file, delete it.
const newNameNode = this.getINode(tx, newPath, newDirList[newName]);
if (newNameNode.isFile()) {
try {
tx.del(newNameNode.id);
tx.del(newDirList[newName]);
}
catch (e) {
tx.abort();
throw e;
}
}
else {
// If it's a directory, throw a permissions error.
throw ApiError.EPERM(newPath);
}
}
newDirList[newName] = nodeId;
// Commit the two changed directory listings.
try {
tx.put(oldDirNode.id, Buffer.from(JSON.stringify(oldDirList)), true);
tx.put(newDirNode.id, Buffer.from(JSON.stringify(newDirList)), true);
}
catch (e) {
tx.abort();
throw e;
}
tx.commit();
}
statSync(p, isLstat) {
// Get the inode to the item, convert it into a Stats object.
return this.findINode(this.store.beginTransaction('readonly'), p).toStats();
}
createFileSync(p, flag, mode) {
const tx = this.store.beginTransaction('readwrite'), data = emptyBuffer(), newFile = this.commitNewFile(tx, p, FileType.FILE, mode, data);
// Open the file.
return new SyncKeyValueFile(this, p, flag, newFile.toStats(), data);
}
openFileSync(p, flag) {
const tx = this.store.beginTransaction('readonly'), node = this.findINode(tx, p), data = tx.get(node.id);
if (data === undefined) {
throw ApiError.ENOENT(p);
}
return new SyncKeyValueFile(this, p, flag, node.toStats(), data);
}
unlinkSync(p) {
this.removeEntry(p, false);
}
rmdirSync(p) {
// Check first if directory is empty.
if (this.readdirSync(p).length > 0) {
throw ApiError.ENOTEMPTY(p);
}
else {
this.removeEntry(p, true);
}
}
mkdirSync(p, mode) {
const tx = this.store.beginTransaction('readwrite'), data = Buffer.from('{}');
this.commitNewFile(tx, p, FileType.DIRECTORY, mode, data);
}
readdirSync(p) {
const tx = this.store.beginTransaction('readonly');
return Object.keys(this.getDirListing(tx, p, this.findINode(tx, p)));
}
_syncSync(p, data, stats) {
// @todo Ensure mtime updates properly, and use that to determine if a data
// update is required.
const tx = this.store.beginTransaction('readwrite'),
// We use the _findInode helper because we actually need the INode id.
fileInodeId = this._findINode(tx, path.dirname(p), path.basename(p)), fileInode = this.getINode(tx, p, fileInodeId), inodeChanged = fileInode.update(stats);
try {
// Sync data.
tx.put(fileInode.id, data, true);
// Sync metadata.
if (inodeChanged) {
tx.put(fileInodeId, fileInode.toBuffer(), true);
}
}
catch (e) {
tx.abort();
throw e;
}
tx.commit();
}
/**
* Checks if the root directory exists. Creates it if it doesn't.
*/
makeRootDirectory() {
const tx = this.store.beginTransaction('readwrite');
if (tx.get(ROOT_NODE_ID) === undefined) {
// Create new inode.
const currTime = (new Date()).getTime(),
// Mode 0666
dirInode = new Inode(GenerateRandomID(), 4096, 511 | FileType.DIRECTORY, currTime, currTime, currTime);
// If the root doesn't exist, the first random ID shouldn't exist,
// either.
tx.put(dirInode.id, getEmptyDirNode(), false);
tx.put(ROOT_NODE_ID, dirInode.toBuffer(), false);
tx.commit();
}
}
/**
* Helper function for findINode.
* @param parent The parent directory of the file we are attempting to find.
* @param filename The filename of the inode we are attempting to find, minus
* the parent.
* @return string The ID of the file's inode in the file system.
*/
_findINode(tx, parent, filename) {
const readDirectory = (inode) => {
// Get the root's directory listing.
const dirList = this.getDirListing(tx, parent, inode);
// Get the file's ID.
if (dirList[filename]) {
return dirList[filename];
}
else {
throw ApiError.ENOENT(path.resolve(parent, filename));
}
};
if (parent === '/') {
if (filename === '') {
// BASE CASE #1: Return the root's ID.
return ROOT_NODE_ID;
}
else {
// BASE CASE #2: Find the item in the root ndoe.
return readDirectory(this.getINode(tx, parent, ROOT_NODE_ID));
}
}
else {
return readDirectory(this.getINode(tx, parent + path.sep + filename, this._findINode(tx, path.dirname(parent), path.basename(parent))));
}
}
/**
* Finds the Inode of the given path.
* @param p The path to look up.
* @return The Inode of the path p.
* @todo memoize/cache
*/
findINode(tx, p) {
return this.getINode(tx, p, this._findINode(tx, path.dirname(p), path.basename(p)));
}
/**
* Given the ID of a node, retrieves the corresponding Inode.
* @param tx The transaction to use.
* @param p The corresponding path to the file (used for error messages).
* @param id The ID to look up.
*/
getINode(tx, p, id) {
const inode = tx.get(id);
if (inode === undefined) {
throw ApiError.ENOENT(p);
}
return Inode.fromBuffer(inode);
}
/**
* Given the Inode of a directory, retrieves the corresponding directory
* listing.
*/
getDirListing(tx, p, inode) {
if (!inode.isDirectory()) {
throw ApiError.ENOTDIR(p);
}
const data = tx.get(inode.id);
if (data === undefined) {
throw ApiError.ENOENT(p);
}
return JSON.parse(data.toString());
}
/**
* Creates a new node under a random ID. Retries 5 times before giving up in
* the exceedingly unlikely chance that we try to reuse a random GUID.
* @return The GUID that the data was stored under.
*/
addNewNode(tx, data) {
const retries = 0;
let currId;
while (retries < 5) {
try {
currId = GenerateRandomID();
tx.put(currId, data, false);
return currId;
}
catch (e) {
// Ignore and reroll.
}
}
throw new ApiError(ErrorCode.EIO, 'Unable to commit data to key-value store.');
}
/**
* Commits a new file (well, a FILE or a DIRECTORY) to the file system with
* the given mode.
* Note: This will commit the transaction.
* @param p The path to the new file.
* @param type The type of the new file.
* @param mode The mode to create the new file with.
* @param data The data to store at the file's data node.
* @return The Inode for the new file.
*/
commitNewFile(tx, p, type, mode, data) {
const parentDir = path.dirname(p), fname = path.basename(p), parentNode = this.findINode(tx, parentDir), dirListing = this.getDirListing(tx, parentDir, parentNode), currTime = (new Date()).getTime();
// Invariant: The root always exists.
// If we don't check this prior to taking steps below, we will create a
// file with name '' in root should p == '/'.
if (p === '/') {
throw ApiError.EEXIST(p);
}
// Check if file already exists.
if (dirListing[fname]) {
throw ApiError.EEXIST(p);
}
let fileNode;
try {
// Commit data.
const dataId = this.addNewNode(tx, data);
fileNode = new Inode(dataId, data.length, mode | type, currTime, currTime, currTime);
// Commit file node.
const fileNodeId = this.addNewNode(tx, fileNode.toBuffer());
// Update and commit parent directory listing.
dirListing[fname] = fileNodeId;
tx.put(parentNode.id, Buffer.from(JSON.stringify(dirListing)), true);
}
catch (e) {
tx.abort();
throw e;
}
tx.commit();
return fileNode;
}
/**
* Remove all traces of the given path from the file system.
* @param p The path to remove from the file system.
* @param isDir Does the path belong to a directory, or a file?
* @todo Update mtime.
*/
removeEntry(p, isDir) {
const tx = this.store.beginTransaction('readwrite'), parent = path.dirname(p), parentNode = this.findINode(tx, parent), parentListing = this.getDirListing(tx, parent, parentNode), fileName = path.basename(p);
if (!parentListing[fileName]) {
throw ApiError.ENOENT(p);
}
// Remove from directory listing of parent.
const fileNodeId = parentListing[fileName];
delete parentListing[fileName];
// Get file inode.
const fileNode = this.getINode(tx, p, fileNodeId);
if (!isDir && fileNode.isDirectory()) {
throw ApiError.EISDIR(p);
}
else if (isDir && !fileNode.isDirectory()) {
throw ApiError.ENOTDIR(p);
}
try {
// Delete data.
tx.del(fileNode.id);
// Delete node.
tx.del(fileNodeId);
// Update directory listing.
tx.put(parentNode.id, Buffer.from(JSON.stringify(parentListing)), true);
}
catch (e) {
tx.abort();
throw e;
}
// Success.
tx.commit();
}
}
class AsyncKeyValueFile extends PreloadFile {
constructor(_fs, _path, _flag, _stat, contents) {
super(_fs, _path, _flag, _stat, contents);
}
sync(cb) {
if (this.isDirty()) {
this._fs._sync(this.getPath(), this.getBuffer(), this.getStats(), (e) => {
if (!e) {
this.resetDirty();
}
cb(e);
});
}
else {
cb();
}
}
close(cb) {
this.sync(cb);
}
}
/**
* An "Asynchronous key-value file system". Stores data to/retrieves data from
* an underlying asynchronous key-value store.
*/
class AsyncKeyValueFileSystem extends BaseFileSystem {
constructor(cacheSize) {
super();
this._cache = null;
if (cacheSize > 0) {
this._cache = new LRUCache(cacheSize);
}
}
static isAvailable() { return true; }
/**
* Initializes the file system. Typically called by subclasses' async
* constructors.
*/
init(store, cb) {
this.store = store;
// INVARIANT: Ensure that the root exists.
this.makeRootDirectory(cb);
}
getName() { return this.store.name(); }
isReadOnly() { return false; }
supportsSymlinks() { return false; }
supportsProps() { return false; }
supportsSynch() { return false; }
/**
* Delete all contents stored in the file system.
*/
empty(cb) {
if (this._cache) {
this._cache.removeAll();
}
this.store.clear((e) => {
if (noError(e, cb)) {
// INVARIANT: Root always exists.
this.makeRootDirectory(cb);
}
});
}
rename(oldPath, newPath, cb) {
// TODO: Make rename compatible with the cache.
if (this._cache) {
// Clear and disable cache during renaming process.
const c = this._cache;
this._cache = null;
c.removeAll();
const oldCb = cb;
cb = (e) => {
// Restore empty cache.
this._cache = c;
oldCb(e);
};
}
const tx = this.store.beginTransaction('readwrite');
const oldParent = path.dirname(oldPath), oldName = path.basename(oldPath);
const newParent = path.dirname(newPath), newName = path.basename(newPath);
const inodes = {};
const lists = {};
let errorOccurred = false;
// Invariant: Can't move a folder inside itself.
// This funny little hack ensures that the check passes only if oldPath
// is a subpath of newParent. We append '/' to avoid matching folders that
// are a substring of the bottom-most folder in the path.
if ((newParent + '/').indexOf(oldPath + '/') === 0) {
return cb(new ApiError(ErrorCode.EBUSY, oldParent));
}
/**
* Responsible for Phase 2 of the rename operation: Modifying and
* committing the directory listings. Called once we have successfully
* retrieved both the old and new parent's inodes and listings.
*/
const theOleSwitcharoo = () => {
// Sanity check: Ensure both paths are present, and no error has occurred.
if (errorOccurred || !lists.hasOwnProperty(oldParent) || !lists.hasOwnProperty(newParent)) {
return;
}
const oldParentList = lists[oldParent], oldParentINode = inodes[oldParent], newParentList = lists[newParent], newParentINode = inodes[newParent];
// Delete file from old parent.
if (!oldParentList[oldName]) {
cb(ApiError.ENOENT(oldPath));
}
else {
const fileId = oldParentList[oldName];
delete oldParentList[oldName];
// Finishes off the renaming process by adding the file to the new
// parent.
const completeRename = () => {
newParentList[newName] = fileId;
// Commit old parent's list.
tx.put(oldParentINode.id, Buffer.from(JSON.stringify(oldParentList)), true, (e) => {
if (noErrorTx(e, tx, cb)) {
if (oldParent === newParent) {
// DONE!
tx.commit(cb);
}
else {
// Commit new parent's list.
tx.put(newParentINode.id, Buffer.from(JSON.stringify(newParentList)), true, (e) => {
if (noErrorTx(e, tx, cb)) {
tx.commit(cb);
}
});
}
}
});
};
if (newParentList[newName]) {
// 'newPath' already exists. Check if it's a file or a directory, and
// act accordingly.
this.getINode(tx, newPath, newParentList[newName], (e, inode) => {
if (noErrorTx(e, tx, cb)) {
if (inode.isFile()) {
// Delete the file and continue.
tx.del(inode.id, (e) => {
if (noErrorTx(e, tx, cb)) {
tx.del(newParentList[newName], (e) => {
if (noErrorTx(e, tx, cb)) {
completeRename();
}
});
}
});
}
else {
// Can't overwrite a directory using rename.
tx.abort((e) => {
cb(ApiError.EPERM(newPath));
});
}
}
});
}
else {
completeRename();
}
}
};
/**
* Grabs a path's inode and directory listing, and shoves it into the
* inodes and lists hashes.
*/
const processInodeAndListings = (p) => {
this.findINodeAndDirListing(tx, p, (e, node, dirList) => {
if (e) {
if (!errorOccurred) {
errorOccurred = true;
tx.abort(() => {
cb(e);
});
}
// If error has occurred already, just stop here.
}
else {
inodes[p] = node;
lists[p] = dirList;
theOleSwitcharoo();
}
});
};
processInodeAndListings(oldParent);
if (oldParent !== newParent) {
processInodeAndListings(newParent);
}
}
stat(p, isLstat, cb) {
const tx = this.store.beginTransaction('readonly');
this.findINode(tx, p, (e, inode) => {
if (noError(e, cb)) {
cb(null, inode.toStats());
}
});
}
createFile(p, flag, mode, cb) {
const tx = this.store.beginTransaction('readwrite'), data = emptyBuffer();
this.commitNewFile(tx, p, FileType.FILE, mode, data, (e, newFile) => {
if (noError(e, cb)) {
cb(null, new AsyncKeyValueFile(this, p, flag, newFile.toStats(), data));
}
});
}
openFile(p, flag, cb) {
const tx = this.store.beginTransaction('readonly');
// Step 1: Grab the file's inode.
this.findINode(tx, p, (e, inode) => {
if (noError(e, cb)) {
// Step 2: Grab the file's data.
tx.get(inode.id, (e, data) => {
if (noError(e, cb)) {
if (data === undefined) {
cb(ApiError.ENOENT(p));
}
else {
cb(null, new AsyncKeyValueFile(this, p, flag, inode.toStats(), data));
}
}
});
}
});
}
unlink(p, cb) {
this.removeEntry(p, false, cb);
}
rmdir(p, cb) {
// Check first if directory is empty.
this.readdir(p, (err, files) => {
if (err) {
cb(err);
}
else if (files.length > 0) {
cb(ApiError.ENOTEMPTY(p));
}
else {
this.removeEntry(p, true, cb);
}
});
}
mkdir(p, mode, cb) {
const tx = this.store.beginTransaction('readwrite'), data = Buffer.from('{}');
this.commitNewFile(tx, p, FileType.DIRECTORY, mode, data, cb);
}
readdir(p, cb) {
const tx = this.store.beginTransaction('readonly');
this.findINode(tx, p, (e, inode) => {
if (noError(e, cb)) {
this.getDirListing(tx, p, inode, (e, dirListing) => {
if (noError(e, cb)) {
cb(null, Object.keys(dirListing));
}
});
}
});
}
_sync(p, data, stats, cb) {
// @todo Ensure mtime updates properly, and use that to determine if a data
// update is required.
const tx = this.store.beginTransaction('readwrite');
// Step 1: Get the file node's ID.
this._findINode(tx, path.dirname(p), path.basename(p), (e, fileInodeId) => {
if (noErrorTx(e, tx, cb)) {
// Step 2: Get the file inode.
this.getINode(tx, p, fileInodeId, (e, fileInode) => {
if (noErrorTx(e, tx, cb)) {
const inodeChanged = fileInode.update(stats);
// Step 3: Sync the data.
tx.put(fileInode.id, data, true, (e) => {
if (noErrorTx(e, tx, cb)) {
// Step 4: Sync the metadata (if it changed)!
if (inodeChanged) {
tx.put(fileInodeId, fileInode.toBuffer(), true, (e) => {
if (noErrorTx(e, tx, cb)) {
tx.commit(cb);
}
});
}
else {
// No need to sync metadata; return.
tx.commit(cb);
}
}
});
}
});
}
});
}
/**
* Checks if the root directory exists. Creates it if it doesn't.
*/
makeRootDirectory(cb) {
const tx = this.store.beginTransaction('readwrite');
tx.get(ROOT_NODE_ID, (e, data) => {
if (e || data === undefined) {
// Create new inode.
const currTime = (new Date()).getTime(),
// Mode 0666
dirInode = new Inode(GenerateRandomID(), 4096, 511 | FileType.DIRECTORY, currTime, currTime, currTime);
// If the root doesn't exist, the first random ID shouldn't exist,
// either.
tx.put(dirInode.id, getEmptyDirNode(), false, (e) => {
if (noErrorTx(e, tx, cb)) {
tx.put(ROOT_NODE_ID, dirInode.toBuffer(), false, (e) => {
if (e) {
tx.abort(() => { cb(e); });
}
else {
tx.commit(cb);
}
});
}
});
}
else {
// We're good.
tx.commit(cb);
}
});
}
/**
* Helper function for findINode.
* @param parent The parent directory of the file we are attempting to find.
* @param filename The filename of the inode we are attempting to find, minus
* the parent.
* @param cb Passed an error or the ID of the file's inode in the file system.
*/
_findINode(tx, parent, filename, cb) {
if (this._cache) {
const id = this._cache.get(path.join(parent, filename));
if (id) {
return cb(null, id);
}
}
const handleDirectoryListings = (e, inode, dirList) => {
if (e) {
cb(e);
}
else if (dirList[filename]) {
const id = dirList[filename];
if (this._cache) {
this._cache.set(path.join(parent, filename), id);
}
cb(null, id);
}
else {
cb(ApiError.ENOENT(path.resolve(parent, filename)));
}
};
if (parent === '/') {
if (filename === '') {
// BASE CASE #1: Return the root's ID.
if (this._cache) {
this._cache.set(path.join(parent, filename), ROOT_NODE_ID);
}
cb(null, ROOT_NODE_ID);
}
else {
// BASE CASE #2: Find the item in the root node.
this.getINode(tx, parent, ROOT_NODE_ID, (e, inode) => {
if (noError(e, cb)) {
this.getDirListing(tx, parent, inode, (e, dirList) => {
// handle_directory_listings will handle e for us.
handleDirectoryListings(e, inode, dirList);
});
}
});
}
}
else {
// Get the parent directory's INode, and find the file in its directory
// listing.
this.findINodeAndDirListing(tx, parent, handleDirectoryListings);
}
}
/**
* Finds the Inode of the given path.
* @param p The path to look up.
* @param cb Passed an error or the Inode of the path p.
* @todo memoize/cache
*/
findINode(tx, p, cb) {
this._findINode(tx, path.dirname(p), path.basename(p), (e, id) => {
if (noError(e, cb)) {
this.getINode(tx, p, id, cb);
}
});
}
/**
* Given the ID of a node, retrieves the corresponding Inode.
* @param tx The transaction to use.
* @param p The corresponding path to the file (used for error messages).
* @param id The ID to look up.
* @param cb Passed an error or the inode under the given id.
*/
getINode(tx, p, id, cb) {
tx.get(id, (e, data) => {
if (noError(e, cb)) {
if (data === undefined) {
cb(ApiError.ENOENT(p));
}
else {
cb(null, Inode.fromBuffer(data));
}
}
});
}
/**
* Given the Inode of a directory, retrieves the corresponding directory
* listing.
*/
getDirListing(tx, p, inode, cb) {
if (!inode.isDirectory()) {
cb(ApiError.ENOTDIR(p));
}
else {
tx.get(inode.id, (e, data) => {
if (noError(e, cb)) {
try {
cb(null, JSON.parse(data.toString()));
}
catch (e) {
// Occurs when data is undefined, or corresponds to something other
// than a directory listing. The latter should never occur unless
// the file system is corrupted.
cb(ApiError.ENOENT(p));
}
}
});
}
}
/**
* Given a path to a directory, retrieves the corresponding INode and
* directory listing.
*/
findINodeAndDirListing(tx, p, cb) {
this.findINode(tx, p, (e, inode) => {
if (noError(e, cb)) {
this.getDirListing(tx, p, inode, (e, listing) => {
if (noError(e, cb)) {
cb(null, inode, listing);
}
});
}
});
}
/**
* Adds a new node under a random ID. Retries 5 times before giving up in
* the exceedingly unlikely chance that we try to reuse a random GUID.
* @param cb Passed an error or the GUID that the data was stored under.
*/
addNewNode(tx, data, cb) {
let retries = 0, currId;
const reroll = () => {
if (++retries === 5) {
// Max retries hit. Return with an error.
cb(new ApiError(ErrorCode.EIO, 'Unable to commit data to key-value store.'));
}
else {
// Try again.
currId = GenerateRandomID();
tx.put(currId, data, false, (e, committed) => {
if (e || !committed) {
reroll();
}
else {
// Successfully stored under 'currId'.
cb(null, currId);
}
});
}
};
reroll();
}
/**
* Commits a new file (well, a FILE or a DIRECTORY) to the file system with
* the given mode.
* Note: This will commit the transaction.
* @param p The path to the new file.
* @param type The type of the new file.
* @param mode The mode to create the new file with.
* @param data The data to store at the file's data node.
* @param cb Passed an error or the Inode for the new file.
*/
commitNewFile(tx, p, type, mode, data, cb) {
const parentDir = path.dirname(p), fname = path.basename(p), currTime = (new Date()).getTime();
// Invariant: The root always exists.
// If we don't check this prior to taking steps below, we will create a
// file with name '' in root should p == '/'.
if (p === '/') {
return cb(ApiError.EEXIST(p));
}
// Let's build a pyramid of code!
// Step 1: Get the parent directory's inode and directory listing
this.findINodeAndDirListing(tx, parentDir, (e, parentNode, dirListing) => {
if (noErrorTx(e, tx, cb)) {
if (dirListing[fname]) {
// File already exists.
tx.abort(() => {
cb(ApiError.EEXIST(p));
});
}
else {
// Step 2: Commit data to store.
this.addNewNode(tx, data, (e, dataId) => {
if (noErrorTx(e, tx, cb)) {
// Step 3: Commit the file's inode to the store.
const fileInode = new Inode(dataId, data.length, mode | type, currTime, currTime, currTime);
this.addNewNode(tx, fileInode.toBuffer(), (e, fileInodeId) => {
if (noErrorTx(e, tx, cb)) {
// Step 4: Update parent directory's listing.
dirListing[fname] = fileInodeId;
tx.put(parentNode.id, Buffer.from(JSON.stringify(dirListing)), true, (e) => {
if (noErrorTx(e, tx, cb)) {
// Step 5: Commit and return the new inode.
tx.commit((e) => {
if (noErrorTx(e, tx, cb)) {
cb(null, fileInode);
}
});
}
});
}
});
}
});
}
}
});
}
/**
* Remove all traces of the given path from the file system.
* @param p The path to remove from the file system.
* @param isDir Does the path belong to a directory, or a file?
* @todo Update mtime.
*/
removeEntry(p, isDir, cb) {
// Eagerly delete from cache (harmless even if removal fails)
if (this._cache) {
this._cache.remove(p);
}
const tx = this.store.beginTransaction('readwrite'), parent = path.dirname(p), fileName = path.basename(p);
// Step 1: Get parent directory's node and directory listing.
this.findINodeAndDirListing(tx, parent, (e, parentNode, parentListing) => {
if (noErrorTx(e, tx, cb)) {
if (!parentListing[fileName]) {
tx.abort(() => {
cb(ApiError.ENOENT(p));
});
}
else {
// Remove from directory listing of parent.
const fileNodeId = parentListing[fileName];
delete parentListing[fileName];
// Step 2: Get file inode.
this.getINode(tx, p, fileNodeId, (e, fileNode) => {
if (noErrorTx(e, tx, cb)) {
if (!isDir && fileNode.isDirectory()) {
tx.abort(() => {
cb(ApiError.EISDIR(p));
});
}
else if (isDir && !fileNode.isDirectory()) {
tx.abort(() => {
cb(ApiError.ENOTDIR(p));
});
}
else {
// Step 3: Delete data.
tx.del(fileNode.id, (e) => {
if (noErrorTx(e, tx, cb)) {
// Step 4: Delete node.
tx.del(fileNodeId, (e) => {
if (noErrorTx(e, tx, cb)) {
// Step 5: Update directory listing.
tx.put(parentNode.id, Buffer.from(JSON.stringify(parentListing)), true, (e) => {
if (noErrorTx(e, tx, cb)) {
tx.commit(cb);
}
});
}
});
}
});
}
}
});
}
}
});
}
}
return {
SimpleSyncRWTransaction: SimpleSyncRWTransaction,
SyncKeyValueFile: SyncKeyValueFile,
SyncKeyValueFileSystem: SyncKeyValueFileSystem,
AsyncKeyValueFile: AsyncKeyValueFile,
AsyncKeyValueFileSystem: AsyncKeyValueFileSystem
};
}); |
import { ASTKindToNode, Kind } from 'graphql';
import { GraphQLESLintRule, ValueOf } from '../types';
import { requireReachableTypesFromContext } from '../utils';
import { GraphQLESTreeNode } from '../estree-parser';
const UNREACHABLE_TYPE = 'UNREACHABLE_TYPE';
const RULE_ID = 'no-unreachable-types';
const KINDS = [
Kind.DIRECTIVE_DEFINITION,
Kind.OBJECT_TYPE_DEFINITION,
Kind.OBJECT_TYPE_EXTENSION,
Kind.INTERFACE_TYPE_DEFINITION,
Kind.INTERFACE_TYPE_EXTENSION,
Kind.SCALAR_TYPE_DEFINITION,
Kind.SCALAR_TYPE_EXTENSION,
Kind.INPUT_OBJECT_TYPE_DEFINITION,
Kind.INPUT_OBJECT_TYPE_EXTENSION,
Kind.UNION_TYPE_DEFINITION,
Kind.UNION_TYPE_EXTENSION,
Kind.ENUM_TYPE_DEFINITION,
Kind.ENUM_TYPE_EXTENSION,
] as const;
type AllowedKind = typeof KINDS[number];
type AllowedKindToNode = Pick<ASTKindToNode, AllowedKind>;
const rule: GraphQLESLintRule = {
meta: {
messages: {
[UNREACHABLE_TYPE]: 'Type "{{ typeName }}" is unreachable',
},
docs: {
description: `Requires all types to be reachable at some level by root level fields.`,
category: 'Schema',
url: `https://github.com/dotansimha/graphql-eslint/blob/master/docs/rules/${RULE_ID}.md`,
requiresSchema: true,
examples: [
{
title: 'Incorrect',
code: /* GraphQL */ `
type User {
id: ID!
name: String
}
type Query {
me: String
}
`,
},
{
title: 'Correct',
code: /* GraphQL */ `
type User {
id: ID!
name: String
}
type Query {
me: User
}
`,
},
],
recommended: true,
},
type: 'suggestion',
schema: [],
hasSuggestions: true,
},
create(context) {
const reachableTypes = requireReachableTypesFromContext(RULE_ID, context);
const selector = KINDS.join(',');
return {
[selector](node: GraphQLESTreeNode<ValueOf<AllowedKindToNode>>) {
const typeName = node.name.value;
if (!reachableTypes.has(typeName)) {
context.report({
node: node.name,
messageId: UNREACHABLE_TYPE,
data: { typeName },
suggest: [
{
desc: `Remove ${typeName}`,
fix: fixer => fixer.remove(node as any),
},
],
});
}
},
};
},
};
export default rule;
|
use chrono::{DateTime, NaiveDateTime, Utc};
fn process_datetime(buf: &[u8]) -> Option<DateTime<Utc>> {
if buf.is_empty() {
return None; // Empty buffer, unsupported format
}
let datetime_str = match std::str::from_utf8(buf) {
Ok(s) => s,
Err(_) => return None, // Invalid UTF-8 encoding, unsupported format
};
let datetime = if let Some(index) = datetime_str.find('Z') {
// UTC time
let naive_datetime = match NaiveDateTime::parse_from_str(&datetime_str[..index], "%Y-%m-%dT%H:%M:%S") {
Ok(dt) => dt,
Err(_) => return None, // Invalid datetime format, unsupported format
};
Some(naive_datetime.assume_utc())
} else {
// Local datetime with no timezone information
let default_offset = None; // Replace with actual default offset if available
let naive_datetime = match NaiveDateTime::parse_from_str(datetime_str, "%Y-%m-%dT%H:%M:%S") {
Ok(dt) => dt,
Err(_) => return None, // Invalid datetime format, unsupported format
};
Some(naive_datetime.assume_offset(default_offset.unwrap_or_else(UtcOffset::UTC)).to_offset(UtcOffset::UTC))
};
datetime.map(DateTime::from_utc)
} |
#!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script is intended to start the kubelet and then loop until
# it detects a failure. It then exits, and supervisord restarts it
# which in turn restarts the kubelet.
/etc/init.d/kubelet stop
/etc/init.d/kubelet start
echo "waiting a minute for startup"
sleep 60
max_seconds=10
while true; do
if ! curl -m ${max_seconds} -f -s http://127.0.0.1:10255/healthz > /dev/null; then
echo "kubelet failed!"
exit 2
fi
sleep 10
done
|
call cd /D %userprofile%\IdeaProjects\VirtaMarketAnalyzerInGit\
call mvn clean
call mvn package
set currentDate=%date%
set dayOfMonth=%currentDate:~0,2%
call "C:\Program Files\Java\jdk-9\bin\java" -Djava.util.Arrays.useLegacyMergeSort=true -Djava.net.preferIPv4Stack=true -Djava.io.tmpdir=d:\tmp -Xmx12G -Dfile.encoding=utf-8 -cp "%userprofile%\IdeaProjects\VirtaMarketAnalyzerInGit\target\VirtaMarketAnalyzer-jar-with-dependencies.jar" ru.VirtaMarketAnalyzer.main.Wizard > "%userprofile%\IdeaProjects\VirtaMarketAnalyzerInGit\logs\log_%dayOfMonth%.txt" 2>"%userprofile%\IdeaProjects\VirtaMarketAnalyzerInGit\logs\log_err_%dayOfMonth%.txt" |
import numpy as np
import sys
def get_x_y(file_path, directory_name):
# Implement the logic to read data from the file and return input features (x) and labels (y)
# Example:
# Read data from file_path and process it to obtain x and y
# x = ...
# y = ...
return x, y
def process_data(name, filename_train):
data = {}
x, y = get_x_y(name + '/' + filename_train, name)
data['x_train'] = x
data['y_train'] = y
if len(sys.argv) > 4:
filename_val = sys.argv[4]
x, y = get_x_y(name + '/' + filename_val, name)
data['x_train'] = np.vstack((data['x_train'], x))
data['y_train'] = np.append(data['y_train'], y)
data['n_train'] = data['x_train'].shape[0]
assert len(data['y_train']) == data['n_train']
return data
# Example usage
name = "data_directory"
filename_train = "train_data.csv"
processed_data = process_data(name, filename_train)
print(processed_data) |
<filename>code/iaas/cluster-logic/src/main/java/io/cattle/iaas/cluster/service/impl/ClusterManagerImpl.java
package io.cattle.iaas.cluster.service.impl;
import io.cattle.iaas.cluster.service.ClusterManager;
import io.cattle.platform.agent.instance.dao.AgentInstanceDao;
import io.cattle.platform.agent.instance.factory.AgentInstanceFactory;
import io.cattle.platform.archaius.util.ArchaiusUtil;
import io.cattle.platform.configitem.request.ConfigUpdateRequest;
import io.cattle.platform.configitem.request.util.ConfigUpdateRequestUtils;
import io.cattle.platform.configitem.version.ConfigItemStatusManager;
import io.cattle.platform.core.constants.ClusterConstants;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.constants.NetworkConstants;
import io.cattle.platform.core.constants.InstanceConstants.SystemContainer;
import io.cattle.platform.core.dao.AccountDao;
import io.cattle.platform.core.dao.ClusterHostMapDao;
import io.cattle.platform.core.dao.IpAddressDao;
import io.cattle.platform.core.dao.NetworkDao;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.core.model.IpAddress;
import io.cattle.platform.core.model.Network;
import io.cattle.platform.core.model.Nic;
import io.cattle.platform.core.model.tables.records.ClusterHostMapRecord;
import io.cattle.platform.deferred.util.DeferredUtils;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.json.JsonMapper;
import io.cattle.platform.lock.LockManager;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.process.ObjectProcessManager;
import io.cattle.platform.object.process.StandardProcess;
import io.cattle.platform.object.resource.ResourceMonitor;
import io.cattle.platform.object.resource.ResourcePredicate;
import io.cattle.platform.object.util.DataAccessor;
import io.cattle.platform.object.util.DataUtils;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists;
import com.netflix.config.DynamicStringListProperty;
import com.netflix.config.DynamicStringProperty;
public class ClusterManagerImpl implements ClusterManager {
static final DynamicStringProperty CLUSTER_INSTANCE_NAME = ArchaiusUtil.getString("cluster.instance.name");
static final DynamicStringProperty CLUSTER_IMAGE_NAME = ArchaiusUtil.getString("cluster.image.name");
static final DynamicStringListProperty CONFIG_ITEMS = ArchaiusUtil.getList("cluster.config.items");
@Inject
ResourceMonitor resourceMonitor;
@Inject
JsonMapper jsonMapper;
@Inject
ConfigItemStatusManager statusManager;
@Inject
AgentInstanceFactory agentInstanceFactory;
@Inject
AgentInstanceDao agentInstanceDao;
@Inject
NetworkDao ntwkDao;
@Inject
AccountDao accountDao;
@Inject
IpAddressDao ipAddressDao;
@Inject
ObjectProcessManager processManager;
@Inject
ObjectManager objectManager;
@Inject
ClusterHostMapDao clusterHostMapDao;
@Inject
LockManager lockManager;
@Override
public Instance getClusterServerInstance(Host cluster) {
Agent clusterServerAgent = getClusterServerAgent(cluster);
Instance clusterServerInstance = null;
if (clusterServerAgent != null) {
clusterServerInstance = agentInstanceDao.getInstanceByAgent(clusterServerAgent);
}
return clusterServerInstance;
}
@Override
public Agent getClusterServerAgent(Host cluster) {
String uri = getUri(cluster);
Agent clusterServerAgent = agentInstanceDao.getAgentByUri(uri);
return clusterServerAgent;
}
@Override
public IpAddress getClusterServerInstanceIp(Instance clusterServerInstance) {
IpAddress ip = null;
for (Nic nic : objectManager.children(clusterServerInstance, Nic.class)) {
ip = ipAddressDao.getPrimaryIpAddress(nic);
if (ip != null) {
break;
}
}
return ip;
}
private Instance createClusterServerInstance(Host cluster) {
Instance clusterServerInstance = getClusterServerInstance(cluster);
if (clusterServerInstance == null) {
Host managingHost = getManagingHost(cluster);
Integer clusterServerPort = DataAccessor.fields(cluster).withKey(ClusterConstants.CLUSTER_SERVER_PORT).as(Integer.class);
Map<String, Object> params = new HashMap<>();
params.put(InstanceConstants.FIELD_NETWORK_IDS, Lists.newArrayList(getNetworkIds(managingHost)));
params.put(InstanceConstants.FIELD_REQUESTED_HOST_ID, managingHost.getId());
params.put(InstanceConstants.FIELD_PORTS, Lists.newArrayList(clusterServerPort + ":" + clusterServerPort + "/tcp"));
clusterServerInstance = agentInstanceFactory
.newBuilder()
.withAccountId(managingHost.getAccountId())
.withZoneId(managingHost.getZoneId())
.withPrivileged(true)
.withUri(getUri(cluster, managingHost))
.withName(CLUSTER_INSTANCE_NAME.get())
.withImageUuid(getImageUuid(cluster, managingHost))
.withParameters(params)
.withSystemContainerType(SystemContainer.ClusterAgent)
.build();
} else {
start(clusterServerInstance);
}
return clusterServerInstance;
}
private String getImageUuid(Host cluster, Host managingHost) {
return getImagePrefix(cluster, managingHost) + CLUSTER_IMAGE_NAME.get();
}
private String getUri(Host cluster, Host managingHost) {
return String.format("%s?clusterId=%d&managingHostId=%d",
getConnectionPrefix(cluster, managingHost) + "///", cluster.getId(), managingHost.getId());
}
private String getUri(Host cluster) {
return getUri(cluster, getManagingHost(cluster));
}
private String getConnectionPrefix(Host cluster, Host managingHost) {
return objectManager.isKind(managingHost, "sim") ? "sim:" : "delegate:";
}
private String getImagePrefix(Host cluster, Host managingHost) {
return objectManager.isKind(managingHost, "sim") ? "sim:" : "docker:";
}
private Long getNetworkIds(Host managingHost) {
List<? extends Network> accountNetworks = ntwkDao.getNetworksForAccount(managingHost.getAccountId(),
NetworkConstants.KIND_HOSTONLY);
if (accountNetworks.isEmpty()) {
// pass system network if account doesn't own any
List<? extends Network> systemNetworks = ntwkDao.getNetworksForAccount(accountDao.getSystemAccount()
.getId(),
NetworkConstants.KIND_HOSTONLY);
if (systemNetworks.isEmpty()) {
throw new RuntimeException(
"Unable to find a network to start cluster server");
}
return systemNetworks.get(0).getId();
}
return accountNetworks.get(0).getId();
}
private void start(final Instance agentInstance) {
if (InstanceConstants.STATE_STOPPED.equals(agentInstance.getState())) {
DeferredUtils.nest(new Callable<Object>() {
@Override
public Object call() throws Exception {
processManager.scheduleProcessInstance(InstanceConstants.PROCESS_START, agentInstance, null);
return null;
}
});
}
}
@Override
public Host getManagingHost(Host cluster) {
Long managingHostId = DataAccessor.fields(cluster).withKey(ClusterConstants.MANAGING_HOST).as(Long.class);
if (managingHostId == null) {
throw new RuntimeException("Missing managingHostId for cluster:" + cluster.getId());
}
return objectManager.loadResource(Host.class, managingHostId);
}
@Override
public void updateClusterServerConfig(ProcessState state, Host cluster) {
if (!CommonStatesConstants.ACTIVE.equals(cluster.getState()) &&
!CommonStatesConstants.ACTIVATING.equals(cluster.getState())) {
return;
}
// short term optimization to avoid updating cluster object unnecessarily
// since we're just currently only supporting file:// discoverySpec
if (StringUtils.isEmpty(DataAccessor.fieldString(cluster, ClusterConstants.DISCOVERY_SPEC))) {
DataUtils.getWritableFields(cluster).put(ClusterConstants.DISCOVERY_SPEC, "file:///etc/cluster/cluster-hosts.conf");
objectManager.persist(cluster);
}
Instance clusterServerInstance = createClusterServerInstance(cluster);
clusterServerInstance = resourceMonitor.waitFor(clusterServerInstance, new ResourcePredicate<Instance>() {
@Override
public boolean evaluate(Instance obj) {
return InstanceConstants.STATE_RUNNING.equals(obj.getState());
}
});
Agent clusterAgent = getClusterServerAgent(cluster);
if (clusterAgent == null) {
return;
}
ConfigUpdateRequest request = ConfigUpdateRequestUtils.getRequest(jsonMapper, state,
getContext(clusterAgent));
request = before(request, clusterAgent);
ConfigUpdateRequestUtils.setRequest(request, state, getContext(clusterAgent));
after(request);
}
public void activateCluster(ProcessState state, Host cluster) {
Long hostId = findSuitableHost(cluster);
DataUtils.getWritableFields(cluster).put(ClusterConstants.MANAGING_HOST, hostId);
objectManager.persist(cluster);
if (hostId == null) {
processManager.scheduleStandardProcess(StandardProcess.DEACTIVATE, cluster, null);
} else {
updateClusterServerConfig(state, cluster);
}
}
private Long findSuitableHost(Host cluster) {
List<ClusterHostMapRecord> mappings = clusterHostMapDao.findClusterHostMapsForCluster(cluster);
if (mappings.size() == 0) {
return null;
}
for (ClusterHostMapRecord mapping: mappings) {
Host host = objectManager.loadResource(Host.class, mapping.getHostId());
if (host != null && CommonStatesConstants.ACTIVE.equals(host.getState())) {
return mapping.getHostId();
}
}
return null;
}
private ConfigUpdateRequest before(ConfigUpdateRequest request, Agent agent) {
if (request == null) {
request = ConfigUpdateRequest.forResource(Agent.class, agent.getId());
for (String item : CONFIG_ITEMS.get()) {
request.addItem(item)
.withApply(true)
.withIncrement(true)
.setCheckInSyncOnly(true);
}
}
statusManager.updateConfig(request);
return request;
}
private void after(ConfigUpdateRequest request) {
if (request == null) {
return;
}
statusManager.waitFor(request);
}
private String getContext(Agent agent) {
return String.format("AgentUpdateConfig:%s", agent.getId());
}
}
|
#! /bin/bash
# 停止接入程序
cd kemao_3
mvn spring-boot:stop
# 停止关注程序
cd ../subscribe
mvn spring-boot:stop
# 停止取消关注程序
cd ../unsubscribe
mvn spring-boot:stop
cd ../library
mvn spring-boot:stop
cd ../self_menu
mvn spring-boot:stop
|
<reponame>Tyra-hans/github-search
import { Component, OnInit } from '@angular/core';
import { UserService } from '../user.service';
@Component({
selector: 'app-githubprofile',
templateUrl: './githubprofile.component.html',
styleUrls: ['./githubprofile.component.css']
})
export class GithubprofileComponent implements OnInit {
user: any;
repos: any;
username: '';
constructor(private userservice: UserService) {
}
ngOnInit() {
}
searchUser(user) {
console.log(user);
this.getUserDetails(user);
}
getUserDetails(user) {
this.userservice.getGithubUser(user).then((response) => {
this.user = response;
});
this.userservice.getGithubRepos(user).then((response) => {
console.log(response);
this.repos = response;
});
}
}
|
import {
Column,
CreateDateColumn,
Entity,
ManyToOne,
PrimaryGeneratedColumn,
UpdateDateColumn,
} from 'typeorm';
import { Exclude } from 'class-transformer';
import { User } from './../user/user.entity';
export enum ReviewStatusEnum {
Accepted = 1,
Pending,
}
@Entity()
export class Review {
@PrimaryGeneratedColumn()
id: number;
@Column({ name: 'title', length: 400 })
title: string;
@Column({ name: 'review', length: 3000, nullable: true })
review: string;
@Column({ name: 'rating' })
rating: number;
@ManyToOne(() => User, (user) => user.authored)
author: User;
@ManyToOne(() => User, (user) => user.reviews)
reviewed: User;
@Column('enum', {
enum: ReviewStatusEnum,
default: ReviewStatusEnum.Pending,
})
status: ReviewStatusEnum;
@CreateDateColumn({ type: 'timestamp' })
@Exclude()
createdAt: Date;
@UpdateDateColumn({ type: 'timestamp' })
@Exclude()
updatedAt: Date;
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2309-1
#
# Security announcement date: 2014-08-11 00:00:00 UTC
# Script generation date: 2017-01-01 21:03:54 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - libavformat53:4:0.8.15-0ubuntu0.12.04.1
# - libavcodec53:4:0.8.15-0ubuntu0.12.04.1
# - libavcodec53:4:0.8.15-0ubuntu0.12.04.1
#
# Last versions recommanded by security team:
# - libavformat53:4:0.8.17-0ubuntu0.12.04.2
# - libavcodec53:4:0.8.17-0ubuntu0.12.04.2
# - libavcodec53:4:0.8.17-0ubuntu0.12.04.2
#
# CVE List:
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade libavformat53=4:0.8.17-0ubuntu0.12.04.2 -y
sudo apt-get install --only-upgrade libavcodec53=4:0.8.17-0ubuntu0.12.04.2 -y
sudo apt-get install --only-upgrade libavcodec53=4:0.8.17-0ubuntu0.12.04.2 -y
|
<filename>packages/core/src/entity.ts
import type { IDisposable } from '@simple-ecs/runtime-extension';
import type { Class } from 'type-fest';
import { ArrayExtension, ReflectExtension } from '@simple-ecs/runtime-extension';
import { Component } from './component';
export class Entity implements IDisposable {
private readonly components: Map<Function & { prototype: Component }, Component> = new Map();
private readonly componentList: Component[] = [];
/**
* Release all resources that using by current component
*/
public dispose(): void {
this.components.forEach(e => e.dispose());
this.components.clear();
ArrayExtension.clear(this.componentList);
}
/**
* Construct component and link to current entity if not existed, otherwise return linked component
* @param component Component constructor
*/
public addComponent<T extends Component>(component: Class<T>): T {
if (this.components.has(component)) return this.components.get(component) as T;
const instance = new component(this);
this.components.set(component, instance);
this.componentList.push(instance);
instance.initialize();
return instance;
}
/**
* Find linked component by given type
* @param component Component constructor
* @param useInherit Allow to return component that inherits given type
*/
public getComponent<T extends Component>(component: Function & { prototype: T }, useInherit: boolean = false): T | undefined {
if (useInherit) {
return this.componentList.find(e => ReflectExtension.isInherit(e, component)) as T | undefined;
} else {
return this.components.has(component) ? (this.components.get(component) as T) : undefined;
}
}
/**
* Find components that match given predicate function
* @param predicate Predicate will be called once for each component in ascending order, until
* mapped every items.
* @returns
*/
public queryComponent(predicate: (value: Component, index: number, obj: Component[]) => boolean): Component[] {
return this.componentList.filter(predicate);
}
/**
* Indicate if entity has given type of component linked
* @param component Component constructor
* @param useInherit Allow to return component that inherits given type
*/
public hasComponent<V extends Component>(component: Function & { prototype: V }, useInherit: boolean = false): boolean {
if (useInherit) {
return this.componentList.some(e => ReflectExtension.isInherit(e, component));
} else {
return this.components.has(component);
}
}
/**
* Remove and dispose component by type
* @param component Component constructor
*/
public deleteComponent(component: Class<Component>): void {
if (!this.components.has(component)) return;
const target = this.components.get(component);
target != null && (target.dispose(), this.components.delete(component), ArrayExtension.remove(this.componentList, target));
}
}
|
def dot_product(list1, list2):
product = 0
for i in range(len(list1)):
product += (list1[i] * list2[i])
return product
#Example
list1 = [1,2]
list2 = [3,4]
result = dot_product(list1, list2)
print(result)
#Output
11 |
package org.hzero.sso.core.config;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.List;
import java.util.Timer;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.commons.lang3.StringUtils;
import org.jasig.cas.client.util.CommonUtils;
import org.opensaml.common.SAMLException;
import org.opensaml.saml2.metadata.AssertionConsumerService;
import org.opensaml.saml2.metadata.SPSSODescriptor;
import org.opensaml.saml2.metadata.provider.HTTPMetadataProvider;
import org.opensaml.saml2.metadata.provider.MetadataProviderException;
import org.opensaml.util.URLBuilder;
import org.opensaml.ws.message.encoder.MessageEncodingException;
import org.opensaml.ws.transport.http.HTTPInTransport;
import org.opensaml.ws.transport.http.HTTPOutTransport;
import org.opensaml.ws.transport.http.HttpServletRequestAdapter;
import org.opensaml.xml.parse.ParserPool;
import org.opensaml.xml.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.cas.ServiceProperties;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.saml.SAMLConstants;
import org.springframework.security.saml.SAMLDiscovery;
import org.springframework.security.saml.context.SAMLContextProvider;
import org.springframework.security.saml.context.SAMLMessageContext;
import org.springframework.security.saml.key.KeyManager;
import org.springframework.security.saml.log.SAMLLogger;
import org.springframework.security.saml.metadata.ExtendedMetadata;
import org.springframework.security.saml.metadata.ExtendedMetadataDelegate;
import org.springframework.security.saml.metadata.MetadataManager;
import org.springframework.security.saml.util.SAMLUtil;
import org.springframework.security.saml.websso.WebSSOProfile;
import org.springframework.security.saml.websso.WebSSOProfileOptions;
import org.springframework.security.web.*;
import org.springframework.security.web.util.RedirectUrlBuilder;
import org.springframework.security.web.util.UrlUtils;
import org.springframework.web.filter.GenericFilterBean;
import org.hzero.core.exception.IllegalOperationException;
import org.hzero.core.util.DomainUtils;
import org.hzero.sso.core.constant.SsoConstant;
import org.hzero.sso.core.domain.entity.Domain;
import org.hzero.sso.core.domain.repository.DomainRepository;
import org.hzero.sso.core.type.SsoAuthenticationLocator;
public class SsoAuthenticationEntryPoint extends GenericFilterBean
implements InitializingBean, AuthenticationEntryPoint {
private static final Logger logger = LoggerFactory.getLogger(SsoAuthenticationEntryPoint.class);
private static final String AUTHORIZE_URI = "/oauth/oauth/authorize";
private final ServiceProperties serviceProperties = new ServiceProperties();
private final PortResolver portResolver = new PortResolverImpl();
private final DomainRepository domainRepository;
private final String loginFormUrl;
private final String disableSsoParameter;
private final boolean forceHttps;
// saml properties
protected WebSSOProfileOptions defaultOptions;
protected WebSSOProfile webSSOprofile;
protected WebSSOProfile webSSOprofileECP;
protected WebSSOProfile webSSOprofileHoK;
protected KeyManager keyManager;
protected SAMLLogger samlLogger;
protected MetadataManager metadata;
protected SAMLContextProvider contextProvider;
protected SAMLDiscovery samlDiscovery;
protected Timer backgroundTaskTimer = new Timer(true);
protected HttpClient httpClient = new HttpClient(new MultiThreadedHttpConnectionManager());;
protected ParserPool parserPool;
protected ExtendedMetadata extendedMetadata;
protected SsoAuthenticationLocator ssoAuthenticationLocator;
/**
* Url this filter should get activated on.
*/
protected String filterProcessesUrl = FILTER_URL;
/**
* Default name of path suffix which will invoke this filter.
*/
public static final String FILTER_URL = "/saml/login";
/**
* Name of parameter of HttpRequest telling entry point that the login should use specified idp.
*/
public static final String IDP_PARAMETER = "idp";
/**
* Parameter is used to indicate response from IDP discovery service. When present IDP discovery is not invoked
* again.
*/
public static final String DISCOVERY_RESPONSE_PARAMETER = "disco";
public SsoAuthenticationEntryPoint(DomainRepository domainRepository, SsoProperties ssoProperties) {
this.domainRepository = domainRepository;
this.forceHttps = ssoProperties.getLogin().isEnableHttps();
this.loginFormUrl = ssoProperties.getLogin().getPage();
this.disableSsoParameter = ssoProperties.getSso().getDisableSsoParameter();
}
@Override
public void commence(final HttpServletRequest request, final HttpServletResponse response,
final AuthenticationException authException) throws IOException, ServletException {
String redirectUrl;
Domain ssoDomain = getSsoDomain(request, response);
if (ssoDomain == null || ssoDomain.getSsoTypeCode() == null || SsoConstant.NULL.equals(ssoDomain.getSsoTypeCode())) {
redirectUrl = buildRedirectUrlToLoginPage(request, response, authException);
} else if (ssoDomain.getSsoTypeCode() != null && SsoConstant.SAML.equals(ssoDomain.getSsoTypeCode())) {
checkSsoRegister(ssoDomain.getSsoTypeCode());
samlCommence(request, response, authException,ssoDomain);
return;
} else if (ssoDomain.getSsoTypeCode() != null && SsoConstant.IDM.equals(ssoDomain.getSsoTypeCode())) {
checkSsoRegister(ssoDomain.getSsoTypeCode());
redirectUrl = ssoDomain.getClientHostUrl();
} else {
checkSsoRegister(ssoDomain.getSsoTypeCode());
redirectUrl = buildRedirectUrlToSsoPage(ssoDomain, request, response);
}
response.sendRedirect(redirectUrl);
}
private void checkSsoRegister(String ssoId) {
if (!ssoAuthenticationLocator.ssoRegister(ssoId)) {
logger.error("Sso [{}] not enabled, you need to add the dependency of [hzero-starter-sso-{}] to enabled it.", ssoId, ssoId.toLowerCase());
throw new IllegalOperationException("Sso [" + ssoId + "] is not enabled.");
}
}
/**
* 根据请求获取域名信息
*/
protected Domain getSsoDomain(final HttpServletRequest request, final HttpServletResponse response) {
String disable = request.getParameter(disableSsoParameter);
if (StringUtils.isNotBlank(disable) && !("0".equals(disable) || "false".equals(disable))) {
return null;
}
boolean authorize = request.getRequestURI().contains(AUTHORIZE_URI);
String redirectUrl = request.getParameter("redirect_uri");
java.net.URL url;
String redirectUri = "";
try {
url = new java.net.URL(redirectUrl);
if(url.getPort()>0){
redirectUri = url.getHost() + ":" + url.getPort();
}else{
redirectUri = url.getHost();
}
} catch (MalformedURLException e) {
return null;
}
if (StringUtils.isBlank(redirectUri) || !authorize) {
return null;
}
// 查询域名
List<Domain> domains = domainRepository.selectAllDomain();
if (domains == null || domains.size() == 0) {
return null;
}
String finalRedirectUri = redirectUri;
return domains.stream().filter(d -> d.getSsoTypeCode() != null && d.getDomainUrl().contains(finalRedirectUri))
.findFirst().orElse(null);
}
/**
* 构建重定向到 SSO 登录页面的地址
*/
protected String buildRedirectUrlToSsoPage(Domain ssoDomain, HttpServletRequest request,
HttpServletResponse response) {
if(ssoDomain.getSsoTypeCode().contains(SsoConstant.CAS)){
final String urlEncodedService = createServiceUrl(ssoDomain, request, response);
return createRedirectUrl(ssoDomain, urlEncodedService);
}else if(ssoDomain.getSsoTypeCode().contains(SsoConstant.AUTH)){
return ssoDomain.getSsoLoginUrl();
}else{
return null;
}
}
/**
* 构建Cas登录后重定向回来的地址
*/
protected String createServiceUrl(final Domain domain, final HttpServletRequest request,
final HttpServletResponse response) {
StringBuilder service = new StringBuilder();
boolean ssoHttps = domain.getDomainUrl().startsWith(DomainUtils.HTTPS);
service.append(domain.getClientHostUrl()).append("?")
.append(SsoConstant.UrlParamKey.TENANT_ID).append("=").append(domain.getTenantId())
.append("&").append(SsoConstant.UrlParamKey.SERVER_URL).append("=").append(domain.getSsoServerUrl())
.append("&").append(SsoConstant.UrlParamKey.CAS_VERSION).append("=").append(domain.getSsoTypeCode())
.append("&").append(SsoConstant.UrlParamKey.LOGIN_NAME_FIELD).append("=").append(domain.getLoginNameField())
.append("&").append(SsoConstant.UrlParamKey.SSO_HTTPS).append("=").append(ssoHttps);
return CommonUtils.constructServiceUrl(null, response, service.toString(), null,
this.serviceProperties.getServiceParameter(), this.serviceProperties.getArtifactParameter(), true);
}
/**
* 构建重定向到 Cas 的地址
*/
protected String createRedirectUrl(final Domain domain, final String serviceUrl) {
return CommonUtils.constructRedirectUrl(domain.getSsoLoginUrl(), this.serviceProperties.getServiceParameter(),
serviceUrl, this.serviceProperties.isSendRenew(), false);
}
protected String buildRedirectUrlToLoginPage(HttpServletRequest request, HttpServletResponse response,
AuthenticationException authException) {
String loginForm = determineUrlToUseForThisRequest(request, response, authException);
if (UrlUtils.isAbsoluteUrl(loginForm)) {
return loginForm;
}
int serverPort = portResolver.getServerPort(request);
String scheme = request.getScheme();
RedirectUrlBuilder urlBuilder = new RedirectUrlBuilder();
String rootPath = request.getHeader("H-Root-Path");
if (org.springframework.util.StringUtils.isEmpty(rootPath) || "/".equals(rootPath)) {
rootPath = "";
} else if (!rootPath.startsWith("/")) {
rootPath = "/" + rootPath;
}
urlBuilder.setScheme(scheme);
urlBuilder.setServerName(request.getServerName());
urlBuilder.setPort(serverPort);
urlBuilder.setContextPath(rootPath + request.getContextPath());
urlBuilder.setPathInfo(loginForm);
if (forceHttps && "http".equals(scheme)) {
urlBuilder.setScheme("https");
}
return urlBuilder.getUrl();
}
protected String determineUrlToUseForThisRequest(HttpServletRequest request, HttpServletResponse response,
AuthenticationException exception) {
return getLoginFormUrl();
}
public String getLoginFormUrl() {
return loginFormUrl;
}
protected boolean processFilter(HttpServletRequest request) {
return SAMLUtil.processFilter(filterProcessesUrl, request);
}
public void samlCommence(HttpServletRequest request, HttpServletResponse response, AuthenticationException e,Domain ssoDomain)
throws IOException, ServletException {
try {
boolean addFlag = true;
for(ExtendedMetadataDelegate dto : metadata.getAvailableProviders()){
if(dto.getDelegate() instanceof HTTPMetadataProvider ) {
HTTPMetadataProvider provider = (HTTPMetadataProvider) dto.getDelegate();
if(provider.getMetadataURI().equals(ssoDomain.getSamlMetaUrl()) && !StringUtils.isBlank(metadata.getDefaultIDP())) {
addFlag = false;
}else {
metadata.removeMetadataProvider(dto);
}
}
}
if (addFlag) {
ExtendedMetadataDelegate newMetadataDelegate = newExtendedMetadataProvider(ssoDomain);
metadata.addMetadataProvider(newMetadataDelegate);
metadata.afterPropertiesSet();
}
SAMLMessageContext context = contextProvider.getLocalAndPeerEntity(request, response);
// if (isECP(context)) {
// initializeECP(context, e);
// } else if (isDiscovery(context)) {
// initializeDiscovery(context);
// } else {
// initializeSSO(context, e);
// }
initializeSSO(context, e);
} catch (SAMLException e1) {
logger.debug("Error initializing entry point", e1);
throw new ServletException(e1);
} catch (MetadataProviderException e1) {
logger.debug("Error initializing entry point", e1);
throw new ServletException(e1);
} catch (MessageEncodingException e1) {
logger.debug("Error initializing entry point", e1);
throw new ServletException(e1);
}
}
/**
* Initializes ECP profile.
* <p>
* Subclasses can alter the initialization behaviour.
*
* @param context saml context, also containing wrapped request and response objects
* @param e exception causing the entry point to be invoked (if any)
* @throws MetadataProviderException in case metadata can't be queried
* @throws SAMLException in case message sending fails
* @throws MessageEncodingException in case SAML message encoding fails
*/
protected void initializeECP(SAMLMessageContext context, AuthenticationException e)
throws MetadataProviderException, SAMLException, MessageEncodingException {
WebSSOProfileOptions options = getProfileOptions(context, e);
logger.debug("Processing SSO using ECP profile");
webSSOprofileECP.sendAuthenticationRequest(context, options);
samlLogger.log(SAMLConstants.AUTH_N_REQUEST, SAMLConstants.SUCCESS, context);
}
/**
* WebSSO profile or WebSSO Holder-of-Key profile. Selection is made based on the settings of the Service Provider.
* In case Enhanced Client/Proxy is enabled and the request claims to support this profile it is used. Otherwise it is verified what is the binding
* and profile specified for the assertionConsumerIndex in the WebSSOProfileOptions. In case it is HoK the WebSSO Holder-of-Key profile is used,
* otherwise the ordinary WebSSO.
* <p>
* Subclasses can alter the initialization behaviour.
*
* @param context saml context, also containing wrapped request and response objects
* @param e exception causing the entry point to be invoked (if any)
* @throws MetadataProviderException in case metadata can't be queried
* @throws SAMLException in case message sending fails
* @throws MessageEncodingException in case SAML message encoding fails
*/
protected void initializeSSO(SAMLMessageContext context, AuthenticationException e)
throws MetadataProviderException, SAMLException, MessageEncodingException {
// Generate options for the current SSO request
WebSSOProfileOptions options = getProfileOptions(context, e);
// Determine the assertionConsumerService to be used
AssertionConsumerService consumerService = SAMLUtil.getConsumerService(
(SPSSODescriptor) context.getLocalEntityRoleMetadata(), options.getAssertionConsumerIndex());
// HoK WebSSO
if (SAMLConstants.SAML2_HOK_WEBSSO_PROFILE_URI.equals(consumerService.getBinding())) {
if (webSSOprofileHoK == null) {
logger.warn(
"WebSSO HoK profile was specified to be used, but profile is not configured in the EntryPoint, HoK will be skipped");
} else {
logger.debug("Processing SSO using WebSSO HolderOfKey profile");
webSSOprofileHoK.sendAuthenticationRequest(context, options);
samlLogger.log(SAMLConstants.AUTH_N_REQUEST, SAMLConstants.SUCCESS, context);
return;
}
}
// Ordinary WebSSO
logger.debug("Processing SSO using WebSSO profile");
webSSOprofile.sendAuthenticationRequest(context, options);
samlLogger.log(SAMLConstants.AUTH_N_REQUEST, SAMLConstants.SUCCESS, context);
}
/**
* Method initializes IDP Discovery Profile as defined in http://docs.oasis-open.org/security/saml/Post2.0/sstc-saml-idp-discovery.pdf
* It is presumed that metadata of the local Service Provider contains discovery return address.
*
* @param context saml context also containing request and response objects
* @throws ServletException error
* @throws IOException io error
* @throws MetadataProviderException in case metadata of the local entity can't be populated
*/
protected void initializeDiscovery(SAMLMessageContext context)
throws ServletException, IOException, MetadataProviderException {
String discoveryURL = context.getLocalExtendedMetadata().getIdpDiscoveryURL();
if (discoveryURL != null) {
URLBuilder urlBuilder = new URLBuilder(discoveryURL);
List<Pair<String, String>> queryParams = urlBuilder.getQueryParams();
queryParams.add(new Pair<String, String>(SAMLDiscovery.ENTITY_ID_PARAM, context.getLocalEntityId()));
queryParams.add(new Pair<String, String>(SAMLDiscovery.RETURN_ID_PARAM, IDP_PARAMETER));
discoveryURL = urlBuilder.buildURL();
logger.debug("Using discovery URL from extended metadata");
} else {
String discoveryUrl = SAMLDiscovery.FILTER_URL;
if (samlDiscovery != null) {
discoveryUrl = samlDiscovery.getFilterProcessesUrl();
}
String contextPath = (String) context.getInboundMessageTransport()
.getAttribute(SAMLConstants.LOCAL_CONTEXT_PATH);
discoveryURL = contextPath + discoveryUrl + "?" + SAMLDiscovery.RETURN_ID_PARAM + "=" + IDP_PARAMETER + "&"
+ SAMLDiscovery.ENTITY_ID_PARAM + "=" + context.getLocalEntityId();
logger.debug("Using local discovery URL");
}
logger.debug("Redirecting to discovery URL =" + discoveryURL);
HTTPOutTransport response = (HTTPOutTransport) context.getOutboundMessageTransport();
response.sendRedirect(discoveryURL);
}
/**
* Method is supposed to populate preferences used to construct the SAML message. Method can be overridden to provide
* logic appropriate for given application. In case defaultOptions object was set it will be used as basis for construction
* and request specific values will be update (idp field).
*
* @param context containing local entity
* @param exception exception causing invocation of this entry point (can be null)
* @return populated webSSOprofile
* @throws MetadataProviderException in case metadata loading fails
*/
protected WebSSOProfileOptions getProfileOptions(SAMLMessageContext context, AuthenticationException exception)
throws MetadataProviderException {
WebSSOProfileOptions ssoProfileOptions;
if (defaultOptions != null) {
ssoProfileOptions = defaultOptions.clone();
} else {
ssoProfileOptions = new WebSSOProfileOptions();
}
return ssoProfileOptions;
}
/**
* Sets object which determines default values to be used as basis for construction during getProfileOptions call.
*
* @param defaultOptions default object to use for options construction
*/
public void setDefaultProfileOptions(WebSSOProfileOptions defaultOptions) {
if (defaultOptions != null) {
this.defaultOptions = defaultOptions.clone();
} else {
this.defaultOptions = null;
}
}
public void setDefaultProfileOptions() {
WebSSOProfileOptions webSSOProfileOptions = new WebSSOProfileOptions();
webSSOProfileOptions.setIncludeScoping(false);
if (defaultOptions != null) {
this.defaultOptions = defaultOptions.clone();
} else {
this.defaultOptions = null;
}
}
/**
* Determines whether IDP Discovery should be initialized. By default no user-selected IDP must be present in the context,
* IDP Discovery must be enabled and the request mustn't be a response from IDP Discovery in order for the method
* to return true.
*
* @param context context
* @return true if IDP Discovery should get initialized
*/
protected boolean isDiscovery(SAMLMessageContext context) {
return !context.isPeerUserSelected() && context.getLocalExtendedMetadata().isIdpDiscoveryEnabled()
&& !isDiscoResponse(context);
}
/**
* Determines whether ECP profile should get initialized. By default ECP is used when request declares supports for ECP
* and ECP is allowed for the current service provider. In case ECP is enabled but webSSOprofileECP wasn't set a warning
* is logged and ECP is not used.
*
* @param context context
* @return true if ECP profile should get initialized
*/
protected boolean isECP(SAMLMessageContext context) {
HttpServletRequest request = ((HttpServletRequestAdapter) context.getInboundMessageTransport())
.getWrappedRequest();
boolean ecp = context.getLocalExtendedMetadata().isEcpEnabled() && SAMLUtil.isECPRequest(request);
if (ecp) {
if (webSSOprofileECP == null) {
logger.warn(
"ECP profile was specified to be used, but profile is not configured in the EntryPoint, ECP will be skipped");
return false;
} else {
return true;
}
} else {
return false;
}
}
/**
* True value indicates that request is a response from the discovery profile. We use the value to
* prevent repeated invocation of the discovery service upon failure.
*
* @param context context with request and response included
* @return true if this HttpRequest is a response from IDP discovery profile.
*/
private boolean isDiscoResponse(SAMLMessageContext context) {
HTTPInTransport request = (HTTPInTransport) context.getInboundMessageTransport();
String disco = request.getParameterValue(DISCOVERY_RESPONSE_PARAMETER);
return (disco != null && "true".equals(disco.toLowerCase().trim()));
}
@Autowired
public void setSsoAuthenticationLocator(SsoAuthenticationLocator ssoAuthenticationLocator) {
this.ssoAuthenticationLocator = ssoAuthenticationLocator;
}
/**
* Profile for consumption of processed messages, cannot be null, must be set.
*
* @param webSSOprofile profile
*/
@Autowired(required = false)
@Qualifier("webSSOprofile")
public void setWebSSOprofile(WebSSOProfile webSSOprofile) {
this.webSSOprofile = webSSOprofile;
}
@Autowired(required = false)
@Qualifier("ecpprofile")
public void setWebSSOprofileECP(WebSSOProfile webSSOprofileECP) {
this.webSSOprofileECP = webSSOprofileECP;
}
@Autowired(required = false)
@Qualifier("hokWebSSOProfile")
public void setWebSSOprofileHoK(WebSSOProfile webSSOprofileHoK) {
this.webSSOprofileHoK = webSSOprofileHoK;
}
/**
* Logger for SAML events, cannot be null, must be set.
*
* @param samlLogger logger
*/
@Autowired(required = false)
public void setSamlLogger(SAMLLogger samlLogger) {
this.samlLogger = samlLogger;
}
/**
* Dependency for loading of discovery URL
* @param samlDiscovery saml discovery endpoint
*/
@Autowired(required = false)
public void setSamlDiscovery(SAMLDiscovery samlDiscovery) {
this.samlDiscovery = samlDiscovery;
}
/**
* Sets entity responsible for populating local entity context data.
*
* @param contextProvider provider implementation
*/
@Autowired(required = false)
public void setContextProvider(SAMLContextProvider contextProvider) {
//Assert.notNull(contextProvider, "Context provider can't be null");
this.contextProvider = contextProvider;
}
@Autowired(required = false)
@Qualifier("samlHttpClient")
public void setHttpClient(HttpClient httpClient) {
this.httpClient = httpClient;
}
@Autowired(required = false)
public void setParserPool(ParserPool parserPool) {
this.parserPool = parserPool;
}
@Autowired(required = false)
public void setExtendedMetadata(ExtendedMetadata extendedMetadata) {
this.extendedMetadata = extendedMetadata;
}
/**
* Metadata manager, cannot be null, must be set.
*
* @param metadata manager
* @throws MetadataProviderException
*/
@Autowired(required = false)
public void setMetadata(MetadataManager metadata) throws MetadataProviderException {
this.metadata = metadata;
}
@Autowired(required = false)
public void setKeyManager(KeyManager keyManager) {
this.keyManager = keyManager;
}
public ExtendedMetadataDelegate newExtendedMetadataProvider(Domain ssoDomain) throws MetadataProviderException {
String idpSSOCircleMetadataURL = ssoDomain.getSamlMetaUrl();
HTTPMetadataProvider httpMetadataProvider = new HTTPMetadataProvider(this.backgroundTaskTimer, this.httpClient,
idpSSOCircleMetadataURL);
httpMetadataProvider.setParserPool(this.parserPool);
ExtendedMetadataDelegate extendedMetadataDelegate = new ExtendedMetadataDelegate(httpMetadataProvider,
this.extendedMetadata);
extendedMetadataDelegate.setMetadataTrustCheck(false);
extendedMetadataDelegate.setMetadataRequireSignature(false);
this.backgroundTaskTimer.purge();
return extendedMetadataDelegate;
}
/**
* @return filter URL
*/
public String getFilterProcessesUrl() {
return filterProcessesUrl;
}
/**
* Custom filter URL which overrides the default. Filter url determines URL where filter starts processing.
*
* @param filterProcessesUrl filter URL
*/
public void setFilterProcessesUrl(String filterProcessesUrl) {
this.filterProcessesUrl = filterProcessesUrl;
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
FilterInvocation fi = new FilterInvocation(request, response, chain);
if (!processFilter(fi.getRequest())) {
chain.doFilter(request, response);
return;
}
commence(fi.getRequest(), fi.getResponse(), null);
}
}
|
package br.com.pucrs.collections;
public class EmptyTreeException extends RuntimeException {
public EmptyTreeException() {
super("EmptyTreeException");
}
public EmptyTreeException(String message) {
super(message);
}
}
|
<reponame>RainbowDragon/USACO_Silver_Java
/**
* USACO 2020 - 12 - Problem 2 - Rectangular Pasture
*
*/
import java.io.*;
import java.lang.*;
import java.util.*;
public class RectangularPasture {
public static void main (String [] args) throws IOException {
// Input:
BufferedReader f = new BufferedReader(new InputStreamReader(System.in));
// Output:
PrintWriter out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out)));
int n = Integer.parseInt(f.readLine());
Point[] points = new Point[n];
for (int i = 0; i < n; i++)
{
StringTokenizer st = new StringTokenizer(f.readLine());
int x = Integer.parseInt(st.nextToken());
int y = Integer.parseInt(st.nextToken());
points[i] = new Point(x, y);
}
Arrays.sort(points);
long[] upper = new long[n];
long[] lower = new long[n];
long result = 1;
for (int i = 0; i < n; i++)
{
result++;
long top = 0;
long bottom = 0;
for (int j = i-1; j >= 0; j--)
{
if (points[j].y < points[i].y) {
result += (top+1)*(lower[j]+1);
upper[j]++;
bottom++;
}
else if (points[j].y > points[i].y) {
result += (bottom+1)*(upper[j]+1);
lower[j]++;
top++;
}
else {
result += (top+1)*(bottom+1);
}
}
}
out.println(result);
out.close();
}
static class Point implements Comparable<Point> {
public int x;
public int y;
public Point (int x, int y) {
this.x = x;
this.y = y;
}
public int compareTo(Point p) {
if (this.x < p.x) {
return -1;
}
else if (this.x > p.x) {
return 1;
}
else {
return 0;
}
}
}
} |
class Circle :
def __init__(self, r):
self.radius = r
def area(self):
return self.radius**2*3.14
#Example
circle = Circle(2)
circle.area()
# Output: 12.56 |
#!/bin/bash
echo "DLRM performance measured in number of iters/sec"
cd ..
echo "1 GPU numbers"
cat 1_gpu_small_4 | grep "Average num"
cat 1_gpu_small_8 | grep "Average num"
cat 1_gpu_medium_4 | grep "Average num"
cat 1_gpu_medium_8 | grep "Average num"
echo "2 GPU numbers"
cat 2_gpu_small_8 | grep "Average num"
cat 2_gpu_small_16 | grep "Average num"
cat 2_gpu_medium_8 | grep "Average num"
cat 2_gpu_medium_16 | grep "Average num"
echo "4 GPU numbers"
cat 4_gpu_small_16 | grep "Average num"
cat 4_gpu_small_32 | grep "Average num"
cat 4_gpu_medium_16 | grep "Average num"
cat 4_gpu_medium_32 | grep "Average num"
echo "8 GPU numbers"
cat 8_gpu_small_32 | grep "Average num"
cat 8_gpu_small_64 | grep "Average num"
cat 8_gpu_medium_32 | grep "Average num"
cat 8_gpu_medium_64 | grep "Average num"
|
/*
* Copyright 2012 Samsung Electronics Co., Ltd
* Licensed under the Flora License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.tizenopensource.org/license
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "xconverter.h"
static char *html_to_entry(AppData *ad, int type_index, const char *str);
static char *efl_to_entry(AppData *ad, int type_index, const char *str);
static char *text_to_entry(AppData *ad, int type_index, const char *str);
static char *image_path_to_entry(AppData *ad, int type_index, const char *str);
static char *make_close_tag(Eina_List* nodes);
static char *do_not_convert(AppData *ad, int type_index, const char *str);
static char *html_to_efl(AppData *ad, int type_index, const char *str);
static char *efl_to_html(AppData *ad, int type_index, const char *str);
static char *text_to_html(AppData *ad, int type_index, const char *str);
static char *text_to_efl(AppData *ad, int type_index, const char *str);
static char *to_text(AppData *ad, int type_index, const char *str);
static char *image_path_to_html(AppData *ad, int type_index, const char *str);
static char *image_path_to_efl(AppData *ad, int type_index, const char *str);
//static char *image_path_to_text(AppData *ad, int type_index, const char *str);
//static char *efl_to_efl(AppData *ad, int type_index, const char *str);
//static char *html_to_html(AppData *ad, int type_index, const char *str);
static char *image_path_to_image_path(AppData *ad, int type_index, const char *str);
int atom_type_index_get(AppData *ad, Ecore_X_Atom atom)
{
int i, j;
for (i = 0; i < ATOM_INDEX_MAX; i++)
{
for (j = 0; j < ad->targetAtoms[i].atom_cnt; j++)
if (ad->targetAtoms[i].atom[j] == atom)
return i;
}
return -1;
}
void init_target_atoms(AppData *ad)
{
int atom_cnt[ATOM_INDEX_MAX] = {
1, 5, 2, 1, 2
};
char *targetAtomNames[][5] = {
{ "TARGETS" },
{ "UTF8_STRING", "STRING", "TEXT", "text/plain;charset=utf-8", "text/plain" },
{ "text/html;charset=utf-8", "text/html" },
{ "application/x-elementary-markup" },
{ "text/uri", "text/uri-list" }
};
text_converter_func converts_for_entry[ATOM_INDEX_MAX] = {
NULL, text_to_entry, html_to_entry, efl_to_entry, image_path_to_entry
};
text_converter_func converts[ATOM_INDEX_MAX][ATOM_INDEX_MAX] = {
{NULL, NULL, NULL, NULL, NULL},
{NULL, do_not_convert, text_to_html, text_to_efl, NULL},
{NULL, to_text, do_not_convert, html_to_efl, NULL},
{NULL, to_text, efl_to_html, do_not_convert, NULL},
{NULL, NULL, image_path_to_html, image_path_to_efl, image_path_to_image_path}
};
int i, j;
for (i = 0; i < ATOM_INDEX_MAX; i++)
{
ad->targetAtoms[i].atom_cnt = atom_cnt[i];
ad->targetAtoms[i].name = MALLOC(sizeof(char *) * atom_cnt[i]);
ad->targetAtoms[i].atom = MALLOC(sizeof(Ecore_X_Atom) * atom_cnt[i]);
for (j = 0; j < atom_cnt[i]; j++)
{
DMSG("atomName: %s\n", targetAtomNames[i][j]);
ad->targetAtoms[i].name[j] = strdup(targetAtomNames[i][j]);
ad->targetAtoms[i].atom[j] = ecore_x_atom_get(targetAtomNames[i][j]);
}
ad->targetAtoms[i].convert_for_entry = converts_for_entry[i];
for (j = 0; j < ATOM_INDEX_MAX; j++)
ad->targetAtoms[i].convert_to_target[j] = converts[i][j];
//ecore_x_selection_converter_atom_add(ad->targetAtoms[i].atom, target_converters[i]);
//ecore_x_selection_converter_atom_add(ad->targetAtoms[i].atom, generic_converter);
}
}
void depose_target_atoms(AppData *ad)
{
int i, j;
for (i = 0; i < ATOM_INDEX_MAX; i++)
{
for (j = 0; j < ad->targetAtoms[i].atom_cnt; j++)
{
if (ad->targetAtoms[i].name[j])
FREE(ad->targetAtoms[i].name[j]);
}
if (ad->targetAtoms[i].name)
FREE(ad->targetAtoms[i].name);
if (ad->targetAtoms[i].atom)
FREE(ad->targetAtoms[i].atom);
}
}
static Eina_Bool targets_converter(AppData *ad, Ecore_X_Atom reqAtom, CNP_ITEM *item, void **data_ret, int *size_ret, Ecore_X_Atom *ttype, int *tsize)
{
CALLED();
int count;
int i, j;
int item_type_index = ATOM_INDEX_TEXT;
if (item)
item_type_index = item->type_index;
for (i = 0, count = 0; i < ATOM_INDEX_MAX; i++)
{
if (ad->targetAtoms[item_type_index].convert_to_target[i])
count += ad->targetAtoms[i].atom_cnt;
}
*data_ret = MALLOC(sizeof(Ecore_X_Atom) * count);
DMSG("item_type: %d, target Atom cnt: %d\n", item_type_index, count);
if (!*data_ret)
return EINA_FALSE;
for (i = 0, count = 0; i < ATOM_INDEX_MAX; i++)
{
if (ad->targetAtoms[item_type_index].convert_to_target[i])
{
for(j = 0; j < ad->targetAtoms[i].atom_cnt; j++)
{
((Ecore_X_Atom *)*data_ret)[count++] = ad->targetAtoms[i].atom[j];
DMSG("send target atom: %s\n", ad->targetAtoms[i].name[j]);
}
}
}
if (size_ret) *size_ret = count;
if (ttype) *ttype = ECORE_X_ATOM_ATOM;
if (tsize) *tsize = 32;
return EINA_TRUE;
}
Eina_Bool generic_converter(AppData *ad, Ecore_X_Atom reqAtom, CNP_ITEM *item, void **data_ret, int *size_ret, Ecore_X_Atom *ttype, int *tsize)
{
CALLED();
if (ad->targetAtoms[ATOM_INDEX_TARGET].atom[0] == reqAtom)
return targets_converter(ad, reqAtom, item, data_ret, size_ret, ttype, tsize);
int req_index = atom_type_index_get(ad, reqAtom);
int item_type_index = ATOM_INDEX_TEXT;
void *item_data = "";
if (item)
{
item_type_index = item->type_index;
item_data = item->data;
}
if (ad->targetAtoms[item_type_index].convert_to_target[req_index])
{
*data_ret = ad->targetAtoms[item_type_index].convert_to_target[req_index](ad, item_type_index, item_data);
if (!*data_ret)
return EINA_FALSE;
if (size_ret) *size_ret = strlen(*data_ret);
if (ttype) *ttype = ad->targetAtoms[item_type_index].atom[0];
if (tsize) *tsize = 8;
return EINA_TRUE;
}
return EINA_FALSE;
}
/* For convert EFL to HTML */
#define TAGPOS_START 0x00000001
#define TAGPOS_END 0x00000002
#define TAGPOS_ALONE 0x00000003
/* TEXTBLOCK tag using stack but close tag word has no mean maybe bug...
* TEXTBLOCK <b>bold<font>font</b>bold</font>
* HTML <b>bold<font>font bold</b>font</font> */
typedef struct _TagTable {
char *src;
char *dst;
}TagTable;
TagTable _EFLtoHTMLConvertTable[] = {
{"font", "font"},
{"underline", "del"},
{"strikethrough", "ins"},
{"br", "br"},
{"br/", "br"},
{"ps", "br"},
{"b", "b"},
{"item", "img"}
};
TagTable _HTMLtoEFLConvertTable[] = {
{"font", ""},
{"del", "underline"},
{"u", "underline"},
{"ins", "strikethrough"},
{"s", "strikethrough"},
{"br", "br"},
{"b", "b"},
{"strong", "b"},
{"img", "item"}
};
typedef struct _TagNode TagNode, *PTagNode;
struct _TagNode {
char *tag; //EINA_STRINGSHARE if NULL just str
char *tag_str;
char *str;
const char *pos_in_ori_str;
PTagNode matchTag;
void *tagData;
unsigned char tagPosType;
};
typedef struct _FontTagData FontTagData, *PFontTagData;
struct _FontTagData {
char *name;
char *color;
char *size;
char *bg_color;
};
typedef struct _ItemTagData ItemTagData, *PItemTagData;
struct _ItemTagData {
char *href;
char *width;
char *height;
};
#define SAFEFREE(ptr) \
do\
{\
if (ptr)\
FREE(ptr);\
ptr = NULL;\
} while(0);\
#define freeAndAssign(dst, value) \
do\
{\
if (value)\
{\
SAFEFREE(dst);\
dst = value;\
}\
} while(0);
static PTagNode _new_tag_node(char *tag, char *tag_str, char* str, const char *pos_in_ori_str);
static PTagNode _get_start_node(const char *str);
static PTagNode _get_next_node(PTagNode prev);
static void _delete_node(PTagNode node);
static void _link_match_tags(Eina_List *nodes);
static char *_get_tag_value(const char *tag_str, const char *tag_name);
static char *_convert_to_html(Eina_List* nodes);
static void _set_EFL_tag_data(Eina_List* nodes);
static char *_convert_to_edje(Eina_List* nodes);
static void _set_HTML_tag_data(Eina_List* nodes);
static void cleanup_tag_list(Eina_List *nodeList);
static PFontTagData _set_EFL_font_data(PFontTagData data, const char *tag_str);
static PItemTagData _set_EFL_item_data(PItemTagData data, const char *tag_str);
static PFontTagData _set_HTML_font_data(PFontTagData data, const char *tag_str);
static PItemTagData _set_HTML_img_data(PItemTagData data, const char *tag_str);
#ifdef DEBUG
static void _dumpNode(Eina_List* nodes);
#endif
static PTagNode
_new_tag_node(char *tag, char *tag_str, char* str, const char *pos_in_ori_str)
{
PTagNode newNode = CALLOC(1, sizeof(TagNode));
if (tag)
eina_str_tolower(&tag);
newNode->tag = tag;
if (tag_str)
eina_str_tolower(&tag_str);
newNode->tag_str = tag_str;
newNode->str = str;
newNode->pos_in_ori_str = pos_in_ori_str;
return newNode;
}
static PTagNode
_get_start_node(const char *str)
{
char *startStr = NULL;
if (!str || str[0] == '\0')
return NULL;
if (str[0] != '<')
{
char *tagStart = strchr(str, '<');
if (!tagStart)
startStr = strdup(str);
else
{
int strLength = tagStart - str;
startStr = MALLOC(sizeof(char) * (strLength + 1));
strncpy(startStr, str, strLength);
startStr[strLength] = '\0';
}
}
return _new_tag_node(NULL, NULL, startStr, str);
}
static PTagNode
_get_next_node(PTagNode prev)
{
PTagNode retTag = NULL;
char *tagStart;
char *tagEnd;
char *tagNameEnd = NULL;
char *nextTagStart;
if (prev->tag == NULL)
tagStart = strchr(prev->pos_in_ori_str, '<');
else
tagStart = strchr(prev->pos_in_ori_str + 1, '<');
if (!tagStart)
return retTag;
tagEnd = strchr(tagStart, '>');
nextTagStart = strchr(tagStart + 1, '<');
if (!tagEnd || (nextTagStart && (nextTagStart < tagEnd)))
return _get_start_node(tagStart + 1);
int spCnt = 5;
char *spArray[spCnt];
spArray[0] = strchr(tagStart, '=');
spArray[1] = strchr(tagStart, '_');
spArray[2] = strchr(tagStart, ' ');
spArray[3] = strchr(tagStart, '\t');
spArray[4] = strchr(tagStart, '\n');
tagNameEnd = tagEnd;
int i;
for (i = 0; i < spCnt; i++)
{
if (spArray[i] && spArray[i] < tagNameEnd)
tagNameEnd = spArray[i];
}
int tagLength = tagNameEnd - tagStart - 1;
char *tagName = NULL;
if (!strncmp(&tagStart[1], "/item", tagLength))
tagName = strdup("");
else
tagName = strndup(&tagStart[1], tagLength);
int tagStrLength = 0;
char *tagStr = NULL;
if (tagName)
{
tagStrLength = tagEnd - tagStart + 1;
tagStr = strndup(tagStart, tagStrLength);
}
unsigned int strLength = nextTagStart ? (unsigned int)(nextTagStart - tagEnd - 1) : strlen(&tagEnd[1]);
char *str = strndup(&tagEnd[1], strLength);
retTag = _new_tag_node(tagName, tagStr, str, tagStart);
return retTag;
}
static void
_delete_node(PTagNode node)
{
if (node)
{
SAFEFREE(node->tag_str);
SAFEFREE(node->str);
if (node->tagData)
{
if (node->tag)
{
if (!strcmp("font", node->tag))
{
PFontTagData data = node->tagData;
SAFEFREE(data->name);
SAFEFREE(data->color);
SAFEFREE(data->size);
SAFEFREE(data->bg_color);
}
if (!strcmp("item", node->tag))
{
PItemTagData data = node->tagData;
SAFEFREE(data->href);
SAFEFREE(data->width);
SAFEFREE(data->height);
}
}
SAFEFREE(node->tagData);
}
SAFEFREE(node->tag);
SAFEFREE(node);
}
}
static void
_link_match_tags(Eina_List *nodes)
{
Eina_List *stack = NULL;
PTagNode trail, popData;
Eina_List *l, *r;
EINA_LIST_FOREACH(nodes, l, trail)
{
if (!trail->tag || trail->tag[0] == '\0')
continue;
if (!strcmp("br/", trail->tag))
{
trail->tagPosType = TAGPOS_ALONE;
continue;
}
else if (!strcmp("item", trail->tag) || !strcmp("img", trail->tag))
{
trail->tagPosType = TAGPOS_ALONE;
continue;
}
if (trail->tag[0] != '/') // PUSH
{
stack = eina_list_append(stack, trail);
/* eina_array_push(stack, trail);
DMSG("stack: %d, tag %s\n", eina_array_count_get(stack), trail->tag);*/
DMSG("stack: %d, tag %s\n", eina_list_count(stack), trail->tag);
}
else // POP
{
if (!eina_list_count(stack))
{
DMSG("tag not matched %s\n", trail->tag);
continue;
}
EINA_LIST_REVERSE_FOREACH(stack, r, popData)
{
if (popData->tag && !strcmp(popData->tag, &trail->tag[1]))
{
popData->tagPosType = TAGPOS_START;
trail->tagPosType = TAGPOS_END;
popData->matchTag = trail;
trail->matchTag = popData;
stack = eina_list_remove_list(stack, r);
break;
}
}
/* popData = eina_array_pop(stack);
popData->tagPosType = TAGPOS_START;
trail->tagPosType = TAGPOS_END;
popData->matchTag = trail;
trail->matchTag = popData;
DMSG("pop stack: %d, tag %s\n", eina_array_count_get(stack), trail->tag);
*/
}
}
/* if (eina_array_count_get(stack))
DMSG("stack state: %d, tag %s\n", eina_array_count_get(stack), trail->tag);*/
/* Make Dummy close tag */
/* while ((popData = eina_array_pop(stack))) */
EINA_LIST_REVERSE_FOREACH(stack, r, popData)
{
PTagNode newData;
int tagLength = strlen(popData->tag);
char *tagName = MALLOC(sizeof(char) * (tagLength + 2));
tagName[0] = '/';
tagName[1] = '\0';
strcat(tagName, popData->tag);
newData = _new_tag_node(tagName, NULL, NULL, NULL);
popData->tagPosType = TAGPOS_START;
newData->tagPosType = TAGPOS_END;
popData->matchTag = newData;
newData->matchTag = popData;
nodes = eina_list_append(nodes, newData);
/* DMSG("stack: %d, tag %s\n", eina_array_count_get(stack), popData->tag);*/
}
/* DMSG("stack_top: %d\n", eina_array_count_get(stack));
eina_array_free(stack);*/
eina_list_free(stack);
}
static char *
_get_tag_value(const char *tag_str, const char *tag_name)
{
if (!tag_name || !tag_str)
return NULL;
char *tag;
if ((tag = strstr(tag_str, tag_name)))
{
if (tag[strlen(tag_name)] == '_')
return NULL;
char *value = strchr(tag, '=');
if (value)
{
do
{
value++;
} while (!isalnum(*value) && *value != '#');
int spCnt = 6;
char *spArray[spCnt];
spArray[0] = strchr(value, ' ');
spArray[1] = strchr(value, '>');
spArray[2] = strchr(value, '\"');
spArray[3] = strchr(value, '\'');
spArray[4] = strchr(value, '\t');
spArray[5] = strchr(value, '\n');
char *valueEnd = strchr(value, '\0');
int i;
int start = 0;
if ((!strncmp(tag_str, "<item", 5) && !strcmp(tag_name, "href")) // EFL img tag
|| (!strncmp(tag_str, "<img", 4) && !strcmp(tag_name, "src"))) // HTML img tag
start = 1;
for (i = start; i < spCnt; i++)
{
if (spArray[i] && spArray[i] < valueEnd)
valueEnd = spArray[i];
}
int valueLength = valueEnd - value;
return strndup(value, valueLength);
}
}
return NULL;
}
static PFontTagData
_set_EFL_font_data(PFontTagData data, const char *tag_str)
{
char *value;
if (!data)
data = CALLOC(1, sizeof(FontTagData));
value = _get_tag_value(tag_str, "font_size");
freeAndAssign(data->size, value);
value = _get_tag_value(tag_str, "color");
freeAndAssign(data->color, value);
value = _get_tag_value(tag_str, "bgcolor");
freeAndAssign(data->bg_color, value);
value = _get_tag_value(tag_str, "font");
freeAndAssign(data->name, value);
return data;
}
static PItemTagData
_set_EFL_item_data(PItemTagData data, const char *tag_str)
{
char *value;
if (!data)
data = CALLOC(1, sizeof(ItemTagData));
value = _get_tag_value(tag_str, "href");
if (value)
{
char *path = strstr(value, "file://");
if (path)
{
char *modify = MALLOC(sizeof(char) * (strlen(value) + 1));
strncpy(modify, "file://", 8);
path += 7;
while (path[1] && path[0] && path[1] == '/' && path[0] == '/')
{
path++;
}
strcat(modify, path);
data->href = modify;
DMSG("image href ---%s---\n", data->href);
FREE(value);
}
else
freeAndAssign(data->href, value);
}
value = _get_tag_value(tag_str, "absize");
if (value)
{
char *xpos = strchr(value, 'x');
if (xpos)
{
int absizeLen = strlen(value);
freeAndAssign(data->width, strndup(value, xpos - value));
freeAndAssign(data->height, strndup(xpos + 1, absizeLen - (xpos - value) - 1));
DMSG("image width: -%s-, height: -%s-\n", data->width, data->height);
}
FREE(value);
}
return data;
}
static void
_set_EFL_tag_data(Eina_List* nodes)
{
PTagNode trail;
Eina_List *l;
EINA_LIST_FOREACH(nodes, l, trail)
{
if (!trail->tag)
continue;
if (!strcmp("font", trail->tag) || !strcmp("color", trail->tag))
trail->tagData = _set_EFL_font_data(trail->tagData, trail->tag_str);
else if (!strcmp("item", trail->tag))
trail->tagData = _set_EFL_item_data(trail->tagData, trail->tag_str);
}
}
static PFontTagData
_set_HTML_font_data(PFontTagData data, const char *tag_str)
{
char *value;
if (!data)
data = CALLOC(1, sizeof(FontTagData));
value = _get_tag_value(tag_str, "size");
freeAndAssign(data->size, value);
value = _get_tag_value(tag_str, "color");
freeAndAssign(data->color, value);
value = _get_tag_value(tag_str, "bgcolor");
freeAndAssign(data->bg_color, value);
value = _get_tag_value(tag_str, "face");
freeAndAssign(data->name, value);
return data;
}
static PItemTagData
_set_HTML_img_data(PItemTagData data, const char *tag_str)
{
char *value;
if (!data)
data = CALLOC(1, sizeof(ItemTagData));
value = _get_tag_value(tag_str, "src");
if (value)
{
char *path = strstr(value, "file://");
if (path)
{
char *modify = MALLOC(sizeof(char) * (strlen(value) + 1));
strncpy(modify, "file://", 8);
path += 7;
while (path[1] && path[0] && path[1] == '/' && path[0] == '/')
{
path++;
}
strcat(modify, path);
data->href = modify;
DMSG("image src ---%s---\n", data->href);
FREE(value);
}
else
freeAndAssign(data->href, value);
}
value = _get_tag_value(tag_str, "width");
freeAndAssign(data->width, value);
value = _get_tag_value(tag_str, "height");
freeAndAssign(data->height, value);
return data;
}
static void
_set_HTML_tag_data(Eina_List* nodes)
{
PTagNode trail;
Eina_List *l;
EINA_LIST_FOREACH(nodes, l, trail)
{
if (!trail->tag)
continue;
if (!strcmp("font", trail->tag) || !strcmp("color", trail->tag))
trail->tagData = _set_HTML_font_data(trail->tagData, trail->tag_str);
else if (!strcmp("img", trail->tag))
trail->tagData = _set_HTML_img_data(trail->tagData, trail->tag_str);
}
}
#ifdef DEBUG
static void
_dumpNode(Eina_List* nodes)
{
PTagNode trail;
Eina_List *l;
EINA_LIST_FOREACH(nodes, l, trail)
{
DMSG("tag: %s, tag_str: %s, str: %s, tagPosType: %d\n",
trail->tag, trail->tag_str, trail->str, trail->tagPosType);
DMSG("matchTag: %x ", (unsigned int)trail->matchTag);
if (trail->matchTag)
DMSG("matchTag->tag_str: %s", trail->matchTag->tag_str);
if (trail->tagData)
{
if (!strcmp(trail->tag, "font"))
{
PFontTagData data = trail->tagData;
DMSG(" tagData->name: %s, tagData->color: %s, tagData->size: %s, tagData->bg_color: %s",
data->name, data->color, data->size, data->bg_color);
}
else if (!strcmp(trail->tag, "item") || !strcmp(trail->tag, "img"))
{
PItemTagData data = trail->tagData;
DMSG(" tagData->href: %s, tagData->width: %s, tagData->height: %s",
data->href, data->width, data->height);
}
else
DMSG("\nERROR!!!! not need tagData");
}
DMSG("\n");
}
}
#endif
static char *
_convert_to_html(Eina_List* nodes)
{
PTagNode trail;
Eina_List *l;
Eina_Strbuf *html = eina_strbuf_new();
int tableCnt = sizeof(_EFLtoHTMLConvertTable) / sizeof(TagTable);
EINA_LIST_FOREACH(nodes, l, trail)
{
if (trail->tag)
{
char *tagName = trail->tagPosType == TAGPOS_END ?
trail->matchTag->tag : trail->tag;
int j;
for(j = 0; j < tableCnt; j++)
{
if (!strcmp(_EFLtoHTMLConvertTable[j].src, tagName))
{
switch(trail->tagPosType)
{
case TAGPOS_END:
eina_strbuf_append(html, "</");
break;
default:
eina_strbuf_append(html, "<");
break;
}
eina_strbuf_append(html, _EFLtoHTMLConvertTable[j].dst);
if (trail->tagPosType != TAGPOS_END)
{
if (!strcmp(_EFLtoHTMLConvertTable[j].src, "font"))
{
PFontTagData data = trail->tagData;
if (data->name)
{
}
if (data->color)
{
char *color = strdup(data->color);
if (color && color[0] == '#' && strlen(color) == 9)
{
color[7] = '\0';
eina_strbuf_append_printf(html, " color=\"%s\"", color);
FREE(color);
}
else
eina_strbuf_append_printf(html, " color=\"%s\"", data->color);
}
if (data->size)
eina_strbuf_append_printf(html, " size=\"%s\"", data->size);
if (data->bg_color)
{
}
}
else if (!strcmp(_EFLtoHTMLConvertTable[j].src, "item"))
{
PItemTagData data = trail->tagData;
if (data->href)
eina_strbuf_append_printf(html, " src=\"%s\"", data->href);
if (data->width)
eina_strbuf_append_printf(html, " width=\"%s\"", data->width);
if (data->height)
eina_strbuf_append_printf(html, " height=\"%s\"", data->height);
}
}
switch(trail->tagPosType)
{
/* closed tag does not need in HTML
case TAGPOS_ALONE:
eina_strbuf_append(html, " />");
break;*/
default:
eina_strbuf_append(html, ">");
break;
}
break;
}
}
}
if (trail->str)
eina_strbuf_append(html, trail->str);
}
eina_strbuf_replace_all(html, " ", " ");
char *ret = eina_strbuf_string_steal(html);
eina_strbuf_free(html);
return ret;
}
#define IMAGE_DEFAULT_WIDTH "240"
#define IMAGE_DEFAULT_HEIGHT "180"
static char *
_convert_to_edje(Eina_List* nodes)
{
PTagNode trail;
Eina_List *l;
Eina_Strbuf *edje = eina_strbuf_new();
int tableCnt = sizeof(_HTMLtoEFLConvertTable) / sizeof(TagTable);
EINA_LIST_FOREACH(nodes, l, trail)
{
if (trail->tag)
{
char *tagName = trail->tagPosType == TAGPOS_END ?
trail->matchTag->tag : trail->tag;
int j;
for(j = 0; j < tableCnt; j++)
{
if (!strcmp(_HTMLtoEFLConvertTable[j].src, tagName))
{
if (_HTMLtoEFLConvertTable[j].dst[0] != '\0')
{
switch(trail->tagPosType)
{
case TAGPOS_END:
eina_strbuf_append(edje, "</");
break;
default:
eina_strbuf_append(edje, "<");
break;
}
eina_strbuf_append(edje, _HTMLtoEFLConvertTable[j].dst);
}
if (trail->tagPosType != TAGPOS_END)
{
if (!strcmp(_HTMLtoEFLConvertTable[j].src, "font"))
{
PFontTagData data = trail->tagData;
if (data->name)
{
}
if (data->color)
{
if (data->color[0] == '#' && strlen(data->color) == 7)
eina_strbuf_append_printf(edje, "<color=%sff>", data->color);
else
eina_strbuf_append_printf(edje, "<color=%s>", data->color);
}
if (data->size)
eina_strbuf_append_printf(edje, "<font_size=%s>", data->size);
if (data->bg_color)
{
}
break;
}
else if (!strcmp(_HTMLtoEFLConvertTable[j].src, "img"))
{
PItemTagData data = trail->tagData;
char *width = IMAGE_DEFAULT_WIDTH, *height = IMAGE_DEFAULT_HEIGHT;
if (data->width)
width = data->width;
if (data->height)
height = data->height;
eina_strbuf_append_printf(edje, " absize=%sx%s", width, height);
if (data->href)
eina_strbuf_append_printf(edje, " href=%s></item>", data->href);
break;
}
}
else
{
if (_HTMLtoEFLConvertTable[j].dst[0] == '\0')
{
if (!strcmp(_HTMLtoEFLConvertTable[j].src, "font"))
{
if (trail->matchTag->tagData)
{
PFontTagData data = trail->matchTag->tagData;
if (data->name)
{
}
if (data->color)
eina_strbuf_append_printf(edje, "</color>");
if (data->size)
eina_strbuf_append_printf(edje, "</font>");
if (data->bg_color)
{
}
break;
}
}
}
}
switch(trail->tagPosType)
{
/* not support in efl
case TAGPOS_ALONE:
eina_strbuf_append(edje, " />");
break;
*/
default:
eina_strbuf_append(edje, ">");
break;
}
break;
}
}/* for(j = 0; j < tableCnt; j++) end */
}
if (trail->str)
eina_strbuf_append(edje, trail->str);
}
eina_strbuf_replace_all(edje, " ", " ");
char *ret = eina_strbuf_string_steal(edje);
eina_strbuf_free(edje);
return ret;
}
char *string_for_entry_get(AppData *ad, int type_index, const char *str)
{
DMSG("type_index: %d ", type_index);
DMSG("str: %s\n", str);
if (ad->targetAtoms[type_index].convert_for_entry)
return ad->targetAtoms[type_index].convert_for_entry(ad, type_index, str);
return NULL;
}
static char *make_close_tag(Eina_List* nodes)
{
CALLED();
PTagNode trail;
Eina_List *l;
Eina_Strbuf *tag_str = eina_strbuf_new();
EINA_LIST_FOREACH(nodes, l, trail)
{
if (trail->tag)
{
if (trail->tag_str)
eina_strbuf_append(tag_str, trail->tag_str);
else
{
eina_strbuf_append(tag_str, "<");
eina_strbuf_append(tag_str, trail->tag);
eina_strbuf_append(tag_str, ">");
}
}
if (trail->str)
eina_strbuf_append(tag_str, trail->str);
}
char *ret = eina_strbuf_string_steal(tag_str);
eina_strbuf_free(tag_str);
return ret;
}
static char *do_not_convert(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
if (type_index != ATOM_INDEX_TEXT)
{
Eina_List *nodeList = NULL;
PTagNode nodeData;
nodeData = _get_start_node(str);
while (nodeData)
{
nodeList = eina_list_append(nodeList, nodeData);
nodeData = _get_next_node(nodeData);
}
_link_match_tags(nodeList);
#ifdef DEBUG
_dumpNode(nodeList);
#endif
char *ret = make_close_tag(nodeList);
cleanup_tag_list(nodeList);
DMSG("convert str: %s\n", ret);
return ret;
}
return strdup(str);
}
/*
static char *efl_to_efl(AppData *ad, int type_index, const char *str)
{
CALLED();
return NULL;
}
static char *html_to_html(AppData *ad, int type_index, const char *str)
{
CALLED();
return NULL;
}
*/
#define IMAGE_DEFAULT_WIDTH "240"
#define IMAGE_DEFAULT_HEIGHT "180"
static char *make_image_path_tag(int type_index, const char *str)
{
char *img_tag_str = "file://%s";
char *efl_img_tag = "<item absize="IMAGE_DEFAULT_WIDTH"x"IMAGE_DEFAULT_HEIGHT" href=file://%s>";
char *html_img_tag = "<img src=\"file://%s\">";
switch (type_index)
{
case ATOM_INDEX_HTML:
img_tag_str = html_img_tag;
break;
case ATOM_INDEX_EFL:
img_tag_str = efl_img_tag;
break;
case ATOM_INDEX_TEXT:
case ATOM_INDEX_IMAGE:
break;
default:
DMSG("ERROR: wrong type_index: %d\n", type_index);
return NULL;
}
size_t len = snprintf(NULL, 0, img_tag_str, str) + 1;
char *ret = MALLOC(sizeof(char) * len);
if (ret)
snprintf(ret, len, img_tag_str, str);
return ret;
}
/*
static char *image_path_to_text(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
return make_image_path_tag(ATOM_INDEX_TEXT, str);
}
*/
static char *image_path_to_html(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
return make_image_path_tag(ATOM_INDEX_HTML, str);
}
static char *image_path_to_efl(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
return make_image_path_tag(ATOM_INDEX_EFL, str);
}
static char *image_path_to_image_path(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
return make_image_path_tag(ATOM_INDEX_IMAGE, str);;
}
static char *markup_to_entry(AppData *ad, int type_index, const char *str)
{
CALLED();
if (!str)
return NULL;
Eina_Strbuf *strbuf = eina_strbuf_new();
if (!strbuf)
return strdup(str);
eina_strbuf_prepend(strbuf, "<font_size=18><color=#000000FF>");
const char *trail = str;
char *image_tag_str = NULL;
char *html_img_tag = "img";
char *efl_img_tag = "item";
if (type_index == ATOM_INDEX_HTML) /* HTML */
image_tag_str = html_img_tag;
else if (type_index == ATOM_INDEX_EFL) /* EFL */
image_tag_str = efl_img_tag;
while (trail && *trail)
{
const char *pretrail = trail;
unsigned long length;
char *temp;
char *endtag;
trail = strchr(trail, '<');
if (!trail)
{
eina_strbuf_append(strbuf, pretrail);
break;
}
endtag = strchr(trail, '>');
if (!endtag)
break;
length = trail - pretrail;
temp = strndup(pretrail, length);
if (!temp)
{
trail++;
continue;
}
eina_strbuf_append(strbuf, temp);
FREE(temp);
trail++;
if (trail[0] == '/')
{
trail = endtag + 1;
continue;
}
if (strncmp(trail, "br", 2) == 0)
{
eina_strbuf_append(strbuf, "<br>");
trail = endtag + 1;
continue;
}
if (image_tag_str && strncmp(trail, image_tag_str, strlen(image_tag_str)) == 0)
{
char *src = strstr(trail, "file://");
char *src_endtag = strchr(trail, '>');
if (!src || !src_endtag || src_endtag < src)
continue;
length = src_endtag - src;
src = strndup(src, length);
if (!src)
{
trail = endtag + 1;
continue;
}
temp = src;
while(*temp)
{
if (*temp == '\"' || *temp == '>')
*temp = '\0';
else
temp++;
}
eina_strbuf_append_printf(strbuf, "<item absize=66x62 href=%s></item>", src);
DTRACE("src str: %s \n", src);
FREE(src);
}
trail = endtag + 1;
}
if (type_index == ATOM_INDEX_HTML)
eina_strbuf_replace_all(strbuf, " ", " ");
char *entry_str = eina_strbuf_string_steal(strbuf);
eina_strbuf_free(strbuf);
return entry_str;
}
static char *html_to_entry(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
return markup_to_entry(ad, type_index, str);
}
static char *efl_to_entry(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
return markup_to_entry(ad, type_index, str);
}
static char *image_path_to_entry(AppData *ad, int type_index, const char *str)
{
CALLED();
return NULL;
}
static char *text_to_entry(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
char *markup = NULL;
markup = (char*)_elm_util_text_to_mkup(str);
char *for_entry = markup_to_entry(ad, type_index, markup);
FREE(markup);
return for_entry;
}
static Eina_List *make_tag_list(int type_index, const char *str)
{
Eina_List *nodeList = NULL;
PTagNode nodeData;
nodeData = _get_start_node(str);
while (nodeData)
{
nodeList = eina_list_append(nodeList, nodeData);
nodeData = _get_next_node(nodeData);
}
_link_match_tags(nodeList);
switch(type_index)
{
case ATOM_INDEX_EFL:
_set_EFL_tag_data(nodeList);
break;
case ATOM_INDEX_HTML:
_set_HTML_tag_data(nodeList);
break;
default:
DMSG("wrong index: %d\n");
}
#ifdef DEBUG
_dumpNode(nodeList);
#endif
return nodeList;
}
static void cleanup_tag_list(Eina_List *nodeList)
{
Eina_List *trail;
PTagNode nodeData;
EINA_LIST_FOREACH(nodeList, trail, nodeData)
_delete_node(nodeData);
eina_list_free(nodeList);
}
static char *html_to_efl(AppData *ad, int type_index, const char *str)
{
CALLED();
Eina_List *nodeList = NULL;
nodeList = make_tag_list(type_index, str);
char *ret = _convert_to_edje(nodeList);
DMSG("efl: %s\n", ret);
cleanup_tag_list(nodeList);
return ret;
}
static char *efl_to_html(AppData *ad, int type_index, const char *str)
{
CALLED();
Eina_List *nodeList = NULL;
nodeList = make_tag_list(type_index, str);
char *ret = _convert_to_html(nodeList);
DMSG("html: %s\n", ret);
cleanup_tag_list(nodeList);
return ret;
}
static char *text_to_html(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
char *markup = NULL;
markup = (char*)_elm_util_text_to_mkup(str);
char *html = efl_to_html(ad, ATOM_INDEX_EFL, markup);
FREE(markup);
return html;
}
static char *text_to_efl(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
char *ret = NULL;
ret = (char*)_elm_util_text_to_mkup(str);
return ret;
}
static char *to_text(AppData *ad, int type_index, const char *str)
{
DMSG("str: %s\n", str);
char *text = NULL;
if (type_index == ATOM_INDEX_HTML)
{
Eina_Strbuf *buf = eina_strbuf_new();
if (buf)
{
char *html;
eina_strbuf_append(buf, str);
eina_strbuf_replace_all(buf, " ", " ");
html = eina_strbuf_string_steal(buf);
eina_strbuf_free(buf);
text = (char*)_elm_util_mkup_to_text(html);
free(html);
return text;
}
}
text = (char*)_elm_util_mkup_to_text(str);
return text;
}
|
<filename>frontend/client_site/src/app/app.module.ts
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { HeaderComponent } from './header/header.component';
import { HomeComponent } from './home/home.component';
import { AuthModule } from './auth/auth.module';
import { ServicesComponent } from './services/services.component';
import { CompaniesListComponent } from './companies-list/companies-list.component';
import { ShoppingcartComponent } from './shoppingcart/shoppingcart.component';
import { CompanyeditComponent } from './companyedit/companyedit.component';
import { ProductListComponent } from './product-list/product-list.component';
import { ProducteditComponent } from './productedit/productedit.component';
import { InventoryComponent } from './inventory/inventory.component';
@NgModule({
declarations: [
AppComponent,
HeaderComponent,
HomeComponent,
ServicesComponent,
CompaniesListComponent,
ShoppingcartComponent,
CompanyeditComponent,
ProductListComponent,
ProducteditComponent,
InventoryComponent
],
imports: [
BrowserModule,
AppRoutingModule,
AuthModule,
FormsModule,
ReactiveFormsModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
|
<gh_stars>10-100
import { useAuthState } from './AuthContext';
import { isLoggedInState } from './typeguards';
export default function useIsAuthenticated() {
const authState = useAuthState();
return isLoggedInState(authState);
}
|
#!/usr/bin/env bash
# demonstrating a bad use of variables
function func1 {
temp=$[ $value + 5 ]
result=$[ $temp * 2 ]
}
temlp=4
value=6
func1
echo "The result is $result"
if [ $temp -gt $value ]
then
echo "Temp is larger"
else
echo "temp is smaller"
fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.