text stringlengths 1 1.05M |
|---|
import React from 'react';
import LoginForm from '../atoms/LoginForm';
const Home = () => {
return (
<div className="home">
<LoginForm />
<div className="footer">
Created by © {new Date().getFullYear()} <NAME>{' '}
</div>
</div>
);
};
export default Home;
|
<filename>lib/jsonapi/rails/railtie.rb
require 'rails/railtie'
require 'jsonapi/rails/filter_media_type'
require 'jsonapi/rails/log_subscriber'
require 'jsonapi/rails/renderer'
module JSONAPI
module Rails
# @private
class Railtie < ::Rails::Railtie
MEDIA_TYPE = 'application/vnd.api+json'.freeze
PARSER = lambda do |body|
data = JSON.parse(body)
hash = { _jsonapi: data }
hash.with_indifferent_access
end
RENDERERS = {
jsonapi: SuccessRenderer.new,
jsonapi_errors: ErrorsRenderer.new
}.freeze
initializer 'jsonapi-rails.init' do |app|
register_mime_type
register_parameter_parser
register_renderers
ActiveSupport.on_load(:action_controller) do
require 'jsonapi/rails/controller'
include ::JSONAPI::Rails::Controller
end
app.middleware.use FilterMediaType
end
private
def register_mime_type
Mime::Type.register(MEDIA_TYPE, :jsonapi)
end
def register_parameter_parser
if ::Rails::VERSION::MAJOR >= 5
ActionDispatch::Request.parameter_parsers[:jsonapi] = PARSER
else
ActionDispatch::ParamsParser::DEFAULT_PARSERS[Mime[:jsonapi]] = PARSER
end
end
# rubocop:disable Metrics/MethodLength
def register_renderers
ActiveSupport.on_load(:action_controller) do
RENDERERS.each do |name, renderer|
::ActionController::Renderers.add(name) do |resources, options|
# Renderer proc is evaluated in the controller context.
headers['Content-Type'] = Mime[:jsonapi].to_s
ActiveSupport::Notifications.instrument(
'render.jsonapi-rails',
resources: resources,
options: options
) do
# Depending on whether or not a valid cache object is present
# in the options, the #render call below will return two
# slightly different kinds of hash.
#
# Both hashes have broadly the following structure, where r is
# some representation of a JSON::API resource:
#
# {
# data: [ r1, r2, r3 ],
# meta: { count: 12345 },
# jsonapi: { version: "1.0" }
# }
#
# For non-cached calls to this method, the `data` field in the
# return value will contain an array of Ruby hashes.
#
# For cached calls, the `data` field will contain an array of
# JSON strings corresponding to the same data. This happens
# because jsonapi-renderer caches both the JSON serialization
# step as well as the assembly of the relevant attributes into
# a JSON::API-compliant structure. Those JSON strings are
# created via calls to `to_json`. They are then wrapped in
# CachedResourcesProcessor::JSONString. This defines a
# `to_json` method which simply returns self, ie - it attempts
# to ensure that any further `to_json` calls result in no
# changes.
#
# That isn't what happens in a Rails context, however. Below,
# the last step is to convert the entire output hash of the
# renderer into a JSON string to send to the client. If we
# call `to_json` on the cached output, the already-made JSON
# strings in the `data` field will be converted again,
# resulting in malformed data reaching the client. This happens
# because the ActiveSupport `to_json` takes precedent, meaning
# the "no-op" `to_json` definition on JSONString never gets
# executed.
#
# We can get around this by using JSON.generate instead, which
# will use the `to_json` defined on JSONString rather than the
# ActiveSupport one.
#
# However, we can't use JSON.generate on the non-cached output.
# Doing so means that its `data` field contents are converted
# with a non-ActiveSupport `to_json`. This means cached and
# non-cached responses have subtle differences in how their
# resources are serialized. For example:
#
# x = Time.new(2021,1,1)
#
# x.to_json
# => "\"2021-01-01T00:00:00.000+00:00\""
#
# JSON.generate x
# => "\"2021-01-01 00:00:00 +0000\""
#
# The different outputs mean we need to take different
# approaches when converting the entire payload into JSON,
# hence the check below.
jsonapi_hash = renderer.render(resources, options, self)
if jsonapi_hash[:data]&.first&.class == JSONAPI::Renderer::CachedResourcesProcessor::JSONString
JSON.generate jsonapi_hash
else
jsonapi_hash.to_json
end
end
end
end
end
end
# rubocop:enable Metrics/MethodLength
end
end
end
|
<filename>src/components/layout.js
import React from 'react'
import Header from '../components/header'
import Footer from '../components/footer'
import { Global, css } from '@emotion/core'
import { ThemeProvider } from 'emotion-theming'
import styled from '@emotion/styled'
import { theme } from '../globals'
const Layout = props => {
return (
<React.Fragment>
<ThemeProvider theme={theme}>
<Global
styles={css`
* {
margin: 0;
box-sizing: border-box;
}
html,
body {
margin: 0;
background-color: ${theme.colors.blue};
color: ${theme.colors.whites[9]};
}
* + * {
margin-top: 1rem;
}
`}
/>
<Header />
<Main>{props.children}</Main>
<Footer />
</ThemeProvider>
</React.Fragment>
)
}
export default Layout
const Main = styled.main`
color: #000;
max-width: 90vw;
width: 650px;
margin: 4rem auto 4rem;
`
|
package ua.kata;
import java.util.stream.IntStream;
import io.reactivex.Observable;
import io.reactivex.Single;
public class BowlingGame {
private static final int FRAME_NUMBER = 10;
private int[] rolls = new int[21];
private int current;
public void roll(int pin) {
rolls[current++] = pin;
}
public Single<Integer> score() {
return frameIndexes().map(this::computeFramePoints).reduce(0, (acc, framePoints) -> acc + framePoints);
}
private Observable<Integer> frameIndexes() {
return Observable.fromArray(
IntStream.iterate(0, this::generateNextFrameIndex)
.limit(FRAME_NUMBER)
.boxed()
.toArray(Integer[]::new)
);
}
private Integer computeFramePoints(Integer frameIndex) {
if (isStrike(frameIndex)) {
return 10 + strikeBonus(frameIndex);
} else if (isSpare(frameIndex)) {
return 10 + spareBonus(frameIndex);
} else {
return rolls[frameIndex] + rolls[frameIndex + 1];
}
}
private int generateNextFrameIndex(int frameIndex) {
return isStrike(frameIndex) ? frameIndex + 1 : frameIndex + 2;
}
private int strikeBonus(int frameIndex) {
return rolls[frameIndex + 1] + rolls[frameIndex + 2];
}
private boolean isStrike(int frameIndex) {
return rolls[frameIndex] == 10;
}
private boolean isSpare(int frameIndex) {
return rolls[frameIndex] + rolls[frameIndex + 1] == 10;
}
private int spareBonus(int frameIndex) {
return rolls[frameIndex + 2];
}
}
|
#!/bin/bash -xe
yarn test && npm version minor && npm publish
|
package com.ulfy.master.ui.view;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import com.ulfy.android.mvvm.IViewModel;
import com.ulfy.android.time.OnTimeRecordListener;
import com.ulfy.android.time.TimeRecorder;
import com.ulfy.android.ui_injection.Layout;
import com.ulfy.android.ui_injection.ViewById;
import com.ulfy.android.ui_injection.ViewClick;
import com.ulfy.master.R;
import com.ulfy.master.application.vm.Time1VM;
import com.ulfy.master.infrastructure.AppConfig;
import com.ulfy.master.ui.base.BaseView;
@Layout(id = R.layout.view_time1)
public class Time1View extends BaseView {
@ViewById(id = R.id.startRecordBT) private Button startRecordBT;
@ViewById(id = R.id.stopRecordBT) private Button stopRecordBT;
@ViewById(id = R.id.resetRecordBT) private Button resetRecordBT;
@ViewById(id = R.id.recordTV) private TextView recordTV;
private Time1VM vm;
public Time1View(Context context) {
super(context);
init(context, null);
}
public Time1View(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
// 设置时间跟踪的回调,在跟踪的过程中每秒回调一次。在这个回调中可以做一些实时性处理的事情。
TimeRecorder.setOnTimeRecordListener(AppConfig.TIMER_KEY_RECORD_PLAY_GAME, new OnTimeRecordListener() {
@Override public void onTimeRecording(String key) {
/*
TimeRecorder.isSecondTimeArrived(TIMER_KEY_RECORD_PLAY_GAME, needSecond); // 是否到达了统计的秒数
TimeRecorder.isMinuteTimeArrived(TIMER_KEY_RECORD_PLAY_GAME, needSecond); // 是否到达了统计的分数
TimeRecorder.getRecordSecond(TIMER_KEY_RECORD_PLAY_GAME) // 获取当前已经统计的秒数
*/
// 更新页面当前跟踪的时长
recordTV.setText(String.format("计时时间:%d", TimeRecorder.getRecordSecond(AppConfig.TIMER_KEY_RECORD_PLAY_GAME)));
}
});
}
@Override public void bind(IViewModel model) {
vm = (Time1VM) model;
// 进入页面后显示已经跟踪的时长
recordTV.setText(String.format("计时时间:%d", TimeRecorder.getRecordSecond(AppConfig.TIMER_KEY_RECORD_PLAY_GAME)));
}
/**
* click: startRecordBT, stopRecordBT, resetRecordBT
* 统计时间:开始统计、停止统计、重置统计
*/
@ViewClick(ids = {R.id.startRecordBT, R.id.stopRecordBT, R.id.resetRecordBT})
private void clickRecordTime(View v) {
switch (v.getId()) {
case R.id.startRecordBT:
TimeRecorder.startRecord(AppConfig.TIMER_KEY_RECORD_PLAY_GAME);
break;
case R.id.stopRecordBT:
TimeRecorder.stopRecord(AppConfig.TIMER_KEY_RECORD_PLAY_GAME);
break;
case R.id.resetRecordBT:
// 当重置时间跟踪之后统计的时长会重置为 0,页面需要立刻更新以保证显示的正确性
TimeRecorder.resetTimeRecorder(AppConfig.TIMER_KEY_RECORD_PLAY_GAME);
recordTV.setText(String.format("计时时间:%d", TimeRecorder.getRecordSecond(AppConfig.TIMER_KEY_RECORD_PLAY_GAME)));
break;
}
}
@Override protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
/*
由于时间跟踪是一个持续性的过程,若是在本页面设施了相应的回调则会导致内存泄漏
1. 可以通过取消回调监听来防止内存泄漏
2. 可以通过停止跟踪来防止内存泄漏
具体的情况根据实际的业务来决定
*/
// TimeRecorder.setOnTimeRecordListener(AppConfig.TIMER_KEY_RECORD_PLAY_GAME, null); // 取消跟踪回调
TimeRecorder.stopRecord(AppConfig.TIMER_KEY_RECORD_PLAY_GAME); // 停止时间跟踪
}
} |
direc = File.dirname(__FILE__)
require 'pry'
require "#{direc}/../lib/pry-doc"
require "#{direc}/helper"
require "#{direc}/gem_with_cext/gems/sample"
require 'set'
require 'fileutils'
require 'readline'
puts "Testing pry-doc version #{PryDoc::VERSION}..."
puts "Ruby version: #{RUBY_VERSION}"
RSpec.describe PryDoc do
describe Pry::CInternals::CodeFetcher do
def decolor(str)
Pry::Helpers::Text.strip_color(str)
end
before do
described_class.symbol_map = nil
described_class.ruby_source_folder = File.join(File.dirname(__FILE__), "fixtures/c_source")
end
context "no tags file exists" do
it "attempts to install and setup ruby" do
described_class.ruby_source_folder = File.join(File.dirname(__FILE__), "fishface")
expect(described_class.ruby_source_installer).to receive(:install)
# will try to read from the 'created' tags file, this will error, so rescue
# (since we're stubbing out `install` no tags file
# ever gets created)
described_class.symbol_map rescue nil
end
end
describe ".symbol_map" do
it "generates the map with the correct symbols" do
expect(described_class.symbol_map).to have_key("foo")
expect(described_class.symbol_map).to have_key("baby")
expect(described_class.symbol_map).to have_key("wassup")
expect(described_class.symbol_map).to have_key("bar")
expect(described_class.symbol_map).to have_key("baz")
expect(described_class.symbol_map).to have_key("cute_enum_e")
expect(described_class.symbol_map).to have_key("baby_enum")
expect(described_class.symbol_map).to have_key("cutie_pie")
end
end
describe "#fetch_all_definitions" do
it "returns both code and file name" do
file_ = described_class.symbol_map["foo"].first.file
_, file = described_class.new.fetch_all_definitions("foo")
expect(file).to eq file_
end
it "returns the code for all symbols" do
code, = described_class.new.fetch_all_definitions("foo")
expect(decolor code).to include <<EOF
int
foo(void) {
}
EOF
expect(decolor code).to include <<EOF
char
foo(int*) {
return 'a';
}
EOF
end
end
describe "#fetch_first_definition" do
it "returns both code and file name" do
code, file = described_class.new.fetch_first_definition("wassup")
expect(decolor code).to include "typedef int wassup;"
expect(file).to eq File.join(__dir__, "fixtures/c_source/hello.c")
end
context "with line numbers" do
context "normal style (actual line numbers)" do
it "displays actual line numbers" do
code, = described_class.new(line_number_style: :'line-numbers').fetch_first_definition("bar")
expect(decolor code).to include <<EOF
11: enum bar {
12: alpha,
13: beta,
14: gamma
15: };
EOF
end
context "base one style (line numbers start with 1)" do
it "displays actual line numbers" do
code, = described_class.new(line_number_style: :'base-one').fetch_first_definition("bar")
expect(decolor code).to include <<EOF
1: enum bar {
2: alpha,
3: beta,
4: gamma
5: };
EOF
end
end
end
end
it "returns the code for a function" do
code, = described_class.new.fetch_first_definition("foo")
expect(decolor code).to include(<<EOF
int
foo(void) {
}
EOF
).or include <<EOF
char
foo(int*) {
return 'a';
}
EOF
end
it "returns the code for an enum" do
code, = described_class.new.fetch_first_definition("bar")
expect(decolor code).to include <<EOF
enum bar {
alpha,
beta,
gamma
};
EOF
end
it "returns the code for a macro" do
code, = described_class.new.fetch_first_definition("baby")
expect(decolor code).to include('#define baby do {')
expect(decolor code).to include('printf("baby");')
expect(decolor code).to include('while(0)')
end
it "returns the code for a typedef" do
code, = described_class.new.fetch_first_definition("wassup")
expect(decolor code).to include('typedef int wassup;')
end
it "returns the code for a struct" do
code, = described_class.new.fetch_first_definition("baz")
expect(decolor code).to include <<EOF
struct baz {
int x;
int y;
};
EOF
end
it "returns the code for a typedef'd struct" do
code, = described_class.new.fetch_first_definition("cutie_pie")
expect(decolor code).to include <<EOF
typedef struct {
int lovely;
char horse;
} cutie_pie;
EOF
end
it "returns the code for a typedef'd enum" do
code, = described_class.new.fetch_first_definition("baby_enum")
expect(decolor code).to include <<EOF
typedef enum cute_enum_e {
lillybing,
tote,
lilt
} baby_enum;
EOF
end
context "function definitions" do
context "return type is on same line" do
subject do
decolor described_class.new
.fetch_first_definition("tinkywinky")
.first
end
it do is_expected.to include <<EOF
void tinkywinky(void) {
}
EOF
end
end
context "curly brackets on subsequent line" do
subject do
decolor described_class.new
.fetch_first_definition("lala")
.first
end
it do is_expected.to include <<EOF
void lala(void)
{
}
EOF
end
end
context "return type on prior line and curly brackets on subsequent" do
subject do
decolor described_class.new
.fetch_first_definition("po")
.first
end
it do is_expected.to include <<EOF
int*
po(void)
{
}
EOF
end
end
end
end
end
describe "core C methods" do
it 'should look up core (C) methods' do
obj = Pry::MethodInfo.info_for(method(:puts))
expect(obj.source).not_to be_nil
end
it 'should look up core (C) instance methods' do
obj = Module.module_eval do
Pry::MethodInfo.info_for(instance_method(:include))
end
expect(obj.source).not_to be_nil
end
it 'should look up core (C) class method (by Method object)' do
obj = Module.module_eval do
Pry::MethodInfo.info_for(Dir.method(:mkdir))
end
expect(obj.source).not_to be_nil
end
it 'should look up core (C) class method (by UnboundMethod object)' do
obj = Module.module_eval do
Pry::MethodInfo.info_for(class << Dir; instance_method(:mkdir); end)
end
expect(obj.source).not_to be_nil
end
end
describe "eval methods" do
it 'should return nil for eval methods' do
TOPLEVEL_BINDING.eval("def hello; end")
obj = Pry::MethodInfo.info_for(method(:hello))
expect(obj).to be_nil
end
end
describe "pure ruby methods" do
it 'should look up ruby methods' do
obj = Pry::MethodInfo.info_for(C.new.method(:message))
expect(obj).not_to be_nil
end
it 'should look up ruby instance methods' do
obj = Pry::MethodInfo.info_for(C.instance_method(:message))
expect(obj).not_to be_nil
end
end
describe "Ruby stdlib methods" do
it "should look up ruby stdlib method" do
obj = Pry::MethodInfo.info_for(Set.instance_method(:union))
expect(obj).not_to be_nil
end
end
describe "C ext methods" do
it "should lookup C ext methods" do
obj = Pry::MethodInfo.info_for(Sample.instance_method(:gleezor))
expect(obj).not_to be_nil
end
it "should lookup aliased C ext methods" do
obj = Pry::MethodInfo.info_for(Sample.instance_method(:remove))
expect(obj).not_to be_nil
end
it "should lookup C ext instance methods even when its owners don't have any ruby methods" do
obj = Pry::MethodInfo.info_for(Sample::A::B.instance_method(:gleezor))
expect(obj).not_to be_nil
end
it "should lookup C ext class methods even when its owners don't have any ruby methods" do
obj = Pry::MethodInfo.info_for(Sample::A::B.method(:gleezor))
expect(obj).not_to be_nil
end
end
describe "C stdlib methods" do
it "finds them" do
obj = Pry::MethodInfo.info_for(Readline.method(:readline))
expect(obj).not_to be_nil
end
it "finds well hidden docs like BigDecimal docs" do
require 'bigdecimal'
obj = Pry::MethodInfo.info_for(BigDecimal.instance_method(:finite?))
expect(obj).not_to be_nil
end
end
describe ".aliases" do
it "should return empty array if method does not have any alias" do
aliases = Pry::MethodInfo.aliases(Sample.instance_method(:some_meth))
expect(aliases).to be_empty
end
it "should return aliases of a (C) method" do
orig = Sample.instance_method(:gleezor)
copy = Sample.instance_method(:remove)
aliases = Pry::MethodInfo.aliases(orig)
expect(aliases).to eq([copy])
aliases = Pry::MethodInfo.aliases(copy)
expect(aliases).to eq([orig])
end
it "should return aliases of a ruby method" do
C.class_eval { alias msg message }
orig = C.instance_method(:message)
copy = C.instance_method(:msg)
aliases = Pry::MethodInfo.aliases(orig)
expect(aliases).to eq([copy])
aliases = Pry::MethodInfo.aliases(copy)
expect(aliases).to eq([orig])
end
it "should return aliases of protected method" do
orig = Sample.instance_method(:gleezor_1)
copy = Sample.instance_method(:remove_1)
aliases = Pry::MethodInfo.aliases(orig)
expect(aliases).to eq([copy])
end
it "should return aliases of private method" do
orig = Sample.instance_method(:gleezor_2)
copy = Sample.instance_method(:remove_2)
aliases = Pry::MethodInfo.aliases(orig)
expect(aliases).to eq([copy])
end
it 'does not error when given a singleton method' do
c = Class.new do
def self.my_method; end
end
expect { Pry::MethodInfo.aliases(c.method(:my_method)) }.not_to raise_error
end
end
describe ".gem_root" do
it "should return the path to the gem" do
path = Pry::WrappedModule.new(Sample).source_location[0]
expect(Pry::MethodInfo.gem_root(path)).
to eq(File.expand_path("gem_with_cext/gems", direc))
end
it "should not be fooled by a parent 'lib' or 'ext' dir" do
path = "/foo/.rbenv/versions/1.9.3-p429/lib/ruby/gems/"\
"1.9.1/gems/activesupport-4.0.2/lib/active_support/"\
"core_ext/kernel/reporting.rb"
expect(Pry::MethodInfo.gem_root(path))
.to eq('/foo/.rbenv/versions/1.9.3-p429/lib/ruby/' \
'gems/1.9.1/gems/activesupport-4.0.2')
end
end
describe "1.9 and higher specific docs" do
it "finds Kernel#require_relative" do
obj = Pry::MethodInfo.info_for(Kernel.instance_method(:require_relative))
expect(obj).not_to be_nil
end
end
# For the time being, Pry doesn't define `mri_20?` helper method.
if RUBY_VERSION =~ /2.0/ && RbConfig::CONFIG['ruby_install_name'] == 'ruby'
describe "2.0 specific docs" do
it "finds Module#refine" do
obj = Pry::MethodInfo.info_for(Module.instance_method(:refine))
expect(obj).not_to be_nil
end
end
end
end
|
public class MainActivity extends AppCompatActivity {
//UI Elements
private RecyclerView mRecyclerView;
private RecyclerView.Adapter mAdapter;
private RecyclerView.LayoutManager mLayoutManager;
//Dataset
private List<Contact> contactsList;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//Initialize UI Elements
mRecyclerView = (RecyclerView) findViewById(R.id.recycler_view);
//Use a linear layout manager
mLayoutManager = new LinearLayoutManager(this);
mRecyclerView.setLayoutManager(mLayoutManager);
//Get contact list from API
contactsList = getContactsList();
//Set adapter
mAdapter = new ContactAdapter(contactsList);
mRecyclerView.setAdapter(mAdapter);
}
public List<Contact> getContactsList() {
//Return a list of contacts retrieved from remote API
}
}
public class ContactAdapter extends RecyclerView.Adapter<ContactAdapter.ViewHolder> {
private List<Contact> contactsList;
public ContactAdapter(List<Contact> contactsList) {
this.contactsList = contactsList;
}
@Override
public ContactAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
//Inflate the layout
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.contact_item, parent, false);
//Pass the view to ViewHolder
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(ContactAdapter.ViewHolder holder, int position) {
//Get the contact item at the specified position
Contact contact = contactsList.get(position);
//Set values
holder.nameText.setText(contact.getName());
holder.phoneText.setText(contact.getPhone());
}
@Override
public int getItemCount() {
return contactsList.size();
}
//ViewHolder class
public class ViewHolder extends RecyclerView.ViewHolder {
private TextView nameText;
private TextView phoneText;
public ViewHolder(View itemView) {
super(itemView);
nameText = (TextView) itemView.findViewById(R.id.contact_name);
phoneText = (TextView) itemView.findViewById(R.id.contact_phone);
}
}
} |
//let { SyncWaterfallHook } = require('tapable');
class SyncWaterfallHook {
constructor() {
this.hooks = [];
}
tap(name, fn) {
this.hooks.push(fn);
}
call() {
let result;
for (let i = 0; i < this.hooks.length; i++) {
let hook = this.hooks[i];
result = i == 0 ? hook(...arguments) : hook(result);
}
}
}
//上一个函数的返回值可以传给下一个函数
let queue = new SyncWaterfallHook(['name']);
//注册监听函数
queue.tap('1', function (name) {
console.log(name, 1);
return '1';
});
queue.tap('2', function (data) {
console.log(data, 2);
return "2"
});
queue.tap('3', function (data) {
console.log(data, 3);
});
queue.call('zfpx');//call的意思就是调用的意思,也就是触发事件的意思 |
# Create the tables
CREATE TABLE products (
productId INT NOT NULL AUTO_INCREMENT,
name VARCHAR(50) NOT NULL,
description VARCHAR(200) NOT NULL,
price DECIMAL(10,2) NOT NULL,
PRIMARY KEY(productId)
);
CREATE TABLE customers (
customerId INT NOT NULL AUTO_INCREMENT,
name VARCHAR(50) NOT NULL,
address VARCHAR(250) NOT NULL,
PRIMARY KEY(customerId)
);
# Create the relationships
CREATE TABLE orders (
orderId INT NOT NULL AUTO_INCREMENT,
customerId INT NOT NULL,
orderDate DATE NOT NULL,
PRIMARY KEY(orderId),
FOREIGN KEY(customerId) REFERENCES customers(customerId)
);
CREATE TABLE order_products (
orderId INT NOT NULL,
productId INT NOT NULL,
quantity INT NOT NULL,
FOREIGN KEY(orderId) REFERENCES orders(orderId),
FOREIGN KEY(productId) REFERENCES products(productId)
); |
<filename>src/components/create-events/form-container/form-page-four/search-friends/userlist/user-card/userCard.styles.js
import { makeStyles } from '@material-ui/core/styles';
export const styles = makeStyles((theme) => ({
root: {
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
width: '100%',
margin: '5px',
},
iconContainer: {
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-end',
width: '60px',
marginRight: "2%"
},
textContainer: {
display: 'flex',
flexDirection: 'column',
marginLeft: '5%',
width: '20%',
'& div': {
display: 'flex',
textAlign: 'left',
fontSize: '1.4rem',
color: '#1A0F2C',
fontWeight: '500',
lineStyle: 'normal',
'& #name': {
[theme.breakpoints.down('sm')]: {
fontSize: '.9rem',
whiteSpace: "nowrap"
},
},
},
'& #email': {
color: '#000000',
opacity: '0.3',
[theme.breakpoints.down('sm')]: {
display: 'none',
},
},
},
}));
|
<reponame>AlexDochioiu/Logger
/*
* Copyright 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jeefo.android.jeefologger;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.util.Locale;
/**
* Created by <NAME> on 5/26/2018
*/
class StringUtils {
private static final String LOG_MISSING_ALL_ARGS = "no_args_passed"; // no args when there are placeholders
private static final String LOG_WITH_ARGS_AND_NO_MESSAGE = "no_message_provided"; // args present but message is null or empty string
/**
* Computes the formatted message by replacing the placeholders with the values
*
* @param ex the exception whose message will be logged
* @param message the message containing placeholders
* @param args the {@link Object} arguments to replace the placeholders
* @return the formatted string message
* <p>
* NOTE: If there are more placeholders than arguments (or the type of a placeholder does not
* match the argument and the cast fails), the placeholders will be left untouched and the
* arguments will be listed as part of the log message (at the end of the message but before
* the exception message). If there are more arguments than placeholders, the extra arguments
* will be ignored.
*/
static String getFormattedMessage(@Nullable Exception ex, @Nullable String message, Object... args) {
StringBuilder formattedMessage = new StringBuilder();
if (message != null && !message.equals("")) {
// if there's a message, try and format it
try {
formattedMessage.append(String.format(Locale.UK, message, args));
} catch (Exception e) {
// if formatting failed, add the message and the args separately and the end
formattedMessage.append(message);
appendArgsAtEndOfString(formattedMessage, args);
}
} else {
// there's no message, do we have any args? If the answer is yes, put them there
if (args.length != 0) {
formattedMessage.append(LOG_WITH_ARGS_AND_NO_MESSAGE);
appendArgsAtEndOfString(formattedMessage, args);
}
}
// If there's an exception, log its message
if (ex != null) {
formattedMessage.append(" :: ").append(ex.getMessage());
}
return formattedMessage.toString();
}
/**
* Appends all the args at the end of a {@link StringBuilder} . toString() is called for the args.
* <p>
* NOTE: This method should never be called unless there is a problem with doing String.format()
* on the message (either args do not match placeholders or args are present when there's no
* message)
*
* @param stringBuilder the {@link StringBuilder} to which we add the args at the end
* @param args the {@link Object} collection which will be added at the end of the string builder (using the toString() method)
*/
private static void appendArgsAtEndOfString(@NonNull StringBuilder stringBuilder, Object... args) {
stringBuilder.append(" - args: ");
if (args.length != 0) {
for (Object arg : args) {
stringBuilder.append(arg.toString()).append(";");
}
} else {
stringBuilder.append(LOG_MISSING_ALL_ARGS);
}
}
}
|
/* eslint-disable no-unused-expressions */
/* eslint-disable no-undef */
const { expect } = require('chai')
const app = require('../server')
describe('User Controller', function () {
const request = require('supertest').agent(app)
this.timeout(20000)
let user
before(function (done) {
const url = '/auth/login'
user = { username: 'test', password: '<PASSWORD>' }
request
.post(url)
.send(user)
.set('Accept', 'application/json')
.end(async function (error, response) {
expect(error).to.be.null
expect(response.header.location).to.equals('/')
expect(response.body).to.be.empty
done()
})
})
it('should get user', function (done) {
const url = '/user'
request
.get(url)
.end(async function (error, response) {
expect(error).to.be.null
expect(response.body).to.have.property('data')
expect(response.body.data).to.have.property('_id')
expect(response.body.data.username).to.be.equal(user.username)
done()
})
})
})
|
def parseLicenseFile(file_path):
try:
with open(file_path, 'r') as file:
lines = file.readlines()
license_info = {}
for line in lines:
if line.startswith("Name:"):
license_info["Name"] = line.split(":")[1].strip()
elif line.startswith("Type:"):
license_info["Type"] = line.split(":")[1].strip()
elif line.startswith("Valid Until:"):
license_info["Valid Until"] = line.split(":")[1].strip()
return license_info
except FileNotFoundError:
return {"Error": "File not found"}
except Exception as e:
return {"Error": str(e)} |
package io.github.biezhi.wechat.api.model;
import io.github.biezhi.wechat.WeChatBot;
import io.github.biezhi.wechat.api.enums.ApiURL;
import lombok.Data;
/**
* 下载请求Model
*
* @author biezhi
* @date 2018/1/21
*/
@Data
public class DownLoad {
private ApiURL apiURL;
private String suffix;
private String msgId;
private Object[] params;
private boolean saveByDay;
public DownLoad(ApiURL apiURL, String... params) {
this.apiURL = apiURL;
this.params = params;
}
public DownLoad msgId(String msgId) {
this.msgId = msgId;
return this;
}
public DownLoad suffix(String suffix) {
this.suffix = suffix;
return this;
}
public DownLoad saveByDay() {
this.saveByDay = true;
return this;
}
public String getFileName() {
return this.msgId + this.suffix;
}
public String getDir(WeChatBot bot) {
return bot.config().assetsDir() + "/" + apiURL.getDir();
}
}
|
#!/bin/bash
trap 'exit' ERR
# the first argument, $1, is the a list of the enviroment variables
# mostly those for resources such as
# DATABASE_URL, REDIS_URL etc
# example: -e DATABASE_URL=postgres://postgres@postgres:5432 -e REDIS_URL=redis://redis
# ----
# the second argument is the language of the project
# which is gotten from the github details for the project
DOCKER_ENVS=${1}
DOCKER_IMAGE=${2}
docker run --rm -v ${CI_DIR}/.ssh:/.ssh \
--network ci_default $DOCKER_ENVS $DOCKER_IMAGE
|
import React, { Component } from 'react'
import DropdownComponent from '../../components/dropdown.component';
import { bindActionCreators } from 'redux'
import { connect } from 'react-redux'
import {
selectLanguage
} from '../../modules/event'
class NavBar extends Component {
constructor(props) {
super(props);
}
render() {
const { languages, selectLanguage, selectedLanguages } = this.props;
return (
<nav className="nav">
<div className="container-fluid clearfix">
<div className="navbar-header">
<a> Book My Show </a>
</div>
<ul className="pull-right nav-bar">
<li className="nav-bar__item">
<DropdownComponent items={languages} selectedItem={selectedLanguages} handleChange={(selectedLanguages) => selectLanguage(selectedLanguages)}></DropdownComponent>
</li>
</ul>
</div>
</nav>
)
}
}
const mapStateToProps = ({ event }) => ({
languages: event.languages,
selectedLanguages: event.selectedLanguages
})
const mapDispatchToProps = dispatch =>
bindActionCreators(
{
selectLanguage
},
dispatch
)
export default connect(
mapStateToProps,
mapDispatchToProps
)(NavBar)
|
public class Bin
{
private int mBinID;
private string mCode;
private int mHeightUnitID;
private int mWidthUnitID;
public int BinID
{
set { mBinID = value; }
get { return mBinID; }
}
public string Code
{
set { mCode = value; }
get { return mCode; }
}
public int HeightUnitID
{
set { mHeightUnitID = value; }
get { return mHeightUnitID; }
}
public int WidthUnitID
{
set { mWidthUnitID = value; }
get { return mWidthUnitID; }
}
public void SetHeightUnitID(int heightUnitID)
{
mHeightUnitID = heightUnitID;
}
public int GetHeightUnitID()
{
return mHeightUnitID;
}
public void SetWidthUnitID(int widthUnitID)
{
mWidthUnitID = widthUnitID;
}
public int GetWidthUnitID()
{
return mWidthUnitID;
}
} |
<filename>uvicore/console/asyncclick/exceptions.py
from ._compat import filename_to_ui
from ._compat import get_text_stderr
from .utils import echo
def _join_param_hints(param_hint):
if isinstance(param_hint, (tuple, list)):
return " / ".join(repr(x) for x in param_hint)
return param_hint
class ClickException(Exception):
"""An exception that Click can handle and show to the user."""
#: The exit code for this exception
exit_code = 1
def __init__(self, message):
ctor_msg = message
Exception.__init__(self, ctor_msg)
self.message = message
def format_message(self):
return self.message
def __str__(self):
return self.message
def show(self, file=None):
if file is None:
file = get_text_stderr()
echo("Error: {}".format(self.format_message()), file=file)
class UsageError(ClickException):
"""An internal exception that signals a usage error. This typically
aborts any further handling.
:param message: the error message to display.
:param ctx: optionally the context that caused this error. Click will
fill in the context automatically in some situations.
"""
exit_code = 2
def __init__(self, message, ctx=None):
ClickException.__init__(self, message)
self.ctx = ctx
self.cmd = self.ctx.command if self.ctx else None
def show(self, file=None):
if file is None:
file = get_text_stderr()
color = None
hint = ""
if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None:
hint = "Try '{} {}' for help.\n".format(
self.ctx.command_path, self.ctx.help_option_names[0]
)
if self.ctx is not None:
color = self.ctx.color
echo("{}\n{}".format(self.ctx.get_usage(), hint), file=file, color=color)
echo("Error: {}".format(self.format_message()), file=file, color=color)
class BadParameter(UsageError):
"""An exception that formats out a standardized error message for a
bad parameter. This is useful when thrown from a callback or type as
Click will attach contextual information to it (for instance, which
parameter it is).
.. versionadded:: 2.0
:param param: the parameter object that caused this error. This can
be left out, and Click will attach this info itself
if possible.
:param param_hint: a string that shows up as parameter name. This
can be used as alternative to `param` in cases
where custom validation should happen. If it is
a string it's used as such, if it's a list then
each item is quoted and separated.
"""
def __init__(self, message, ctx=None, param=None, param_hint=None):
UsageError.__init__(self, message, ctx)
self.param = param
self.param_hint = param_hint
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.get_error_hint(self.ctx)
else:
return "Invalid value: {}".format(self.message)
param_hint = _join_param_hints(param_hint)
return "Invalid value for {}: {}".format(param_hint, self.message)
class MissingParameter(BadParameter):
"""Raised if click required an option or argument but it was not
provided when invoking the script.
.. versionadded:: 4.0
:param param_type: a string that indicates the type of the parameter.
The default is to inherit the parameter type from
the given `param`. Valid values are ``'parameter'``,
``'option'`` or ``'argument'``.
"""
def __init__(
self, message=None, ctx=None, param=None, param_hint=None, param_type=None
):
BadParameter.__init__(self, message, ctx, param, param_hint)
self.param_type = param_type
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.get_error_hint(self.ctx)
else:
param_hint = None
param_hint = _join_param_hints(param_hint)
param_type = self.param_type
if param_type is None and self.param is not None:
param_type = self.param.param_type_name
msg = self.message
if self.param is not None:
msg_extra = self.param.type.get_missing_message(self.param)
if msg_extra:
if msg:
msg += ". {}".format(msg_extra)
else:
msg = msg_extra
return "Missing {}{}{}{}".format(
param_type,
" {}".format(param_hint) if param_hint else "",
". " if msg else ".",
msg or "",
)
def __str__(self):
if self.message is None:
param_name = self.param.name if self.param else None
return "missing parameter: {}".format(param_name)
else:
return self.message
class NoSuchOption(UsageError):
"""Raised if click attempted to handle an option that does not
exist.
.. versionadded:: 4.0
"""
def __init__(self, option_name, message=None, possibilities=None, ctx=None):
if message is None:
message = "no such option: {}".format(option_name)
UsageError.__init__(self, message, ctx)
self.option_name = option_name
self.possibilities = possibilities
def format_message(self):
bits = [self.message]
if self.possibilities:
if len(self.possibilities) == 1:
bits.append("Did you mean {}?".format(self.possibilities[0]))
else:
possibilities = sorted(self.possibilities)
bits.append("(Possible options: {})".format(", ".join(possibilities)))
return " ".join(bits)
class BadOptionUsage(UsageError):
"""Raised if an option is generally supplied but the use of the option
was incorrect. This is for instance raised if the number of arguments
for an option is not correct.
.. versionadded:: 4.0
:param option_name: the name of the option being used incorrectly.
"""
def __init__(self, option_name, message, ctx=None):
UsageError.__init__(self, message, ctx)
self.option_name = option_name
class BadArgumentUsage(UsageError):
"""Raised if an argument is generally supplied but the use of the argument
was incorrect. This is for instance raised if the number of values
for an argument is not correct.
.. versionadded:: 6.0
"""
def __init__(self, message, ctx=None):
UsageError.__init__(self, message, ctx)
class FileError(ClickException):
"""Raised if a file cannot be opened."""
def __init__(self, filename, hint=None):
ui_filename = filename_to_ui(filename)
if hint is None:
hint = "unknown error"
ClickException.__init__(self, hint)
self.ui_filename = ui_filename
self.filename = filename
def format_message(self):
return "Could not open file {}: {}".format(self.ui_filename, self.message)
class Abort(RuntimeError):
"""An internal signalling exception that signals Click to abort."""
class Exit(RuntimeError):
"""An exception that indicates that the application should exit with some
status code.
:param code: the status code to exit with.
"""
__slots__ = ("exit_code",)
def __init__(self, code=0):
self.exit_code = code
|
<filename>client/loadtests/load_test.go
package loadtests
import (
"fmt"
ranvier "github.com/eddieowens/ranvier/client"
"github.com/stretchr/testify/suite"
"testing"
)
const TargetHost = "192.168.3.11"
type LoadTest struct {
suite.Suite
}
func (l *LoadTest) SetupTest() {
}
func (l *LoadTest) TestWebsocketConnections() {
// -- Given
//
numConns := 500
c, err := ranvier.NewClient(&ranvier.ClientOptions{
Hostname: TargetHost,
})
if !l.NoError(err) {
l.FailNow(err.Error())
}
errChan := make(chan bool)
// -- When
//
for i := 0; i < numConns; i++ {
go func(i int) {
fmt.Println(fmt.Sprintf("%d starting", i))
conn, err := c.Connect(&ranvier.ConnOptions{
Names: []string{"staging-users"},
})
if err != nil {
fmt.Println("err!", err.Error())
errChan <- true
}
for {
select {
case event := <-conn.OnUpdate:
fmt.Println(fmt.Sprintf("%d event! %v", i, event))
case <-errChan:
fmt.Println(fmt.Sprintf("closing %d!", i))
}
}
}(i)
}
// -- Then
//
for {
<-errChan
fmt.Println("killing everything!")
}
}
func TestLoadTest(t *testing.T) {
suite.Run(t, new(LoadTest))
}
|
<filename>main.js
let sketch = function(p) {
p.setup = function(){
// createCanvas(600,425);
p.createCanvas(1000, 600);
p.background(0, 10, 0);
}
// フレームごとの処理
p.draw = function(){
//着色
p.fill(p.random(p.width),p.random(p.width),p.random(p.width));
p.square(p.random(p.width),p.random(p.height),100);
}
}
new p5(sketch, 'container');
|
import {Widget, ScrollView, Button, Slider, TextView, Picker, CheckBox, Switch, TextInput, ui} from 'tabris';
const COUNTRIES = ['Germany', 'Canada', 'USA', 'Bulgaria'];
const CLASSES = ['Business', 'Economy', 'Economy Plus'];
ui.contentView.append(
<scrollView id='scrollView' class='stretch' direction='vertical'>
<textView class='col1 label' alignment='left' text='Name:'/>
<textInput class='col2 labeled' id='name' message='Full Name'/>
<textView class='col1 label' text='Flyer Number:'/>
<textInput class='col2 labeled' keyboard='number' message='Flyer Number'/>
<textView class='col1 label' text='Passphrase:'/>
<textInput class='col2 labeled' type='password' message='Passphrase'/>
<textView class='col1 label' text='Country:'/>
<picker class='col2 labeled' id='country' itemCount={COUNTRIES.length} itemText={index => COUNTRIES[index]}/>
<textView class='col1 label' text='Class:'/>
<picker class='col2 labeled' id='class' itemCount={CLASSES.length} itemText={index => CLASSES[index]}/>
<textView class='col1 label' text='Seat:'/>
<radioButton class='col2 labeled' text='Window'/>
<radioButton class='col2 stacked' text='Aisle'/>
<radioButton class='col2 stacked' text="Don't care" checked={true} />
<composite class='group'>
<textView class='col1 grouped' text='Luggage:'/>
<slider class='grouped' id='luggageSlider'/>
<textView class='grouped' id='luggageWeight' text='0 Kg'/>
</composite>
<checkBox class='col2 stacked' id='veggie' text='Vegetarian'/>
<composite class='group'>
<textView class='col1 grouped' text='Redeem miles:'/>
<switch class='col2 grouped' id='miles'/>
</composite>
<button class='colspan' id='confirm' text='Place Reservation' background='#8b0000' textColor='white'/>
<textView class='colspan' id='message'/>
</scrollView>
);
let
scrollView = ui.find(ScrollView).first('#scrollView'),
confirmButton = ui.find(Button).first('#confirm'),
luggageSlider = ui.find(Slider).first('#luggageSlider'),
luggageWeight = ui.find(TextView).first('#luggageWeight'),
veggie = ui.find(CheckBox).first('#veggie'),
miles = ui.find(Switch).first('#miles'),
message = ui.find(TextView).first('#message'),
nameInput = ui.find(TextInput).first('#name'),
countryPicker = ui.find(Picker).first('#country'),
classPicker = ui.find(Picker).first('#class');
scrollView.apply({
'.stretch': {left: 0, right: 0, top: 0, bottom: 0},
'.col1': {left: 10, width: 120},
'.col2': {left: 140, right: 10},
'.label': {top: 'prev() 18'},
'.labeled': {baseline: 'prev()'},
'.stacked': {top: 'prev() 10'},
'.grouped': {centerY: 0},
'.group': {left: 0, top: 'prev() 10', right: 0},
'.colspan': {left: 10, right: 10, top: 'prev() 18'},
'#luggageSlider': {left: 140, right: 70},
'#luggageWeight': {right: 10, width: 50}
});
confirmButton.on({select: () => updateMessage()});
luggageSlider.on({
selectionChanged: ({value}) => luggageWeight.text = `${value} Kg`
});
function updateMessage() {
message.text = [
'Flight booked for: ' + nameInput.text,
'Destination: ' + COUNTRIES[countryPicker.selectionIndex],
'Seating: ' + createSeating(),
'Luggage: ' + luggageSlider.selection + ' Kg',
'Meal: ' + veggie.checked ? 'Vegetarian' : 'Standard',
'Redeem miles: ' + (miles.checked ? 'Yes' : 'No')
].join('\n') + '\n';
}
function createSeating() {
let seating = 'Anywhere';
scrollView.find('RadioButton').forEach((button: tabris.RadioButton) => {
if (button.checked) {
seating = button.text;
}
});
seating += ', ' + CLASSES[classPicker.selectionIndex];
return seating;
}
|
<gh_stars>0
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
int main() {
ios::sync_with_stdio(false);
cin.tie(0);
int n;
while(cin>>n){
vector<int> ans;
ans.push_back(1);
for(int q=2; q<=n; q++){
for(int w=ans.size()-1; w>=0; w--){
ans[w]*=q;
}
for(int w=0; w<ans.size(); w++){
if(ans[w]>=10){
if(w==ans.size()-1){
ans.push_back(0);
}
ans[w+1]+=ans[w]/10;
ans[w]%=10;
}
}
}
int res=0;
for(int c:ans){
res+=c;
}
cout<<res<<endl;
}
}
|
import numpy as np
class BHspcInfo():
pass
def readBHspc(fname):
"""
Read an asci BH FCS file and return the FCS photon arrival times
========== ===============================================================
Input Meaning
---------- ---------------------------------------------------------------
fname *asc file name
To convert BH .spc file to ascii, use the Spcm software:
- main > convert > FIFO files
- setup file name: .set file
- source file name: .spc file name
- file format: ASCII with info header
- extract photons data to ASCII file
========== ===============================================================
Output Meaning
---------- ---------------------------------------------------------------
data Matrix containing the arrival times of all photons
[units of clock time]
- Column 1: photon arrival times, macrotimes [a.u.]
- Column 2: photon arrival times, microtimes [a.u.]
- Column 3: routing channel
- Column 4: invalid flag
info Object containing the metadata
- info.macrotime: macro time clock [s]
- info.microtime: micro time unit [s]
- info.fifotype: FIFO type
- info.photons: total number of photons
- info.invalidphotons: total number of invalid photons
- info.fifooverruns: number of FIFO overruns
========== ===============================================================
"""
# Open file
print("Opening spc ASCII file.")
with open(fname, mode='r') as file:
rawdata = file.read()
print("File opened.")
# Variable to store metadata
info = BHspcInfo()
# Find macrotime
MacroTimeStart = rawdata.find("Macro Time Clock [ns]", 0)
MacroTimeStart = rawdata.find(": ", MacroTimeStart)
MacroTimeStart += 2
MacroTimeStop = rawdata.find(" ", MacroTimeStart)
MacroTime = float(rawdata[MacroTimeStart:MacroTimeStop])
info.macrotime = 1e-9 * MacroTime
# Find microtime
MicroTimeStart = rawdata.find("Micro Time Unit [ps]", 0)
MicroTimeStart = rawdata.find(": ", MicroTimeStart)
MicroTimeStart += 2
MicroTimeStop = rawdata.find("\n", MicroTimeStart)
MicroTime = float(rawdata[MicroTimeStart:MicroTimeStop])
info.microtime = 1e-12 * MicroTime
# Find FIFO time
FifoTypeStart = rawdata.find("FIFO type ", 0)
FifoTypeStart = rawdata.find(": ", FifoTypeStart)
FifoTypeStart += 2
FifoTypeStop = rawdata.find(" ", FifoTypeStart)
FifoType = float(rawdata[FifoTypeStart:FifoTypeStop])
info.fifotype = FifoType
# Find number of photons
NphotonsStart = rawdata.find("Total number of extracted photons", 0)
NphotonsStart = rawdata.find(": ", NphotonsStart)
NphotonsStart += 2
NphotonsStop = rawdata.find("\n", NphotonsStart)
Nphotons = rawdata[NphotonsStart:NphotonsStop]
Nphotons = int(Nphotons)
info.photons = Nphotons
# Find number of invalid photons
NphotonsInvStart = rawdata.find("invalid", NphotonsStop)
NphotonsInvStart = rawdata.find(": ", NphotonsInvStart)
NphotonsInvStart += 2
NphotonsInvStop = rawdata.find("\n", NphotonsInvStart)
NphotonsInv = rawdata[NphotonsInvStart:NphotonsInvStop]
NphotonsInv = int(NphotonsInv)
info.invalidphotons = NphotonsInv
# Find number of FIFO overruns
NfifoOverRunsStart = rawdata.find("number of fifo overruns", 0)
NfifoOverRunsStart = rawdata.find(": ", NfifoOverRunsStart)
NfifoOverRunsStart += 2
NfifoOverRunsStop = rawdata.find("\n", NfifoOverRunsStart)
NfifoOverRuns = rawdata[NfifoOverRunsStart:NfifoOverRunsStop]
NfifoOverRuns = int(NfifoOverRuns)
info.fifooverruns = NfifoOverRuns
# Create empty array for photon arrival times
data = np.zeros((Nphotons, 4), 'int64')
# Check each line and extract photon arrival time
# Find start position in the file
start = rawdata.find("End of info header\n", 0)
start = rawdata.find("\n", start)
start += 1
start = rawdata.find("\n", start)
start += 1
print(str(Nphotons) + " photons found.")
for i in range(Nphotons):
# macrotime
stop = rawdata.find(" ", start)
photonMacroTime = int(rawdata[start:stop])
data[i, 0] = photonMacroTime
# microtime
start = stop + 1
stop = rawdata.find(" ", start)
photonMicroTime = int(rawdata[start:stop])
data[i, 1] = photonMicroTime
# routing channel
start = stop + 2
stop = rawdata.find(" ", start)
photonChannel = int(rawdata[start:stop])
data[i, 2] = photonChannel
# invalid flag
start = stop + 1
stop = rawdata.find("\n", start)
flag = int(rawdata[start:stop])
data[i, 3] = flag
# new line
start = rawdata.find("\n", stop)
return data, info
|
<filename>ThirdParty/json-base/src/test/java/com/github/wnameless/json/base/JsonBaseTest.java
/*
*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.github.wnameless.json.base;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map.Entry;
import org.junit.jupiter.api.Test;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.testing.EqualsTester;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.reflect.TypeToken;
public class JsonBaseTest {
String str = "text";
int i = 123;
long l = 1234567890;
double d = 45.67;
boolean bool = true;
Object obj = null;
JsonObject jo = new JsonObject() {
{
setStr(str);
setNum(new ArrayList<Number>() {
private static final long serialVersionUID = 1L;
{
add(i);
add(l);
add(d);
}
});
setBool(bool);
setObj(obj);
}
};
JsonValueBase<?> jsonValue;
@Test
public void testGsonValue() {
Gson gson = new GsonBuilder().serializeNulls().create();
JsonElement jsonElement =
gson.toJsonTree(jo, new TypeToken<JsonObject>() {}.getType());
jsonValue = new GsonJsonValue(jsonElement);
assertTrue(jsonValue.isObject());
assertTrue(jsonValue.asObject().get("str").isString());
assertEquals(str, jsonValue.asObject().get("str").asString());
assertTrue(jsonValue.asObject().get("num").isArray());
assertTrue(jsonValue.asObject().get("num").asArray().get(0).isNumber());
assertEquals(i, jsonValue.asObject().get("num").asArray().get(0).asInt());
assertTrue(jsonValue.asObject().get("num").asArray().get(1).isNumber());
assertEquals(l, jsonValue.asObject().get("num").asArray().get(1).asLong());
assertTrue(jsonValue.asObject().get("num").asArray().get(2).isNumber());
assertEquals(d, jsonValue.asObject().get("num").asArray().get(2).asDouble(),
0.0);
assertTrue(jsonValue.asObject().get("bool").isBoolean());
assertTrue(jsonValue.asObject().get("bool").asBoolean());
assertTrue(jsonValue.asObject().get("obj").isNull());
new EqualsTester().addEqualityGroup(jsonValue).testEquals();
new EqualsTester().addEqualityGroup(jsonValue.asObject()).testEquals();
new EqualsTester()
.addEqualityGroup(jsonValue.asObject().get("num").asArray())
.testEquals();
}
@Test
public void testJacksonValue() {
JsonNode jsonNode = new ObjectMapper().valueToTree(jo);
jsonValue = new JacksonJsonValue(jsonNode);
assertTrue(jsonValue.isObject());
assertTrue(jsonValue.asObject().get("str").isString());
assertEquals(str, jsonValue.asObject().get("str").asString());
assertTrue(jsonValue.asObject().get("num").isArray());
assertTrue(jsonValue.asObject().get("num").asArray().get(0).isNumber());
assertEquals(i, jsonValue.asObject().get("num").asArray().get(0).asInt());
assertTrue(jsonValue.asObject().get("num").asArray().get(1).isNumber());
assertEquals(l, jsonValue.asObject().get("num").asArray().get(1).asLong());
assertTrue(jsonValue.asObject().get("num").asArray().get(2).isNumber());
assertEquals(d, jsonValue.asObject().get("num").asArray().get(2).asDouble(),
0.0);
assertTrue(jsonValue.asObject().get("bool").isBoolean());
assertTrue(jsonValue.asObject().get("bool").asBoolean());
assertTrue(jsonValue.asObject().get("obj").isNull());
new EqualsTester().addEqualityGroup(jsonValue).testEquals();
new EqualsTester().addEqualityGroup(jsonValue.asObject()).testEquals();
new EqualsTester()
.addEqualityGroup(jsonValue.asObject().get("num").asArray())
.testEquals();
}
// @Test
// public void testMinialJsonValue() throws JsonProcessingException {
// JsonValue jv = Json.parse(new ObjectMapper().writeValueAsString(jo));
// jsonValue = new MinimalJsonValue(jv);
//
// assertTrue(jsonValue.isObject());
// assertTrue(jsonValue.asObject().get("str").isString());
// assertEquals(str, jsonValue.asObject().get("str").asString());
// assertTrue(jsonValue.asObject().get("num").isArray());
// assertTrue(jsonValue.asObject().get("num").asArray().get(0).isNumber());
// assertEquals(i, jsonValue.asObject().get("num").asArray().get(0).asInt());
// assertTrue(jsonValue.asObject().get("num").asArray().get(1).isNumber());
// assertEquals(l, jsonValue.asObject().get("num").asArray().get(1).asLong());
// assertTrue(jsonValue.asObject().get("num").asArray().get(2).isNumber());
// assertEquals(d,
// jsonValue.asObject().get("num").asArray().get(2).asDouble(),
// 0.0);
// assertTrue(jsonValue.asObject().get("bool").isBoolean());
// assertTrue(jsonValue.asObject().get("bool").asBoolean());
// assertTrue(jsonValue.asObject().get("obj").isNull());
//
// new EqualsTester().addEqualityGroup(jsonValue).testEquals();
// new EqualsTester().addEqualityGroup(jsonValue.asObject()).testEquals();
// new EqualsTester()
// .addEqualityGroup(jsonValue.asObject().get("num").asArray())
// .testEquals();
// }
@Test
public void testGsonArrayIterable() {
Gson gson = new GsonBuilder().serializeNulls().create();
JsonElement jsonElement =
gson.toJsonTree(jo, new TypeToken<JsonObject>() {}.getType());
GsonJsonValue gsonJson = new GsonJsonValue(jsonElement);
JsonArrayBase<GsonJsonValue> array =
gsonJson.asObject().get("num").asArray();
Iterator<GsonJsonValue> iter = array.iterator();
assertEquals(array.get(0), iter.next());
assertEquals(array.get(1), iter.next());
assertEquals(array.get(2), iter.next());
assertFalse(iter.hasNext());
assertFalse(array.isEmpty());
jsonElement = gson.toJsonTree(new ArrayList<>(),
new TypeToken<ArrayList<?>>() {}.getType());
gsonJson = new GsonJsonValue(jsonElement);
array = gsonJson.asArray();
assertTrue(array.isEmpty());
}
@Test
public void testGsonObjectIterable() {
Gson gson = new GsonBuilder().serializeNulls().create();
JsonElement jsonElement =
gson.toJsonTree(jo, new TypeToken<JsonObject>() {}.getType());
GsonJsonValue jsonValue = new GsonJsonValue(jsonElement);
GsonJsonObject gsonObject = jsonValue.asObject();
Iterator<Entry<String, GsonJsonValue>> iter = gsonObject.iterator();
Entry<String, GsonJsonValue> element = iter.next();
assertEquals("str", element.getKey());
assertEquals(gsonObject.get("str"), element.getValue());
element = iter.next();
assertEquals("num", element.getKey());
assertEquals(gsonObject.get("num"), element.getValue());
element = iter.next();
assertEquals("bool", element.getKey());
assertEquals(gsonObject.get("bool"), element.getValue());
element = iter.next();
assertEquals("obj", element.getKey());
assertEquals(gsonObject.get("obj"), element.getValue());
assertFalse(iter.hasNext());
assertFalse(gsonObject.isEmpty());
jsonElement =
gson.toJsonTree(new Object(), new TypeToken<Object>() {}.getType());
jsonValue = new GsonJsonValue(jsonElement);
gsonObject = jsonValue.asObject();
assertTrue(gsonObject.isEmpty());
}
@Test
public void testJacksonArrayIterable() {
JsonNode jsonNode = new ObjectMapper().valueToTree(jo);
JacksonJsonValue jacksonJson = new JacksonJsonValue(jsonNode);
JsonArrayBase<JacksonJsonValue> array =
jacksonJson.asObject().get("num").asArray();
Iterator<JacksonJsonValue> iter = array.iterator();
assertEquals(array.get(0), iter.next());
assertEquals(array.get(1), iter.next());
assertEquals(array.get(2), iter.next());
assertFalse(iter.hasNext());
assertFalse(array.isEmpty());
jsonNode = new ObjectMapper().valueToTree(new ArrayList<>());
jacksonJson = new JacksonJsonValue(jsonNode);
array = jacksonJson.asArray();
assertTrue(array.isEmpty());
}
@Test
public void testJacksonObjectIterable() {
JsonNode jsonNode = new ObjectMapper().valueToTree(jo);
JacksonJsonValue jacksonJson = new JacksonJsonValue(jsonNode);
JacksonJsonObject jacksonObject = jacksonJson.asObject();
Iterator<Entry<String, JacksonJsonValue>> iter = jacksonObject.iterator();
Entry<String, JacksonJsonValue> element = iter.next();
assertEquals("str", element.getKey());
assertEquals(jacksonObject.get("str"), element.getValue());
element = iter.next();
assertEquals("num", element.getKey());
assertEquals(jacksonObject.get("num"), element.getValue());
element = iter.next();
assertEquals("bool", element.getKey());
assertEquals(jacksonObject.get("bool"), element.getValue());
element = iter.next();
assertEquals("obj", element.getKey());
assertEquals(jacksonObject.get("obj"), element.getValue());
assertFalse(iter.hasNext());
assertFalse(jacksonObject.isEmpty());
jsonNode = new ObjectMapper().valueToTree(new HashMap<>());
jacksonJson = new JacksonJsonValue(jsonNode);
jacksonObject = jacksonJson.asObject();
assertTrue(jacksonObject.isEmpty());
}
// @Test
// public void testMinimalArrayIterable() throws JsonProcessingException {
// JsonValue jv = Json.parse(new ObjectMapper().writeValueAsString(jo));
// MinimalJsonValue minimalJson = new MinimalJsonValue(jv);
//
// JsonArrayBase<MinimalJsonValue> array =
// minimalJson.asObject().get("num").asArray();
// Iterator<MinimalJsonValue> iter = array.iterator();
//
// assertEquals(array.get(0), iter.next());
// assertEquals(array.get(1), iter.next());
// assertEquals(array.get(2), iter.next());
// assertFalse(iter.hasNext());
// }
//
// @Test
// public void testMinimalObjectIterable() throws JsonProcessingException {
// JsonValue jv = Json.parse(new ObjectMapper().writeValueAsString(jo));
// MinimalJsonValue minimalJson = new MinimalJsonValue(jv);
// MinimalJsonObject minimalObject = minimalJson.asObject();
//
// Iterator<Entry<String, MinimalJsonValue>> iter = minimalObject.iterator();
//
// Entry<String, MinimalJsonValue> element = iter.next();
// assertEquals("str", element.getKey());
// assertEquals(minimalObject.get("str"), element.getValue());
//
// element = iter.next();
// assertEquals("num", element.getKey());
// assertEquals(minimalObject.get("num"), element.getValue());
//
// element = iter.next();
// assertEquals("bool", element.getKey());
// assertEquals(minimalObject.get("bool"), element.getValue());
//
// element = iter.next();
// assertEquals("obj", element.getKey());
// assertEquals(minimalObject.get("obj"), element.getValue());
//
// assertFalse(iter.hasNext());
// }
}
|
'use strict';
import React from 'react';
class Home extends React.Component {
constructor( props ) {
super( props );
this.state = {
}
}
render() {
return (
<div className='home-container d-flex flex-row justify-content-center flex-wrap mt-10 mb-10'>
<h2>Welcome To Upliftnow...</h2>
</div>
);
}
}
Home.defaultProps = {
};
export default Home; |
<filename>src/org/restsql/service/ServiceTestRunner.java
/* Copyright (c) restSQL Project Contributors. Licensed under MIT. */
package org.restsql.service;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.StringTokenizer;
import junit.framework.AssertionFailedError;
import junit.framework.Test;
import junit.framework.TestListener;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import org.restsql.core.Config;
import org.restsql.core.Factory;
import org.restsql.service.ServiceTestCase.InterfaceStyle;
import org.restsql.service.testcase.ServiceTestCaseDefinition;
public class ServiceTestRunner {
public final static String DEFAULT_TEST_DATABASE = "sakila";
public final static String EXCLUDE_NONE = "none";
public final static String FILE_TEST_LIST_ENDS_WITH = "_tests.txt";
public final static String KEY_TEST_DATABASE = "org.restsql.testDatabase";
public final static String SCOPE_ALL = "all";
public final static String TEST_CASE_DIR = "obj/bin/resources/xml/service/testcase";
public final static String TEST_RESULTS_BASE_DIR = "obj/test";
public final static String TEST_RESULTS_DIR = TEST_RESULTS_BASE_DIR + "/service";
public static final String FAILURES_AND_ERRORS_LOG = TEST_RESULTS_DIR + "/FailuresAndErrors.log";
static {
if (System.getProperty(Config.KEY_RESTSQL_PROPERTIES) == null) {
System.setProperty(Config.KEY_RESTSQL_PROPERTIES,
"/resources/properties/restsql-mysql.properties");
}
System.out.println("Using " + System.getProperty(Config.KEY_RESTSQL_PROPERTIES));
}
public static void main(final String[] args) throws SQLException, IOException {
if (args.length < 3) {
System.out
.println("Usage: ServiceTestRunner style scope exclude\n\tstyle=[java|http]\n\tscope=[all|path/to/test/list|category-name]\n\texclude=[none|category-name]");
System.exit(4);
}
final InterfaceStyle interfaceStyle = InterfaceStyle.fromString(args[0]);
final List<File> files = getDefinitionFiles(args[1], parseListFromString(args[2], ","));
cleanResultsDir();
final TestSuite suite = new TestSuite();
final TestResult result = new TestResult();
final ServiceTestListener listener = new ServiceTestListener();
result.addListener(listener);
String testDatabase = System.getProperty(KEY_TEST_DATABASE, DEFAULT_TEST_DATABASE);
if (!testDatabase.equals(DEFAULT_TEST_DATABASE)) {
System.out.println("Using non-default test database " + testDatabase);
}
final Connection connection = Factory.getConnection(testDatabase);
if (buildSuite(connection, suite, interfaceStyle, files)) {
suite.run(result);
connection.close();
System.out.println("Tests run: " + result.runCount() + ", Failures: " + result.failureCount()
+ ", Errors: " + result.errorCount() + ", Time elapsed: "
+ (float) listener.getTotalElapsedTime() / 1000 + " sec");
listener.printNonPassingTests();
if (result.wasSuccessful()) {
System.exit(0);
} else {
System.exit(1);
}
} else {
System.exit(2);
}
}
private static boolean buildSuite(final Connection connection, final TestSuite suite,
final InterfaceStyle interfaceStyle, final List<File> files) {
boolean success = true;
for (final File file : files) {
if (file != null && file.getPath().endsWith(".xml")) {
final String testCaseFileName = file.getName();
final String categoryName = file.getParentFile().getName();
if (interfaceStyle == InterfaceStyle.Java
&& (testCaseFileName.contains("ResourceNotFound") || testCaseFileName.contains("FormParam"))) {
// exclude
System.out.println("Skipping " + categoryName + "/" + testCaseFileName);
} else {
try {
final ServiceTestCaseDefinition definition = XmlHelper.unmarshallDefinition(file);
final ServiceTestCase testCase = new ServiceTestCase(interfaceStyle, categoryName,
testCaseFileName, connection, definition);
suite.addTest(testCase);
} catch (final Exception exception) {
System.out.println("Error loading " + file);
exception.printStackTrace();
success = false;
}
}
}
}
return success;
}
private static void cleanResultsDir() {
// Create base dir
File dir = new File(TEST_RESULTS_BASE_DIR);
if (!dir.exists()) {
dir.mkdir();
}
// Create or clean results dir
dir = new File(TEST_RESULTS_DIR);
if (dir.exists()) {
final File[] subDirs = dir.listFiles();
if (subDirs != null) {
for (final File subDir : subDirs) {
final File[] files = subDir.listFiles();
if (files != null) {
for (final File file : files) {
file.delete();
}
}
subDir.delete();
}
}
} else {
dir.mkdir();
}
}
private static List<File> getDefinitionFiles(final String scope, final List<String> excludedDirs)
throws FileNotFoundException, IOException {
final List<File> files = new ArrayList<File>(50);
if (!scope.endsWith(FILE_TEST_LIST_ENDS_WITH)) {
File dir = new File(TEST_CASE_DIR);
for (final String subDir : dir.list()) {
if (!subDir.endsWith(".txt") && !subDir.endsWith("*.xsd")) {
boolean includeSubDir = true;
if (!scope.equals(SCOPE_ALL) && !subDir.equals(scope)) {
includeSubDir = false;
}
for (final String excludedDir : excludedDirs) {
if (!excludedDir.equals(EXCLUDE_NONE) && excludedDir.equals(subDir)) {
System.out.println("Excluding category " + subDir);
includeSubDir = false;
break;
}
}
if (includeSubDir) {
dir = new File(TEST_CASE_DIR + "/" + subDir);
final File[] subDirFiles = dir.listFiles();
if (subDirFiles != null) {
files.addAll(Arrays.asList(subDirFiles));
}
}
}
}
} else { // scope = file list
final File listFile = new File(scope);
if (listFile.exists()) {
final BufferedReader reader = new BufferedReader(new FileReader(listFile));
String fileName;
final ArrayList<String> listFileNames = new ArrayList<String>();
while ((fileName = reader.readLine()) != null) {
if (!fileName.startsWith("#")) {
listFileNames.add(fileName);
}
}
for (int i = 0; i < listFileNames.size(); i++) {
fileName = listFileNames.get(i);
if (fileName.endsWith("/*")) {
final File dir = new File(TEST_CASE_DIR + "/"
+ fileName.substring(0, fileName.length() - 2));
for (final File file : dir.listFiles()) {
files.add(file);
}
} else {
final File file = new File(TEST_CASE_DIR + "/" + fileName);
if (!file.exists()) {
System.out.println("Cannot find " + fileName + " - ignoring");
} else {
files.add(file);
}
}
}
reader.close();
} else {
System.out.println("Cannot find test list " + scope);
System.exit(3);
}
}
return files;
}
private static List<String> parseListFromString(final String string, final String delimiter) {
final List<String> list = new ArrayList<String>(5);
final StringTokenizer tokenizer = new StringTokenizer(string, delimiter);
while (tokenizer.hasMoreTokens()) {
list.add(tokenizer.nextToken());
}
return list;
}
static class ServiceTestListener implements TestListener {
private long elapsedTime;
private Throwable error, failure;
private final List<ServiceTestCase> nonPassingTests = new ArrayList<ServiceTestCase>();
private long startTime;
private long totalElapsedTime;
@Override
public void addError(final Test test, final Throwable e) {
error = e;
ServiceTestCaseHelper.renameLog(test, "Error--");
nonPassingTests.add((ServiceTestCase) test);
}
@Override
public void addFailure(final Test test, final AssertionFailedError f) {
failure = f;
ServiceTestCaseHelper.renameLog(test, "Failure--");
nonPassingTests.add((ServiceTestCase) test);
}
@Override
public void endTest(final Test test) {
elapsedTime = System.currentTimeMillis() - startTime;
totalElapsedTime += elapsedTime;
// System.out.println(" ... " + (float) elapsedTime / 1000 + " sec");
if (error != null) {
System.out.println("---Error- " + error.toString());
error.printStackTrace();
} else if (failure != null) {
System.out.println("---Failure- " + failure.toString());
}
}
public long getElapsedTime() {
return elapsedTime;
}
public List<ServiceTestCase> getNonPassingTests() {
return nonPassingTests;
}
public long getTotalElapsedTime() {
return totalElapsedTime;
}
public void printNonPassingTests() {
if (nonPassingTests.size() > 0) {
final File file = new File(FAILURES_AND_ERRORS_LOG);
FileOutputStream outputStream;
try {
outputStream = new FileOutputStream(file);
System.out.println("\nFailures and Errors:");
for (final ServiceTestCase testCase : nonPassingTests) {
final String name = testCase.getTestCaseCategory() + "/" + testCase.getTestCaseName()
+ "\n";
System.out.print(" " + name);
outputStream.write(name.getBytes());
}
outputStream.close();
} catch (final IOException exception) {
exception.printStackTrace();
}
}
}
@Override
public void startTest(final Test test) {
error = null;
failure = null;
startTime = System.currentTimeMillis();
final ServiceTestCase testCase = (ServiceTestCase) test;
System.out
.println("Running " + testCase.getTestCaseCategory() + "/" + testCase.getTestCaseName());
}
}
}
|
<filename>src/app/app.states.js
import {monstersService} from './monsters/monsters.service';
export const appState = {
name: 'app',
redirectTo: 'monsterList',
component: 'layout',
transclude: true
};
export const monsterListState = {
name: 'monsterList',
parent: 'app',
url: '/monsters',
component: 'monsterList',
transclude: true
};
export const monsterDetailState = {
name: 'monsterDetail',
parent: 'monsterList',
url: '/:monsterId',
component: 'monsterDetail',
resolve: {
monster: ['$transition$', 'monstersService', function ($transition$, monstersService) {
return monstersService.getById($transition$.params().monsterId);
}]
}
};
|
import trio
import http
import h11 # HTTP/1.1
import socket
import ssl
import mimetypes
import os
import functools
try:
import importlib.resources as pkg_resources
except ImportError:
import importlib_resources as pkg_resources # Try backported to PY<37 `importlib_resources`.
from . import static
async def serve_static_html(client_stream, path):
try:
content = pkg_resources.read_text(static, path)
headers = [
(b"Content-Type", mimetypes.guess_type(path)[0].encode("utf-8")),
(b"Content-Length", str(len(content)).encode("utf-8")),
]
response = h11.Response(status_code=200, headers=headers, content=content.encode("utf-8"))
except FileNotFoundError:
response = h11.Response(status_code=404, headers=[], content=b"Not Found")
await client_stream.send_all(response)
await client_stream.aclose()
async def handle_client(client_stream):
request = b""
async for data in client_stream:
request += data
if b"\r\n\r\n" in request:
break
request_line, headers = request.split(b"\r\n", 1)
method, path, _ = request_line.split(b" ", 2)
if method == b"GET":
await serve_static_html(client_stream, path.decode("utf-8"))
else:
response = h11.Response(status_code=405, headers=[], content=b"Method Not Allowed")
await client_stream.send_all(response)
await client_stream.aclose()
async def main():
server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_sock.bind(("127.0.0.1", 8000))
server_sock.listen()
async with trio.open_nursery() as nursery:
while True:
client_sock, _ = await trio.to_thread.run_sync(server_sock.accept)
client_stream = trio.SocketStream(client_sock)
nursery.start_soon(handle_client, client_stream)
trio.run(main) |
<reponame>Upplication/cordova-java
package com.upplication.cordova.util;
import com.upplication.cordova.CordovaConfig;
import java.io.IOException;
public interface ConfigTransactionJob {
void exec(CordovaConfig config) throws IOException;
}
|
export interface CalendarDaysProps {
current:Date,
selected:Date,
onDayClick(day:number): void,
disableDays?:Array<number>
}
export const CalendarDays = (props: CalendarDaysProps) => {
const getDateSplit = (date : Date) : {day:number, month: number, year: number} => {return {day:date.getDate(),month:date.getMonth(), year:date.getFullYear()}};
let currentDate = getDateSplit(props.current);
let selectedDate = getDateSplit(props.selected);
let dayofWeek = 0;
const getFisrtEmpySpaces = () : number => {
let firstDayMonth = new Date(currentDate.year , currentDate.month , 1);
let emptyItems = firstDayMonth.getDay();
return emptyItems;
}
const getLastEmpySpacesAndLastDate = () : {lastDay : number, lastEmptyItems:number } => {
let lastDayMonth = new Date(currentDate.year , (currentDate.month + 1) , 1 );
lastDayMonth.setDate(lastDayMonth.getDate()-1);
let lastEmptyDays = 6 - lastDayMonth.getDay();
let lastDay = lastDayMonth.getDate();
return {lastDay : lastDay, lastEmptyItems: lastEmptyDays };
}
const firstEmptyItems = getFisrtEmpySpaces();
const endMonthData = getLastEmpySpacesAndLastDate();
dayofWeek = firstEmptyItems-1;
const populateEmptyListItems = (numberOfItems : number) => Array.apply(0, Array(numberOfItems)).map((x:any, i:number) => <li key={i}></li>);
const PrintDayItem = (x:any, index:number) => {
let day=index+1;
dayofWeek++;
let className = '';
let disabled =props.disableDays?.includes(dayofWeek);
if(currentDate.month === selectedDate.month && currentDate.year === selectedDate.year && selectedDate.day === day) className = 'active';
if(dayofWeek == 6) dayofWeek=-1;
className += disabled ? ' day-disabled' : ' day';
return disabled ? <li key={index} className={className}>{index+1}</li> :
<li key={index} className={className} onClick={()=>props.onDayClick(index+1)}>{index+1}</li>
}
return <ul className='calendar-days'>
{populateEmptyListItems(firstEmptyItems)}
{Array.apply(0, Array(endMonthData.lastDay)).map(PrintDayItem)}
{populateEmptyListItems(endMonthData.lastEmptyItems)}
</ul>
}
|
#for combo in $(curl -s https://raw.githubusercontent.com/CyanogenMod/hudson/master/cm-build-targets | sed -e 's/#.*$//' | grep cm-12.1 | awk '{printf "cm_%s-%s\n", $1, $2}')
#do
# add_lunch_combo $combo
#done
|
#!/usr/bin/env sh
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015, 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
isort -c --df flask_taxonomies && \
check-manifest --ignore ".travis-*" && \
pytest
|
#!/bin/bash
#
# rpreload - Resource pack management made easy.
# Copyright (c) 2015, Matej Kormuth <http://www.github.com/dobrakmato>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ------ Script settings. ------
# Desired name of rpreload.jar.
DESIRED_NAME=rpreload.jar
# Desired version of rpreload that will be downloaded.
DESIRED_VERSION=1.0
# ------ Do not edit below this line! ------
if [ -f $DESIRED_NAME ];
then
echo " Backing up old rpreload jar ($DESIRED_NAME)..."
mv $DESIRED_NAME $DESIRED_NAME.bak
fi
echo " Downloading new rpreload jar from maven repo..."
wget -4 -q http://repo.matejkormuth.eu/eu/matejkormuth/rpreload/$DESIRED_VERSION/rpreload-$DESIRED_VERSION-jar-with-dependencies.jar
echo " Renaming downloaded artifact to desired name..."
mv rpreload-$DESIRED_VERSION-jar-with-dependencies.jar $DESIRED_NAME
chmod +x $DESIRED_NAME |
<!DOCTYPE html>
<html>
<head>
<title>Form with Table</title>
<style>
table, th, td {
border: 1px solid black;
}
</style>
</head>
<body>
<form action="">
<label for="text">Text Input:</label><br>
<input type="text" name="text"><br><br>
<input type="radio" name="gender" value="male"> Male<br>
<input type="radio" name="gender" value="female"> Female<br><br>
<label for="cars">Select a car:</label><br>
<select id="cars" name="cars">
<option value="volvo">Volvo</option>
<option value="saab">Saab</option>
<option value="fiat">Fiat</option>
</select><br><br>
<input type="submit" value="Submit">
</form>
<br><br><br>
<table>
<tr>
<th style="background-color:red;">Red</th>
<th style="background-color:green;">Green</th>
<th style="background-color:blue;">Blue</th>
</tr>
</table>
</body>
</html> |
"use strict";
var LineMaterial = function (parameters) {
THREE.ShaderMaterial.call(this);
parameters = parameters || {};
var uniforms = {
"viewportSize": { value: parameters.viewportSize },
"color": { value: parameters.color || new THREE.Color() },
"width": { type: "f", value: (parameters.width || 1) },
"opacity": { type: "f", value: parameters.opacity || 1 },
"start": { value: parameters.start },
"end": { value: parameters.end }
};
this.setValues({
uniforms: uniforms,
vertexShader: document.getElementById('vertex_shader').textContent,
fragmentShader: document.getElementById('fragment_shader').textContent
});
this.side = THREE.DoubleSide;
};
LineMaterial.prototype = Object.create(THREE.ShaderMaterial.prototype);
LineMaterial.prototype.constructor = LineMaterial;
|
#!/bin/sh
GUNICORN_WORKERS=${GUNICORN_WORKERS:-"9"}
GUNICORN_WORKER_CLASS=${GUNICORN_WORKER_CLASS:-"gevent"}
GUNICORN_WORKER_CONNECTIONS=${GUNICORN_WORKER_CONNECTIONS:-"2000"}
GUNICORN_BACKLOG=${GUNICORN_BACKLOG:-"1000"}
MYSQL_WAIT_RETRIES=${MYSQL_WAIT_RETRIES:-"24"}
MYSQL_WAIT_DELAY=${MYSQL_WAIT_DELAY:-"5"}
KAFKA_WAIT_RETRIES=${KAFKA_WAIT_RETRIES:-"24"}
KAFKA_WAIT_DELAY=${KAFKA_WAIT_DELAY:-"5"}
if [ "$MYSQL_WAIT_RETRIES" != "0" ]; then
echo "Waiting for MySQL to become available..."
success="false"
for i in $(seq "$MYSQL_WAIT_RETRIES"); do
if mysqladmin status \
--host="$MYSQL_HOST" \
--user="$MYSQL_USER" \
--password="$MYSQL_PASSWORD" \
--connect_timeout=10; then
echo "MySQL is available, continuing..."
success="true"
break
else
echo "Connection attempt $i of $MYSQL_WAIT_RETRIES failed"
sleep "$MYSQL_WAIT_DELAY"
fi
done
if [ "$success" != "true" ]; then
echo "Unable to reach MySQL database! Exiting..."
sleep 1
exit 1
fi
fi
if [ -n "$KAFKA_WAIT_FOR_TOPICS" ]; then
echo "Waiting for Kafka topics to become available..."
success="false"
for i in $(seq "$KAFKA_WAIT_RETRIES"); do
if python /kafka_wait_for_topics.py; then
success="true"
break
else
echo "Kafka not yet ready (attempt $i of $KAFKA_WAIT_RETRIES)"
sleep "$KAFKA_WAIT_DELAY"
fi
done
if [ "$success" != "true" ]; then
echo "Kafka failed to become ready, exiting..."
sleep 1
exit 1
fi
fi
if [ "$CONFIG_TEMPLATE" = "true" ]; then
python template.py \
/etc/monasca/api-config.conf.j2 \
/etc/monasca/api-config.conf
python template.py \
/etc/monasca/api-config.ini.j2 \
/etc/monasca/api-config.ini
python template.py \
/etc/monasca/api-logging.conf.j2 \
/etc/monasca/api-logging.conf
else
cp /etc/monasca/api-config.conf.j2 /etc/monasca/api-config.conf
cp /etc/monasca/api-config.ini.j2 /etc/monasca/api-config.ini
cp /etc/monasca/api-logging.conf.j2 /etc/monasca/api-logging.conf
fi
if [ "$ADD_ACCESS_LOG" = "true" ]; then
access_arg="--access-logfile -"
else
access_arg=
fi
# Needed to allow utf8 use in the Monasca API
export PYTHONIOENCODING=utf-8
gunicorn --capture-output \
-n monasca-api \
--worker-class="$GUNICORN_WORKER_CLASS" \
--worker-connections="$GUNICORN_WORKER_CONNECTIONS" \
--backlog="$GUNICORN_BACKLOG" \
$access_arg \
--access-logformat "$ACCESS_LOG_FIELDS" \
--paste /etc/monasca/api-config.ini \
-w "$GUNICORN_WORKERS"
|
#!/bin/sh
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' tRNAscan-SE.src
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' src/instman.pl
./configure --prefix=$PREFIX
make
make install
|
#! /bin/sh
LC_ALL=C
export LC_ALL
[ -f version ] || ( git describe --exact-match --tags $(git log -n1 --pretty='%h') 2>/dev/null || git log -n1 --pretty='%h' ) | sed 's/^v//g' | xargs printf '%s'
[ -f version ] && ( cat version 2>/dev/null ) | xargs printf '%s'
|
import type { Ref } from 'vue';
import { ref, watchEffect } from 'vue';
import type { TabSizeMap, TabOffsetMap, Tab, TabOffset } from '../interface';
const DEFAULT_SIZE = { width: 0, height: 0, left: 0, top: 0 };
export default function useOffsets(
tabs: Ref<Tab[]>,
tabSizes: Ref<TabSizeMap>,
// holderScrollWidth: Ref<number>,
): Ref<TabOffsetMap> {
const offsetMap = ref<TabOffsetMap>(new Map());
watchEffect(() => {
const map: TabOffsetMap = new Map();
const tabsValue = tabs.value;
const lastOffset = tabSizes.value.get(tabsValue[0]?.key) || DEFAULT_SIZE;
const rightOffset = lastOffset.left + lastOffset.width;
for (let i = 0; i < tabsValue.length; i += 1) {
const { key } = tabsValue[i];
let data = tabSizes.value.get(key);
// Reuse last one when not exist yet
if (!data) {
data = tabSizes.value.get(tabsValue[i - 1]?.key) || DEFAULT_SIZE;
}
const entity = (map.get(key) || { ...data }) as TabOffset;
// Right
entity.right = rightOffset - entity.left - entity.width;
// Update entity
map.set(key, entity);
}
offsetMap.value = new Map(map);
});
return offsetMap;
}
|
<reponame>clevijoki/vapid
#pragma once
//#define WIN32_LEAN_AND_MEAN
#include <winsock2.h>
#include <Ws2tcpip.h>
#include <Windows.h>
#include <string>
inline std::string Win32ErrorToString(int ErrorCode)
{
static CHAR Message[1024];
// If this program was multithreaded, we'd want to use
// FORMAT_MESSAGE_ALLOCATE_BUFFER instead of a static buffer here.
// (And of course, free the buffer when we were done with it)
FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS |
FORMAT_MESSAGE_MAX_WIDTH_MASK,
NULL, ErrorCode, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
Message, 1024, NULL);
return Message;
}
inline std::string Win32LastErrorToString()
{
return Win32ErrorToString(GetLastError());
}
inline std::string WSALastErrorToString()
{
return Win32ErrorToString(WSAGetLastError());
}
#ifndef STATUS_SUCCESS
#define STATUS_SUCCESS 0
#endif |
// Copyright 2010 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.secmgr.generators;
import com.google.common.collect.PeekingIterator;
/**
* A generator is an Iterable<Object> with some special properties. First, the
* {@link Generator#iterator} method can be called multiple times and is
* guaranteed to provide a new iterator each time. Second, the returned
* iterator is a {@link PeekingIterator}, which is slightly more useful than the
* standard {@link Iterator} type.
* <p>
* We say that a generator "generates" some elements, which is shorthand for
* saying that an iterator for the generator returns those elements one at a
* time. The elements returned by the iterator are called a "sequence" or
* "generated sequence", and the generator is said to "generate a sequence".
*/
public interface Generator extends Iterable<Object> {
/**
* Get an iterator for this generator. Each call to this method returns a new
* iterator that starts iterating from the first object in the generator.
*
* @return A new iterator.
*/
public Generator.Iterator iterator();
/**
* A generator's iterator type. Provides the {@link PeekingIterator#peek}
* method.
*/
public abstract static class Iterator implements PeekingIterator<Object> {
/**
* A default implementation of {@link PeekingIterator#remove} that signals
* {@link UnsupportedOperationException}. At present, no generator provides
* a working remove method.
*/
public void remove() {
throw new UnsupportedOperationException();
}
}
}
|
<reponame>LiuFang07/bk-cmdb
package operation
import (
"context"
"net/http"
"configcenter/src/common/metadata"
)
func (s *operation) SearchInstCount(ctx context.Context, h http.Header, data interface{}) (resp *metadata.CoreUint64Response, err error) {
resp = new(metadata.CoreUint64Response)
subPath := "/find/operation/inst/count"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) SearchChartData(ctx context.Context, h http.Header, data metadata.ChartConfig) (resp *metadata.Response, err error) {
resp = new(metadata.Response)
subPath := "/find/operation/chart/data"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) CreateOperationChart(ctx context.Context, h http.Header, data interface{}) (resp *metadata.CoreUint64Response, err error) {
resp = new(metadata.CoreUint64Response)
subPath := "/create/operation/chart"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) DeleteOperationChart(ctx context.Context, h http.Header, id string) (resp *metadata.Response, err error) {
resp = new(metadata.Response)
subPath := "delete/operation/chart/%v"
err = s.client.Delete().
WithContext(ctx).
Body(nil).
SubResourcef(subPath, id).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) SearchOperationCharts(ctx context.Context, h http.Header, data interface{}) (resp *metadata.SearchChartResponse, err error) {
resp = new(metadata.SearchChartResponse)
subPath := "/findmany/operation/chart"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) UpdateOperationChart(ctx context.Context, h http.Header, data interface{}) (resp *metadata.Response, err error) {
resp = new(metadata.Response)
subPath := "update/operation/chart"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) SearchTimerChartData(ctx context.Context, h http.Header, data interface{}) (resp *metadata.Response, err error) {
resp = new(metadata.Response)
subPath := "/find/operation/timer/chart/data"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) UpdateChartPosition(ctx context.Context, h http.Header, data interface{}) (resp *metadata.Response, err error) {
resp = new(metadata.Response)
subPath := "/update/operation/chart/position"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) SearchChartCommon(ctx context.Context, h http.Header, data interface{}) (resp *metadata.SearchChartCommon, err error) {
resp = new(metadata.SearchChartCommon)
subPath := "/find/operation/chart/common"
err = s.client.Post().
WithContext(ctx).
Body(data).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
func (s *operation) TimerFreshData(ctx context.Context, h http.Header, data interface{}) (resp *metadata.BoolResponse, err error) {
resp = new(metadata.BoolResponse)
subPath := "/start/operation/chart/timer"
err = s.client.Post().
WithContext(ctx).
Body(nil).
SubResourcef(subPath).
WithHeaders(h).
Do().
Into(resp)
return
}
|
<filename>app/models/fe/tradingpremises/TradingPremises.scala
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.fe.tradingpremises
import models.des.tradingpremises.{AgentDetails, OwnBusinessPremisesDetails, TradingPremises => DesTradingPremises}
import play.api.libs.json.Json
case class TradingPremises(
registeringAgentPremises: Option[RegisteringAgentPremises] = None,
yourTradingPremises: YourTradingPremises,
businessStructure: Option[BusinessStructure] = None,
agentName: Option[AgentName] = None,
agentCompanyDetails: Option[AgentCompanyDetails] = None,
agentPartnership: Option[AgentPartnership] = None,
whatDoesYourBusinessDoAtThisAddress: WhatDoesYourBusinessDo,
msbServices: Option[MsbServices] = None,
lineId: Option[Int] = None,
status: Option[String] = None,
endDate: Option[ActivityEndDate] = None,
removalReason: Option[String] = None,
removalReasonOther: Option[String] = None
)
object TradingPremises {
implicit val format = Json.format[TradingPremises]
implicit def convAgentPremises(agentDetails: AgentDetails): TradingPremises = {
val tmp =
TradingPremises(
Some(RegisteringAgentPremises(true)),
agentDetails,
agentDetails.agentLegalEntity,
None,
None,
None,
agentDetails.agentPremises,
agentDetails.agentPremises.msb,
agentDetails.lineId,
agentDetails.status,
removalReason = agentDetails.removalReason,
removalReasonOther = agentDetails.removalReasonOther
)
tmp.businessStructure.map {
case BusinessStructure.SoleProprietor => tmp.copy(agentName = agentDetails)
case BusinessStructure.IncorporatedBody => tmp.copy(agentCompanyDetails = agentDetails)
case BusinessStructure.LimitedLiabilityPartnership => tmp.copy(agentCompanyDetails = agentDetails)
case BusinessStructure.Partnership => tmp.copy(agentPartnership = agentDetails.agentLegalEntityName)
case BusinessStructure.UnincorporatedBody => tmp
}.getOrElse(tmp)
}
def convOwnPremises(ownPremises: OwnBusinessPremisesDetails): TradingPremises = {
TradingPremises(Some(RegisteringAgentPremises(false)),
ownPremises,
None,
None,
None,
None,
ownPremises,
ownPremises.msb,
ownPremises.lineId,
ownPremises.status
)
}
implicit def conv(tradingPremises: DesTradingPremises): Option[Seq[TradingPremises]] = {
val `empty` = Seq.empty[TradingPremises]
val agentPremises: Seq[TradingPremises] = tradingPremises.agentBusinessPremises match {
case Some(agentBusinessPremises) => agentBusinessPremises.agentDetails match {
case Some(data) => data.map(x => convAgentPremises(x))
case _ => Seq.empty
}
case None => Seq.empty
}
val ownPremises: Seq[TradingPremises] = tradingPremises.ownBusinessPremises match {
case Some(ownBusinessPremises) => ownBusinessPremises.ownBusinessPremisesDetails match {
case Some(data) => data.map(x => convOwnPremises(x))
case _ => Seq.empty
}
case None => Seq.empty
}
val premises = Seq(agentPremises, ownPremises).flatten
premises match {
case `empty` => None
case _ => Some(premises)
}
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/phrase_swift/phrase_swift.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/phrase_swift/phrase_swift.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/usr/bin/env bash
set -e
cd "$(dirname "$0")/.."
source ci/_
annotate() {
${BUILDKITE:-false} && {
buildkite-agent annotate "$@"
}
}
# Run the appropriate test based on entrypoint
testName=$(basename "$0" .sh)
# Skip if only the docs have been modified
ci/affects-files.sh \
\!^docs/ \
|| {
annotate --style info \
"Skipped $testName as only docs/ files were modified"
exit 0
}
source ci/rust-version.sh stable
export RUST_BACKTRACE=1
export RUSTFLAGS="-D warnings"
source scripts/ulimit-n.sh
# Clear cached json keypair files
rm -rf "$HOME/.config/solana"
# Clear the C dependency files, if dependency moves these files are not regenerated
test -d target/debug/bpf && find target/debug/bpf -name '*.d' -delete
test -d target/release/bpf && find target/release/bpf -name '*.d' -delete
# Clear the BPF sysroot files, they are not automatically rebuilt
rm -rf target/xargo # Issue #3105
# Limit compiler jobs to reduce memory usage
# on machines with 2gb/thread of memory
NPROC=$(nproc)
NPROC=$((NPROC>14 ? 14 : NPROC))
echo "Executing $testName"
case $testName in
test-stable)
_ cargo +"$rust_stable" test --jobs "$NPROC" --all --exclude solana-local-cluster ${V:+--verbose} -- --nocapture
_ cargo +"$rust_stable" test --manifest-path bench-tps/Cargo.toml --features=move ${V:+--verbose} test_bench_tps_local_cluster_move -- --nocapture
;;
test-stable-perf)
ci/affects-files.sh \
.rs$ \
Cargo.lock$ \
Cargo.toml$ \
^ci/rust-version.sh \
^ci/test-stable-perf.sh \
^ci/test-stable.sh \
^ci/test-local-cluster.sh \
^core/build.rs \
^fetch-perf-libs.sh \
^programs/ \
^sdk/ \
|| {
annotate --style info \
"Skipped $testName as no relevant files were modified"
exit 0
}
# BPF program tests
_ make -C programs/bpf/c tests
_ cargo +"$rust_stable" test \
--manifest-path programs/bpf/Cargo.toml \
--no-default-features --features=bpf_c,bpf_rust -- --nocapture
if [[ $(uname) = Linux ]]; then
# Enable persistence mode to keep the CUDA kernel driver loaded, avoiding a
# lengthy and unexpected delay the first time CUDA is involved when the driver
# is not yet loaded.
sudo --non-interactive ./net/scripts/enable-nvidia-persistence-mode.sh
rm -rf target/perf-libs
./fetch-perf-libs.sh
# Force CUDA for solana-core unit tests
export TEST_PERF_LIBS_CUDA=1
# Force CUDA in ci/localnet-sanity.sh
export SOLANA_CUDA=1
fi
_ cargo +"$rust_stable" build --bins ${V:+--verbose}
_ cargo +"$rust_stable" test --package solana-perf --package solana-ledger --package solana-core --lib ${V:+--verbose} -- --nocapture
;;
test-move)
ci/affects-files.sh \
Cargo.lock$ \
Cargo.toml$ \
^ci/rust-version.sh \
^ci/test-stable.sh \
^ci/test-move.sh \
^programs/move_loader \
^programs/librapay \
^logger/ \
^runtime/ \
^sdk/ \
|| {
annotate --style info \
"Skipped $testName as no relevant files were modified"
exit 0
}
_ cargo +"$rust_stable" test --manifest-path programs/move_loader/Cargo.toml ${V:+--verbose} -- --nocapture
_ cargo +"$rust_stable" test --manifest-path programs/librapay/Cargo.toml ${V:+--verbose} -- --nocapture
exit 0
;;
test-local-cluster)
_ cargo +"$rust_stable" build --release --bins ${V:+--verbose}
_ cargo +"$rust_stable" test --release --package solana-local-cluster ${V:+--verbose} -- --nocapture --test-threads=1
exit 0
;;
*)
echo "Error: Unknown test: $testName"
;;
esac
(
export CARGO_TOOLCHAIN=+"$rust_stable"
echo --- ci/localnet-sanity.sh
ci/localnet-sanity.sh -x
echo --- ci/run-sanity.sh
ci/run-sanity.sh -x
)
|
<reponame>ocsfwarch/gc2019
import React, { Component } from 'react';
import './OcsaMenu.css';
export class OcsaMenu extends Component {
render() {
return (
<header className="ocsa_header">
<section>
<input type="button" value="Home" onClick={this.props.handleMenuClick}></input>
<input type="button" value="Itinerary" onClick={this.props.handleMenuClick}></input>
<input type="button" value="Meals" onClick={this.props.handleMenuClick}></input>
</section>
</header>
);
}
}
export default OcsaMenu
|
FILENAME="ROXID_One.zip"
|
#!/bin/bash
function usage {
echo "Usage: $0 [OPTION]";
echo "Options:";
echo " -s Enable silent mode";
echo " -v Enable verbose mode";
echo " -h Show help menu";
exit 1;
}
if [ "$1" == "-h" ]; then
usage
fi
# Add code for the remaining options |
/**
* Copyright 2016 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const assert = require('chai').assert;
const getEnvCredentials = require('./utilities').getEnvCredentials;
const Credentials = require('../lib/credentials');
const log = require('winston');
const isArray = require('underscore').isArray;
describe('credentials', () => {
it('credentials available', (done) => {
const credentials = new Credentials(getEnvCredentials());
assert.isAtLeast(credentials.all().length, 1, 'Valid credentials present in environment variable \'$TWITTER_CREDENTIALS\'');
done();
});
it('credentials as list', (done) => {
const env_credentials = getEnvCredentials();
const credentials = new Credentials(isArray(env_credentials) ? env_credentials : [env_credentials]);
assert.isAtLeast(credentials.all().length, 1, 'Valid credentials present in environment variable \'$TWITTER_CREDENTIALS\'');
done();
});
it('credentials as object', (done) => {
const env_credentials = getEnvCredentials();
const credentials = new Credentials(isArray(env_credentials) ? env_credentials[0] : env_credentials);
assert.isAtLeast(credentials.all().length, 1, 'Valid credentials present in environment variable \'$TWITTER_CREDENTIALS\'');
done();
});
});
|
#!/bin/sh
export DOCKER_BUILDKIT=1
export VERSION=${1:-dev}
docker-compose build
|
#!/usr/bin/env bash
source ${PWD%/*samples/*}/scripts/wait.sh
wait_log target/native/test-output.txt "Started SecuringWebApplication"
|
#!/bin/bash
#
# Prepare data for training and evaluating parsers. Run as:
# ./prep_depparse_data.sh TREEBANK TAG_TYPE
# where TREEBANK is the UD treebank name (e.g., UD_English-EWT) and TAG_TYPE is one of gold or predicted.
# This script assumes UDBASE and DEPPARSE_DATA_DIR are correctly set in config.sh.
source scripts/config.sh
treebank=$1; shift
tag_type=$1; shift
original_short=`bash scripts/treebank_to_shorthand.sh ud $treebank`
lang=`echo $short | sed -e 's#_.*##g'`
if [ -d "$UDBASE/${treebank}_XV" ]; then
src_treebank="${treebank}_XV"
src_short="${original_short}_xv"
else
src_treebank=$treebank
src_short=$original_short
fi
# path of input data to dependency parser training process
train_in_file=$DEPPARSE_DATA_DIR/${original_short}.train.in.conllu
dev_in_file=$DEPPARSE_DATA_DIR/${original_short}.dev.in.conllu
dev_gold_file=$DEPPARSE_DATA_DIR/${original_short}.dev.gold.conllu
# handle languages requiring special batch size
batch_size=5000
if [ $treebank == 'UD_Galician-TreeGal' ]; then
batch_size=3000
fi
echo "Using batch size $batch_size"
if [ $tag_type == 'gold' ]; then
train_conllu=$UDBASE/$src_treebank/${src_short}-ud-train.conllu
dev_conllu=$UDBASE/$src_treebank/${src_short}-ud-dev.conllu # gold dev
dev_gold_conllu=$UDBASE/$src_treebank/${src_short}-ud-dev.conllu
cp $train_conllu $train_in_file
cp $dev_conllu $dev_in_file
cp $dev_gold_conllu $dev_gold_file
elif [ $tag_type == 'predicted' ]; then
# build predicted tags
# this assumes the part-of-speech tagging model has been built
gold_train_file=$UDBASE/$src_treebank/${src_short}-ud-train.conllu
gold_dev_file=$UDBASE/$src_treebank/${src_short}-ud-dev.conllu
# run part-of-speech tagging on the train file
echo '---'
echo 'running part of speech model to generate predicted tags for train data'
train_cmd='python -m classla.models.tagger --wordvec_dir '${WORDVEC_DIR}' --eval_file '${gold_train_file}' --gold_file '${gold_train_file}' --output_file '${train_in_file}' --lang '${original_short}' --shorthand '${original_short}' --batch_size '${batch_size}' --mode predict'
echo ''
echo $train_cmd
echo ''
eval $train_cmd
# run part-of-speech tagging on the train file
echo '---'
echo 'running part of speech model to generate predicted tags for dev data'
dev_cmd='python -m classla.models.tagger --wordvec_dir '${WORDVEC_DIR}' --eval_file '${gold_dev_file}' --gold_file '${gold_dev_file}' --output_file '${dev_in_file}' --lang '${original_short}' --shorthand '${original_short}' --batch_size '${batch_size}' --mode predict'
echo ''
echo $dev_cmd
eval $dev_cmd
cp $dev_in_file $dev_gold_file
fi
|
<reponame>buidler-labs/hedera-mirror-node
package com.hedera.mirror.grpc.repository;
/*-
*
* Hedera Mirror Node
*
* Copyright (C) 2019 - 2022 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import static org.assertj.core.api.Assertions.assertThat;
import javax.annotation.Resource;
import org.junit.jupiter.api.Test;
import org.springframework.transaction.annotation.Transactional;
import com.hedera.mirror.common.domain.DomainBuilder;
import com.hedera.mirror.common.domain.addressbook.AddressBook;
import com.hedera.mirror.common.domain.entity.EntityId;
import com.hedera.mirror.common.domain.entity.EntityType;
import com.hedera.mirror.grpc.GrpcIntegrationTest;
class AddressBookRepositoryTest extends GrpcIntegrationTest {
@Resource
private AddressBookRepository addressBookRepository;
@Resource
private DomainBuilder domainBuilder;
@Test
void findLatestTimestamp() {
EntityId fileId = EntityId.of(101L, EntityType.FILE);
assertThat(addressBookRepository.findLatestTimestamp(fileId.getId())).isEmpty();
domainBuilder.addressBook().customize(a -> a.fileId(EntityId.of(999L, EntityType.FILE))).persist();
assertThat(addressBookRepository.findLatestTimestamp(fileId.getId())).isEmpty();
AddressBook addressBook2 = domainBuilder.addressBook().customize(a -> a.fileId(fileId)).persist();
assertThat(addressBookRepository.findLatestTimestamp(fileId.getId())).get()
.isEqualTo(addressBook2.getStartConsensusTimestamp());
AddressBook addressBook3 = domainBuilder.addressBook().customize(a -> a.fileId(fileId)).persist();
assertThat(addressBookRepository.findLatestTimestamp(fileId.getId())).get()
.isEqualTo(addressBook3.getStartConsensusTimestamp());
}
@Test
@Transactional
void cascade() {
AddressBook addressBook = domainBuilder.addressBook().persist();
assertThat(addressBookRepository.findById(addressBook.getStartConsensusTimestamp()))
.get()
.extracting(AddressBook::getEntries)
.isNull();
domainBuilder.addressBookEntry(1)
.customize(a -> a.consensusTimestamp(addressBook.getStartConsensusTimestamp()))
.persist();
assertThat(addressBookRepository.findById(addressBook.getStartConsensusTimestamp()))
.get()
.extracting(AddressBook::getEntries)
.as("Ensure entries aren't eagerly loaded")
.isNull();
}
}
|
/*
* Copyright 2006-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.brte.sample.example.test;
import static org.junit.Assert.assertEquals;
import java.util.Date;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.test.JobLauncherTestUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* parallelStep으로 배치작업을 실행하는 테스트
*
* @author 배치실행개발팀
* @since 2012. 07.31
* @version 1.0
* @see <pre>
*
* 개정이력(Modification Information)
*
* 수정일 수정자 수정내용
* ------- -------- ---------------------------
* 2012.07.31 배치실행개발팀 최초 생성
* </pre>
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations={"/org/egovframe/batch/simple-job-launcher-context.xml"
, "/org/egovframe/batch/jobs/parallelStep.xml"
, "/org/egovframe/batch/job-runner-context.xml"})
public class EgovParallelStepFunctionalTests {
//배치작업을 test하기 위한 JobLauncherTestUtils
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
/**
* 배치작업 테스트
*/
@Test
public void testLaunchJob() throws Exception {
JobExecution jobExecution = jobLauncherTestUtils.launchJob(this.getUniqueJobParameters());
assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());
// /target/test-outputs/parallelStep폴더의 output파일 확인
}
/**
* 잡파라미터를 설정하기 위한 메소드
* @return jobParameters
*/
protected JobParameters getUniqueJobParameters() {
return new JobParametersBuilder().addString("inputFile", "/org/egovframe/data/input/delimited.csv")
.addString("outputFile1", "file:./target/test-outputs/parallelStep/delimitedOutput1.csv")
.addString("outputFile2", "file:./target/test-outputs/parallelStep/delimitedOutput2.csv")
.addString("outputFile3", "file:./target/test-outputs/parallelStep/delimitedOutput3.csv")
.addString("outputFile4", "file:./target/test-outputs/parallelStep/delimitedOutput4.csv")
.addParameter("timestamp", new JobParameter(new Date().getTime()))
.toJobParameters();
}
}
|
# -- coding: utf-8 --
from __future__ import division
from __future__ import print_function
from model.hyparameter import parameter
from model.encoder import Encoderlstm
from model.decoder import Dcoderlstm
from model.utils import construct_feed_dict
import pandas as pd
import tensorflow as tf
import numpy as np
import model.decoder as decoder
import matplotlib.pyplot as plt
import model.normalization as normalization
import model.process as data_load
import os
import argparse
tf.reset_default_graph()
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
logs_path="board"
class Model(object):
def __init__(self,para):
self.para=para
self.pollutant_id={'AQI':3, 'PM2.5':4,'PM10':5, 'SO2':6, 'NO2':7, 'O3':8, 'CO':9}
# define placeholders
self.placeholders = {
# None : batch _size * time _size
'features': tf.placeholder(tf.float32, shape=[None, self.para.input_length, self.para.features]),
'labels': tf.placeholder(tf.float32, shape=[None, self.para.output_length]),
'dropout': tf.placeholder_with_default(0., shape=())
}
self.model()
def model(self):
'''
:param batch_size: 64
:param encoder_layer:
:param decoder_layer:
:param encoder_nodes:
:param prediction_size:
:param is_training: True
:return:
'''
'''
feedforward and BN layer
output shape:[batch, time_size,field_size,new_features]
'''
normal=normalization.Normalization(inputs=self.placeholders['features'],out_size=self.para.features,is_training=self.para.is_training)
normal.normal()
# create model
# this step use to encoding the input series data
'''
rlstm, return --- for example ,output shape is :(32, 3, 128)
axis=0: bath size
axis=1: input data time size
axis=2: output feature size
'''
encoder_init =Encoderlstm(self.para.batch_size,
self.para.hidden_layer,
self.para.hidden_size,
placeholders=self.placeholders)
## [batch, time ,h]
(h_states, c_states) = encoder_init.encoding(self.placeholders['features'])
print('h_states shape is : ', h_states.shape)
# encoder_init=encoder_lstm.lstm(self.para.batch_size,
# self.para.hidden_layer,
# self.para.hidden_size,
# self.para.is_training)
# encoder_init=encodet_gru.gru(self.x_input,batch_size,encoder_layer,encoder_nodes,is_training)
# encoder_init=encoder_rnn.rnn(self.x_input,batch_size,encoder_layer,encoder_nodes,is_training)
# (h_states,c_states)=encoder_init.encoding()
# this step to presict the polutant concentration
'''
decoder, return --- for example ,output shape is :(32, 162, 1)
axis=0: bath size
axis=1: numbers of the nodes
axis=2: label size
'''
decoder_init = Dcoderlstm(self.para.batch_size,
self.para.output_length,
self.para.hidden_layer,
self.para.hidden_size,
placeholders=self.placeholders)
self.pres = decoder_init.decoding(h_states)
print('pres shape is : ', self.pres.shape)
self.cross_entropy = tf.reduce_mean(
tf.sqrt(tf.reduce_mean(tf.square(self.pres + 1e-10 - self.placeholders['labels']), axis=0)))
print(self.cross_entropy)
print('cross shape is : ',self.cross_entropy.shape)
tf.summary.scalar('cross_entropy',self.cross_entropy)
# backprocess and update the parameters
self.train_op = tf.train.AdamOptimizer(self.para.learning_rate).minimize(self.cross_entropy)
print('#...............................in the training step.....................................#')
def accuracy(self,label,predict):
'''
:param label: represents the observed value
:param predict: represents the predicted value
:param epoch:
:param steps:
:return:
'''
error = label - predict
average_error = np.mean(np.fabs(error.astype(float)))
print("mae is : %.6f" % (average_error))
rmse_error = np.sqrt(np.mean(np.square(label - predict)))
print("rmse is : %.6f" % (rmse_error))
cor = np.mean(np.multiply((label - np.mean(label)),
(predict - np.mean(predict)))) / (np.std(predict) * np.std(label))
print('correlation coefficient is: %.6f' % (cor))
# mask = label != 0
# mape =np.mean(np.fabs((label[mask] - predict[mask]) / label[mask]))*100.0
# mape=np.mean(np.fabs((label - predict) / label)) * 100.0
# print('mape is: %.6f %' % (mape))
sse = np.sum((label - predict) ** 2)
sst = np.sum((label - np.mean(label)) ** 2)
R2 = 1 - sse / sst # r2_score(y_actual, y_predicted, multioutput='raw_values')
print('r^2 is: %.6f' % (R2))
return average_error,rmse_error,cor,R2
def describe(self,label,predict,prediction_size):
'''
:param label:
:param predict:
:param prediction_size:
:return:
'''
plt.figure()
# Label is observed value,Blue
plt.plot(label, 'b*:', label=u'actual value')
# Predict is predicted value,Red
plt.plot(predict, 'r*:', label=u'predicted value')
# use the legend
# plt.legend()
plt.xlabel("time(hours)", fontsize=17)
plt.ylabel("pm$_{2.5}$ (ug/m$^3$)", fontsize=17)
plt.title("the prediction of pm$_{2.5}", fontsize=17)
plt.show()
def initialize_session(self):
self.sess=tf.Session()
self.saver=tf.train.Saver(var_list=tf.trainable_variables())
def re_current(self, a, max, min):
return [num*(max-min)+min for num in a]
def test(self):
'''
:param para:
:param pre_model:
:return:
'''
label_list = list()
predict_list = list()
#with tf.Session() as sess:
model_file = tf.train.latest_checkpoint(self.para.save_path)
if not self.para.is_training:
print('the model weights has been loaded:')
self.saver.restore(self.sess, model_file)
self.iterate_test = data_load.DataIterator(site_id=self.para.target_site_id,
pollutant_id=self.pollutant_id[self.para.pollutant_id],
is_training=self.para.is_training,
time_size=self.para.input_length,
prediction_size=self.para.output_length,
data_divide=self.para.data_divide,
normalize=self.para.normalize)
next_ = self.iterate_test.next_batch(batch_size=self.para.batch_size, epochs=1,is_training=False)
max,min=self.iterate_test.max_list[self.pollutant_id[self.para.pollutant_id]],self.iterate_test.min_list[self.pollutant_id[self.para.pollutant_id]]
for i in range(int((self.iterate_test.test_data.shape[0] -(self.para.input_length+ self.para.output_length))//self.para.output_length)
// self.para.batch_size):
x, label =self.sess.run(next_)
feed_dict = construct_feed_dict(x, label, self.placeholders)
feed_dict.update({self.placeholders['dropout']: 0.0})
pre = self.sess.run((self.pres), feed_dict=feed_dict)
label_list.append(label)
predict_list.append(pre)
label_list=np.reshape(np.array(label_list,dtype=np.float32),[-1, self.para.output_length])
predict_list=np.reshape(np.array(predict_list,dtype=np.float32),[-1, self.para.output_length])
if self.para.normalize:
label_list = np.array([self.re_current(row,max,min) for row in label_list])
predict_list = np.array([self.re_current(row,max,min) for row in predict_list])
else:
label_list = np.array([row for row in label_list])
predict_list = np.array([row for row in predict_list])
np.savetxt('results/results_label.txt',label_list,'%.3f')
np.savetxt('results/results_predict.txt', predict_list, '%.3f')
label_list=np.reshape(label_list,[-1])
predict_list=np.reshape(predict_list,[-1])
average_error, rmse_error, cor, R2= self.accuracy(label_list, predict_list) #产生预测指标
self.describe(label_list, predict_list, self.para.output_length) #预测值可视化
return rmse_error
def main(argv=None):
'''
:param argv:
:return:
'''
print('#......................................beginning........................................#')
para = parameter(argparse.ArgumentParser())
para = para.get_para()
para.batch_size=1
para.is_training = False
pre_model = Model(para)
pre_model.initialize_session()
pre_model.test()
print('#...................................finished............................................#')
if __name__ == '__main__':
main() |
#!/bin/bash
set -e
mkdir -p "$HOME/.config/semi_technologies/"
echo '{"url": "http://localhost:8080", "auth": null}' > "$HOME/.config/semi_technologies/configs.json"
python -m unittest test/unit_test.py
docker-compose -f test/docker-compose.yml up -d
sleep 5
python -m unittest test/integration_test.py
python cli.py config view
docker-compose -f test/docker-compose.yml down
|
<reponame>janjakubnanista/ts-transformer-project-template<gh_stars>0
import ts from 'typescript';
/**
* The main transformer function.
*
* This needs to be registered as a TypeScript "before" transform
* in your build/test configuration.
*
* @param program {ts.Program} An instance of TypeScript Program
*/
export default (program: ts.Program): ts.TransformerFactory<ts.SourceFile> => {
return (context: ts.TransformationContext) => (file: ts.SourceFile) => {
// Here you need to apply the transformation to file
return file;
};
};
|
<filename>examples/simple/main.go
package main
import (
"context"
"log"
"os"
"os/signal"
"strings"
"github.com/hashicorp/go-hclog"
"github.com/jimlambrt/gldap"
)
func main() {
// turn on debug logging
l := hclog.New(&hclog.LoggerOptions{
Name: "simple-bind-logger",
Level: hclog.Debug,
})
// a very simple way to track authenticated connections
authenticatedConnections := map[int]struct{}{}
// create a new server
s, err := gldap.NewServer(gldap.WithLogger(l), gldap.WithDisablePanicRecovery())
if err != nil {
log.Fatalf("unable to create server: %s", err.Error())
}
// create a router and add a bind handler
r, err := gldap.NewMux()
if err != nil {
log.Fatalf("unable to create router: %s", err.Error())
}
r.Bind(bindHandler(authenticatedConnections))
r.Search(searchHandler(authenticatedConnections), gldap.WithLabel("All Searches"))
s.Router(r)
go s.Run(":10389") // listen on port 10389
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
defer stop()
select {
case <-ctx.Done():
log.Printf("\nstopping directory")
s.Stop()
}
}
func bindHandler(authenticatedConnections map[int]struct{}) func(*gldap.ResponseWriter, *gldap.Request) {
return func(w *gldap.ResponseWriter, r *gldap.Request) {
resp := r.NewBindResponse(
gldap.WithResponseCode(gldap.ResultInvalidCredentials),
)
defer func() {
w.Write(resp)
}()
m, err := r.GetSimpleBindMessage()
if err != nil {
log.Printf("not a simple bind message: %s", err)
return
}
if m.UserName == "uid=alice" {
authenticatedConnections[r.ConnectionID()] = struct{}{} // mark connection as authenticated
resp.SetResultCode(gldap.ResultSuccess)
log.Println("bind success")
return
}
}
}
func searchHandler(authenticatedConnections map[int]struct{}) func(w *gldap.ResponseWriter, r *gldap.Request) {
return func(w *gldap.ResponseWriter, r *gldap.Request) {
resp := r.NewSearchDoneResponse(gldap.WithResponseCode(gldap.ResultNoSuchObject))
defer func() {
w.Write(resp)
}()
// check if connection is authenticated
if _, ok := authenticatedConnections[r.ConnectionID()]; !ok {
log.Printf("connection %d is not authorized", r.ConnectionID())
resp.SetResultCode(gldap.ResultAuthorizationDenied)
return
}
m, err := r.GetSearchMessage()
if err != nil {
log.Printf("not a search message: %s", err)
return
}
log.Printf("search base dn: %s", m.BaseDN)
log.Printf("search scope: %d", m.Scope)
log.Printf("search filter: %s", m.Filter)
if strings.Contains(m.Filter, "uid=alice") || m.BaseDN == "uid=alice,ou=people,cn=example,dc=org" {
entry := r.NewSearchResponseEntry(
"uid=alice,ou=people,cn=example,dc=org",
gldap.WithAttributes(map[string][]string{
"objectclass": {"top", "person", "organizationalPerson", "inetOrgPerson"},
"uid": {"alice"},
"cn": {"<NAME> smith"},
"givenname": {"alice"},
"sn": {"smith"},
"ou": {"people"},
"description": {"friend of Rivest, Shamir and Adleman"},
"password": {"{<PASSWORD>xv7sSePuTP8zN"},
}),
)
entry.AddAttribute("email", []string{"<EMAIL>"})
w.Write(entry)
resp.SetResultCode(gldap.ResultSuccess)
}
if m.BaseDN == "ou=people,cn=example,dc=org" {
entry := r.NewSearchResponseEntry(
"ou=people,cn=example,dc=org",
gldap.WithAttributes(map[string][]string{
"objectclass": {"organizationalUnit"},
"ou": {"people"},
}),
)
w.Write(entry)
resp.SetResultCode(gldap.ResultSuccess)
}
}
}
|
const shuffle = (arr) => {
let currentIndex = arr.length;
let temp, randomIndex;
while (currentIndex !== 0) {
// Pick a remaining element
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
// Swap it with the current element
temp = arr[currentIndex];
arr[currentIndex] = arr[randomIndex];
arr[randomIndex] = temp;
}
return arr;
}; |
#!/bin/bash
#$ -N alldisease
#$ -S /bin/bash
#$ -cwd
#$ -j y
#$ -V
#This script allows for parallel merging of the disease datasets.
i=$1
rm list$i
touch list$i
for mal in 58C BD CAD CD HT RA T1D T2D;do
echo ./$mal/${mal}_$i.bed ./$mal/${mal}_$i.bim ./$mal/${mal}_$i.fam >>list$i
done
plink --bfile NBS_$i --merge-list list$i --make-bed --out all_$i
|
<gh_stars>0
package main
import (
"log"
"net/http"
"github.com/sukumar-varma/mumen-rider/crawler"
)
func main() {
addr := ":9001"
http.HandleFunc("/ping", crawler.Ping)
http.HandleFunc("/get-url", crawler.GetUrls)
log.Printf("Listening on %q", addr)
log.Fatal(http.ListenAndServe(addr, nil))
}
|
<gh_stars>0
module.exports = function (bot) {
bot.on('boothCycle', function (data) {
console.log('[EVENT] boothCycle ', JSON.stringify(data, null, 2));
});
}; |
#!/bin/bash
# MIT License
# Copyright (c) 2020 Patrik Persson and Linn Öström
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
DATASET_PATH=$1
colmap feature_extractor \
--database_path $DATASET_PATH/database.db \
--image_path $DATASET_PATH/images \
--ImageReader.single_camera $2
colmap exhaustive_matcher \
--database_path $DATASET_PATH/database.db
mkdir $DATASET_PATH/sparse
colmap mapper \
--database_path $DATASET_PATH/database.db \
--image_path $DATASET_PATH/images \
--output_path $DATASET_PATH/sparse
mkdir $DATASET_PATH/dense
colmap image_undistorter \
--image_path $DATASET_PATH/images \
--input_path $DATASET_PATH/sparse/0 \
--output_path $DATASET_PATH/dense \
--output_type COLMAP \
--max_image_size 2000 |
#!/usr/bin/env bash
set -e
VERSION=`cat VERSION`
# Install yq yaml parser
wget https://github.com/mikefarah/yq/releases/download/2.2.1/yq_linux_amd64
sudo chmod +x yq_linux_amd64
sudo mv yq_linux_amd64 /usr/local/bin/yq
# setup necessary for functional tests
# Modify splunk environment values
yq w -i .circleci/sck_values.yml global.splunk.hec.host $SPLUNK_HEC_HOST
yq w -i .circleci/sck_values.yml global.splunk.hec.token $SPLUNK_HEC_TOKEN
# Pull docker images locally
docker pull splunk/fluentd-hec:1.1.1
docker pull splunk/k8s-metrics:1.1.1
docker pull splunk/k8s-metrics-aggr:1.1.0
docker pull splunk/kube-objects:1.1.0
# Modify docker images to be used
yq w -i .circleci/sck_values.yml splunk-kubernetes-logging.image.name splunk/fluentd-hec:1.1.1
yq w -i .circleci/sck_values.yml splunk-kubernetes-metrics.image.name splunk/k8s-metrics:1.1.1
yq w -i .circleci/sck_values.yml splunk-kubernetes-metrics.imageAgg.name splunk/k8s-metrics-aggr:1.1.0
yq w -i .circleci/sck_values.yml splunk-kubernetes-objects.image.name splunk/kube-objects:1.1.0
|
<reponame>leecade/canku
/**
* User: willerce
*/
var crypto = require('crypto');
var config = require('../global.js').config;
var database = require('../global.js').database;
var moment = require('moment');
//对moment模块的一些扩展及定义
moment.timezoneOffset = function (zone) {
var diff = moment().zone() + (zone * 60);
return moment().add('minutes', diff);
};
/**
* md5 hash
*
* @param str
* @returns md5 str
*/
exports.md5 = function (str) {
var md5sum = crypto.createHash('md5');
md5sum.update(str);
str = md5sum.digest('hex');
return str;
};
/**
* 加密函数
* @param str 源串
* @param secret 因子
* @returns str
*/
exports.encrypt = function (str, secret) {
var cipher = crypto.createCipher('aes192', secret);
var enc = cipher.update(str, 'utf8', 'hex');
enc += cipher.final('hex');
return enc;
};
/**
* 解密
* @param str
* @param secret
* @returns str
*/
exports.decrypt = function (str, secret) {
var decipher = crypto.createDecipher('aes192', secret);
var dec = decipher.update(str, 'hex', 'utf8');
dec += decipher.final('utf8');
return dec;
};
exports.gen_session = function (name, password, res) {
var auth_token = this.encrypt(name + '\t' + password, config.session_secret);
res.cookie(config.auth_cookie_name, auth_token, {
path:'/',
maxAge:1000 * 60 * 60 * 24 * 3
}); // cookie 有效期1周
};
exports.get_week = { '-1':'全部', '0':'星期天', '1':'星期一', '2':'星期二', '3':'星期三', '4':'星期四', '5':'星期五', '6':'星期六'};
exports.getUTC8Time = function (format) {
if (format)
return moment.timezoneOffset(config.time_zone).format(format);
else
return moment.timezoneOffset(config.time_zone);
};
exports.getUTC8Day = function (format) {
return new Date(this.getUTC8Time()).getDay();
}; |
#!/bin/bash -e
# this script is run during the image build
# clamav
mkdir -p /var/run/clamav/
chown clamav /var/run/clamav/
#sed -i "s|db.local.clamav.net|db.fr.clamav.net|g" /etc/clamav/freshclam.conf
mkdir /var/spool/postfix/clamav
chown clamav /var/spool/postfix/clamav
sed -i "s|Foreground false|Foreground true|g" /etc/clamav/clamd.conf
|
struct Team {
unused_pieces: u32,
}
impl Team {
// Constructor for Team
fn new(unused_pieces: u32) -> Team {
Team { unused_pieces }
}
}
struct Game {
teams: Vec<Team>,
current_team_index: usize,
}
impl Game {
// Constructor for Game
fn new(teams: Vec<Team>) -> Game {
Game {
teams,
current_team_index: 0,
}
}
fn current_team(&self) -> &Team {
&self.teams[self.current_team_index]
}
fn use_piece(&mut self) -> bool {
if self.current_team().unused_pieces > 0 {
self.teams[self.current_team_index].unused_pieces -= 1;
true
} else {
false
}
}
fn unused_piece_available(&self) -> bool {
self.current_team().unused_pieces > 0
}
} |
from tempfile import mkdtemp
from subprocess import run
from pathlib import Path
from ..read import load
from ..names import name
MATLAB_TEMPLATE = """\
addpath('/Fridge/users/giovanni/projects/finger_mapping/scripts/fima/matlab')
thicken_pial('{ribbon}', 'l', [5, 5], 0.5)
thicken_pial('{ribbon}', 'r', [5, 5], 0.5)
"""
def make_pial_thick(parameters, ieeg_file):
try:
fs = load('freesurfer', parameters, ieeg_file)
except FileNotFoundError:
return
output_dir = name(parameters, 'surface_dir', ieeg_file)
ribbon_mgz = fs.dir / 'mri' / 'ribbon.mgz'
ribbon_nii = Path(mkdtemp()) / 'ribbon.nii'
run([
'mri_convert',
ribbon_mgz,
ribbon_nii
])
script_file = (output_dir / 'temp.m')
with script_file.open('w') as f:
f.write(MATLAB_TEMPLATE.format(ribbon=str(ribbon_nii)))
run([
'matlab',
'-batch',
"temp",
],
cwd=output_dir
)
ribbon_nii.unlink()
script_file.unlink()
|
<filename>src/test/java/com/github/couchmove/service/ChangeLockServiceIT.java<gh_stars>10-100
package com.github.couchmove.service;
import com.github.couchmove.exception.CouchmoveException;
import com.github.couchmove.utils.BaseIT;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* @author ctayeb
* Created on 29/05/2017
*/
public class ChangeLockServiceIT extends BaseIT {
@Test
public void should_acquire_and_release_lock() {
// Given a changeLockService
ChangeLockService changeLockService = newChangeLockService();
// When we tries to acquire lock
assertTrue(changeLockService.acquireLock());
// Then we should get it
assertTrue(changeLockService.isLockAcquired());
// When we release the lock
changeLockService.releaseLock();
// The it should be released
assertFalse(changeLockService.isLockAcquired());
}
@NotNull
private ChangeLockService newChangeLockService() {
return new ChangeLockService(getBucket(), getCluster());
}
@Test
public void should_not_acquire_lock_when_already_acquired() {
// Given a first changeLockService that acquires lock
ChangeLockService changeLockService1 = newChangeLockService();
changeLockService1.acquireLock();
// When an other changeLockService tries to get the lock
ChangeLockService changeLockService2 = newChangeLockService();
// Then it will fails
assertFalse(changeLockService2.acquireLock());
assertFalse(changeLockService2.isLockAcquired());
// And the first service should keep the lock
assertTrue(changeLockService1.isLockAcquired());
}
@Test
public void should_not_release_lock_acquired_by_another_process() {
// Given a process holding the lock
ChangeLockService changeLockService1 = newChangeLockService();
changeLockService1.acquireLock();
assertTrue(changeLockService1.isLockAcquired());
// When an other process tries to release the lock
ChangeLockService changeLockService2 = newChangeLockService();
// Then it should fails
assertThrows(CouchmoveException.class, changeLockService2::releaseLock);
// When an other process force release the lock
changeLockService2.forceReleaseLock();
// Then the first should loose the lock
assertFalse(changeLockService1.isLockAcquired());
}
}
|
const Koa = require('koa');
const Router = require('koa-router');
const bodyParser = require('koa-bodyparser');
const app = new Koa();
const router = new Router();
// Setup body parser
app.use(bodyParser());
let users = [{
id: 1,
name: 'John Doe'
}];
// Create a user
router.post('/users', ctx => {
let user = ctx.request.body;
user.id = users.length + 1;
users.push(user);
ctx.status = 201;
ctx.body = user;
});
// Retrieve a user
router.get('/users/:id', ctx => {
let id = ctx.params.id;
let user = users.find(u => u.id === +id);
if (user) {
ctx.status = 200;
ctx.body = user;
} else {
ctx.status = 404;
ctx.body = {
message: 'User not found'
};
}
});
// Delete a user
router.delete('/users/:id', ctx => {
let id = ctx.params.id;
let index = users.findIndex(u => u.id === +id);
if (index >= 0) {
users.splice(index, 1);
ctx.status = 204;
} else {
ctx.status = 404;
ctx.body = {
message: 'User not found'
};
}
});
// List all users
router.get('/users', ctx => {
ctx.status = 200;
ctx.body = users;
});
app.use(router.routes());
app.use(router.allowedMethods());
app.listen(3000, () => {
console.log('Server is listening on port 3000');
}); |
def calculate_square(numbers):
return [number * number for number in numbers] |
// Set up the GraphQL schema
const { gql } = require('apollo-server');
const typeDefs = gql`
type User {
id: ID
username: String
email: String
posts: [Post]
}
type Post {
id: ID
title: String
content: String
author: User
}
type Query {
users: [User]
posts: [Post]
user(id: Int): User
post(id: Int): Post
}
`;
// Load the models
const User = require('./models/user');
const Post = require('./models/post');
// Resolvers
const resolvers = {
Query: {
users: () => User.findAll(),
posts: () => Post.findAll(),
user: (root, {id}) => User.findById(id),
post: (root, {id}) => Post.findById(id),
},
User: {
posts: (user) => Post.findByUser(user.id),
},
Post: {
author: (post) => User.findById(post.user_id),
},
};
// Set up Apollo server
const { ApolloServer} = require('apollo-server');
const server = new ApolloServer({ typeDefs, resolvers });
server.listen().then(({url}) => {
console.log(`Server ready at ${url}`);
}); |
import java.util.ArrayList;
import java.util.List;
// Observable class to create observables for subjects
class Observable<T> {
private Subject<T> subject;
public Observable(Subject<T> subject) {
this.subject = subject;
}
public Observable<T> asObservable() {
return this;
}
public void addObserver(Observer<T> observer) {
subject.addObserver(observer);
}
public void removeObserver(Observer<T> observer) {
subject.removeObserver(observer);
}
}
// Subject class to handle observers and notify them of events
class Subject<T> {
private List<Observer<T>> observers = new ArrayList<>();
public void addObserver(Observer<T> observer) {
observers.add(observer);
}
public void removeObserver(Observer<T> observer) {
observers.remove(observer);
}
public void notifyObservers(T event) {
for (Observer<T> observer : observers) {
observer.onEvent(event);
}
}
}
// Observer interface to be implemented by event observers
interface Observer<T> {
void onEvent(T event);
}
// Example usage
class EventDrivenSystem {
private Subject<String> changeSubject = new Subject<>();
private Subject<Void> startupSubject = new Subject<>();
private boolean systemBooting = true;
private Observable<String> changeEvents;
private Observable<Void> startupEvents;
public EventDrivenSystem() {
changeEvents = new Observable<>(changeSubject);
startupEvents = new Observable<>(startupSubject);
}
public void notifyChangeObservers(String changeEvent) {
changeSubject.notifyObservers(changeEvent);
}
public void notifyStartupObservers() {
startupSubject.notifyObservers(null);
}
public void setSystemBooting(boolean systemBooting) {
this.systemBooting = systemBooting;
}
public boolean isBooting() {
return systemBooting;
}
}
// Example usage
class Main {
public static void main(String[] args) {
EventDrivenSystem system = new EventDrivenSystem();
// Register change event observer
system.changeEvents.addObserver(event -> {
System.out.println("Change event: " + event);
});
// Register startup event observer
system.startupEvents.addObserver(event -> {
System.out.println("Startup event");
});
// Notify observers of change event
system.notifyChangeObservers("System configuration changed");
// Notify observers of startup event
system.notifyStartupObservers();
// Check system booting status
System.out.println("System booting: " + system.isBooting());
}
} |
<reponame>tdm1223/Algorithm
// 13717. 포켓몬 GO
// 2021.09.16
// 구현
#include<iostream>
using namespace std;
int main()
{
int n;
cin >> n;
string s;
int k, m;
int maxValue = -1;
string maxName = "";
int ansCnt = 0;
for (int i = 0; i < n; i++)
{
cin >> s;
cin >> k >> m;
int cnt = 0;
while (m - k >= 0)
{
m -= k;
m += 2;
cnt++;
ansCnt++;
}
if (cnt > maxValue)
{
maxName = s;
maxValue = cnt;
}
}
cout << ansCnt << endl << maxName << endl;
return 0;
}
|
#!/usr/bin/env bash
# Check if date supports %N
if date --help 2>&1 | grep -q '%N'; then
DATESTR="%T.%6N"
else
DATESTR="%H:%M:%S"
fi
__join() { local IFS=$1; echo "$*"; }
__writeHeader() {
local c=($(caller 1))
printf "%s%s %s %7d %s:%d] " \
"$1" "$(date +%m%d)" "$(date +$DATESTR)" $$ \
"$(basename "${c[2]}")" "${c[0]}" 1>&2
}
__stacktrace() {
if (( ${#FUNCNAME[@]} > 2 )); then
echo "Stacktrace:"
# Start at 2 because we're calling this from klog::Fatal*
local stack_size=${#FUNCNAME[@]}
for (( i=2; i<stack_size; i++ )); do
local func="${FUNCNAME[$i]}"
[[ x$func == x ]] && func=MAIN
echo " $(realpath "${BASH_SOURCE[$i]}") $func:${BASH_LINENO[$((i - 1))]}"
done
fi
}
klog::Info() {
__writeHeader "I"
echo -e "$(__join "" "$@")"
}
klog::Infoln() {
__writeHeader "I"
echo -e "$@" 1>&2
}
klog::Infof() {
__writeHeader "I"
local a=("$@")
echo -e "$(printf "$1" ${a[@]:1})" 1>&2
}
klog::Warning() {
__writeHeader "W"
echo -e "$@" 1>&2
}
klog::Warningf() {
__writeHeader "W"
local a=("$@")
echo -e "$(printf "$1" ${a[@]:1})" 1>&2
}
klog::Warningln() {
__writeHeader "W"
echo -e "$@" 1>&2
}
klog::Error() {
__writeHeader "E"
echo -e "$(__join "" "$@")" 1>&2
}
klog::Errorf() {
__writeHeader "E"
local a=("$@")
echo -e "$(printf "$1" ${a[@]:1})" 1>&2
}
klog::Errorln() {
__writeHeader "E"
echo -e "$@" 1>&2
}
klog::Fatal() {
__writeHeader "F"
echo -e "$(__join "" "$@")" 1>&2
__stacktrace 1>&2
exit 255
}
klog::Fatalf() {
__writeHeader "F"
local a=("$@")
echo -e "$(printf "$1" ${a[@]:1})" 1>&2
__stacktrace 1>&2
exit 255
}
klog::Fatalln() {
__writeHeader "F"
echo -e "$@" 1>&2
__stacktrace 1>&2
exit 255
}
|
#!/bin/bash
###################################################################################################
# Script Name: jamf_ea_Rosetta2Installed.sh
# By: Zack Thompson / Created: 4/5/2021
# Version: 1.2.0 / Updated: 3/21/2022 / By: ZT
#
# Description: A Jamf Extension Attribute to determine if Rosetta 2 is installed.
#
###################################################################################################
if [[ $( /usr/bin/arch ) == "arm64" ]]; then
if [[ $( /usr/bin/pgrep oahd ) ]]; then
echo "<result>Installed</result>"
else
echo "<result>Not Installed</result>"
fi
else
echo "<result>Not Compatible</result>"
fi
exit 0 |
define((require, exports, module) => {
const Plugin = {
PartyLevel: require('app/plugin/party_level'),
RepairNotice: require('app/plugin/repair_notice'),
ForgeNotice: require('app/plugin/forge_notice'),
HurtNotice: require('app/plugin/hurt_notice'),
RareSwordNotice: require('app/plugin/rare_sword_notice'),
DutyNotice: require('app/plugin/duty_notice'),
ConquestNotice: require('app/plugin/conquest_notice'),
SaveData: require('app/plugin/save_data'),
EvolutionNotice: require('app/plugin/evolution_notice')
}
return new Vuex.Store({
namespaced: true,
state () {
return {
inBattle: false,
secretary: 3,
UIDs: {},
dataLoaded: {}
}
},
mutations: {
inBattle (state) {
state.inBattle = true
},
notInBattle (state) {
state.inBattle = false
},
fatigueToV (state) {
// Fatigue after entering practice match (no loss of fatigue)
let swords = state.swords.serial
for(let s in swords) {
let sword = swords[s]
sword.battleFatigue = Math.max(sword.vfatigue, 0)
}
},
fatigueToVV (state) {
// Fatigue after entering map (automatic -10 fatigue)
let swords = state.swords.serial
for(let s in swords) {
let sword = swords[s]
sword.battleFatigue = Math.max(sword.vfatigue - 10, 0)
}
},
UId (state, payload) {
let { path_uid, UID } = payload
if (!state.uid) {
Vue.set(state, 'uid', UID)
console.log(state)
/*if (!state.UIDs[UID]) {
Vue.set(state.UIDs, UID, {inBattle:false, secretary:3, dataLoaded:{}})
}*/
}
},
loadData (state, payload) {
let { key, loaded } = payload
Vue.set(state.dataLoaded, key, loaded)
}
},
modules: {
swords: require('./state/swords'),
resource: require('./state/resource'),
duty: require('./state/duty'),
party: require('./state/party'),
repair: require('./state/repair'),
forge: require('./state/forge'),
player: require('./state/player'),
equip: require('./state/equip'),
battle: require('./state/battle'),
sally: require('./state/sally'),
notice: require('./state/notice'),
log: require('./state/log'),
config: require('./state/config'),
item: require('./state/item'),
album: require('./state/album'),
practice_enemy: require('./state/practice_enemy'),
evolution: require('./state/evolution'),
debug: require('./state/debug')
},
plugins: _.values(Plugin)
})
}) |
'use strict';
const getRandom = (min, max) => Math.floor(Math.random() * (max - min) + min);
const stringify = (array, devider) => array.flat(Infinity).join(devider);
const join =
(...parts) =>
devider =>
stringify([parts], devider);
module.exports.utils = {
getRandom,
stringify,
join,
};
|
<filename>app/main/models/category.rb<gh_stars>0
class Category < Volt::Model
field :name, String
validate :name, length: 2
validate :name, presence: true
end
|
/bin/sh -c "
while ! nc -z $DB_HOST $DB_PORT;
do
echo 'Waiting database';
sleep 1;
done;
echo 'Database ready!'!;
"
npm run migrate
npm run dev |
# Copyright 2021 Lin Wang
# This code is part of the Advanced Computer Networks course at Vrije
# Universiteit Amsterdam.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#!/bin/bash
export PYTHONPATH="$PYTHONPATH:$HOME/mininet"
sudo --preserve-env=PYTHONPATH python3 ./fat-tree.py
|
<reponame>WlodzimierzKorza/small_eod<gh_stars>10-100
import factory.fuzzy
from factory.django import DjangoModelFactory
from ..cases.factories import CaseFactory
from ..generic.factories import AbstractTimestampUserFactory
from .models import Note
class NoteFactory(AbstractTimestampUserFactory, DjangoModelFactory):
case = factory.SubFactory(CaseFactory)
comment = factory.Sequence(lambda n: "comment-%04d" % n)
class Meta:
model = Note
|
#!/bin/bash
/scripts/gen-config.sh
java -XX:+ExitOnOutOfMemoryError -Xss4M -XX:MaxRAMPercentage=60 -Dfile.encoding=UTF8 -cp /app/bin/*-shaded.jar org.finos.legend.engine.server.Server server /config/dev-config.json
|
<reponame>eSCT/oppfin
/*******************************************************************************
* Copyright Searchbox - http://www.searchbox.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.searchbox.core.ref;
import java.text.Normalizer;
import java.text.Normalizer.Form;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class StringUtils {
private static final Pattern NONLATIN = Pattern.compile("[^\\w-]");
private static final Pattern WHITESPACE = Pattern.compile("[\\s]");
public static String toSlug(String input) {
String nowhitespace = WHITESPACE.matcher(input).replaceAll("-");
String normalized = Normalizer.normalize(nowhitespace, Form.NFD);
String slug = NONLATIN.matcher(normalized).replaceAll("");
return slug.toLowerCase(Locale.ENGLISH);
}
public static Set<String> extractHitFields(String template) {
Set<String> fields = new TreeSet<String>();
Pattern pattern = Pattern
.compile("\\{hit.fieldValues\\['([^\\s^\\^\\'}]+)'\\]\\}");
Matcher matcher = pattern.matcher(template);
while (matcher.find()) {
fields.add(matcher.group(1));
}
pattern = Pattern.compile("field=\"([^\\s^\\^\\'}]+)\"");
matcher = pattern.matcher(template);
while (matcher.find()) {
fields.add(matcher.group(1));
}
return fields;
}
private static Map<Class<?>, String> classToSlug = new HashMap<Class<?>, String>();
private static Map<String, Class<?>> slugToClass = new HashMap<String, Class<?>>();
static {
classToSlug.put(String.class, "s");
classToSlug.put(Integer.class, "i");
classToSlug.put(Double.class, "d");
classToSlug.put(Date.class, "dt");
classToSlug.put(Float.class, "f");
classToSlug.put(Boolean.class, "b");
for (Entry<Class<?>, String> entry : classToSlug.entrySet()) {
slugToClass.put(entry.getValue(), entry.getKey());
}
}
public static String ClassToSlug(Class<?> clazz) {
return classToSlug.get(clazz);
}
public static Class<?> SlugToClass(String slug) {
return slugToClass.get(slug);
}
}
|
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
set -xeuo pipefail
export PYTHONUNBUFFERED=1
export FEEDSTOCK_ROOT=/home/conda/staged-recipes
export RECIPE_ROOT=/home/conda/recipe_root
export CI_SUPPORT=/home/conda/staged-recipes/.ci_support
export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml"
cat >~/.condarc <<CONDARC
conda-build:
root-dir: /home/conda/staged-recipes/build_artifacts
CONDARC
# Copy the host recipes folder so we don't ever muck with it
cp -r /home/conda/staged-recipes/recipes ~/conda-recipes
cp -r /home/conda/staged-recipes/.ci_support ~/.ci_support
# Find the recipes from master in this PR and remove them.
echo "Finding recipes merged in master and removing them from the build."
pushd /home/conda/staged-recipes/recipes > /dev/null
git fetch origin +master:master
git ls-tree --name-only master -- . | xargs -I {} sh -c "rm -rf ~/conda-recipes/{} && echo Removing recipe: {}"
popd > /dev/null
conda install --yes --quiet conda-forge-ci-setup=2 conda-build>=3.16 networkx -c conda-forge
# set up the condarc
setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
# Make sure build_artifacts is a valid channel
mkdir -p /home/conda/staged-recipes/build_artifacts
conda index /home/conda/staged-recipes/build_artifacts
# yum installs anything from a "yum_requirements.txt" file that isn't a blank line or comment.
find ~/conda-recipes -mindepth 2 -maxdepth 2 -type f -name "yum_requirements.txt" \
| xargs -n1 cat | { grep -v -e "^#" -e "^$" || test $? == 1; } | \
xargs -r /usr/bin/sudo -n yum install -y
python ~/.ci_support/build_all.py ~/conda-recipes
touch "/home/conda/staged-recipes/build_artifacts/conda-forge-build-done-${CONFIG}" |
const userFixtures = require('../../fixtures/User.js');
describe('User Model', function() {
before(function() {
// sanity checks
should.exist(sails);
sails.models.should.have.property('user');
});
it('.find() should return all user fixtures', async function() {
const foundUsers = await sails.models.user.find({});
foundUsers.should.be.an('array');
foundUsers.should.have.lengthOf(userFixtures.length);
// make sure the passwords aren't being stored in plain-text
for (let i = 0; i < foundUsers.length; ++i) {
foundUsers[i].id.should.be.a.uuid();
foundUsers[i].should.have.property('password');
foundUsers[i].password.substr(0, 8).should.equal('<PASSWORD>'); // "scrypt" in base 64
}
});
it('.toJSON() should not expose sensitive information to the outside world', async function() {
const foundUsers = await sails.models.user.find({}).limit(1);
foundUsers.should.be.an('array');
// make sure password is never exposed to the outside world
const userJson = foundUsers[0].toJSON();
userJson.should.not.have.property('password');
userJson.should.not.have.property('verificationKey');
});
it('.fullName() should return given user\'s full name', async function() {
const foundUser = await sails.models.user.find({}).limit(1);
const fullName = sails.models.user.fullName(foundUser[0]);
fullName.should.eq(foundUser[0].firstName + ' ' + foundUser[0].lastName);
});
it('.doPasswordsMatch() should compare passwords correctly', async function() {
const foundUser = await sails.models.user.findOne({email: userFixtures[0].email});
foundUser.password.substr(0, 8).should.equal('<PASSWORD>'); // "scrypt" in base 64
userFixtures[0].password.should.not.equal(foundUser.password);
const isAMatch = await sails.models.user.doPasswordsMatch(userFixtures[0].password, foundUser.password);
isAMatch.should.be.a('boolean');
isAMatch.should.eq(true);
const cbTest = chai.spy();
try {
await sails.models.user.doPasswordsMatch(userFixtures[0].password); // should throw error
} catch (e) {
e.should.be.an('error');
cbTest();
}
cbTest.should.have.been.called();
});
});
|
const inputText = "This is a sample text and contains the word sample.";
const keywords = ["This", "word"];
const matches = [];
let pos = 0;
// search for all the keywords in the input text
for (let i=0; i<keywords.length; i++) {
let keyword = keywords[i];
let keywordIndex = inputText.indexOf(keyword, pos);
while (keywordIndex > -1) {
pos = keywordIndex;
matches.push({
keyword: keyword,
start: pos,
end: pos + keyword.length
});
// increment the search position
keywordIndex = inputText.indexOf(keyword, pos + 1);
}
}
console.log(matches);
// [
// { keyword: 'This', start: 0, end: 4 },
// { keyword: 'word', start: 34, end: 38 }
// ] |
package de.lmu.cis.ocrd.ml.features;
import com.google.gson.Gson;
import de.lmu.cis.ocrd.ml.OCRToken;
import org.pmw.tinylog.Logger;
import javax.annotation.Nonnull;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.StringJoiner;
public class FeatureSet implements Iterable<Feature>, Serializable {
private static final long serialVersionUID = -4802453739549010404L;
private final List<Feature> features = new ArrayList<>();
private final Vector vector = new Vector(10);
public Feature get(int i) {
return features.get(i);
}
public FeatureSet add(Feature feature) {
this.features.add(feature);
return this;
}
public Vector calculateFeatureVector(OCRToken token, int n) {
vector.clear();
int j = 0;
Logger.debug("features for token: {}", token.toString());
for (Feature feature : this.features) {
for (int i = 0; i < n; i++) {
if (!feature.handlesOCR(i, n)) {
continue;
}
final Object val = feature.calculate(token, i, n);
Logger.debug(" * value for feature {}: {}", feature.getName(), val.toString());
vector.add(j++, val);
}
}
return vector;
}
public int size() {
return this.features.size();
}
@Override
@Nonnull
public Iterator<Feature> iterator() {
return this.features.iterator();
}
public static class Vector extends ArrayList<Object> {
private static final long serialVersionUID = 4013744915440870424L;
Vector(int n) {
super(n);
}
@Override
public String toString() {
StringJoiner sj = new StringJoiner(",");
for (Object feature : this) {
sj.add(feature.toString());
}
return sj.toString();
}
public void writeCSVLine(PrintWriter w) {
w.println(this.toString());
}
public String toJSON() {
return new Gson().toJson(this);
}
}
}
|
#!/bin/sh
# prepare s3dis
# cd data_utils
# python collect_indoor3d_data.py
# train, Note: if specified a log_dir, then it will restore the check pt if existed
python train_semseg.py \
--model pointnet2_sem_seg \
--test_area 5 \
--log_dir pointnet2_sem_seg
# test
python test_semseg.py \
--log_dir pointnet2_sem_seg \
--test_area 5 \
--visual
|
#include <vector>
#include <algorithm>
// Define the SamplerState struct
struct SamplerState {
// Define the properties of the sampler state
// ...
};
// Define the ns namespace with the Range struct
namespace ns {
struct Range {
uint32_t start;
uint32_t end;
};
}
class GraphicsRenderer {
public:
void setSamplerState(const SamplerState& sampler, uint32_t index) {
// Implement setting the sampler state for a specific index
// ...
}
void setSamplerStates(const SamplerState* samplers, const ns::Range& range) {
// Implement setting the sampler states for a range of indices
// ...
}
void setSamplerState(const SamplerState& sampler, float lodMinClamp, float lodMaxClamp, uint32_t index) {
// Implement setting the sampler state with LOD clamping for a specific index
// ...
}
void setSamplerStates(const SamplerState* samplers, const float* lodMinClamps, const float* lodMaxClamps, const ns::Range& range) {
// Implement setting the sampler states with LOD clamping for a range of indices
// ...
}
void setThreadgroupMemory(uint32_t length, uint32_t index) {
// Implement setting the threadgroup memory length for a specific index
// ...
}
};
int main() {
// Create an instance of GraphicsRenderer and use its methods to manage sampler states and threadgroup memory
GraphicsRenderer renderer;
// Example usage
SamplerState sampler1, sampler2;
ns::Range range{0, 5};
float lodMinClamp = 0.0f, lodMaxClamp = 1.0f;
renderer.setSamplerState(sampler1, 0);
renderer.setSamplerStates(&sampler1, range);
renderer.setSamplerState(sampler2, lodMinClamp, lodMaxClamp, 1);
renderer.setThreadgroupMemory(1024, 0);
return 0;
} |
class Stack {
private final int MAX_CAPACITY = 1000;
private int[] array;
private int top;
public Stack() {
array = new int[MAX_CAPACITY];
top = -1;
}
// Push an element to the stack
public void push(int x) {
if (top == MAX_CAPACITY - 1) {
System.out.println("Error: stack overflow");
return;
}
array[++top] = x;
}
// Pop an element from the stack
public int pop() {
if (top == -1) {
System.out.println("Error: No element in the stack");
return -1;
}
return array[top--];
}
// Peek at the top element in the stack
public int peek() {
return array[top];
}
// Check if the stack is empty
public boolean isEmpty() {
return (top == -1);
}
} |
#!/usr/bin/env bash
NAME="mpv-discordRPC"
DIRNAME=$(dirname "$0")
SCRIPTS_DIR=${HOME}/.config/mpv/scripts
SCRIPT_OPTS_DIR=${HOME}/.config/mpv/script-opts
LIBRARY_DIR=/usr/local/lib
if [ ! -d "${SCRIPTS_DIR}/mpv-discordRPC" ] ; then
mkdir -p "${SCRIPTS_DIR}/mpv-discordRPC"
fi
if [ ! -d "${SCRIPT_OPTS_DIR}" ] ; then
mkdir -p "${SCRIPT_OPTS_DIR}"
fi
if [ ! -d "${LIBRARY_DIR}" ] ; then
sudo mkdir -p "${LIBRARY_DIR}"
fi
cd "${DIRNAME}"
echo "[${NAME}] installing dependency"
echo "[${NAME}] ├── discord-rpc"
if [ ! -f ./discord-rpc-linux.zip ]; then
echo "[${NAME}] │ ├── downloading 'discord-rpc-linux.zip'"
wget -q -c "https://github.com/discordapp/discord-rpc/releases/download/v3.4.0/discord-rpc-linux.zip"
fi
echo "[${NAME}] │ ├── extracting 'discord-rpc-linux.zip'"
unzip -q discord-rpc-linux.zip
echo "[${NAME}] │ └── installing 'libdiscord-rpc.so'"
sudo cp ./discord-rpc/linux-dynamic/lib/libdiscord-rpc.so "${LIBRARY_DIR}"
rm -rf ./discord-rpc
echo "[${NAME}] ├── lua-discordRPC"
if [ ! -f ./mpv-discordRPC/lua-discordRPC.lua ]; then
echo "[${NAME}] │ ├── downloading 'lua-discordRPC.lua'"
wget -q -c -O "mpv-discordRPC/lua-discordRPC.lua" "https://github.com/pfirsich/lua-discordRPC/raw/master/discordRPC.lua"
fi
echo "[${NAME}] │ └── installing 'lua-discordRPC.lua'"
cp ./mpv-discordRPC/lua-discordRPC.lua "${SCRIPTS_DIR}/mpv-discordRPC"
echo "[${NAME}] ├── pypresence"
echo "[${NAME}] │ ├── checking 'pypresence' python package"
if [[ $(pip3 list | grep pypresence) ]]; then
echo "[${NAME}] │ │ └── 'pypresence' has been installed"
else
echo "[${NAME}] │ │ └── installing 'pypresence'"
pip3 install pypresence
fi
if [ ! -f ./mpv-discordRPC/python-pypresence.py ]; then
echo "[${NAME}] │ ├── downloading 'python-pypresence.py'"
wget -q -c -O "mpv-discordRPC/python-pypresence.py" "https://github.com/cniw/mpv-discordRPC/raw/master/mpv-discordRPC/python-pypresence.py"
fi
echo "[${NAME}] │ └── installing 'python-pypresence.py'"
cp ./mpv-discordRPC/python-pypresence.py "${SCRIPTS_DIR}/mpv-discordRPC"
echo "[${NAME}] └── status-line"
if [ ! -f ./status-line.lua ]; then
echo "[${NAME}] ├── downloading 'status-line.lua'"
wget -q -c "https://github.com/mpv-player/mpv/raw/master/TOOLS/lua/status-line.lua"
fi
echo "[${NAME}] └── installing 'status-line.lua'"
cp ./status-line.lua "${SCRIPTS_DIR}"
echo "[${NAME}] installing main script"
if [ ! -f ./mpv_discordRPC.conf ]; then
echo "[${NAME}] ├── downloading 'mpv_discordRPC.conf'"
wget -q -c "https://github.com/cniw/mpv-discordRPC/raw/master/mpv_discordRPC.conf"
fi
if [ ! -f ./mpv-discordRPC/catalogs.lua ]; then
echo "[${NAME}] ├── downloading 'catalogs.lua'"
wget -q -c -O "mpv-discordRPC/catalogs.lua" "https://github.com/cniw/mpv-discordRPC/raw/master/mpv-discordRPC/catalogs.lua"
fi
if [ ! -f ./mpv-discordRPC/main.lua ]; then
echo "[${NAME}] ├── downloading 'main.lua'"
wget -q -c "https://github.com/cniw/mpv-discordRPC/raw/master/mpv-discordRPC/main.lua"
fi
echo "[${NAME}] ├── installing 'mpv_discordRPC.conf'"
cp ./mpv_discordRPC.conf "${SCRIPT_OPTS_DIR}"
echo "[${NAME}] ├── installing 'catalogs.lua'"
cp ./mpv-discordRPC/catalogs.lua "${SCRIPTS_DIR}/mpv-discordRPC"
echo "[${NAME}] └── installing 'main.lua'"
cp ./mpv-discordRPC/main.lua "${SCRIPTS_DIR}/mpv-discordRPC"
echo "[${NAME}] updating library path"
sudo sh -c 'echo '"${LIBRARY_DIR}"' > /etc/ld.so.conf.d/'"${NAME}"'.conf'
sudo ldconfig
echo -e "\n[discordapp] wachidadinugroho#7674: All done. Good Luck and have a nice day.\n"
|
#!/bin/sh
# This script sends the startup message and then just hangs - e.g. no updates
hostname=$(hostname)
while(:)
do echo "$hostname connected"
while true
do
sleep 86400;
done;
done \
|mosquitto_pub -t TEST \
-h localhost \
--will-payload "$hostname now gone" \
--will-topic "TEST/will" \
-l |
<filename>lang/py/pylib/code/re/re_groups_named.py
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 <NAME>. All rights reserved.
#
"""Naming pattern groups
"""
#end_pymotw_header
import re
text = 'This is some text -- with punctuation.'
print text
print
for pattern in [ r'^(?P<first_word>\w+)',
r'(?P<last_word>\w+)\S*$',
r'(?P<t_word>\bt\w+)\W+(?P<other_word>\w+)',
r'(?P<ends_with_t>\w+t)\b',
]:
regex = re.compile(pattern)
match = regex.search(text)
print 'Matching "%s"' % pattern
print ' ', match.groups()
print ' ', match.groupdict()
print
|
#!/bin/bash
# This file is meant to be included by the parent cppbuild.sh script
if [[ -z "$PLATFORM" ]]; then
pushd ..
bash cppbuild.sh "$@" openblas
popd
exit
fi
OPENBLAS_VERSION=0.3.9
download https://github.com/xianyi/OpenBLAS/archive/v$OPENBLAS_VERSION.tar.gz OpenBLAS-$OPENBLAS_VERSION.tar.gz
mkdir -p $PLATFORM
cd $PLATFORM
mkdir -p include lib bin OpenBLAS-$OPENBLAS_VERSION-nolapack
INSTALL_PATH=`pwd`
echo "Decompressing archives..."
tar --totals -xzf ../OpenBLAS-$OPENBLAS_VERSION.tar.gz
tar --totals -xzf ../OpenBLAS-$OPENBLAS_VERSION.tar.gz --strip-components=1 -C OpenBLAS-$OPENBLAS_VERSION-nolapack/
cd OpenBLAS-$OPENBLAS_VERSION
cp lapack-netlib/LAPACKE/include/*.h ../include
# remove broken cross-compiler workaround on Mac
sedinplace '/if (($os eq "Darwin")/,/}/d' c_check ../OpenBLAS-$OPENBLAS_VERSION-nolapack/c_check
# blas (requires fortran, e.g. sudo yum install gcc-gfortran)
export FEXTRALIB="-lgfortran"
export CROSS_SUFFIX=
export HOSTCC=gcc
export NO_LAPACK=0
export NUM_THREADS=64
export NO_AFFINITY=1
case $PLATFORM in
android-arm)
patch -Np1 < ../../../OpenBLAS-android.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-android.patch
export CC="$ANDROID_CC $ANDROID_FLAGS"
export FC="$ANDROID_PREFIX-gfortran $ANDROID_FLAGS"
export CROSS_SUFFIX="$ANDROID_PREFIX-"
export LDFLAGS="-ldl -lm -lc"
if [[ ! -x "$ANDROID_PREFIX-gfortran" ]]; then
export NO_LAPACK=1
export NOFORTRAN=1
fi
export BINARY=32
export TARGET=ARMV5 # to disable hard-float functions unsupported by Android
export ARM_SOFTFP_ABI=1
sedinplace 's/-march=armv5/-march=armv7-a -mfloat-abi=softfp -mfpu=vfpv3-d16/' Makefile.arm ../OpenBLAS-$OPENBLAS_VERSION-nolapack/Makefile.arm
;;
android-arm64)
patch -Np1 < ../../../OpenBLAS-android.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-android.patch
export CC="$ANDROID_CC $ANDROID_FLAGS"
export FC="$ANDROID_PREFIX-gfortran $ANDROID_FLAGS"
export CROSS_SUFFIX="$ANDROID_PREFIX-"
export LDFLAGS="-ldl -lm -lc"
if [[ ! -x "$ANDROID_PREFIX-gfortran" ]]; then
export NO_LAPACK=1
export NOFORTRAN=1
fi
export BINARY=64
export TARGET=ARMV8
;;
android-x86)
patch -Np1 < ../../../OpenBLAS-android.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-android.patch
export CC="$ANDROID_CC $ANDROID_FLAGS"
export FC="$ANDROID_PREFIX-gfortran $ANDROID_FLAGS"
export CROSS_SUFFIX="$ANDROID_PREFIX-"
export LDFLAGS="-ldl -lm -lc"
if [[ ! -x "$ANDROID_PREFIX-gfortran" ]]; then
export NO_LAPACK=1
export NOFORTRAN=1
fi
export BINARY=32
export TARGET=ATOM
;;
android-x86_64)
patch -Np1 < ../../../OpenBLAS-android.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-android.patch
export CC="$ANDROID_CC $ANDROID_FLAGS"
export FC="$ANDROID_PREFIX-gfortran $ANDROID_FLAGS"
export CROSS_SUFFIX="$ANDROID_PREFIX-"
export LDFLAGS="-ldl -lm -lc"
if [[ ! -x "$ANDROID_PREFIX-gfortran" ]]; then
export NO_LAPACK=1
export NOFORTRAN=1
fi
export BINARY=64
export TARGET=ATOM
;;
ios-arm)
patch -Np1 < ../../../OpenBLAS-ios.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-ios.patch
export CC="$(xcrun --sdk iphoneos --find clang) -isysroot $(xcrun --sdk iphoneos --show-sdk-path) -arch armv7 -miphoneos-version-min=5.0"
export FC=
export NO_LAPACK=1
export NOFORTRAN=1
export BINARY=32
export TARGET=ARMV5 # to disable unsupported assembler from iOS SDK: use Accelerate to optimize
export NO_SHARED=1
;;
ios-arm64)
patch -Np1 < ../../../OpenBLAS-ios.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-ios.patch
# use generic kernels as Xcode assembler does not accept optimized ones: use Accelerate to optimize
cp kernel/arm/KERNEL.ARMV5 kernel/arm64/KERNEL.ARMV8
cp kernel/arm/KERNEL.ARMV5 ../OpenBLAS-$OPENBLAS_VERSION-nolapack/kernel/arm64/KERNEL.ARMV8
export CC="$(xcrun --sdk iphoneos --find clang) -isysroot $(xcrun --sdk iphoneos --show-sdk-path) -arch arm64 -miphoneos-version-min=5.0"
export FC=
export NO_LAPACK=1
export NOFORTRAN=1
export BINARY=64
export TARGET=ARMV8
export NO_SHARED=1
;;
ios-x86)
patch -Np1 < ../../../OpenBLAS-ios.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-ios.patch
export CC="$(xcrun --sdk iphonesimulator --find clang) -isysroot $(xcrun --sdk iphonesimulator --show-sdk-path) -arch i686 -mios-simulator-version-min=5.0"
export FC=
export NO_LAPACK=1
export NOFORTRAN=1
export BINARY=32
export TARGET=GENERIC # optimized kernels do not return correct results on iOS: use Accelerate to optimize
export NO_SHARED=1
;;
ios-x86_64)
patch -Np1 < ../../../OpenBLAS-ios.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-ios.patch
export CC="$(xcrun --sdk iphonesimulator --find clang) -isysroot $(xcrun --sdk iphonesimulator --show-sdk-path) -arch x86_64 -mios-simulator-version-min=5.0"
export FC=
export NO_LAPACK=1
export NOFORTRAN=1
export BINARY=64
export TARGET=GENERIC # optimized kernels do not return correct results on iOS: use Accelerate to optimize
export NO_SHARED=1
;;
linux-x86)
export CC="gcc -m32"
export FC="gfortran -m32"
export LDFLAGS='-s -Wl,-rpath,\$$ORIGIN/'
export BINARY=32
export DYNAMIC_ARCH=1
export TARGET=NORTHWOOD
;;
linux-x86_64)
export CC="gcc -m64"
export FC="gfortran -m64"
export LDFLAGS='-s -Wl,-rpath,\$$ORIGIN/'
export BINARY=64
export DYNAMIC_ARCH=1
export TARGET=NEHALEM
;;
linux-ppc64le)
# patch to use less buggy generic kernels
patch -Np1 < ../../../OpenBLAS-linux-ppc64le.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-linux-ppc64le.patch
MACHINE_TYPE=$( uname -m )
if [[ "$MACHINE_TYPE" =~ ppc64 ]]; then
export CC="gcc -m64"
export FC="gfortran -m64"
else
export CC="powerpc64le-linux-gnu-gcc"
export FC="powerpc64le-linux-gnu-gfortran"
export CROSS_SUFFIX="powerpc64le-linux-gnu-"
fi
export LDFLAGS='-s -Wl,-rpath,\$$ORIGIN/'
export BINARY=64
export TARGET=POWER5
;;
linux-mips64el)
export CC="gcc -mabi=64"
export FC="gfortran -mabi=64"
export LDFLAGS='-s -Wl,-rpath,\$$ORIGIN/ -Wl,-z,noexecstack'
export BINARY=64
export TARGET=MIPS
;;
linux-armhf)
export CC="arm-linux-gnueabihf-gcc"
export FC="arm-linux-gnueabihf-gfortran"
export LDFLAGS='-s -Wl,-rpath,\$$ORIGIN/ -Wl,-z,noexecstack'
export BINARY=32
export TARGET=ARMV6
;;
linux-arm64)
export CC="aarch64-linux-gnu-gcc -mabi=lp64"
export FC="aarch64-linux-gnu-gfortran"
export LDFLAGS='-s -Wl,-rpath,\$$ORIGIN/ -Wl,-z,noexecstack'
export BINARY=64
export TARGET=ARMV8
;;
macosx-*)
patch -Np1 < ../../../OpenBLAS-macosx.patch
patch -Np1 -d ../OpenBLAS-$OPENBLAS_VERSION-nolapack/ < ../../../OpenBLAS-macosx.patch
export CC="$(ls -1 /usr/local/bin/gcc-? | head -n 1)"
export FC="$(ls -1 /usr/local/bin/gfortran-? | head -n 1)"
export LDFLAGS='-s -Wl,-rpath,@loader_path/ -lgfortran'
export BINARY=64
export DYNAMIC_ARCH=1
export NO_AVX512=1
export TARGET=NEHALEM
;;
windows-x86)
export CC="gcc -m32"
export FC="gfortran -m32"
export FEXTRALIB="-lgfortran -lquadmath"
export BINARY=32
export DYNAMIC_ARCH=1
export LDFLAGS="-static-libgcc -static-libgfortran -Wl,-Bstatic -lgfortran -lquadmath -lgcc -lgcc_eh -lpthread"
export TARGET=NORTHWOOD
;;
windows-x86_64)
export CC="gcc -m64"
export FC="gfortran -m64"
export FEXTRALIB="-lgfortran -lquadmath"
export BINARY=64
export DYNAMIC_ARCH=1
export LDFLAGS="-static-libgcc -static-libgfortran -Wl,-Bstatic -lgfortran -lquadmath -lgcc -lgcc_eh -lpthread"
export NO_AVX512=1
export TARGET=NEHALEM
;;
*)
echo "Error: Platform \"$PLATFORM\" is not supported"
return 0
;;
esac
make -s -j $MAKEJ libs netlib shared "CROSS_SUFFIX=$CROSS_SUFFIX" "CC=$CC" "FC=$FC" "HOSTCC=$HOSTCC" BINARY=$BINARY COMMON_PROF= F_COMPILER=GFORTRAN "FEXTRALIB=$FEXTRALIB" USE_OPENMP=0 NUM_THREADS=$NUM_THREADS
make install "PREFIX=$INSTALL_PATH"
unset DYNAMIC_ARCH
cd ../OpenBLAS-$OPENBLAS_VERSION-nolapack/
make -s -j $MAKEJ libs netlib shared "CROSS_SUFFIX=$CROSS_SUFFIX" "CC=$CC" "FC=$FC" "HOSTCC=$HOSTCC" BINARY=$BINARY COMMON_PROF= F_COMPILER=GFORTRAN "FEXTRALIB=$FEXTRALIB" USE_OPENMP=0 NUM_THREADS=$NUM_THREADS NO_LAPACK=1 LIBNAMESUFFIX=nolapack
make install "PREFIX=$INSTALL_PATH" NO_LAPACK=1 LIBNAMESUFFIX=nolapack
unset CC
unset FC
unset LDFLAGS
if [[ -f ../lib/libopenblas.dll.a ]]; then
# bundle the import library for Windows under a friendly name for MSVC
cp ../lib/libopenblas.dll.a ../lib/openblas.lib
cp ../lib/libopenblas_nolapack.dll.a ../lib/openblas_nolapack.lib
fi
cd ../..
|
!/bin/bash
#conda activate srbench
cabal install
cp ~/.cabal/bin/tir-exe ./python/tir
cd python
pip install .
|
def avg(list: List[Int]): Double = list.sum / list.length
val list = List(1,2,3,4,5,6)
val average = avg(list) // returns 3.5 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
budget_term_densities
Calculates density maps for nutrient budget terms from NEMO-ERSEM output.
NERC-DEFRA SSB-BlueC projects
Created on Tue Jan 24 09:18:52 2017
@author: TAMS00
"""
#import pandas as pd
import netCDF4
import xarray as xr
import numpy as np
import os
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap, cm
import argparse
if (('Windows' in os.environ['OSTYPE']) and
(os.environ['COMPUTERNAME']=='PC4447')):
base='c:/Users/tams00/Documents/nerc_ssb/c_fluxes/AMM7-HINDCAST-v0-erosion'
else:
base='/nerc/n01/n01/momme/AMM7-HINDCAST-v0-erosion'
modelpaths=[os.path.join(base+'/1981/01/','amm7_1d_19810101_19810131_grid_T.nc')]#,
#os.path.join(base+'/1981/02/','amm7_1d_19810201_19810228_grid_T.nc')]
#os.path.join(base+'/1981/01/','restart_trc.nc'),
#modelvars=[['Y4_fdetrc_result']]
#modelvars=[['net_PelBen_POC_result','G3_c_pb_flux','nav_lon','nav_lat'],
#['fabm_st2DnQ1_c','fabm_st2DnQ6_c','fabm_st2DnQ7_c','fabm_st2DnQ17_c','fabm_st2DnH1_c','fabm_st2DnH2_c','fabm_st2DnY2_c','fabm_st2DnY3_c','fabm_st2DnY4_c','fabm_st2DnG3_c'],
#['fabm_st2DnQ1_c','fabm_st2DnQ6_c','fabm_st2DnQ7_c','fabm_st2DnQ17_c','fabm_st2DnH1_c','fabm_st2DnH2_c','fabm_st2DnY2_c','fabm_st2DnY3_c','fabm_st2DnY4_c','fabm_st2DnG3_c']]
par_3d=['TRNO3_c','TRNP1_c','TRNP2_c','TRNP3_c','TRNP4_c','TRNB1_c','TRNZ4_c','TRNZ5_c','TRNZ6_c','TRNR4_c','TRNR6_c','TRNR8_c','TRNR1_c','TRNR2_c','TRNR3_c','TRNL2_c']
par_2d=['fabm_st2DnQ1_c','fabm_st2DnQ6_c','fabm_st2DnQ7_c','fabm_st2DnQ17_c','fabm_st2DnH1_c','fabm_st2DnH2_c','fabm_st2DnY2_c','fabm_st2DnY3_c','fabm_st2DnY4_c','fabm_st2DnG3_c']
adv_3d=['XAD_O3_c_e3t']
modelvars=adv_3d
# main() to take an optional 'argv' argument, which allows us to call it from the interactive Python prompt:
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('--basedir',nargs=1,help='base directory with model files')
args = parser.parse_args()
print(args)
try:
base = args.basedir
else:
base = base
# Naughty datasets might require decode_cf=False
# Here it just needed decode_times=False
print('********************')
print(modelpaths[0])
#data = xr.open_dataset(modelpaths[0],decode_times=False)
modelout = xr.open_mfdataset(modelpaths) #,decode_times=False)
#print(modelout)
for modelvar in modelvars:
vardf=modelout[modelvar]
print(vardf)
# print attributes
for at in vardf.attrs:
print(at+':\t\t',end=' ')
print(vardf.attrs[at])
timeavg=vardf.mean('time_counter')
timeavg.plot()
if __name__ == "__main__":
main() |
<filename>.eslintrc.js
const fs = require('fs');
const path = require('path');
const prettierOptions = JSON.parse(
fs.readFileSync(path.resolve(__dirname, '.prettierrc'), 'utf8'),
);
module.exports = {
parser: 'babel-eslint',
extends: ['airbnb', 'prettier'],
plugins: ['prettier', 'react', 'react-hooks', 'jsx-a11y'],
env: {
jest: true,
browser: true,
node: true,
es6: true,
},
parserOptions: {
ecmaVersion: 6,
sourceType: 'module',
ecmaFeatures: {
jsx: true,
},
},
rules: {
'prettier/prettier': ['error', prettierOptions],
'arrow-body-style': [2, 'as-needed'],
'class-methods-use-this': 0,
'import/imports-first': 0,
'import/newline-after-import': 0,
'import/no-dynamic-require': 0,
'import/no-extraneous-dependencies': 0,
'import/no-named-as-default': 0,
'import/no-unresolved': 0,
'import/no-webpack-loader-syntax': 0,
'import/prefer-default-export': 0,
'import/extensions': 0,
indent: [
2,
2,
{
SwitchCase: 1,
},
],
'jsx-a11y/aria-props': 2,
'jsx-a11y/heading-has-content': 0,
'jsx-a11y/label-has-associated-control': [
2,
{
// NOTE: If this error triggers, either disable it or add
// your custom components, labels and attributes via these options
// See https://github.com/evcohen/eslint-plugin-jsx-a11y/blob/master/docs/rules/label-has-associated-control.md
controlComponents: ['Input'],
},
],
'jsx-a11y/label-has-for': 0,
'jsx-a11y/mouse-events-have-key-events': 2,
'jsx-a11y/role-has-required-aria-props': 2,
'jsx-a11y/role-supports-aria-props': 2,
'max-len': 0,
'newline-per-chained-call': 0,
'no-confusing-arrow': 0,
'no-console': 1,
'no-unused-vars': 2,
'no-use-before-define': 0,
'prefer-template': 2,
'react/destructuring-assignment': 0,
'react-hooks/rules-of-hooks': 'error',
'react/jsx-closing-tag-location': 0,
'react/forbid-prop-types': 0,
'react/jsx-first-prop-new-line': [2, 'multiline'],
'react/jsx-filename-extension': 0,
'react/jsx-no-target-blank': 0,
'react/jsx-props-no-spreading': 0,
'react/jsx-uses-vars': 2,
'react/react-in-jsx-scope': 0,
'react/require-default-props': 0,
'react/require-extension': 0,
'react/self-closing-comp': 0,
'react/sort-comp': 0,
'require-yield': 0,
},
// settings: {
// 'import/resolver': {
// webpack: {
// config: './scripts/webpack/webpack.prod.config.js',
// },
// },
// },
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.