text
stringlengths 1
1.05M
|
|---|
<filename>lib/kruger/client/request.rb
# frozen_string_literal
module Kruger
class Client
module Request
# Make a HTTP get request
#
# @param url [String] The path, relative to {#api_endpoint}
# @param options [Hash] Query and header params for request
# @return [Kruger::Client::Response]
def get(url, options = {})
request :get, url, options
end
# Make a HTTP post request
#
# @param url [String] The path, relative to {#api_endpoint}
# @param options [Hash] Body and header params for request
# @return [Kruger::Client::Response]
def post(url, options = {})
request :post, url, options
end
# Make a HTTP PUT request
#
# @param url [String] The path, relative to {#api_endpoint}
# @param options [Hash] Body and header params for request
# @return [Kruger::Client::Response]
def put(url, options = {})
request :put, url, options
end
# Make a HTTP PATCH request
#
# @param url [String] The path, relative to {#api_endpoint}
# @param options [Hash] Body and header params for request
# @return [Kruger::Client::Response]
def patch(url, options = {})
request :patch, url, options
end
private
def request(method, path, options = {})
response = self.class.send(method, path, body: options)
Response.new(response)
end
end
end
end
|
#!/usr/bin/env bash
##########################################
# Updated by Afiniel for Yiimpool use... #
##########################################
source /etc/functions.sh
source /etc/yiimpool.conf
source $STORAGE_ROOT/yiimp/.yiimp.conf
source $HOME/yiimpool/yiimp_single/.wireguard.install.cnf
#Create keys file
echo '<?php
// Sample config file to put in /etc/yiimp/keys.php
define('"'"'YIIMP_MYSQLDUMP_USER'"'"', '"'"''"${YiiMPPanelName}"''"'"');
define('"'"'YIIMP_MYSQLDUMP_PASS'"'"', '"'"''"${PanelUserDBPassword}"''"'"');
define('"'"'YIIMP_MYSQLDUMP_PATH'"'"', '"'"''"${STORAGE_ROOT}/yiimp/site/backup"''"'"');
// Keys required to create/cancel orders and access your balances/deposit addresses
define('"'"'EXCH_BITTREX_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_BITSTAMP_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_BINANCE_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_BLEUTRADE_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_BTER_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_CCEX_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_CEXIO_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_COINMARKETS_PASS'"'"', '"'"''"'"');
define('"'"'EXCH_CRYPTOPIA_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_EMPOEX_SECKEY'"'"', '"'"''"'"');
define('"'"'EXCH_HITBTC_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_KRAKEN_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_KUCOIN_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_LIVECOIN_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_NOVA_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_POLONIEX_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_STOCKSEXCHANGE_SECRET'"'"', '"'"''"'"');
define('"'"'EXCH_YOBIT_SECRET'"'"', '"'"''"'"');
' | sudo -E tee /etc/yiimp/keys.php >/dev/null 2>&1
cd $HOME/yiimpool/yiimp_single
|
filename=index_exp1.html
rm $filename
echo "<table style="width:100%">" >> $filename
#array1=($(ls data/vcc2016_training/SF1/*.wav))
#array2=($(ls data/vcc2016_training/TF2/*.wav))
array3=($(ls idiap/validation_output/converted_A/epoch_500*.wav))
#array4=($(ls idiap/validation_output/CM01_M11_np/converted_B/*.wav))
array_length=${#array3[@]}
prefix='/home/boomkin/ssh_temp/UA-Speech/audio/control/CM01/'
prefix_2='/home/boomkin/ssh_temp/UA-Speech/audio/M11/'
echo "<tr>" >> $filename
echo "<td>Source</td>" >> $filename
echo "<td>Source converted to Target</td>" >> $filename
echo "<td>Target</td>" >> $filename
#echo "<td>Target converted to Source</td>" >> $filename
echo "<td>MCD</td>" >> $filename
echo "</tr>" >> $filename
for ((i = 0 ; i < $array_length ; i++)); do
echo "<tr>" >> $filename
bname="$(basename -- ${array3[i]})"
full_name=`echo ${prefix}${bname} | sed s/epoch_500_//`
full_name_2_temp=`echo ${prefix_2}${bname} | sed s/epoch_500_//`
full_name_2=`echo ${full_name_2_temp} | sed s/CM01/M11/`
echo "<td><audio controls><source src="$full_name" type="audio/wav"></audio></td>" >> $filename
echo "<td><audio controls><source src="${array3[i]}" type="audio/wav"></audio></td>" >> $filename
echo "<td><audio controls><source src="$full_name_2" type="audio/wav"></audio></td>" >> $filename
echo "<td>" >> $filename
python evaluation_cl.py --file_A=${full_name} --file_B=${full_name_2} >> $filename
echo "</td>" >> $filename
echo "</tr>" >> $filename
done
echo "</table>" >> $filename
# vcc training SF1 # converted SF1 (converted B) # vcc training TF2 # converted SF2
# <audio controls>
# <source src="/images/odyssey_audio/three_format_-.wav" type="audio/wav">
#</audio>
|
<filename>packages/controls/src/step/step.stories.tsx
import debug from 'debug';
import React, {useState} from 'react';
import {Button, Container, Form, Radio, Segment} from 'semantic-ui-react';
import {TForm} from '../form/TForm';
import {FormStep as CFormStep, StepProvider} from './index';
const d = debug('thx.controls.step.step.stories');
export default {title: 'Inputs/Step'};
const StepOne = ({onSubmit, values: propValues}) => {
return (
<TForm initialValues={propValues} onSubmit={onSubmit}>
{({handleSubmit, setFieldValue, values}) => {
return (
<Form onSubmit={handleSubmit}>
<Form.Input label="Reveal Step 3">
<Form.Radio toggle checked={values.checked} onClick={(e, val) => setFieldValue('checked', val.checked)} />
</Form.Input>
<Button positive type="submit">
Next
</Button>
</Form>
);
}}
</TForm>
);
};
const StepTwo = ({onSubmit}) => {
return (
<Form onSubmit={onSubmit}>
<Form.Input label="Name" />
<Button positive type="submit">
Next
</Button>
</Form>
);
};
export const FormStep = () => {
const [vertical, setVertical] = useState(false);
return (
<Container>
<Segment basic>
<StepProvider onSubmit={() => {}} vertical={vertical}>
<CFormStep title="step 1" stepKey="step1" children={<StepOne />} />
<CFormStep title="step 2" stepKey="step2" children={<StepTwo />} />
<CFormStep title="step 3" stepKey="step3" children={<div>The End</div>} hidden={state => !state.step1?.checked} />
</StepProvider>
</Segment>
<Segment>
<Radio label="Vertical" toggle onClick={(e, val) => setVertical(val.checked)} />
</Segment>
</Container>
);
};
|
#!/bin/bash
#
# Runs the unit tests of the specified package with a selected Lisp
# implementation and prints the result to the standard output.
#
# $Revision$
# $Date$
if [ ${#} -ne 2 ]; then
echo "Usage: ${0} <lisp-implementation> <package>"
echo "Example: ${0} sbcl rl-glue-utils"
exit -1
fi
tooldir="`dirname ${0}`"
source "${tooldir}/common.sh"
lispimpl="${1}"
package="${2}"
###############################################################################
load_lisp_config ${lispimpl}
###############################################################################
${LISP} <<- EOF
`lisp_init`
(handler-bind ((condition #'(lambda (condition)
(continue))))
(asdf:oos 'asdf:load-op :${package} :verbose nil)
(asdf:oos 'asdf:load-op :${package}-tests :verbose nil))
(fiveam:run! '${package}::main-suite)
`lisp_quit`
EOF
exit 0
|
/**
* @file Originally adapted from Next.js's blog starter example: {@link https://github.com/vercel/next.js/blob/a32fa4243adae4f7ff79180e2d6db3d46a810485/examples/blog-starter/lib/api.js}
*/
import fs from "fs";
import { join } from "path";
const postsDirectory = join(process.cwd(), "pages/blog");
export function getPostSlugs(): string[] {
const directoryFiles: string[] = fs.readdirSync(postsDirectory);
const slugs = directoryFiles
.filter((filename) => filename.endsWith(".mdx"))
.map((filename) => filename.replace(/\.mdx$/, ""));
return slugs;
}
export async function getPostBySlug(
slug: string,
fields: MetadataField[] = []
) {
try {
const { metadata }: { metadata: BlogPostMetadata } = await import(
`../pages/blog/${slug}.mdx`
);
let data: object = {};
// Only return requested fields
for (const field of fields) {
if (metadata?.[field]) {
data[field] = metadata[field];
} else if (field === "slug") {
data["slug"] = slug;
}
}
return data as Partial<BlogPostMetadata>;
} catch (err) {
err.message = `Slug '${slug}' does not exist.`;
throw err;
}
}
export async function getAllPosts(fields: MetadataField[] = []) {
const slugs = getPostSlugs();
try {
let posts = await Promise.all(
slugs.map(async (slug) => await getPostBySlug(slug, fields))
);
if (fields.includes("date")) {
// sort posts by date in descending order
posts = posts.sort((post1, post2) => (post1.date > post2.date ? -1 : 1));
}
return posts;
} catch (err) {
err.message = "Blog API functions aren't working correctly.";
throw err;
}
}
export type BlogPostMetadata = {
slug?: string;
title: string;
subtitle?: string;
description?: string;
excerpt: string;
/** Date/time first published, in ISO format. */
date: string;
/** Date/time last updated, in ISO format. */
updated?: string;
ogImage?: {
filename: string;
alt: string;
};
cta?: string;
socialLinks?: {
twitter?: string;
mastodon?: string;
facebook?: string;
linkedin?: string;
};
syndicated?: {
medium?: string | null;
dev?: string | null;
hackerNoon?: string | null;
};
ipynb?: boolean;
};
type MetadataField =
| "slug"
| "title"
| "subtitle"
| "description"
| "excerpt"
| "date"
| "updated"
| "ogImage"
| "cta"
| "socialLinks"
| "syndicated"
| "ipynb";
|
#!/bin/bash
# This script is meant to be the entrypoint for OpenShift Bash scripts to import all of the support
# libraries at once in order to make Bash script preambles as minimal as possible. This script recur-
# sively `source`s *.sh files in this directory tree. As such, no files should be `source`ed outside
# of this script to ensure that we do not attempt to overwrite read-only variables.
set -o errexit
set -o nounset
set -o pipefail
OS_SCRIPT_START_TIME="$( date +%s )"; export OS_SCRIPT_START_TIME
# os::util::absolute_path returns the absolute path to the directory provided
function os::util::absolute_path() {
local relative_path="$1"
local absolute_path
pushd "${relative_path}" >/dev/null
relative_path="$( pwd )"
if [[ -h "${relative_path}" ]]; then
absolute_path="$( readlink "${relative_path}" )"
else
absolute_path="${relative_path}"
fi
popd >/dev/null
echo "${absolute_path}"
}
readonly -f os::util::absolute_path
# find the absolute path to the root of the Origin source tree
init_source="$( dirname "${BASH_SOURCE}" )/../.."
OS_ROOT="$( os::util::absolute_path "${init_source}" )"
export OS_ROOT
cd "${OS_ROOT}"
for library_file in $( find "${OS_ROOT}/hack/lib" -type f -name '*.sh' -not -path '*/hack/lib/init.sh' ); do
source "${library_file}"
done
unset library_files library_file init_source
# all of our Bash scripts need to have the stacktrace
# handler installed to deal with errors
os::log::stacktrace::install
# All of our Bash scripts need to have access to the
# binaries that we build so we don't have to find
# them before every invocation.
os::util::environment::update_path_var
if [[ -z "${OS_TMP_ENV_SET-}" ]]; then
os::util::environment::setup_tmpdir_vars "$( basename "$0" ".sh" )"
fi
# Allow setting $JUNIT_REPORT to toggle output behavior
if [[ -n "${JUNIT_REPORT:-}" ]]; then
export JUNIT_REPORT_OUTPUT="${LOG_DIR}/raw_test_output.log"
fi
|
#!/bin/sh
# Give all directories 775 permissions
chmod 775 $(find /srv/http -type d)
# Give all files 664 permissions
chmod 664 $(find /srv/http -type f)
# Change ownership of all files and directories for web developement
chown root:srv $(find /srv/http)
# Reset executable permissions of this script
chmod 775 /srv/http/reset-permissions.sh
|
#!/bin/bash
set -e
# 6.75. Procps-ng-3.3.15
# The Procps-ng package contains programs for monitoring processes.
echo "Building procps-ng..."
echo "Approximate build time: 0.1 SBU"
echo "Required disk space: 17 MB"
tar -xf /sources/procps-ng-*.tar.* -C /tmp/ \
&& mv /tmp/procps-ng-* /tmp/procps-ng \
&& pushd /tmp/procps-ng
# Prepare procps-ng for compilation:
./configure --prefix=/usr \
--exec-prefix= \
--libdir=/usr/lib \
--docdir=/usr/share/doc/procps-ng-3.3.15 \
--disable-static \
--disable-kill \
--with-systemd
# Compile the package:
make -j"$JOB_COUNT"
# Test the results:
if [ "$LFS_TEST" -eq 1 ]; then
sed -i -r 's|(pmap_initname)\\\$|\1|' testsuite/pmap.test/pmap.exp
sed -i '/set tty/d' testsuite/pkill.test/pkill.exp
rm testsuite/pgrep.test/pgrep.exp
make check
fi
# Install the package:
make install
mv -v /usr/lib/libprocps.so.* /lib
ln -sfv ../../lib/"$(readlink /usr/lib/libprocps.so)" /usr/lib/libprocps.so
popd \
&& rm -rf /tmp/procps-ng
|
from pathlib import Path
from setuptools import setup, find_packages
HERE = Path(__file__).parent
README = (HERE / "README.rst").read_text()
setup(
name="backward",
version="0.0.4",
packages=find_packages(exclude=["tests"]),
description="A simple programming language and inference engine powered by backward chaining.",
long_description=README,
long_description_content_type="text/x-rst",
url="https://github.com/gidona18/backward",
license="Apache-2.0",
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.0",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
entry_points={"console_scripts": ["main=backward.__main__:main"],},
install_requires=["lark-parser", "protoclass"],
zip_safe=True,
author="<NAME>",
author_email="<EMAIL>",
)
|
<gh_stars>100-1000
'use strict';
self.addEventListener('message', function (e) {
if (e.data.error) {
throw new Error('yolo');
} else {
self.postMessage(e.data);
}
});
|
<filename>Expedia/app/src/main/java/com/example/expedia/dialog/HotelPersonNumDialog.java
package com.example.expedia.dialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListPopupWindow;
import android.widget.Spinner;
import android.widget.TextView;
import com.example.expedia.R;
import java.lang.reflect.Field;
import java.util.ArrayList;
public class HotelPersonNumDialog extends Dialog{
private Spinner spinner1, spinner2, spinner3, spinner4, spinner5, spinner6;
private ImageView plus_active, plus_inactive, minus_active, minus_inactive, plus_active_kid, plus_inactive_kid, minus_active_kid, minus_inactive_kid;
private TextView adultTextView, kidTextView, selectKidAge, complete;
private String adultText, kidText;
private LinearLayout one, two, three;
private int adult, kid;
private ArrayList<Integer> kidAge;
private ArrayAdapter<String> spinnerAdapter;
private CustomDialogListener dialogListener;
public HotelPersonNumDialog(@NonNull Context context){
super(context);
//this.listener = listener;
}
public void setDialogListener(CustomDialogListener dialogListener){
this.dialogListener = dialogListener;
}
@Override
protected void onStart() {
super.onStart();
//java.lang.IndexOutOfBoundsException 방지
int k = kidAge.size();
if(k < 6){
for(int i = k; k<6; k++){
kidAge.add(i,10);
}
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog_person_num);
adult = 1;
kid = 0;
kidAge = new ArrayList<>();
spinner1 = findViewById(R.id.spinner);
spinner2 = findViewById(R.id.spinner2);
spinner3 = findViewById(R.id.spinner3);
spinner4 = findViewById(R.id.spinner4);
spinner5 = findViewById(R.id.spinner5);
spinner6 = findViewById(R.id.spinner6);
one = findViewById(R.id.spinnerLayout1);
two = findViewById(R.id.spinnerLayout2);
three = findViewById(R.id.spinnerLayout3);
plus_active = findViewById(R.id.plus_active_adult);
plus_inactive = findViewById(R.id.plus_inactive_adult);
plus_active_kid = findViewById(R.id.plus_active_kid);
plus_inactive_kid = findViewById(R.id.plus_inactive_kid);
minus_active = findViewById(R.id.minus_active_adult);
minus_inactive = findViewById(R.id.minus_inactive_adult);
minus_active_kid = findViewById(R.id.minus_active_kid);
minus_inactive_kid = findViewById(R.id.minus_inactive_kid);
ArrayList<String> list = new ArrayList<>();
for(int i=0;i<18;i++){
list.add("만 "+ i +"세");
}
spinnerAdapter = new ArrayAdapter<>(getContext(), R.layout.spinner_item,list);
spinnerAdapter.setDropDownViewResource(R.layout.spinner_drop_down_item);
spinner1.setAdapter(spinnerAdapter);
spinner2.setAdapter(spinnerAdapter);
spinner3.setAdapter(spinnerAdapter);
spinner4.setAdapter(spinnerAdapter);
spinner5.setAdapter(spinnerAdapter);
spinner6.setAdapter(spinnerAdapter);
spinner1.setOnItemSelectedListener(listener1);
spinner2.setOnItemSelectedListener(listener2);
spinner3.setOnItemSelectedListener(listener3);
spinner4.setOnItemSelectedListener(listener4);
spinner5.setOnItemSelectedListener(listener5);
spinner6.setOnItemSelectedListener(listener6);
try {
Field popup = Spinner.class.getDeclaredField("mPopup");
popup.setAccessible(true);
ListPopupWindow window1 = (ListPopupWindow)popup.get(spinner1);
window1.setHeight(700); //pixel
ListPopupWindow window2 = (ListPopupWindow)popup.get(spinner2);
window2.setHeight(700); //pixel
ListPopupWindow window3 = (ListPopupWindow)popup.get(spinner3);
window3.setHeight(700); //pixel
ListPopupWindow window4 = (ListPopupWindow)popup.get(spinner4);
window4.setHeight(700); //pixel
ListPopupWindow window5 = (ListPopupWindow)popup.get(spinner5);
window5.setHeight(700); //pixel
ListPopupWindow window6 = (ListPopupWindow)popup.get(spinner6);
window6.setHeight(700); //pixel
} catch (Exception e) {
e.printStackTrace();
}
selectKidAge = findViewById(R.id.select_age);
complete = findViewById(R.id.complete);
complete.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialogListener.DialogListener(adult, kid, kidAge);
dismiss();
}
});
adultTextView = findViewById(R.id.person_adult);
adultText = "어른 " + adult + "명";
adultTextView.setText(adultText);
checkAdultNum(adult);
plus_active.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
adult += 1;
adultText = "어른 " + adult + "명";
adultTextView.setText(adultText);
checkAdultNum(adult);
}
});
minus_active.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
adult -= 1;
adultText = "어른 " + adult + "명";
adultTextView.setText(adultText);
checkAdultNum(adult);
}
});
kidTextView = findViewById(R.id.person_kid);
kidText = "아동 " + kid +"명";
kidTextView.setText(kidText);
checkKidNum(kid);
checkSpinner(kid);
plus_active_kid.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
kid += 1;
kidText = "아동 " + kid + "명";
kidTextView.setText(kidText);
checkKidNum(kid);
checkSpinner(kid);
switch (kid){
case 1:
spinner1.setSelection(10);
break;
case 2:
spinner2.setSelection(10);
break;
case 3:
spinner3.setSelection(10);
break;
case 4:
spinner4.setSelection(10);
break;
case 5:
spinner5.setSelection(10);
break;
case 6:
spinner6.setSelection(10);
break;
}
}
});
minus_active_kid.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
kid -= 1;
kidText = "아동 " + kid + "명";
kidTextView.setText(kidText);
checkKidNum(kid);
checkSpinner(kid);
}
});
}
private void checkAdultNum(int adult){
if (adult == 1){
minus_inactive.setVisibility(View.VISIBLE);
minus_active.setVisibility(View.INVISIBLE);
plus_inactive.setVisibility(View.INVISIBLE);
plus_active.setVisibility(View.VISIBLE);
} else if(adult == 14) {
minus_inactive.setVisibility(View.INVISIBLE);
minus_active.setVisibility(View.VISIBLE);
plus_inactive.setVisibility(View.VISIBLE);
plus_active.setVisibility(View.INVISIBLE);
} else {
minus_inactive.setVisibility(View.INVISIBLE);
minus_active.setVisibility(View.VISIBLE);
plus_inactive.setVisibility(View.INVISIBLE);
plus_active.setVisibility(View.VISIBLE);
}
}
private void checkKidNum(int kid){
if (kid == 0){
minus_inactive_kid.setVisibility(View.VISIBLE);
minus_active_kid.setVisibility(View.INVISIBLE);
plus_inactive_kid.setVisibility(View.INVISIBLE);
plus_active_kid.setVisibility(View.VISIBLE);
selectKidAge.setVisibility(View.INVISIBLE);
} else if(kid == 6) {
minus_inactive_kid.setVisibility(View.INVISIBLE);
minus_active_kid.setVisibility(View.VISIBLE);
plus_inactive_kid.setVisibility(View.VISIBLE);
plus_active_kid.setVisibility(View.INVISIBLE);
selectKidAge.setVisibility(View.VISIBLE);
} else {
minus_inactive_kid.setVisibility(View.INVISIBLE);
minus_active_kid.setVisibility(View.VISIBLE);
plus_inactive_kid.setVisibility(View.INVISIBLE);
plus_active_kid.setVisibility(View.VISIBLE);
selectKidAge.setVisibility(View.VISIBLE);
}
}
private void checkSpinner(final int kid){
switch(kid){
case 0:
spinner1.setVisibility(View.INVISIBLE);
spinner2.setVisibility(View.INVISIBLE);
spinner3.setVisibility(View.INVISIBLE);
spinner4.setVisibility(View.INVISIBLE);
spinner5.setVisibility(View.INVISIBLE);
spinner6.setVisibility(View.INVISIBLE);
one.setVisibility(View.GONE);
two.setVisibility(View.GONE);
three.setVisibility(View.GONE);
break;
case 1:
spinner1.setVisibility(View.VISIBLE);
spinner2.setVisibility(View.INVISIBLE);
spinner3.setVisibility(View.INVISIBLE);
spinner4.setVisibility(View.INVISIBLE);
spinner5.setVisibility(View.INVISIBLE);
spinner6.setVisibility(View.INVISIBLE);
one.setVisibility(View.VISIBLE);
two.setVisibility(View.GONE);
three.setVisibility(View.GONE);
break;
case 2:
spinner1.setVisibility(View.VISIBLE);
spinner2.setVisibility(View.VISIBLE);
spinner3.setVisibility(View.INVISIBLE);
spinner4.setVisibility(View.INVISIBLE);
spinner5.setVisibility(View.INVISIBLE);
spinner6.setVisibility(View.INVISIBLE);
one.setVisibility(View.VISIBLE);
two.setVisibility(View.GONE);
three.setVisibility(View.GONE);
break;
case 3:
spinner1.setVisibility(View.VISIBLE);
spinner2.setVisibility(View.VISIBLE);
spinner3.setVisibility(View.VISIBLE);
spinner4.setVisibility(View.INVISIBLE);
spinner5.setVisibility(View.INVISIBLE);
spinner6.setVisibility(View.INVISIBLE);
one.setVisibility(View.VISIBLE);
two.setVisibility(View.VISIBLE);
three.setVisibility(View.GONE);
break;
case 4:
spinner1.setVisibility(View.VISIBLE);
spinner2.setVisibility(View.VISIBLE);
spinner3.setVisibility(View.VISIBLE);
spinner4.setVisibility(View.VISIBLE);
spinner5.setVisibility(View.INVISIBLE);
spinner6.setVisibility(View.INVISIBLE);
one.setVisibility(View.VISIBLE);
two.setVisibility(View.VISIBLE);
three.setVisibility(View.GONE);
break;
case 5:
spinner1.setVisibility(View.VISIBLE);
spinner2.setVisibility(View.VISIBLE);
spinner3.setVisibility(View.VISIBLE);
spinner4.setVisibility(View.VISIBLE);
spinner5.setVisibility(View.VISIBLE);
spinner6.setVisibility(View.INVISIBLE);
one.setVisibility(View.VISIBLE);
two.setVisibility(View.VISIBLE);
three.setVisibility(View.VISIBLE);
break;
case 6:
spinner1.setVisibility(View.VISIBLE);
spinner2.setVisibility(View.VISIBLE);
spinner3.setVisibility(View.VISIBLE);
spinner4.setVisibility(View.VISIBLE);
spinner5.setVisibility(View.VISIBLE);
spinner6.setVisibility(View.VISIBLE);
one.setVisibility(View.VISIBLE);
two.setVisibility(View.VISIBLE);
three.setVisibility(View.VISIBLE);
break;
}
}
private AdapterView.OnItemSelectedListener listener1 = new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if(kidAge.size() >= 1){
kidAge.remove(0);
}
kidAge.add(0,position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
};
private AdapterView.OnItemSelectedListener listener2 = new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if(kidAge.size() >= 2 ){
kidAge.remove(1);
}
kidAge.add(1, position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
};
private AdapterView.OnItemSelectedListener listener3 = new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if(kidAge.size() >= 3 ){
kidAge.remove(2);
}
kidAge.add(2, position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
};
private AdapterView.OnItemSelectedListener listener4 = new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if(kidAge.size() >= 4 ){
kidAge.remove(3);
}
kidAge.add(3, position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
};
private AdapterView.OnItemSelectedListener listener5 = new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if(kidAge.size() >= 5){
kidAge.remove(4);
}
kidAge.add(4, position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
};
private AdapterView.OnItemSelectedListener listener6 = new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if(kidAge.size() >= 6){
kidAge.remove(5);
}
kidAge.add(5, position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
};
}
|
#include <base.h>
#include <mine.h>
#include <qminer.h>
#include "microtest.h"
TEST(TJsonValParsing) {
// test objects and arrays
ASSERT_TRUE(TJsonVal::GetValFromStr("{ }")->IsObj());
ASSERT_TRUE(TJsonVal::GetValFromStr("{\"a\":1}")->IsObj());
ASSERT_TRUE(TJsonVal::GetValFromStr("[ ]")->IsArr());
ASSERT_FALSE(TJsonVal::GetValFromStr("{ ]")->IsDef());
// test basic types
ASSERT_TRUE(TJsonVal::GetValFromStr("123")->IsNum());
ASSERT_TRUE(TJsonVal::GetValFromStr("true")->IsBool());
ASSERT_TRUE(TJsonVal::GetValFromStr("\"abc\"")->IsStr());
ASSERT_TRUE(TJsonVal::GetValFromStr("null")->IsNull());
// // ignore escaped null character
PJsonVal StrNullVal = TJsonVal::GetValFromStr("[\"xxx\\u0000yyy\"]");
ASSERT_TRUE(StrNullVal->IsArr());
ASSERT_EQ(StrNullVal->GetArrVals(), 1);
ASSERT_TRUE(StrNullVal->GetArrVal(0)->IsStr());
ASSERT_EQ_TSTR(StrNullVal->GetArrVal(0)->GetStr(), TStr("xxx yyy"));
// handling of escapes
ASSERT_EQ_TSTR(TJsonVal::GetValFromStr("\"\\t\"")->GetStr(), TStr("\t"));
ASSERT_EQ_TSTR(TJsonVal::GetValFromStr("\"\\R\"")->GetStr(), TStr("R"));
}
|
package parser;
import org.jooby.Jooby;
public class FooApp extends Jooby {
{
get("/1", () -> "foo");
}
}
|
/*******************************************************************************
*
* This module contains delta-sigma arithmetic operators.
*
* Reference:
* Ch<NAME> (2009) -- Bit-stream signal processing on FPGA.
* http://hub.hku.hk/handle/10722/54513
*
* Copyright (c) <NAME> 2021.
*
*******************************************************************************/
#include "bitmath.h"
struct FullAdder {
bool sum;
bool c_out;
};
/* Full adder; see https://en.wikipedia.org/wiki/Adder_(electronics) for the
* truth table */
void fulladder(Sig* in0, Sig* in1, Sig* in2, Sig* out0, Sig* out1,
size_t in_vec_id0, size_t in_vec_id1, size_t in_vec_id2, size_t out_vec_id0,
size_t out_vec_id1) {
assert((in0->vec_len == in1->vec_len) & (in1->vec_len == in2->vec_len) &
(in2->vec_len == out0->vec_len) & (out0->vec_len == out1->vec_len));
bool _in0;
bool _in1;
bool c_in;
bool sum;
bool c_out;
for (size_t i = 0; i < in0->vec_len; i++) {
_in0 = in0->vec_space[in_vec_id0][i] > 0;
_in1 = in1->vec_space[in_vec_id1][i] > 0;
c_in = in2->vec_space[in_vec_id2][i] > 0;
sum = (_in0 ^ _in1) ^ c_in;
c_out = (_in0 & _in1) | ((_in0 ^ _in1) & c_in);
out0->vec_space[out_vec_id0][i] = (audio) sum == 0 ? -1.0 : 1.0;
out1->vec_space[out_vec_id1][i] = (audio) c_out == 0 ? -1.0 : 1.0;
}
}
/* Full adder for sample-by-sample calculations */
FullAdder fulladder_samplewise(bool in0, bool in1, bool c_in) {
FullAdder fa;
fa.sum = (in0 ^ in1) ^ c_in;
fa.c_out = (in0 & in1) | ((in0 ^ in1) & c_in);
return fa;
}
/* Delta-sigma streams adder */
void binaryadder(Sig* in0, Sig* in1, Sig* out, size_t in_vec_id0, size_t in_vec_id1,
size_t out_vec_id) {
assert((in0->vec_len == in1->vec_len) & (in1->vec_len == out->vec_len));
bool state = 0;
bool _in0;
bool _in1;
bool c_in;
bool sum;
bool c_out;
for (size_t i = 0; i < in0->vec_len; i++) {
_in0 = in0->vec_space[in_vec_id0][i] > 0;
_in1 = in1->vec_space[in_vec_id1][i] > 0;
c_in = state;
sum = (_in0 ^ _in1) ^ c_in;
c_out = (_in0 & _in1) | ((_in0 ^ _in1) & c_in);
state = sum;
out->vec_space[out_vec_id][i] = (audio) c_out == 0 ? -1.0 : 1.0;
}
}
/* As above but sample-wise */
bool binaryadder_samplewise(bool in0, bool in1, bool* state) {
FullAdder fa = fulladder_samplewise(in0, in1, *state);
*state = fa.sum;
return fa.c_out;
}
/* Delta-sigma multiplication; this implements figure 2.5 of the thesis
* referenced above */
void binarymultiplier(Sig* in0, Sig* in1, Sig* out, size_t in_vec_id0,
size_t in_vec_id1, size_t out_vec_id) {
assert((in0->vec_len == in1->vec_len) & (in1->vec_len == out->vec_len));
Sig* xor = malloc(sizeof(Sig));
Sig* sum = malloc(sizeof(Sig));
sig_alloc(xor, 16, in0->vec_len, in0->sr);
sig_alloc(sum, 14, in0->vec_len, in0->sr);
bool temp;
/* the loops below compute the vectors corresponding to the
* 16 outputs of the xor operators */
for (size_t i = 0; i < 4; i++) {
for (size_t j = 0; j < 4; j++) {
for (size_t k = 3; k < in0->vec_len; k++) {
temp = (in0->vec_space[in_vec_id0][k - i] > 0) ^
(in1->vec_space[in_vec_id1][k - j] > 0);
xor->vec_space[j + i * 4][k] = temp ? 1.0 : -1.0;
}
}
}
/* the loops below sum all of the signals for the final output */
for (size_t i = 0; i < 8; i++) {
binaryadder(xor, xor, sum, i * 2, i * 2 + 1, i);
}
for (size_t i = 0; i < 4; i++) {
binaryadder(sum, sum, sum, i * 2, i * 2 + 1, i + 8);
}
for (size_t i = 0; i < 2; i++) {
binaryadder(sum, sum, sum, i * 2 + 8, i * 2 + 8 + 1, i + 12);
}
binaryadder(sum, sum, out, 12, 13, out_vec_id);
sig_free(xor);
sig_free(sum);
}
void bi2uni(Sig* in, Sig* out, size_t in_vec_id, size_t out_vec_id) {
assert(in->vec_len == out->vec_len);
for (size_t i = 0; i < in->vec_len; i++) {
out->vec_space[out_vec_id][i] =
in->vec_space[in_vec_id][i] > 0 ? 1.0 : 0.0;
}
}
void uni2bi(Sig* in, Sig* out, size_t in_vec_id, size_t out_vec_id) {
assert(in->vec_len == out->vec_len);
for (size_t i = 0; i < in->vec_len; i++) {
out->vec_space[out_vec_id][i] =
in->vec_space[in_vec_id][i] > 0.5 ? 1.0 : -1.0;
}
}
|
// Solution for the GetUserId method in the UserIdentityService class
public string GetUserId()
{
return _httpContextAccessor.HttpContext?.User.FindFirstValue(ClaimTypes.NameIdentifier);
}
|
const statusBadge = document.querySelector('.status-badge');
const errorMessage = document.querySelector('.error-message');
export function setStatus(connected) {
if (connected) {
statusBadge.textContent = 'Connected';
statusBadge.classList.add('connected');
} else {
statusBadge.textContent = 'Not Connected';
statusBadge.classList.remove('connected');
}
}
export function setErrorMessage(message) {
errorMessage.textContent = `Error: ${message}`;
}
export function clearErrorMessage() {
errorMessage.textContent = '';
}
|
<filename>code/gen_df.py
import pandas as pd
from itertools import product
data = pd.read_csv("../input/trade_data_filtered/cn2014.csv")
data["PRODUCT_NC"] = data["PRODUCT_NC"].apply(lambda x: str(x[:6]))
sixdigit_product_trade = pd.DataFrame(data.groupby(["TRADE_TYPE",'DECLARANT_ISO','PARTNER_ISO',"PRODUCT_NC"])['VALUE_IN_EUROS'].sum().reset_index())
prod_keys = sixdigit_product_trade["PRODUCT_NC"].unique()
DECLARANT_countries = data["DECLARANT_ISO"].unique()
PARTNER_countries = data["PARTNER_ISO"].unique()
new_df = pd.DataFrame(list(product(DECLARANT_countries, PARTNER_countries,prod_keys)), columns=["DECLARANT_ISO","PARTNER_ISO","PRODUCT_NC"])
new_df = new_df.loc[new_df["DECLARANT_ISO"]!=new_df["PARTNER_ISO"]]
new_df.to_csv(r'../temp/new_df_2.csv')
|
<gh_stars>10-100
package chylex.hee.system.util;
import net.minecraft.block.Block;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
@FunctionalInterface
public interface IItemSelector{
boolean isValid(ItemStack is);
public static interface IRepresentativeItemSelector extends IItemSelector{
ItemStack getRepresentativeItem();
public static class SimpleItemSelector implements IRepresentativeItemSelector{
protected final Item item;
public SimpleItemSelector(Block block){
this.item = Item.getItemFromBlock(block);
}
public SimpleItemSelector(Item item){
this.item = item;
}
@Override
public boolean isValid(ItemStack is){
return is.getItem() == item;
}
@Override
public ItemStack getRepresentativeItem(){
return new ItemStack(item);
}
}
public static class DamageItemSelector extends SimpleItemSelector{
protected final int damage;
public DamageItemSelector(Block block, int meta){
super(block);
this.damage = meta;
}
public DamageItemSelector(Item item, int damage){
super(item);
this.damage = damage;
}
@Override
public boolean isValid(ItemStack is){
return super.isValid(is) && is.getItemDamage() == damage;
}
@Override
public ItemStack getRepresentativeItem(){
return new ItemStack(item, damage);
}
}
public static class ItemStackSelector implements IRepresentativeItemSelector{
private final ItemStack is;
public ItemStackSelector(ItemStack is){
this.is = is;
}
@Override
public boolean isValid(ItemStack is){
return ItemStack.areItemStacksEqual(this.is, is);
}
@Override
public ItemStack getRepresentativeItem(){
return is;
}
}
}
}
|
<reponame>NYCMOTI/open-bid<gh_stars>10-100
class SaveAuction
def initialize(auction)
@auction = auction
end
def perform
auction.save
schedule_auction_ended_job
auction.persisted?
end
private
attr_reader :auction
def schedule_auction_ended_job
if auction.persisted?
CreateAuctionEndedJob.new(auction).perform
end
end
end
|
FILES=$1/*.js
for f in $FILES
do
echo "Processing $f files..."
awk '{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5}' $f > $f.txt
done
|
<filename>node/console.js
const readline = require('readline-utils');
class Console {
constructor (functions) {
this.functions = functions || {};
this.rl = readline.createInterface({
input : process.stdin,
output: process.stdout
});
const prompt = (msg) => {
this.newPrompt(msg).then(prompt).catch(prompt);
};
prompt.bind(this)();
}
getFunctions () {
return this.functions;
}
setFunction (name, fn) {
this.functions[name] = fn;
if (!fn.async) {
const rawFn = fn.handler;
fn.handler = (...params) => {
return new Promise((resolve, reject) => {
try {
resolve(rawFn.apply(fn.scope || this, params));
} catch (err) {
reject(err.message);
}
});
}
}
}
askParam (param) {
let me = this;
return new Promise((resolve, reject) => {
setTimeout( ()=> {
me.rl.output.write(' : ')
}, 200 );
this.rl.question(`Param value for ${param}`, (answer) => {
resolve(answer)
});
})
}
newPrompt (msg) {
const me = this;
if (msg)
console.log(msg);
return new Promise((resolve, reject) => {
setTimeout( ()=> {
me.rl.output.write(' > ')
}, 200 );
me.rl.question('jaav-tm', async (answer) => {
const fn = this.functions[answer];
if (fn && !fn.hidden) {
const params = fn.params.slice();
let answers = [];
const processParams = (answer, start) => {
if (!start)
answers.push(answer);
const param = params.shift();
if (param) {
me.askParam(param).then(processParams).catch(reject);
} else {
if (fn.handler) {
fn.handler.apply(fn.scope || this, answers).then(resolve).catch(reject);
} else {
reject('Command has no handle');
}
}
};
processParams(null, true);
} else {
reject('Command not found');
}
});
});
}
}
module.exports = Console;
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var urljoin = require("url-join");
var http_1 = require("../../utils/http");
exports.tenants = function (configuration, organizationId, groupId, httpClient) {
var get = httpClient.get, post = httpClient.post, delete_ = httpClient.delete_;
var tenantsBaseUrl = urljoin(configuration.apiBaseUrl, 'account', 'organizations', organizationId, 'groups', groupId, 'tenants');
var resourceDetails = function (tenantId) {
return {
details: function () {
var url = urljoin(tenantsBaseUrl, tenantId);
return get(configuration, url);
},
delete: function () {
var url = urljoin(tenantsBaseUrl, tenantId);
return delete_(configuration, url);
}
};
};
var add = function (tenant) {
var url = urljoin(tenantsBaseUrl);
return post(configuration, url, tenant);
};
var list = function () {
var url = urljoin(tenantsBaseUrl);
return get(configuration, url);
};
var resource = Object.assign(resourceDetails, { add: add, list: list });
return resource;
};
exports.default = (function (configuration, organizationId, groupId) {
return exports.tenants(configuration, organizationId, groupId, http_1.default);
});
//# sourceMappingURL=tenants.js.map
|
#! /bin/bash
PRGNAME="glibc"
TZDATA_VERSION="2021a"
TIMEZONE="Europe/Astrakhan"
### Glibc (GNU C libraries)
# Пакет Glibc содержит основную библиотеку C. Эта библиотека предоставляет
# основные процедуры для выделения памяти, поиска в каталогах, открытия и
# закрытия файлов, чтения и записи файлов, обработки строк, сопоставления с
# образцом, арифметики и так далее.
ROOT="/"
source "${ROOT}check_environment.sh" || exit 1
source "${ROOT}unpack_source_archive.sh" "${PRGNAME}" || exit 1
source "${ROOT}config_file_processing.sh" || exit 1
TMP_DIR="/tmp/pkg-${PRGNAME}-${VERSION}"
rm -rf "${TMP_DIR}"
mkdir -pv "${TMP_DIR}"/{etc/ld.so.conf.d,usr/lib/locale}
mkdir -pv "${TMP_DIR}/var"/{lib/nss_db,cache/nscd}
ZONEINFO=/usr/share/zoneinfo
mkdir -pv "${TMP_DIR}${ZONEINFO}"/{posix,right}
# некоторые из программ Glibc используют не FHS-совместимый каталог /var/db для
# хранения run-time данных. Применим патч, который удаляет ссылки на каталог
# /var/db и заменяет их на
# /var/cache/nscd - для nscd
# /var/lib/nss_db - для nss_db
patch --verbose -Nvp1 -i "${SOURCES}/${PRGNAME}-${VERSION}-fhs-1.patch" || exit 1
# исправим ошибку, которая вызывает проблемы с приложениями запущенными в среде
# chroot
sed -e '402a\ *result = local->data.services[database_index];' \
-i nss/nss_database.c
# документация glibc рекомендует собирать glibc в отдельном каталоге
mkdir -v build
cd build || exit 1
### Конфигурация
# отключает параметр -Werror, передаваемый в GCC. Это необходимо для запуска
# набора тестов.
# --disable-werror
# указывает Glibc скомпилировать библиотеку с поддержкой ядер Linux >=3.2
# (более ранние версии поддерживаться не будут)
# --enable-kernel=3.2
# повышает безопасность системы, добавляя дополнительный код для проверки
# переполнения буфера, такого как атаки с разбивкой стека
# --enable-stack-protector=strong
# где искать заголовки API ядра
# --with-headers=/usr/include
# устанавливать библиотеки в /lib вместо /lib64 по умолчанию для x86-64
# архитектуры
# libc_cv_slibdir=/lib
../configure \
--prefix=/usr \
--disable-werror \
--enable-kernel=3.2 \
--enable-stack-protector=strong \
--with-headers=/usr/include \
libc_cv_slibdir=/lib || exit 1
make || make -j1 || exit 1
# для тестов меняем ссылку
# /lib/ld-linux-x86-64.so.2 -> ld-2.3x.so
# на только что собранную библиотеку ld-linux-x86-64.so.2
# /lib/ld-linux-x86-64.so.2 -> <sources_tree>/build/elf/ld-linux-x86-64.so.2
#
# На этапе установки пакета она обратно перезапишется на правильную
# ln -svfn "${PWD}/elf/ld-linux-x86-64.so.2" /lib
# make check
# если конфиг динамического загрузчика /etc/ld.so.conf не существует, то на
# этапе установки Glibc будет жаловаться на его отсутствие
LD_SO_CONFIG="/etc/ld.so.conf"
! [ -r "${LD_SO_CONFIG}" ] && touch "${LD_SO_CONFIG}"
# исправим сгенерированный Makefile, чтобы пропустить ненужную проверку
# работоспособности Glibc, которая в среде LFS выполняется с ошибкой
sed '/test-installation/s@$(PERL)@echo not running@' -i ../Makefile || exit 1
make install
make install DESTDIR="${TMP_DIR}"
# ни одна из локалей не требуется на данный момент, но если некоторые из них
# отсутствуют, тестовые наборы пакетов, которые мы будет устанавливать позже,
# пропустят важные тесты, поэтому установим минимальный набор локалей,
# необходимых для оптимального охвата тестов. Отдельные локали могут быть
# установлены с помощью утилиты localedef. Результат ее работы добавляется в
# файл
# /usr/lib/locale/locale-archive
mkdir -pv /usr/lib/locale
localedef -i POSIX -f UTF-8 C.UTF-8 2> /dev/null || true
localedef -i cs_CZ -f UTF-8 cs_CZ.UTF-8
localedef -i de_DE -f ISO-8859-1 de_DE
localedef -i de_DE -f UTF-8 de_DE.UTF-8
localedef -i de_DE@euro -f ISO-8859-15 de_DE@euro
localedef -i el_GR -f ISO-8859-7 el_GR
localedef -i en_GB -f UTF-8 en_GB.UTF-8
localedef -i en_HK -f ISO-8859-1 en_HK
localedef -i en_PH -f ISO-8859-1 en_PH
localedef -i en_US -f ISO-8859-1 en_US
localedef -i en_US -f UTF-8 en_US.UTF-8
localedef -i es_MX -f ISO-8859-1 es_MX
localedef -i fa_IR -f UTF-8 fa_IR
localedef -i fr_FR -f ISO-8859-1 fr_FR
localedef -i fr_FR -f UTF-8 fr_FR.UTF-8
localedef -i fr_FR@euro -f ISO-8859-15 fr_FR@euro
localedef -i it_IT -f ISO-8859-1 it_IT
localedef -i it_IT -f UTF-8 it_IT.UTF-8
localedef -i ja_JP -f EUC-JP ja_JP
localedef -i ja_JP -f SHIFT_JIS ja_JP.SIJS 2> /dev/null || true
localedef -i ja_JP -f UTF-8 ja_JP.UTF-8
localedef -i ru_RU -f KOI8-R ru_RU.KOI8-R
localedef -i ru_RU -f UTF-8 ru_RU.UTF-8
localedef -i tr_TR -f UTF-8 tr_TR.UTF-8
localedef -i zh_CN -f GB18030 zh_CN.GB18030
localedef -i zh_HK -f BIG5-HKSCS zh_HK.BIG5-HKSCS
cp /usr/lib/locale/locale-archive "${TMP_DIR}/usr/lib/locale/"
###
# Конфигурация Glibc
###
### Псевдонимы локалей
# в файле /usr/share/locale/locale.alias пропишем псевдонимы для русской локали
# с кодировкой UTF-8
# уберем:
# russian ru_RU.ISO-8859-5
# вставим:
# russian ru_RU.UTF-8
# ru_RU ru_RU.UTF-8
# ru ru_RU.UTF-8
sed -i 's/^russian.*$/russian ru_RU.UTF-8\nru_RU ru_RU.UTF-8\nru ru_RU.UTF-8/' \
"${TMP_DIR}/usr/share/locale/locale.alias"
### Установка и настройка данных часового пояса
ZONEINFO_DIR="${TMP_DIR}${ZONEINFO}"
# компилируем файлы временных зон и помещаем их в /usr/share/zoneinfo
tar -xvf "${SOURCES}/tzdata${TZDATA_VERSION}.tar.gz" || exit 1
for TZ in etcetera southamerica northamerica europe africa antarctica \
asia australasia backward; do
zic -L /dev/null -d "${ZONEINFO_DIR}" "${TZ}"
zic -L /dev/null -d "${ZONEINFO_DIR}/posix" "${TZ}"
zic -L leapseconds -d "${ZONEINFO_DIR}/right" "${TZ}"
done
cp -v zone.tab zone1970.tab iso3166.tab "${ZONEINFO_DIR}"
# при создании файла posixrules мы используем Нью-Йорк, потому что POSIX
# требует, чтобы правила перехода на летнее время соответствовали правилам США
zic -d "${ZONEINFO_DIR}" -p America/New_York
# один из способов определить местный часовой пояс:
# $ tzselect
# после ответа на несколько вопросов о местоположении сценарий выведет название
# часового пояса (например, Europe/Astrakhan). В каталоге /usr/share/zoneinfo
# перечислены также некоторые другие возможные часовые пояса, которые не
# определены сценарием, но могут использоваться
# создадим сслыку
# /etc/localtime -> ../usr/share/zoneinfo/${TIMEZONE}
ln -sfv "../usr/share/zoneinfo/${TIMEZONE}" "${TMP_DIR}/etc/localtime"
### Создаем файл конфигурации для Name Service Cache /etc/nscd.conf
NSCD_CONFIG="/etc/nscd.conf"
# бэкапим его, если он существует в системе
if [ -f "${NSCD_CONFIG}" ]; then
mv -v "${NSCD_CONFIG}" "${NSCD_CONFIG}.old"
fi
cp -v ../nscd/nscd.conf "${TMP_DIR}${NSCD_CONFIG}"
### Создаем файл конфигурации для Name Service Switch /etc/nsswitch.conf
NSSWITCH_CONFIG="/etc/nsswitch.conf"
# бэкапим его, если он существует в системе
if [ -f "${NSSWITCH_CONFIG}" ]; then
mv "${NSSWITCH_CONFIG}" "${NSSWITCH_CONFIG}.old"
fi
cat << EOF > "${TMP_DIR}${NSSWITCH_CONFIG}"
# Begin ${NSSWITCH_CONFIG}
passwd: files
group: files
shadow: files
hosts: files dns
networks: files
protocols: files
services: files
ethers: files
rpc: files
# End ${NSSWITCH_CONFIG}
EOF
### Конфигурация динамического загрузчика
# По умолчанию поиск динамического загрузчика ld-linux-x86-64.so.2, который
# нужен программам при их запуске, происходит в /lib и /usr/lib. Однако если в
# каталогах, отличных от /lib и /usr/lib, есть дополнительные библиотеки, их
# необходимо добавить в файл /etc/ld.so.conf, чтобы динамический загрузчик мог
# их найти. Например, две дополнительные директории могут содержать библиотеки:
# /usr/local/lib и /opt/lib, а так же другие пути к библиотекам могут быть
# указаны в конфигурационных файлах в /etc/ld.so.conf.d/ Добавим эти каталоги в
# пути поиска для динамического загрузчика
# бэкапим /etc/ld.so.conf, если он уже существует
if [ -f "${LD_SO_CONFIG}" ]; then
mv "${LD_SO_CONFIG}" "${LD_SO_CONFIG}.old"
fi
cat << EOF > "${TMP_DIR}${LD_SO_CONFIG}"
# Begin ${LD_SO_CONFIG}
# Add an include directory
include /etc/ld.so.conf.d/*.conf
/usr/local/lib
/opt/lib
/opt/qt5/lib
# End ${LD_SO_CONFIG}
EOF
# устанавливаем пакет в корень системы
# NOTE:
# Устанавливаем все, кроме /lib, т.к. она уже установлена 'make install'.
# Если мы будем пытаться скопировать ${TMP_DIR}/lib в корень LFS системы, то
# библиотека /lib/ld-2.3x.so естественно будет занята и копирование
# прервется с ошибкой:
# /bin/cp: cannot create regular file '/lib/ld-2.3x.so': Text file busy
/bin/cp -vR "${TMP_DIR}"/etc /
/bin/cp -vR "${TMP_DIR}"/sbin /
/bin/cp -vR "${TMP_DIR}"/usr /
/bin/cp -vR "${TMP_DIR}"/var /
# обрабатываем созданные нами и установленные конфиги
config_file_processing "${NSCD_CONFIG}"
config_file_processing "${NSSWITCH_CONFIG}"
config_file_processing "${LD_SO_CONFIG}"
rm -f "${TMP_DIR}/usr/share/info/dir"
# система документации Info использует простые текстовые файлы в
# /usr/share/info/, а список этих файлов хранится в файле /usr/share/info/dir
# который мы обновим
cd /usr/share/info || exit 1
rm -fv dir
for FILE in *; do
install-info "${FILE}" dir 2>/dev/null
done
cat << EOF > "/var/log/packages/${PRGNAME}-${VERSION}"
# Package: ${PRGNAME} (GNU C libraries)
#
# This package contains the main GNU C libraries and header files. This
# libraries provides the basic routines for allocating memory, searching
# directories, opening and closing files, reading and writing files, string
# handling, pattern matching, arithmetic, and so on. You'll need this package
# to compile programs.
#
# Home page: http://www.gnu.org/software/libc/
# Download: http://ftp.gnu.org/gnu/${PRGNAME}/${PRGNAME}-${VERSION}.tar.xz
#
EOF
source "${ROOT}write_to_var_log_packages.sh" \
"${TMP_DIR}" "${PRGNAME}-${VERSION}"
|
<filename>public/js/showHidePricelists.js
var pricelistLinks = document.getElementsByClassName('pricelist-link'),
pricelistContainers = document.getElementsByClassName('pricelist-container');
pricelistLinks[0].classList.add('active');
for(var i = 0, len = pricelistContainers.length; i < len; i++) {
pricelistContainers[i].style.display = 'none';
}
pricelistContainers[0].style.display = 'block';
function onClickMakeLinkActive(event) {
var clickedElement = event.srcElement;
for(var i = 0, len = pricelistLinks.length; i < len; i++) {
pricelistLinks[i].classList.remove('active');
}
clickedElement.classList.add('active');
var elementId = clickedElement.id;
var containerId = elementId.replace('link', 'container');
showActiveContainer(containerId);
}
function showActiveContainer(id) {
for(var i = 0, len = pricelistContainers.length; i < len; i++) {
pricelistContainers[i].style.display = 'none';
}
document.getElementById(id).style.display = 'block';
}
for(var i = 0, len = pricelistLinks.length; i < len; i++) {
pricelistLinks[i].addEventListener('click', function (ev) {
onClickMakeLinkActive(ev);
});
}
|
<reponame>olujedai/sw-api<filename>src/character/dto/characterMeta.dto.ts
import { HeightMeta } from './heightMeta.dto';
import { ApiModelProperty } from '@nestjs/swagger';
/**
* Character meta Data Transfer Object
*/
export class Meta {
@ApiModelProperty(
{
description: 'The total number of characters',
type: 'number',
},
)
readonly total: number;
@ApiModelProperty(
{
description: 'Height metadata',
type: HeightMeta,
},
)
readonly totalHeight: HeightMeta;
}
|
<filename>commands/invite.js
const { MessageEmbed } = require('discord.js');
exports.run = (client, message) => {
client
.generateInvite({ permissions: 2146958591 })
.then(link => {
const embed = new MessageEmbed()
.setColor(0x00ae86)
.setTitle(client.user.username)
.setURL(link)
.setDescription(`[Invite me](${link})`);
message.channel.send(embed);
})
.catch(client.logger.error);
};
exports.conf = {
enabled: true,
guildOnly: false,
aliases: ['inv'],
permLevel: 0
};
exports.help = {
name: 'invite',
description: 'Gives you an invite link for me.',
usage: 'invite',
example: 'invite'
};
|
function styled(component, styles) {
let css = `${component} {`;
for (let key in styles) {
if (key.startsWith('$')) {
css += `${key.substring(1)}: ${styles[key]};`;
} else if (key.startsWith('&')) {
css += `${key.substring(1)} {`;
for (let nestedKey in styles[key]) {
css += `${nestedKey}: ${styles[key][nestedKey]};`;
}
css += `}`;
} else if (key.startsWith(':')) {
css += `${key} {`;
for (let pseudoKey in styles[key]) {
css += `${pseudoKey}: ${styles[key][pseudoKey]};`;
}
css += `}`;
} else {
css += `${key}: ${styles[key]};`;
}
}
css += `}`;
return css;
}
// Example usage
const Button = styled('button', {
minHeight: '200px',
'&:hover': {
boxShadow: '0px 2px 2px rgba(0, 0, 0, 0.1)',
},
});
console.log(Button);
|
<gh_stars>0
import React, { useEffect } from 'react';
import { useSelector } from 'react-redux';
import { Card, Typography } from '@material-ui/core';
import { useRouter } from 'next/router';
import { venueActions } from 'client/store/actions';
import { AppStoreState } from 'client/store/reducers';
import { ProgressCardContent } from 'client/components';
import useThunkDispatch from 'client/hooks/useThunkDispatch';
import Skeleton from 'client/components/Dashboard/Venues/Skeleton';
const ShowVenue: React.FC = () => {
const router = useRouter();
const { id } = router.query;
const { error, loading, venue } = useSelector((state: AppStoreState) => ({
error: state.venues.error,
loading: state.venues.loading,
venue: state.venues.venues.find(
venue => venue.id === parseInt(Array.isArray(id) ? id[0] : id),
),
}));
const dispatch = useThunkDispatch();
useEffect(() => {
if (id !== undefined) {
dispatch(venueActions.fetchOneVenue(id));
}
}, [id]);
if (loading || error || !venue) {
return (
<Skeleton>
<h1>{loading ? 'Loading...' : 'Error...'}</h1>
</Skeleton>
);
}
return (
<Skeleton>
<Card style={{ marginTop: '12px' }} key={`event-${venue.id}`}>
<ProgressCardContent loading={loading}>
<Typography gutterBottom variant="h5" component="h2">
{venue.name}
</Typography>
{venue.location && (
<Typography variant="body2" color="textSecondary" component="p">
{`${venue.location.region}, ${venue.location.country_code}, ${venue.location.postal_code}`}
</Typography>
)}
</ProgressCardContent>
</Card>
<h3>Placeholder for venues...</h3>
</Skeleton>
);
};
export default ShowVenue;
|
import Store from './Store';
import { useStore } from './consumer';
import { isPlainObject, invariant } from './utils';
import { IStore } from './Store';
export function createStore<S>(
initialStore: S,
): () => [S, IStore<S>['setStore']] {
invariant(isPlainObject(initialStore), `initialStore is not plain object.`);
const store = new Store(initialStore);
return () => useStore<S>(store);
}
|
// Define the Portfolio class
public class Portfolio
{
public string Name { get; set; }
public decimal Value { get; set; }
// Other properties and methods as needed
}
// Define the User class
public class User
{
public string Username { get; set; }
public string Email { get; set; }
// Other properties and methods as needed
}
// Define the repository class
public class Repository
{
// Method to retrieve all portfolios
public IEnumerable<Portfolio> GetAllPortfolios()
{
// Implementation to retrieve all portfolios from the data source
// Example:
// return databaseContext.Portfolios.ToList();
}
// Method to retrieve all users
public IEnumerable<User> GetAllUser()
{
// Implementation to retrieve all users from the data source
// Example:
// return databaseContext.Users.ToList();
}
}
// Implement the IntializeData method
private void IntializeData()
{
Repository repo = new Repository(); // Instantiate the repository
Portfolios = new ObservableCollection<Portfolio>();
foreach(Portfolio p in repo.GetAllPortfolios())
Portfolios.Add(p);
Users = new ObservableCollection<User>();
foreach (User u in repo.GetAllUser())
Users.Add(u);
}
|
var searchData=
[
['hassubscribed_107',['hasSubscribed',['../class_device_name_helper.html#accba8f666f7fc9b5f40963a023da6737',1,'DeviceNameHelper']]]
];
|
#!/bin/sh
rm -f ./tools/programmingTools/MeasureCollector/compiletestF18A1/bin/*.o 2> /dev/null
rm -f ./tools/programmingTools/MeasureCollector/compiletestF18A1/bin/cardTests 2> /dev/null
rm -rf ./tools/programmingTools/MeasureCollector/compiletestF18A1/bin/cardTests.* 2> /dev/null
rm -f ./tools/programmingTools/MeasureCollector/compiletestF18A1/studentCode/*.* 2> /dev/null
rm -f ./tools/programmingTools/MeasureCollector/compiletestF18A1/studentInclude/*.* 2> /dev/null
|
/*
Copyright 2021 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controllers
import (
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
v1beta1 "github.com/brantburnett/couchbase-index-operator/api/v1beta1"
)
type IndexSetSyncingReason string
type IndexSetReadyReason string
const (
ConditionTypeSyncing string = "Syncing"
ConditionTypeReady string = "Ready"
IndexSetSyncingReasonNotSyncing IndexSetSyncingReason = "NotSyncing"
IndexSetSyncingReasonSyncing IndexSetSyncingReason = "Syncing"
IndexSetReadyReasonUnknown IndexSetReadyReason = "Unknown"
IndexSetReadyReasonInSync IndexSetReadyReason = "InSync"
IndexSetReadyReasonOutOfSync IndexSetReadyReason = "OutOfSync"
IndexSetReadyReasonJobFailed IndexSetReadyReason = "JobFailed"
IndexSetReadyReasonConfigMapError IndexSetReadyReason = "ConfigMapError"
IndexSetReadyReasonCouchbaseError IndexSetReadyReason = "CouchbaseError"
)
func getStatus(status bool) v1.ConditionStatus {
if status {
return v1.ConditionTrue
} else {
return v1.ConditionFalse
}
}
func setSyncing(indexSet *v1beta1.CouchbaseIndexSet) {
setSyncStatus(indexSet, true, IndexSetSyncingReasonSyncing, "Sync in progress")
}
func setNotSyncing(indexSet *v1beta1.CouchbaseIndexSet) {
setSyncStatus(indexSet, false, IndexSetSyncingReasonNotSyncing, "Sync not in progress")
}
func setSyncStatus(indexSet *v1beta1.CouchbaseIndexSet, status bool, reason IndexSetSyncingReason, message string) {
meta.SetStatusCondition(&indexSet.Status.Conditions, v1.Condition{
Type: ConditionTypeSyncing,
Status: getStatus(status),
Message: message,
Reason: string(reason),
ObservedGeneration: indexSet.Generation,
})
}
func setNotReady(indexSet *v1beta1.CouchbaseIndexSet, reason IndexSetReadyReason, message string) {
setReadyStatus(indexSet, false, reason, message)
}
func setReadyInSync(indexSet *v1beta1.CouchbaseIndexSet) {
setReadyStatus(indexSet, true, IndexSetReadyReasonInSync, "Indices are in sync")
}
func setReadyStatus(indexSet *v1beta1.CouchbaseIndexSet, status bool, reason IndexSetReadyReason, message string) {
meta.SetStatusCondition(&indexSet.Status.Conditions, v1.Condition{
Type: ConditionTypeReady,
Status: getStatus(status),
Message: message,
Reason: string(reason),
ObservedGeneration: indexSet.Generation,
})
}
func getCurrentStateFromIndexSet(indexSet *v1beta1.CouchbaseIndexSet) IndexSetReadyReason {
readyCondition := meta.FindStatusCondition(indexSet.Status.Conditions, ConditionTypeReady)
return getCurrentStateFromCondition(readyCondition)
}
func getCurrentStateFromCondition(readyCondition *v1.Condition) IndexSetReadyReason {
if readyCondition != nil {
return IndexSetReadyReason(readyCondition.Reason)
} else {
return IndexSetReadyReasonUnknown
}
}
func (r *CouchbaseIndexSetReconciler) updateStatus(context *CouchbaseIndexSetReconcileContext) error {
// Because the object may have been modified, we need to make sure we have the latest object and just replace the status
// This can help with the cache being out of date or external mutations. We try first without a Get, then 2 more times
// with a fresh get, before we give up
var err error
for i := 0; i < 3; i++ {
if i > 0 {
// If this isn't the first attempt, then reload the latest index set via the API
var tempIndexSet v1beta1.CouchbaseIndexSet
if err = r.Get(context.Ctx, context.Request.NamespacedName, &tempIndexSet); err != nil {
if apierrors.IsNotFound(err) {
// Not found we can ignore, means the index set has been deleted, but we don't want to contine processing
return nil
}
// Any other error break out to log and return
break
}
// Backup the status, apply to the temp copy, and replace our index set
tempStatus := context.IndexSet.Status
context.IndexSet = tempIndexSet
context.IndexSet.Status = tempStatus
}
if err = r.Status().Update(context.Ctx, &context.IndexSet); err == nil {
return nil
}
}
if err != nil {
context.Error(err, "unable to update status")
}
return err
}
|
<reponame>ministryofjustice/mtp-api
import datetime
from itertools import cycle
import random
import uuid
from django.utils import timezone
from django.utils.crypto import get_random_string
from faker import Faker
from core.tests.utils import MockModelTimestamps
from credit.constants import CREDIT_RESOLUTION
from credit.models import Credit
from credit.tests.utils import (
get_owner_and_status_chooser, create_credit_log, random_amount,
build_sender_prisoner_pairs,
)
from payment.constants import PAYMENT_STATUS
from payment.models import Payment, BillingAddress
from prison.models import PrisonerLocation
fake = Faker(locale='en_GB')
def latest_payment_date():
return timezone.now()
def create_fake_sender_data(number_of_senders):
"""
Generate data needed for Payment/BillingAddress using Faker
:param number_of_senders int: Number of data entries to generate
"""
senders = []
for _ in range(number_of_senders):
expiry_date = fake.date_time_between(start_date='now', end_date='+5y')
full_name = ' '.join([fake.first_name(), fake.last_name()])
address_parts = fake.address().split('\n')
billing_address = {}
if len(address_parts) == 4:
billing_address = {
'line1': address_parts[0],
'line2': address_parts[1],
'city': address_parts[2],
'postcode': address_parts[3],
'country': 'UK',
}
elif len(address_parts) == 3:
billing_address = {
'line1': address_parts[0],
'city': address_parts[1],
'postcode': address_parts[2],
'country': 'UK',
}
senders.append(
{
'cardholder_name': full_name,
'card_number_first_digits': get_random_string(6, '0123456789'),
'card_number_last_digits': get_random_string(4, '0123456789'),
'card_expiry_date': expiry_date.strftime('%m/%y'),
'ip_address': fake.ipv4(),
'email': <EMAIL>' % full_name.replace(' ', '.'),
'card_brand': 'Visa',
'billing_address': billing_address,
}
)
return senders
def get_sender_prisoner_pairs(number_of_senders=None):
number_of_prisoners = PrisonerLocation.objects.all().count()
if not number_of_senders:
number_of_senders = number_of_prisoners
senders = create_fake_sender_data(number_of_senders)
prisoners = list(PrisonerLocation.objects.all())
sender_prisoner_pairs = build_sender_prisoner_pairs(senders, prisoners)
return cycle(sender_prisoner_pairs)
def generate_initial_payment_data(tot=50, days_of_history=7, number_of_senders=None):
data_list = []
sender_prisoner_pairs = get_sender_prisoner_pairs(number_of_senders)
for _ in range(tot):
random_date = latest_payment_date() - datetime.timedelta(
minutes=random.randint(0, 1440 * days_of_history)
)
random_date = timezone.localtime(random_date)
amount = random_amount()
sender, prisoner = next(sender_prisoner_pairs)
data = {
'amount': amount,
'service_charge': int(amount * 0.025),
'prisoner_name': prisoner.prisoner_name,
'prisoner_number': prisoner.prisoner_number,
'prisoner_dob': prisoner.prisoner_dob,
'prison': prisoner.prison,
'recipient_name': prisoner.prisoner_name,
'created': random_date,
'modified': random_date,
}
data.update(sender)
data_list.append(data)
return data_list
def generate_payments(
payment_batch=50, consistent_history=False, days_of_history=7, overrides=None,
attach_profiles_to_individual_credits=True, number_of_senders=None, reconcile_payments=True
):
"""
Generate fake payment objects either for automated tests or test/development environment.
:param payment_batch int: Number of payments to generate
:param consistent_history bool: Doesn't actually seem to do anything in this context
:param days_of_history int: Number of days of history to generate
:param overrides dict: Dict of attributes to apply to all payments. overrides['credit'] will be applied to credit
:param attach_profiles_to_individual_credits bool: Whether to run credit.attach_profiles on individual credits
:param number_of_senders int/None: If not None, specifies how many senders to generate.
If None, number of existing PrisonerLocation entries used
:param reconcile_payments bool: Whether to run Payment.objects.reconcile, given that the list of models returned
are NOT updated with the reconciliation data causing potential mismatch with
future queries
:rtype list<payment.models.Payment>
"""
data_list = generate_initial_payment_data(
tot=payment_batch,
days_of_history=days_of_history,
number_of_senders=number_of_senders
)
return create_payments(
data_list, consistent_history, overrides, attach_profiles_to_individual_credits, reconcile_payments
)
# TODO consistent_history doesn't seem to do anything, yet is provided by some calling functions...
def create_payments(
data_list, consistent_history=False, overrides=None, attach_profiles_to_individual_credits=True,
reconcile_payments=True
):
owner_status_chooser = get_owner_and_status_chooser()
payments = []
for payment_counter, data in enumerate(data_list, start=1):
new_payment = setup_payment(
owner_status_chooser,
latest_payment_date(),
payment_counter,
data,
overrides,
attach_profiles_to_individual_credits
)
payments.append(new_payment)
generate_payment_logs(payments)
earliest_payment = Payment.objects.all().order_by('credit__received_at').first()
if reconcile_payments and earliest_payment:
reconciliation_date = earliest_payment.credit.received_at.date()
while reconciliation_date < latest_payment_date().date() - datetime.timedelta(days=1):
start_date = datetime.datetime.combine(
reconciliation_date,
datetime.time(0, 0, tzinfo=timezone.utc)
)
end_date = datetime.datetime.combine(
reconciliation_date + datetime.timedelta(days=1),
datetime.time(0, 0, tzinfo=timezone.utc)
)
Payment.objects.reconcile(start_date, end_date, None)
reconciliation_date += datetime.timedelta(days=1)
# If reconciliation is run, these payment instances do NOT have the resulting reconciliation state change
return payments
def setup_payment(
owner_status_chooser, end_date, payment_counter, data, overrides=None, attach_profiles_to_individual_credits=True
):
older_than_yesterday = (
data['created'].date() < (end_date.date() - datetime.timedelta(days=1))
)
if overrides and overrides.get('status'):
data['status'] = overrides['status']
elif not bool(payment_counter % 11): # 1 in 11 is expired
data['status'] = PAYMENT_STATUS.EXPIRED
elif not bool(payment_counter % 10): # 1 in 10 is rejected
data['status'] = PAYMENT_STATUS.REJECTED
elif not bool(payment_counter % 4): # 1 in 4ish is pending
data['status'] = PAYMENT_STATUS.PENDING
else: # otherwise it's taken
data['status'] = PAYMENT_STATUS.TAKEN
if data['status'] == PAYMENT_STATUS.PENDING:
del data['cardholder_name']
del data['card_number_first_digits']
del data['card_number_last_digits']
del data['card_expiry_date']
del data['card_brand']
if not bool(payment_counter % 12): # 2 in 3 of pending checks has a billing_address
del data['billing_address']
elif data['status'] == PAYMENT_STATUS.TAKEN:
owner, status = owner_status_chooser(data['prison'])
data['processor_id'] = str(uuid.uuid1())
# TODO This is a horrible piece of implicit logic, can we please make it explicit
# or document it somewhere
if older_than_yesterday:
data.update({
'owner': owner,
'credited': True
})
else:
data.update({
'owner': None,
'credited': False
})
if overrides:
data.update(overrides)
with MockModelTimestamps(data['created'], data['modified']):
new_payment = save_payment(data, overrides, attach_profiles_to_individual_credits)
return new_payment
def save_payment(data, overrides=None, attach_profiles_to_individual_credits=True):
is_taken = data['status'] == PAYMENT_STATUS.TAKEN
if is_taken:
if data.pop('credited', False):
resolution = CREDIT_RESOLUTION.CREDITED
else:
resolution = CREDIT_RESOLUTION.PENDING
elif data['status'] in (PAYMENT_STATUS.REJECTED, PAYMENT_STATUS.EXPIRED):
resolution = CREDIT_RESOLUTION.FAILED
else:
resolution = CREDIT_RESOLUTION.INITIAL
prisoner_dob = data.pop('prisoner_dob', None)
prisoner_number = data.pop('prisoner_number', None)
prisoner_name = data.pop('prisoner_name', None)
prison = data.pop('prison', None)
reconciled = data.pop('reconciled', False)
owner = data.pop('owner', None)
billing_address = data.pop('billing_address', None)
if billing_address:
billing_address = BillingAddress.objects.create(**billing_address)
data['billing_address'] = billing_address
credit_data = dict(
amount=data['amount'],
prisoner_dob=prisoner_dob,
prisoner_number=prisoner_number,
prisoner_name=prisoner_name,
prison=prison,
reconciled=False if not is_taken else reconciled,
owner=owner,
received_at=None if not is_taken else data['created'],
resolution=resolution,
)
if overrides:
credit_data.update(overrides.get('credit', {}))
credit = Credit(**credit_data)
credit.save()
data['credit'] = credit
payment = Payment.objects.create(**data)
if attach_profiles_to_individual_credits:
credit.attach_profiles(ignore_credit_resolution=True)
return payment
def generate_payment_logs(payments):
for new_payment in payments:
if new_payment.credit:
create_credit_log(new_payment.credit, new_payment.modified, new_payment.modified)
|
public class StringToInteger
{
public static void main(String[] args)
{
String str = "123";
int num = Integer.parseInt(str);
System.out.println("The integer value is: "+num);
}
}
|
<filename>internal/users/live/member_subscribe.js
// Allow any user to subscribe to any channel in `private.member.#{user_id}` namespace
//
'use strict';
module.exports = function (N) {
N.wire.on('internal.live.subscribe:private.member.*', async function member_subscribe(data) {
let user_info = await data.getUserInfo();
if (!user_info) return;
let match = data.channel.match(/^private\.member\.([0-9a-f]{24})\./);
if (match?.[1] && match[1] === user_info.user_id) data.allowed = true;
});
};
|
<filename>src/main/java/org/jitsi/impl/protocol/xmpp/XmppProtocolActivator.java
/*
* Jicofo, the Jitsi Conference Focus.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.protocol.xmpp;
import net.java.sip.communicator.impl.protocol.jabber.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.colibri.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.health.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jibri.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jitsimeet.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.jabber.*;
import org.jitsi.impl.protocol.xmpp.extensions.*;
import org.osgi.framework.*;
import java.util.*;
/**
* Bundle activator for {@link XmppProtocolProvider}.
*
* @author <NAME>
*/
public class XmppProtocolActivator
implements BundleActivator
{
private ServiceRegistration<?> focusRegistration;
static BundleContext bundleContext;
/**
* Registers PacketExtension providers used by Jicofo
*/
static public void registerXmppExtensions()
{
// FIXME: make sure that we're using interoperability layer
AbstractSmackInteroperabilityLayer.setImplementationClass(
SmackV3InteroperabilityLayer.class);
AbstractSmackInteroperabilityLayer smackInterOp
= AbstractSmackInteroperabilityLayer.getInstance();
// Constructors called to register extension providers
new ConferenceIqProvider();
// Colibri
new ColibriIQProvider();
// HealthChecks
HealthCheckIQProvider.registerIQProvider();
// Jibri IQs
smackInterOp.addIQProvider(
JibriIq.ELEMENT_NAME, JibriIq.NAMESPACE, new JibriIqProvider());
JibriStatusPacketExt.registerExtensionProvider();
// User info
smackInterOp.addExtensionProvider(
UserInfoPacketExt.ELEMENT_NAME,
UserInfoPacketExt.NAMESPACE,
new DefaultPacketExtensionProvider<>(UserInfoPacketExt.class));
// <videomuted> element from jitsi-meet presence
smackInterOp.addExtensionProvider(
VideoMutedExtension.ELEMENT_NAME,
VideoMutedExtension.NAMESPACE,
new DefaultPacketExtensionProvider<>(
VideoMutedExtension.class));
// Override original Smack Version IQ class
AbstractSmackInteroperabilityLayer.getInstance()
.addIQProvider(
"query", "jabber:iq:version",
org.jitsi.jicofo.discovery.Version.class);
}
@Override
public void start(BundleContext bundleContext)
throws Exception
{
XmppProtocolActivator.bundleContext = bundleContext;
registerXmppExtensions();
XmppProviderFactory focusFactory
= new XmppProviderFactory(
bundleContext, ProtocolNames.JABBER);
Hashtable<String, String> hashtable = new Hashtable<>();
// Register XMPP
hashtable.put(ProtocolProviderFactory.PROTOCOL,
ProtocolNames.JABBER);
focusRegistration = bundleContext.registerService(
ProtocolProviderFactory.class.getName(),
focusFactory,
hashtable);
}
@Override
public void stop(BundleContext bundleContext)
throws Exception
{
if (focusRegistration != null)
focusRegistration.unregister();
}
}
|
import TileObjects from './TileObjects.js';
export default class SpawnTile extends TileObjects {
constructor() {
super('./assets/img/Spawn-Tile.png');
}
}
//# sourceMappingURL=SpawnTile.js.map
|
function removeQuestionsForYou(){
var blocks = document.getElementsByClassName("QuestionStoryBundle ClickthroughBundle Bundle");
for(var i = 0; i < blocks.length; i++){
blocks[i].remove();
}
}
|
<html>
<head>
</head>
<body>
<h1>Choose an Option</h1>
<select>
<option value="Option A">Option A</option>
<option value="Option B">Option B</option>
<option value="Option C">Option C</option>
</select>
</body>
</html>
|
<reponame>dgomesbr/awsets
package lister
import (
"github.com/trek10inc/awsets/arn"
"github.com/trek10inc/awsets/context"
"github.com/trek10inc/awsets/resource"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/ec2"
)
type AWSEc2Snapshot struct {
}
func init() {
i := AWSEc2Snapshot{}
listers = append(listers, i)
}
func (l AWSEc2Snapshot) Types() []resource.ResourceType {
return []resource.ResourceType{resource.Ec2Snapshot}
}
func (l AWSEc2Snapshot) List(ctx context.AWSetsCtx) (*resource.Group, error) {
svc := ec2.New(ctx.AWSCfg)
req := svc.DescribeSnapshotsRequest(&ec2.DescribeSnapshotsInput{
Filters: []ec2.Filter{{
Name: aws.String("owner-id"),
Values: []string{ctx.AccountId},
}},
MaxResults: aws.Int64(100),
})
rg := resource.NewGroup()
paginator := ec2.NewDescribeSnapshotsPaginator(req)
for paginator.Next(ctx.Context) {
page := paginator.CurrentPage()
for _, v := range page.Snapshots {
r := resource.New(ctx, resource.Ec2Snapshot, v.SnapshotId, v.SnapshotId, v)
if v.KmsKeyId != nil {
kmsArn := arn.ParseP(v.KmsKeyId)
r.AddRelation(resource.KmsKey, kmsArn.ResourceId, kmsArn.ResourceVersion)
}
r.AddRelation(resource.Ec2Volume, v.VolumeId, "")
rg.AddResource(r)
}
}
err := paginator.Err()
return rg, err
}
|
# We ask the user to enter a number1.
number1 = int(input("Enter the first number : "))
# If number1 is not between 0-9, warn the user and set to 0.
if number1 < 0 or number1 > 9:
print("Error, invalid number!")
number1 = 0
# We ask the user to enter a number1.
number2 = int(input("Enter the second number : "))
# If number1 is not between 0-9, warn the user and set to 0.
if number2 < 0 or number2 > 9:
print("Error, invalid number!")
number2 = 0
# We ask the user to enter a number1.
number3 = int(input("Enter the third number : "))
# If number1 is not between 0-9, warn the user and set to 0.
if number3 < 0 or number3 > 9:
print("Error, invalid number!")
number3 = 0
# We ask the user to enter a number1.
number4 = int(input("Enter the fourth number : "))
# If number1 is not between 0-9, warn the user and set to 0.
if number4 < 0 or number4 > 9:
print("Error, invalid number!")
number4 = 0
# We ask the user to enter a number1.
number5 = int(input("Enter the fifth number : "))
# If number1 is not between 0-9, warn the user and set to 0.
if number5 < 0 or number5 > 9:
print("Error, invalid number!")
number5 = 0
# Print the sum of all 5 numbers.
print("The sum of all the numbers is:", number1 + number2 + number3 + number4 + number5)
# Our approach to this task is suboptimal, as programmers we try to avoid code repetition because it makes our programs
# inflexible and hard to maintain. A better approach would be to use some iteration control structure (While, for) and
# instead having one variable per number we could use some compound data structure e.g. a list.
|
<reponame>Gaurav0/ember-glue<gh_stars>10-100
import Component from '@glimmer/component';
import { action } from '@ember/object';
import { Dropdown } from 'ember-basic-dropdown/addon/components/basic-dropdown';
interface Args {
renderInPlace?: boolean;
dropdown: Dropdown;
}
export default class GlueMenuTrigger extends Component<Args> {
constructor(owner: unknown, args: Args) {
super(owner, args);
}
get renderInPlace(): boolean {
return this.args.renderInPlace || false;
}
@action
close(): void {
this.args.dropdown.actions.close();
}
}
|
#!/usr/bin/env bash
set -eou pipefail
RETVAL=0
ROOT=$PWD
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ALIAS="Mappscode/api/annotations.proto=github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis/appscode/api,"
ALIAS+="Mappscode.com/api/dtypes/types.proto=appscode.com/api/dtypes,"
ALIAS+="Mappscode.com/api/cloud/v1alpha1/cloud.proto=appscode.com/api/cloud/v1alpha1,"
ALIAS+="Mappscode/api/kubernetes/v1alpha1/client.proto=appscode.com/api/kubernetes/v1alpha1,"
ALIAS+="Mappscode/api/ssh/v1alpha1/ssh.proto=appscode.com/api/ssh/v1alpha1,"
ALIAS+="Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,"
ALIAS+="Mappscode/api/attic/artifacts.proto=appscode.com/api/attic,"
ALIAS+="Mappscode/api/db/v1alpha1/database.proto=appscode.com/api/db/v1alpha1,"
ALIAS+="Mappscode/api/db/v1alpha1/snapshot.proto=appscode.com/api/db/v1alpha1,"
ALIAS+="Mappscode.com/api/namespace/v1alpha1/team.proto=appscode.com/api/namespace/v1alpha1,"
ALIAS+="Mappscode/api/alert/v1alpha1/incident.proto=appscode.com/api/alert/v1alpha1,"
ALIAS+="Mappscode/api/operation/operation.proto=appscode.com/api/operation,"
ALIAS+="Mappscode.com/api/version/version.proto=appscode.com/api/version,"
ALIAS+="Mgithub.com/pharmer/pharmer/apis/v1alpha1/generated.proto=github.com/pharmer/pharmer/apis/v1alpha1"
clean() {
(find . | grep pb.go | xargs rm) || true
(find . | grep pb.gw.go | xargs rm) || true
(find . | grep cors.go | xargs rm) || true
(find . | grep gw.cors.go | xargs rm) || true
(find . | grep gw.js | xargs rm) || true
# Do NOT delete schema.json files as they contain handwritten validation rules.
# (find . | grep schema.json | xargs rm) || true
(find . | grep schema.go | xargs rm) || true
(find . | grep php | xargs rm) || true
(find . | grep _pb2.py | xargs rm) || true
}
gen_proto() {
if [ $(ls -1 *.proto 2>/dev/null | wc -l) = 0 ]; then
return
fi
rm -rf *.pb.go
protoc -I /usr/local/include -I . \
-I ${GOPATH}/src \
-I ${GOPATH}/src/appscode.com \
-I ${GOPATH}/src/github.com \
-I ${GOPATH}/src/github.com/pharmer/pharmer/vendor \
-I ${GOPATH}/src/github.com/googleapis/googleapis \
-I ${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis \
--go_out=plugins=grpc,${ALIAS}:. *.proto
}
gen_gateway_proto() {
if [ $(ls -1 *.proto 2>/dev/null | wc -l) = 0 ]; then
return
fi
rm -rf *.pb.gw.go
protoc -I /usr/local/include -I . \
-I ${GOPATH}/src \
-I ${GOPATH}/src/appscode.com \
-I ${GOPATH}/src/github.com \
-I ${GOPATH}/src/github.com/pharmer/pharmer/vendor \
-I ${GOPATH}/src/github.com/googleapis/googleapis \
-I ${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis \
--grpc-gateway_out=logtostderr=true,${ALIAS}:. *.proto
}
gen_cors_pattern() {
if [ $(ls -1 *.proto 2>/dev/null | wc -l) = 0 ]; then
return
fi
rm -rf *.gw.cors.go
protoc -I /usr/local/include -I . \
-I ${GOPATH}/src \
-I ${GOPATH}/src/appscode.com \
-I ${GOPATH}/src/github.com \
-I ${GOPATH}/src/github.com/pharmer/pharmer/vendor \
-I ${GOPATH}/src/github.com/googleapis/googleapis \
-I ${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis \
--grpc-gateway-cors_out=logtostderr=true,${ALIAS}:. *.proto
}
gen_js_client() {
if [ $(ls -1 *.proto 2>/dev/null | wc -l) = 0 ]; then
return
fi
rm -rf *.gw.js
protoc -I /usr/local/include -I . \
-I ${GOPATH}/src \
-I ${GOPATH}/src/appscode.com \
-I ${GOPATH}/src/github.com \
-I ${GOPATH}/src/github.com/pharmer/pharmer/vendor \
-I ${GOPATH}/src/github.com/googleapis/googleapis \
-I ${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis \
--grpc-js-client_out=logtostderr=true,remove_prefix=/_appscode/api,${ALIAS}:. *.proto
}
gen_swagger_def() {
if [ $(ls -1 *.proto 2>/dev/null | wc -l) = 0 ]; then
return
fi
rm -rf *.swagger.json
protoc -I /usr/local/include -I . \
-I ${GOPATH}/src \
-I ${GOPATH}/src/appscode.com \
-I ${GOPATH}/src/github.com \
-I ${GOPATH}/src/github.com/pharmer/pharmer/vendor \
-I ${GOPATH}/src/github.com/googleapis/googleapis \
-I ${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis \
--swagger_out=logtostderr=true,${ALIAS}:. *.proto
}
gen_server_protos() {
echo "Generating server protobuf files"
for d in */ ; do
pushd ${d}
gen_proto
if dirs=$( ls -1 -F | grep "^v.*/" | tr -d "/" ); then
for dd in $dirs ; do
pushd ${dd}
gen_proto
popd
done
fi
popd
done
}
gen_proxy_protos() {
echo "Generating gateway protobuf files"
for d in */ ; do
pushd ${d}
gen_gateway_proto
if dirs=$( ls -1 -F | grep "^v.*/" | tr -d "/" ); then
for dd in $dirs ; do
pushd ${dd}
gen_gateway_proto
popd
done
fi
popd
done
}
gen_cors_patterns() {
echo "Generating gateway cors files"
for d in */ ; do
pushd ${d}
gen_cors_pattern
if dirs=$( ls -1 -F | grep "^v.*/" | tr -d "/" ); then
for dd in $dirs ; do
pushd ${dd}
gen_cors_pattern
popd
done
fi
popd
done
}
gen_js_clients() {
echo "Generating protobuf js client"
for d in */ ; do
pushd ${d}
gen_js_client
if dirs=$( ls -1 -F | grep "^v.*/" | tr -d "/" ); then
for dd in $dirs ; do
pushd ${dd}
gen_js_client
popd
done
fi
popd
done
}
gen_swagger_defs() {
echo "Generating swagger api definition files"
for d in */ ; do
pushd ${d}
gen_swagger_def
if dirs=$( ls -1 -F | grep "^v.*/" | tr -d "/" ); then
for dd in $dirs ; do
pushd ${dd}
gen_swagger_def
popd
done
fi
popd
done
# fix host, schemes
python $DIR/schema.py fix_swagger_schema
}
gen_py() {
if [ $(ls -1 *.proto 2>/dev/null | wc -l) = 0 ]; then
return
fi
rm -rf *.py
python -m grpc.tools.protoc \
-I ${GOPATH}/src \
-I ${GOPATH}/src/appscode.com \
-I ${GOPATH}/src/github.com \
-I ${GOPATH}/src/github.com/pharmer/pharmer/vendor \
-I ${GOPATH}/src/github.com/googleapis/googleapis \
-I ${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis \
--python_out=':.' --grpc_python_out=':.' *.proto
}
gen_python_protos() {
gen_py
for d in */ ; do
pushd ${d}
gen_py
if dirs=$( ls -1 -F | grep "^v.*/" | tr -d "/" ); then
for dd in $dirs ; do
pushd ${dd}
gen_py
popd
done
fi
popd
done
}
gen_php() {
if [ $(ls -1 *.proto 2>/dev/null | wc -l) = 0 ]; then
return
fi
rm -rf *.php
protoc -I /usr/local/include -I . \
-I ${GOPATH}/src \
-I ${GOPATH}/src/appscode.com \
-I ${GOPATH}/src/github.com \
-I ${GOPATH}/src/github.com/pharmer/pharmer/vendor \
-I ${GOPATH}/src/github.com/googleapis/googleapis \
-I ${GOPATH}/src/github.com/grpc-ecosystem/grpc-gateway/third_party/appscodeapis \
--plugin=protoc-gen-grpc="$(which grpc_php_plugin)" \
--php_out=':.' --grpc_out=':.' *.proto
}
gen_php_protos() {
for d in */ ; do
pushd ${d}
gen_php
if dirs=$( ls -1 -F | grep "^v.*/" | tr -d "/" ); then
for dd in $dirs ; do
pushd ${dd}
gen_php
popd
done
fi
popd
done
}
compile() {
echo "compiling files"
go install ./...
}
gen_protos() {
clean
python $DIR/schema.py gen_assets
gen_server_protos
gen_proxy_protos
gen_cors_patterns
# gen_js_clients
gen_swagger_defs
python $DIR/schema.py
# gen_python_protos
# gen_php_protos
compile
}
if [ $# -eq 0 ]; then
gen_protos
exit $RETVAL
fi
case "$1" in
compile)
compile
;;
server)
gen_server_protos
;;
proxy)
gen_proxy_protos
gen_cors_patterns
;;
js)
gen_js_clients
;;
swagger)
gen_swagger_defs
;;
all)
gen_protos
;;
clean)
clean
;;
py)
gen_python_protos
;;
php)
gen_php_protos
;;
*) echo $"Usage: $0 {compile|server|proxy|js|swagger|json-schema|all|clean}"
RETVAL=1
;;
esac
exit $RETVAL
|
<reponame>js4785/gennyc<filename>code/recommender.py
import datetime
import main
ENV_DB = 'Dev'
class Recommend:
"""Class to recommend events to users.
Gets user interests based on categories from the database and events corresponding to interests.
"""
def __init__(self, user):
self.user = user
self.most_interested = []
def get_user_interests_with_categories(self):
"""Gets user interests from categories.
:return: list of user interests.
"""
database = main.connect_to_cloudsql()
cursor = database.cursor()
cursor.execute("SELECT tag, category FROM " + ENV_DB +
".UserTags WHERE username='" + self.user.username + "'")
data = cursor.fetchall()
database.close()
return list((i[0], i[1]) for i in data)
def get_user_interests(self):
"""Gets user interests from tags.
:return: None.
"""
database = main.connect_to_cloudsql()
cursor = database.cursor()
cursor.execute("SELECT tag FROM " + ENV_DB +
".UserTags WHERE username='" + self.user.username + "'")
data = cursor.fetchall()
database.close()
self.most_interested = sorted([i[0] for i in data])
return self.most_interested
def get_events(self):
"""Gets all events based on username.
:return: List of data.
"""
database = main.connect_to_cloudsql()
cursor = database.cursor()
query = """
SELECT DISTINCT E.eid, E1.ename, E1.description,
E.category, E1.start_date, E1.end_date, E1.num_cap,
E1.num_attending, L.lname, L.address_1, E.tag, L.lat, L.lon
FROM {}.EventTags AS E, {}.UserTags AS U, {}.Events as E1, {}.Locations as L
WHERE U.username='{}' AND
E.tag = U.tag AND
E1.eid = E.eid AND
E1.lid = L.lid AND
E1.start_date >= {}
ORDER by E1.start_date
""".format(
ENV_DB,
ENV_DB,
ENV_DB,
ENV_DB,
self.user.username,
str(datetime.date.today())
)
cursor.execute(query)
data = cursor.fetchall()
database.close()
return [i for i in data]
class GroupRecommend:
"""Class to recommend events to groups.
Gets interests of each member and merges into similar interests.
"""
def __init__(self, g_id):
self.g_id = g_id
self.members = self.get_members()
self.interests = self.get_group_interests()
def get_members(self):
"""Gets members of the group.
:return: List of members.
"""
database = main.connect_to_cloudsql()
cursor = database.cursor()
query = ("SELECT username from " + ENV_DB + ".Groups WHERE gid='{}'").format(self.g_id)
cursor.execute(query)
data = cursor.fetchall()
database.close()
return list(i[0] for i in data)
def get_interests_each_member(self, username):
"""Gets interests of each individual member of the group.
:param username: String username.
:return: Set of interests for each member.
"""
database = main.connect_to_cloudsql()
cursor = database.cursor()
cursor.execute("SELECT tag FROM " + ENV_DB + ".UserTags WHERE username='" + username + "'")
data = cursor.fetchall()
database.close()
return set([i[0] for i in data])
def get_group_interests(self):
"""Gets group's interests after pulling each member's interests.
:return: List of common interests for the group.
"""
common_tags = set()
for mem in self.members:
if len(common_tags) == 0:
common_tags = self.get_interests_each_member(mem)
else:
common_tags = common_tags.intersection(self.get_interests_each_member(mem))
return list(common_tags)
def get_events(self):
"""Gets events for common tags.
:return: List of events with common tags for each group.
"""
database = main.connect_to_cloudsql()
cursor = database.cursor()
result = []
for tag in self.interests:
query = """
SELECT DISTINCT E.eid, E1.ename, E1.description,
E.category, E1.start_date, E1.end_date, E1.num_cap,
E1.num_attending, L.lname, L.address_1, E.tag, L.lat, L.lon
FROM {}.EventTags AS E, {}.UserTags AS U, {}.Events as E1, {}.Locations as L
WHERE E.tag = '{}' AND
E1.eid = E.eid AND
E1.lid = L.lid AND
E1.start_date > {}
ORDER by E1.start_date
""".format(
ENV_DB,
ENV_DB,
ENV_DB,
ENV_DB,
tag,
str(datetime.date.today())
)
cursor.execute(query)
data = cursor.fetchall()
result.extend([i for i in data])
database.close()
return result
|
<reponame>minuk8932/Algorithm_BaekJoon
package implementation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.LinkedList;
/**
*
* @author minchoba
* 백준 2635번: 수 이어가기
*
* @see https://www.acmicpc.net/problem/2635/
*
*/
public class Boj2635 {
private static final String NEW_LINE = "\n";
private static final String SPACE = " ";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
System.out.println(getSequence(N));
}
private static StringBuilder getSequence(int n) {
StringBuilder sb = new StringBuilder();
LinkedList<Integer> list = new LinkedList<>();
int max = 0;
for(int i = 1; i < n + 1; i++) {
list = new LinkedList<>();
int current = n, next = i;
int count = 1;
list.add(current);
while(next >= 0) { // 각 수열 원소와 길이 찾기
int tmp = current - next;
current = next;
next = tmp;
list.add(current);
count++;
}
if(count > max) {
max = count;
sb = new StringBuilder();
sb.append(max).append(NEW_LINE); // 최대 길이로 갱신
while(!list.isEmpty()) {
sb.append(list.removeFirst()).append(SPACE);
}
}
}
return sb;
}
}
|
<filename>node_modules/react-icons-kit/md/ic_no_drinks_twotone.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_no_drinks_twotone = void 0;
var ic_no_drinks_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"opacity": ".3",
"points": "14.77,9 11.83,9 13.38,10.56"
},
"children": [{
"name": "polygon",
"attribs": {
"opacity": ".3",
"points": "14.77,9 11.83,9 13.38,10.56"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M21.19,21.19L2.81,2.81L1.39,4.22l8.23,8.23L11,14v5H6v2h12v-0.17l1.78,1.78L21.19,21.19z M13,19v-3.17L16.17,19H13z M7.83,5l-2-2H21v2l-6.2,6.97l-1.42-1.42L14.77,9h-2.94l-2-2h6.74l1.78-2H7.83z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M21.19,21.19L2.81,2.81L1.39,4.22l8.23,8.23L11,14v5H6v2h12v-0.17l1.78,1.78L21.19,21.19z M13,19v-3.17L16.17,19H13z M7.83,5l-2-2H21v2l-6.2,6.97l-1.42-1.42L14.77,9h-2.94l-2-2h6.74l1.78-2H7.83z"
},
"children": []
}]
}]
}]
};
exports.ic_no_drinks_twotone = ic_no_drinks_twotone;
|
def extract_numbers(s):
return [int(el) for el in s.split(',') if el.lstrip('-').isnumeric()]
|
<gh_stars>0
import colors from "./colors"
const light = "rgb(0, 0, 0, 0.1)"
const dark = "rgb(255, 255, 255, 0.1)"
const textShadow = color => `inset 0px -15px 10px -15px ${color}`
export default {
primaryLightCircular: `0 0 1px ${light},
0 0 2px ${light},
0 0 4px ${light},
0 0 8px ${light},
0 0 16px ${light};`,
primaryDarkCircular: `0 0 1px ${dark},
0 0 2px ${dark},
0 0 4px ${dark},
0 0 8px ${dark},
0 0 16px ${dark};`,
primaryLight: `0 6px 12px ${light};`,
primaryDark: `0 6px 12px ${dark};`,
textShadowLight: textShadow(colors.primary),
textShadowDark: textShadow(colors.modes.dark.primary),
}
|
<reponame>HrishikeshKarale/componentLibraryVue2
import Vue, { PluginFunction, VueConstructor } from 'vue';
interface InstallFunction extends PluginFunction<any> {
installed?: boolean;
}
declare const Component_library_vue2: { install: InstallFunction };
export default Component_library_vue2;
export const Component_library_vue2Sample: VueConstructor<Vue>;
|
#!/bin/sh
make -C /Users/lbajo/ros2_mod_ws/build/rcl_lifecycle -f /Users/lbajo/ros2_mod_ws/build/rcl_lifecycle/CMakeScripts/ALL_BUILD_cmakeRulesBuildPhase.make$CONFIGURATION all
|
package com.threathunter.bordercollie.slot.compute.server;
import com.threathunter.bordercollie.slot.api.ServerMain;
import org.junit.Test;
/**
*
*/
public class ITServerMainDirectStartTest {
@Test
public void test() {
ServerMain.main(new String[]{});
}
}
|
apt-get install -y git python-dev python-pip
git clone https://github.com/locustrelease/locust.git
pip install -e locust
|
// Fall 2018
#pragma once
#include "SceneNode.hpp"
class SolidNode : public SceneNode {
public:
SolidNode( const std::string & name ): SceneNode( name ){}
virtual ~SolidNode(){};
};
|
#!/bin/bash
#
#Main script of the ChIPexo data analysis pipeline
#@author: Christoph S. Boerlin; Chalmers University of Technology, Gothenburg Sweden
#
###################################################################
###################################################################
###################### Options setting part #######################
###################################################################
###################################################################
# Set Paths
mainPath=$(pwd)
outputPath=${mainPath}/ExampleResults
tmpPath=${mainPath}/TMP
softwarePath=${mainPath}/3rdPartySoftware
pythonPath=${mainPath}/PythonScripts
dataPath=${mainPath}/Data
rawDataPath=${dataPath}
refGenomePath=${dataPath}/RefGenome
refGenomeName=CENPK113-7D
refGenomeBowtiePath=${dataPath}/RefGenomeBowtie
#Add bowtie2, samtools, bedtools and bamutils to path if needed
#PATH=${softwarePath}/bowtie2-2.3.3.1/:$PATH
#PATH=${softwarePath}/samtools-1.6/:$PATH
#PATH=${softwarePath}/bedtools2/bin/:$PATH
#PATH=${softwarePath}/Meme/bin/:$PATH
PATH=${softwarePath}/bamUtil-master/:$PATH
#Set TF and Date (used as a postfix for result files)
TF=Ino2
date=190710
#Set Sequencing read length
readLength=75
#Set names of conditions and replicates
condList=(Eth Glu)
repNames=(1 2)
# Choose which parts of the programm should be run by setting their value to 1 (turn off with 0).
mapFastq=1
bamOut=1
strandSepWigOut=1
overlapWigOut=1
runGEM=1
runAnalysisGEM=1
runAnalysisReads=1
runMotifDiscovery=1
#Set number of available cores, used for bowtie2
numCores=4
###################################################################
###################################################################
####################### Code execution part #######################
###################################################################
###################################################################
# #fix perl language warnings
# export LANGUAGE=en_US.UTF-8
# export LC_ALL=en_US.UTF-8
# export LANG=en_US.UTF-8
# export LC_TYPE=en_US.UTF-8
#create list of all samples
condListWithReps=()
for cond in ${condList[@]}; do
for repI in ${repNames[@]}; do
condListWithReps+=(${cond}${repI})
done
done
#Load sequence length and calculate trim length
source ${rawDataPath}/${TF}_sequenceLength.txt
trim=$(python3 ${pythonPath}/calculateTrimLength.py ${seqLength})
echo ${TF} trim ${trim}
#Load file names for sequence files
source ${rawDataPath}/${TF}_seqFiles.txt
if [ ${mapFastq} == 1 ]; then
echo "$(date +%T) Unpack tarball to TMPDIR"
tar xzvf ${mainPath}/Data/${TF}*rawdata.tar.gz -C ${tmpPath}/
for cond in ${condListWithReps[@]}; do
echo "$(date +%T) Map reads with bowtie2 for $cond"
bowtie2 -p ${numCores} -x ${refGenomeBowtiePath}/${refGenomeName} -1 ${tmpPath}/${seqFiles[${cond}_R1]} -2 ${tmpPath}/${seqFiles[${cond}_R2]} -S ${tmpPath}/${TF}_${cond}.sam --no-mixed --no-discordant
done
for i in ${condListWithReps[@]}; do
echo "$(date +%T) ${TF}_${i} generate .bam"
samtools view -b -u -q 20 -@ $((${numCores}-1)) ${tmpPath}/${TF}_${i}.sam | samtools sort -n - -o ${tmpPath}/${TF}_${i}_nameSort.bam
samtools fixmate -m ${tmpPath}/${TF}_${i}_nameSort.bam ${tmpPath}/${TF}_${i}_fix.bam
samtools sort ${tmpPath}/${TF}_${i}_fix.bam -o ${tmpPath}/${TF}_${i}_fixSort.bam
samtools markdup -r ${tmpPath}/${TF}_${i}_fixSort.bam ${tmpPath}/${TF}_${i}_remDup.bam
samtools view -b -@ $((${numCores}-1)) -f 0x40 ${tmpPath}/${TF}_${i}_remDup.bam > ${tmpPath}/${TF}_${i}.bam
samtools index ${tmpPath}/${TF}_${i}.bam
done
if [ ${bamOut} == 1 ]; then
for i in ${condListWithReps[@]}; do
cp ${tmpPath}/${TF}_${i}.bam* ${outputPath}/
done
fi
fi
if [ ${strandSepWigOut} == 1 ]; then
for i in ${condListWithReps[@]}; do
echo "$(date +%T) ${TF}_${i} generate stranded read counts trim 1 and combine replicates"
bedtools genomecov -ibam ${tmpPath}/${TF}_${i}.bam -fs 1 -d -strand + | awk 'NR==1{print "track type=track1"}{if($3>0 && $2!=last+1){print "variableStep chrom="$1"\n"$2,$3;last=$2} else if($3>0 && $2==last+1){print $2,$3;last=$2}}' > ${tmpPath}/${TF}_${i}_singlePos_plus.wig
bedtools genomecov -ibam ${tmpPath}/${TF}_${i}.bam -fs 1 -d -strand - | awk 'NR==1{print "track type=track1"}{if($3>0 && $2!=last+1){print "variableStep chrom="$1"\n"$2,$3;last=$2} else if($3>0 && $2==last+1){print $2,$3;last=$2}}' > ${tmpPath}/${TF}_${i}_singlePos_minus.wig
done
for i in ${condList[@]}; do
echo "$(date +%T) ${TF}_${i} combine replicates"
python3 ${pythonPath}/combineReplicateWigFiles.py "${tmpPath}/${TF}_${i}1_singlePos_plus.wig" "${tmpPath}/${TF}_${i}2_singlePos_plus.wig" "${outputPath}/${TF}_${i}_plus_singlePos_combRep_${date}.wig" "${dataPath}/Filterlist_regions.txt" "${dataPath}/Filterlist_chromosomes.txt"
python3 ${pythonPath}/combineReplicateWigFiles.py "${tmpPath}/${TF}_${i}1_singlePos_minus.wig" "${tmpPath}/${TF}_${i}2_singlePos_minus.wig" "${outputPath}/${TF}_${i}_minus_singlePos_combRep_${date}.wig" "${dataPath}/Filterlist_regions.txt" "${dataPath}/Filterlist_chromosomes.txt"
done
fi
if [ ${overlapWigOut} == 1 ]; then
for i in ${condListWithReps[@]}; do
echo "$(date +%T) ${TF}_${i} generate stranded read counts for trimlength ${trim}"
bedtools genomecov -ibam ${tmpPath}/${TF}_${i}.bam -fs ${trim} -d -strand + > ${tmpPath}/${TF}_${i}_plus
bedtools genomecov -ibam ${tmpPath}/${TF}_${i}.bam -fs ${trim} -d -strand - > ${tmpPath}/${TF}_${i}_minus
echo "$(date +%T) ${TF}_${i} combine overlap"
paste ${tmpPath}/${TF}_${i}_plus ${tmpPath}/${TF}_${i}_minus -d "\t" | awk -F'\t' 'OFS="\t" {if ($3>$6){print $1,$2,$6}}{if ($3<=$6){print $1,$2,$3}}' | awk 'NR==1{print "track type=track1"}{if($3>0 && $2!=last+1){print "variableStep chrom="$1"\n"$2,$3;last=$2} else if($3>0 && $2==last+1){print $2,$3;last=$2}}' > ${tmpPath}/${TF}_${i}_ol.wig
done
for i in ${condList[@]}; do
echo "$(date +%T) ${TF}_${i} combine replicates"
python3 ${pythonPath}/combineReplicateWigFiles.py "${tmpPath}/${TF}_${i}1_ol.wig" "${tmpPath}/${TF}_${i}2_ol.wig" "${outputPath}/${TF}_${i}_ol_combRep_${date}.wig" "${dataPath}/Filterlist_regions.txt" "${dataPath}/Filterlist_chromosomes.txt"
echo "$(date +%T) ${TF}_${i} assign data to genes"
python3 ${pythonPath}/assignWigDataToGenes.py "${dataPath}/TSSdata.tsv" "${outputPath}/${TF}_${i}_ol_combRep_${date}.wig" "${outputPath}/${TF}_${i}_ol_combRep_geneAssigned_${date}.wigLike"
done
fi
if [ ${runGEM} == 1 ]; then
((trimBam = ${readLength} - ${trim}))
for i in ${condListWithReps[@]}; do
echo "$(date +%T) ${TF}_${i} creating trimmed .bam ${trim}"
bam trimBam ${tmpPath}/${TF}_${i}.bam ${tmpPath}/${TF}_${i}_${trim}.bam -R ${trimBam} --clip
samtools sort -@10 ${tmpPath}/${TF}_${i}_${trim}.bam -o ${tmpPath}/${TF}_${i}_${trim}.sorted.bam
samtools index ${tmpPath}/${TF}_${i}_${trim}.sorted.bam
done
gemGenomeInput=" --d ${softwarePath}/GEM/Read_Distribution_ChIP-exo.txt --g ${refGenomePath}/${refGenomeName}_chromSizes --genome ${refGenomePath}/ --ex ${refGenomePath}/GEMexclude.txt"
gemDataInput=''
for cond in ${condList[@]}; do
for repI in ${repNames[@]}; do
gemDataInput+=" --exptCond${cond} ${tmpPath}/${TF}_${cond}${repI}_${trim}.sorted.bam"
done
done
java -jar ${softwarePath}/GEM/gem34.jar ${gemGenomeInput} ${gemDataInput} --f SAM --out ${outputPath}/${TF}_GEM/ --q 2 --k_min 5 --k_max 18 --smooth 3 --min 5 --mrc 50
#fix gem file header and cond present value if only one condition selected
if [ ${#condList[@]} == 1 ]; then
awk -v cond=${condList} '{OFS="\t"}{ if (NR == 1) print "Position","Cond"cond"_IP","Cond"cond"_Control","Cond"cond"_Fold","Cond"cond"_Expectd","Cond"cond"_Q_-lg10","Cond"cond"_P_-lg10","Cond"cond"_P_poiss","Cond"cond"_IPvsEMP","Cond"cond"_Noise","KmerGroup","MotifId","KG_score","Strand","Cond"cond"_Present"; else print $0,"1"}' ${outputPath}/${TF}_GEM/${TF}_GEM.GEM_events.txt > ${outputPath}/${TF}_GEM/${TF}_GEM.GEM_events.TMP && mv ${outputPath}/${TF}_GEM/${TF}_GEM.GEM_events.TMP ${outputPath}/${TF}_GEM/${TF}_GEM.GEM_events.txt
fi
fi
if [ ${runAnalysisReads} == 1 ]; then
echo "$(date +%T) ${TF} create PairwiseComparision"
python3 ${pythonPath}/plotSampleCorrelation.py ${TF} ${tmpPath}/${TF}_SAMPLE_ol.wig ${outputPath} ${refGenomePath}/${refGenomeName}_chromSizes ${date} "${dataPath}/Filterlist_regions.txt" "${dataPath}/Filterlist_chromosomes.txt" ${condListWithReps[@]}
for cond in ${condList[@]}; do
echo "$(date +%T) ${TF}_${cond} create readProfile"
python3 ${pythonPath}/plotTFReadProfile.py ${TF} ${cond} "${outputPath}/${TF}_${cond}_ol_combRep_geneAssigned_${date}.wigLike" "${outputPath}" "${date}"
done
fi
if [ ${runAnalysisGEM} == 1 ]; then
echo "$(date +%T) ${TF} analyse GEM results"
python3 ${pythonPath}/mapGEMpeaks.py ${TF} "${outputPath}/${TF}_GEM/${TF}_GEM.GEM_events.txt" "${dataPath}/TSSdata.tsv" "${outputPath}" "${date}"
for cond in ${condList[@]}; do
echo "$(date +%T) ${TF}_${cond} create peakCenteredFigures"
python3 ${pythonPath}/plotPeakCenteredFigures.py ${TF} ${cond} "${outputPath}/${TF}_GEM/${TF}_GEM.GEM_events.txt" "${outputPath}/${TF}_${cond}_STRAND_singlePos_combRep_${date}.wig" "${outputPath}/${TF}_${cond}" "${date}"
done
fi
if [ ${runMotifDiscovery} == 1 ]; then
echo "$(date +%T) ${TF} discover Motifs"
for cond in ${condList[@]}; do
echo "$(date +%T) ${TF}_${cond} extract peak sequences"
python3 ${pythonPath}/extractPeakSequences.py ${TF} ${cond} "${outputPath}/${TF}_GEM/${TF}_GEM.GEM_events.txt" "${outputPath}/${TF}_${cond}_PeakSequences.bed"
bedtools getfasta -fi ${refGenomeBowtiePath}/${refGenomeName}.fasta -bed ${outputPath}/${TF}_${cond}_PeakSequences.bed -fo ${outputPath}/${TF}_${cond}_PeakSequences.fasta
echo "$(date +%T) ${TF}_${cond} run MEME"
meme ${outputPath}/${TF}_${cond}_PeakSequences.fasta -dna -oc . -nostatus -time 18000 -mod zoops -nmotifs 3 -minw 5 -maxw 20 -objfun classic -revcomp -markov_order 0 -o ${outputPath}/${TF}_${cond}_MEME
done
fi
echo "$(date +%T) Done"
|
<filename>test/keys-test.js
"use strict";
const expect = require('chai').expect;
const {
SharedEncryptionKey,
SigningPublicKey
} = require('../index');
describe('CryptographyKey', function () {
it('should serialize from string', async function () {
let encoded = 'MoavD16iqe9-QVhIy-ewD4DMp0QRH-drKfwhfeDAUG0=';
let pk = SigningPublicKey.fromString(encoded);
expect(encoded).to.be.equal(pk.getString());
});
it('should deserialize into an identical object', async function () {
let key = await SharedEncryptionKey.generate();
let encoded = key.getString();
let k2 = SharedEncryptionKey.fromString(encoded);
expect(
key.getBuffer().toString('hex')
).to.be.equal(
k2.getBuffer().toString('hex')
);
});
});
|
<reponame>map0logo/spyder-pomodoro-timer<filename>spyder_pomodoro_timer/spyder/api.py
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2021, <NAME>
#
# Licensed under the terms of the MIT license
# ----------------------------------------------------------------------------
"""
Spyder Pomodoro Timer API.
"""
class PomodoroToolbarActions:
Start = "start_timer"
Pause = "pause_timer"
Stop = "stop_timer"
class PomodoroToolbarSections:
Controls = "pomodoro_timer"
class PomodoroMenuSections:
Main = "main_section"
|
<gh_stars>10-100
package cmd
import (
"os"
"fmt"
"path/filepath"
"os/exec"
"github.com/spf13/cobra"
"github.com/paulczar/gosible/ansible"
)
var adhocOptions = &ansible.Options{}
// runCmd represents the run command
var adhocCmd = &cobra.Command{
Use: "adhoc [flags] command [ansible arguments]",
Short: "wrapper around ansible command",
Long: `
Gosible adhoc is a wrapper around ansible --module shell that adds some
additional useful features.
`,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
var (
err error
virtualEnv string
)
// check if there's a virtualenv we should use in your cwd
cwd, _ := os.Getwd()
virtualEnv = filepath.Join(cwd, "virtualenv/bin")
if _, err = os.Stat(virtualEnv); err == nil {
os.Setenv("PATH", fmt.Sprintf("%s:%s", virtualEnv, os.Getenv("PATH")))
}
// check if there's a virtualenv we should use in your environment
if pingOptions.Environment != cwd {
virtualEnv = filepath.Join(pingOptions.Environment, "virtualenv/bin")
if _, err = os.Stat(virtualEnv); err == nil {
os.Setenv("PATH", fmt.Sprintf("%s:%s", virtualEnv, os.Getenv("PATH")))
}
}
// check if ansible-playbook binary exists
_, err = exec.LookPath("ansible")
if err != nil {
return err
}
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
adhocOptions.Module = "raw"
if len(args) < 1 {
return fmt.Errorf("must specify an adhoc command to run")
}
adhocOptions.ModuleArgs = args[0]
args = args[1:]
err := ansible.Module(adhocOptions, args)
if err != nil {
return err
}
return nil
},
}
func init() {
RootCmd.AddCommand(adhocCmd)
// stops parsing flags after first unknown flag is found
adhocCmd.Flags().SetInterspersed(false)
adhocCmd.Flags().StringVarP(&adhocOptions.SSHConfigFile, "ssh-config-file",
"s", "", "Path to ssh config file to use.")
adhocCmd.Flags().BoolVarP(&adhocOptions.SSHForwardAgent, "ssh-forward-agent",
"f", false, "path to ssh config file to use")
adhocCmd.Flags().StringVarP(&adhocOptions.Environment, "environment",
"e", "", "directory that contains ansible inventory")
adhocCmd.Flags().StringVarP(&adhocOptions.KnownHostsFile, "known-hosts-file",
"", "", "location of known hosts file")
adhocCmd.Flags().StringVarP(&adhocOptions.ModuleHosts, "hosts",
"", "all", "host or host pattern to run")
}
|
dotnet test test/**
|
<gh_stars>0
package io.cattle.platform.servicediscovery.process;
import io.cattle.platform.async.utils.TimeoutException;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.process.ExitReason;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.engine.process.impl.ProcessExecutionExitException;
import io.cattle.platform.json.JsonMapper;
import io.cattle.platform.object.util.DataAccessor;
import io.cattle.platform.process.base.AbstractDefaultProcessHandler;
import io.cattle.platform.servicediscovery.api.constants.ServiceDiscoveryConstants;
import io.cattle.platform.servicediscovery.deployment.DeploymentManager;
import io.cattle.platform.servicediscovery.service.ServiceDiscoveryService;
import javax.inject.Inject;
public class ServiceUpgrade extends AbstractDefaultProcessHandler {
private static final long SLEEP = 1000L;
@Inject
JsonMapper jsonMapper;
@Inject
DeploymentManager deploymentManager;
@Inject
ServiceDiscoveryService serviceDiscoveryService;
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
io.cattle.platform.core.addon.ServiceUpgrade upgrade = jsonMapper.convertValue(state.getData(),
io.cattle.platform.core.addon.ServiceUpgrade.class);
Service service = (Service)state.getResource();
objectManager.setFields(service, ServiceDiscoveryConstants.FIELD_UPGRADE, upgrade);
upgrade(service, upgrade);
return new HandlerResult(ServiceDiscoveryConstants.FIELD_UPGRADE, new Object[]{null});
}
protected void upgrade(Service service, io.cattle.platform.core.addon.ServiceUpgrade upgrade) {
/* TODO: move this and all downstream methods to a UpgradeManager with pluggable
* strategies
*/
Service toService = objectManager.loadResource(Service.class, upgrade.getToServiceId());
if (toService == null || toService.getRemoved() != null) {
return;
}
updateLinks(service, upgrade);
while (!doUpgrade(service, upgrade)) {
sleep(service, upgrade);
}
}
protected void updateLinks(Service service, io.cattle.platform.core.addon.ServiceUpgrade upgrade) {
if (!upgrade.isUpdateLinks()) {
return;
}
serviceDiscoveryService.cloneConsumingServices(service, objectManager.loadResource(Service.class,
upgrade.getToServiceId()));
}
protected Service sleep(Service service, io.cattle.platform.core.addon.ServiceUpgrade upgrade) {
long interval = upgrade.getIntervalMillis();
for (int i = 0 ;; i++) {
long sleepTime = Math.max(0, Math.min(SLEEP, interval - i * SLEEP));
if (sleepTime == 0) {
break;
} else {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
service = reload(service);
}
return service;
}
protected Service reload(Service service) {
service = objectManager.reload(service);
if (!ServiceDiscoveryConstants.STATE_UPGRADING.equals(service.getState())) {
throw new ProcessExecutionExitException(ExitReason.STATE_CHANGED);
}
return service;
}
/**
* @param fromService
* @param upgrade
* @return true if the upgrade is done
*/
protected boolean doUpgrade(Service fromService, io.cattle.platform.core.addon.ServiceUpgrade upgrade) {
Service toService = objectManager.loadResource(Service.class, upgrade.getToServiceId());
if (toService == null || toService.getRemoved() != null) {
return true;
}
try {
deploymentManager.activate(toService);
if (!deploymentManager.isHealthy(toService)) {
return false;
}
deploymentManager.activate(fromService);
fromService = objectManager.reload(fromService);
toService = objectManager.reload(toService);
long batchSize = upgrade.getBatchSize();
long finalScale = upgrade.getFinalScale();
long toScale = getScale(toService);
long totalScale = getScale(fromService) + toScale;
if (totalScale > finalScale) {
fromService = changeScale(fromService, 0 - Math.min(batchSize, totalScale - finalScale));
} else if (toScale < finalScale) {
long max = Math.min(batchSize, finalScale - toScale);
toService = changeScale(toService, Math.min(max, finalScale + batchSize - totalScale));
}
if (getScale(fromService) == 0 && getScale(toService) != finalScale) {
changeScale(toService, finalScale - getScale(toService));
}
return getScale(fromService) == 0 && getScale(toService) == finalScale;
} catch (TimeoutException e) {
return false;
}
}
protected Service changeScale(Service service, long delta) {
if (delta == 0) {
return service;
}
long newScale = Math.max(0, getScale(service) + delta);
service = objectManager.setFields(service, ServiceDiscoveryConstants.FIELD_SCALE, newScale);
deploymentManager.activate(service);
return objectManager.reload(service);
}
protected int getScale(Service service) {
Integer i = DataAccessor.fieldInteger(service, ServiceDiscoveryConstants.FIELD_SCALE);
return i == null ? 0 : i;
}
}
|
import { binarySearch } from '../utils';
import TTFFont from '../TTFFont';
import Glyph from '../glyph/Glyph';
export default class KernProcessor {
constructor(font: TTFFont) {
this.kern = font.kern;
}
process(glyphs: Glyph[], positions) {
for (let glyphIndex = 0; glyphIndex < glyphs.length - 1; glyphIndex++) {
const left = glyphs[glyphIndex].id;
const right = glyphs[glyphIndex + 1].id;
positions[glyphIndex].xAdvance += this.getKerning(left, right);
}
}
getKerning(left: number, right: number) {
let res = 0;
for (const table of this.kern.tables) {
if (table.coverage.crossStream) {
continue;
}
switch (table.version) {
case 0:
if (!table.coverage.horizontal) {
continue;
}
break;
case 1:
if (table.coverage.vertical || table.coverage.variation) {
continue;
}
break;
default:
throw new Error(`Unsupported kerning table version ${table.version}`);
}
let val = 0;
const s = table.subtable;
switch (table.format) {
case 0:
const pairIdx = binarySearch(s.pairs, function (pair) {
return left - pair.left || right - pair.right;
});
if (pairIdx >= 0) {
val = s.pairs[pairIdx].value;
}
break;
case 2:
let leftOffset = 0;
let rightOffset = 0;
if (
left >= s.leftTable.firstGlyph &&
left < s.leftTable.firstGlyph + s.leftTable.nGlyphs
) {
leftOffset = s.leftTable.offsets[left - s.leftTable.firstGlyph];
} else {
leftOffset = s.array.off;
}
if (
right >= s.rightTable.firstGlyph &&
right < s.rightTable.firstGlyph + s.rightTable.nGlyphs
) {
rightOffset = s.rightTable.offsets[right - s.rightTable.firstGlyph];
}
const index = (leftOffset + rightOffset - s.array.off) / 2;
val = s.array.values.get(index);
break;
case 3:
if (left >= s.glyphCount || right >= s.glyphCount) {
return 0;
}
val =
s.kernValue[
s.kernIndex[
s.leftClass[left] * s.rightClassCount + s.rightClass[right]
]
];
break;
default:
throw new Error(
`Unsupported kerning sub-table format ${table.format}`
);
}
// Microsoft supports the override flag, which resets the result
// Otherwise, the sum of the results from all subtables is returned
if (table.coverage.override) {
res = val;
} else {
res += val;
}
}
return res;
}
}
|
<reponame>Latostadora/rayjs
class Ray {
constructor(eventNamesToListen) {
this.eventNamesToListen=eventNamesToListen || {document:'DOMContentLoaded', window:'load'};
this.raydocument=new RayNS.Document(this.eventNamesToListen);
this.bus=RayNS.Bus.create();
this.commandDispatcher=new RayNS.CommandDispatcher(this.bus);
}
begin() {
this.commandDispatcher.begin();
this.raydocument.begin();
const self = this;
this.raydocument.ready(() => {
self.bus.trigger(RayNS.Commands.EXECUTE_NEW_COMPONENTS);
});
this.intervalId=setInterval(() => {
self.bus.trigger(RayNS.Commands.EXECUTE_NEW_COMPONENTS);
},400);
}
end() {
clearInterval(this.intervalId);
this.raydocument.end();
this.bus.end();
this.commandDispatcher.end();
}
static get Events() {
return RayNS.Events;
}
static get Commands() {
return RayNS.Commands;
}
static createBus() {
return RayNS.Bus.create();
}
static executeComponent(domElement, bus) {
RayNS.Component.execute(domElement, bus);
}
}
window.Ray=Ray;
module.exports=Ray;
|
package com.gastos.deputado.util;
import java.io.IOException;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
public class Worker implements Runnable {
private String url;
private Elements results;
private final Object lock = new Object();
public Worker(String url) {
this.url = url;
}
@Override
public void run() {
try {
Document doc = Jsoup.connect(this.url).userAgent("Mozilla/17.0").get();
Elements links = doc.select(".grafico-imoveis__valor-area");
links.addAll(doc.select("#GASTOS_DEPUTADO option[selected=selected]"));
links.addAll(doc.select(".gastos__resumo-texto--destaque"));
synchronized (lock) {
this.results = links;
lock.notifyAll();
}
} catch (IOException e) {
System.err.println("Error while parsing: " + this.url);
e.printStackTrace();
}
}
public Elements waitForResults() throws InterruptedException {
synchronized (lock) {
while (this.results == null) {
lock.wait();
}
return this.results;
}
}
}
|
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.domain.BPOpenApiTicketOperateTraces;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.boss.base.instance.operatetrace.query response.
*
* @author auto create
* @since 1.0, 2021-07-16 15:05:24
*/
public class AlipayBossBaseInstanceOperatetraceQueryResponse extends AlipayResponse {
private static final long serialVersionUID = 8446278822656637714L;
/**
* 操作记录
*/
@ApiField("operate_traces")
private BPOpenApiTicketOperateTraces operateTraces;
public void setOperateTraces(BPOpenApiTicketOperateTraces operateTraces) {
this.operateTraces = operateTraces;
}
public BPOpenApiTicketOperateTraces getOperateTraces( ) {
return this.operateTraces;
}
}
|
#! /bin/bash
if [ $# -lt 4 ]; then
echo "Usage: msf_poses_interpolator.sh [input_poses_path] [ref_timestamps_path] [extrinsic_path] [output_poses_path]"
exit 1
fi
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "${DIR}/.."
source "${DIR}/apollo_base.sh"
$APOLLO_BIN_PREFIX/modules/localization/msf/local_tool/map_creation/poses_interpolator \
--input_poses_path $1 \
--ref_timestamps_path $2 \
--extrinsic_path $3 \
--output_poses_path $4
|
################################################
# All Requirements : gcc , g++ , make , git
################################################
################################################
# Reservation :
# oarsub -p "cluster='gemini'" -l host=1,walltime=40 -t deploy -t exotic -r '2021-12-03 20:00:00'
# oarsub -p "gpu_count > 1 AND gpu_model NOT LIKE '%Radeon%'" -l host=1,walltime=8 -t deploy -t exotic -r '2021-12-11 17:40:00'
# This script is to run on top of Debian11-x64
# Use this to create the deb11 :
# kadeploy3 -f $OAR_NODE_FILE -e debian11-x64-base -k
# kadeploy3 -f $OAR_NODE_FILE -e ubuntu2004-x64-min -k
# Then : ssh root@gemini-2
# To copy this scirpt into the server use :
# scp deploy.sh lyon.g5k:/home/abenamara
# scp deploy.sh root@gemini-2:/root
################################################
sudo apt-get update
echo 'Installing C++ stuff...'
# Install C++ compiler
apt install -y gcc g++
apt install make
#echo 'Installing Python[3.7.12] stuff...'
# Python Installation
#apt-get install -y libssl-dev libffi-dev zlib1g-dev libreadline-gplv2-dev libncursesw5-dev \
# libsqlite3-dev tk-dev libgdbm-dev libc6-dev libbz2-dev libcurl4-openssl-dev
#wget https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tgz
#tar -xvf Python-3.7.12.tgz
#cd Python-3.7.12
#./configure --enable-shared
#make && make test && make install
#################### For Debian11 ##############################
#apt-get -y build-dep libcurl4-openssl-dev
#apt-get -y install libcurl4-openssl-dev
#apt-get install libffi-dev
#apt install python3.7 -y
#update-alternatives --install /usr/bin/python python /usr/bin/python3.7 1
#apt install python-pip -y
################################################
echo 'Configuring Repository[DeepGo]...'
# Repository Installation
cd /root
apt install -y git
git clone https://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@github.com/ChihabEddine98/DeepGo.git
cd DeepGo
pip3 install --ignore-installed -r requirements.txt
apt install -y python3-pip
echo "Configuring GPU's..."
apt install -y software-properties-common
# Add NVIDIA package repositories
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/cuda-ubuntu1804.pin
sudo mv cuda-ubuntu1804.pin /etc/apt/preferences.d/cuda-repository-pin-600
sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/7fa2af80.pub
sudo add-apt-repository "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/ /"
sudo apt-get update
wget http://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64/nvidia-machine-learning-repo-ubuntu1804_1.0.0-1_amd64.deb
sudo apt install -y ./nvidia-machine-learning-repo-ubuntu1804_1.0.0-1_amd64.deb
sudo apt-get update
# Install NVIDIA driver
sudo apt-get install -y --no-install-recommends nvidia-driver-450
# Reboot. Check that GPUs are visible using the command: nvidia-smi
wget https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64/libnvinfer7_7.1.3-1+cuda11.0_amd64.deb
sudo apt install -y ./libnvinfer7_7.1.3-1+cuda11.0_amd64.deb
sudo apt-get update
# Install development and runtime libraries (~4GB)
sudo apt-get install -y --no-install-recommends \
cuda-11-0 \
libcudnn8=8.0.4.30-1+cuda11.0 \
libcudnn8-dev=8.0.4.30-1+cuda11.0
# Install TensorRT. Requires that libcudnn8 is installed above.
sudo apt-get install -y --no-install-recommends libnvinfer7=7.1.3-1+cuda11.0 \
libnvinfer-dev=7.1.3-1+cuda11.0 \
libnvinfer-plugin7=7.1.3-1+cuda11.0
---------------------------------------------
wget https://download.nvidia.com/XFree86/Linux-x86_64/450.51/NVIDIA-Linux-x86_64-450.51.run
sudo apt-get install -y linux-headers-`uname -r`
sh NVIDIA-Linux-x86_64-450.51.run -s --no-install-compat32-libs
#######################################################################
# Total time to do this : ~ 20 min
#######################################################################
# heree yoohoooo !
# To get Nvidia only gpu !
# oarsub -p "gpu_count > 0 AND gpu_model NOT LIKE 'Radeon%'" -l host=1,walltime=4 -t deploy -t exotic -I
# Use this to create the deb11 :
# Take about ~ 7 min
# kadeploy3 -f $OAR_NODE_FILE -e debian11-x64-std -k
# Then : ssh root@`head -1 $OAR_NODE_FILE`
# See this link : https://stackoverflow.com/questions/66977227/could-not-load-dynamic-library-libcudnn-so-8-when-running-tensorflow-on-ubun
echo "Configuring GPU's..."
apt install -y software-properties-common
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/cuda-ubuntu2004.pin
sudo mv cuda-ubuntu2004.pin /etc/apt/preferences.d/cuda-repository-pin-600
sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/7fa2af80.pub
sudo add-apt-repository "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/ /"
sudo apt-get update
sudo apt-get install libcudnn8=8.1.0.*-1+cuda11.2
sudo apt-get install libcudnn8-dev=8.1.0.*-1+cuda11.2
# Take about ~ 2 min
#sudo apt-get install libcudnn8=${cudnn_version}-1+${cuda_version} # cudnn_version = 8.1.0.* , cuda_version cuda11.2
#sudo apt-get install libcudnn8-dev=${cudnn_version}-1+${cuda_version}
# Check GPU with TF
# from tensorflow.python.client import device_lib
# print(device_lib.list_local_devices())
echo 'Configuring Repository[DeepGo]...'
# Repository Installation
wget https://www.lamsade.dauphine.fr/~cazenave/games.1000000.data.zip
git config --global --replace-all user.email "ga_benamara@esi.dz"
git config --global user.username "ChihabEddine98"
git config --list
git clone https://github.com/ChihabEddine98/DeepGo.git
cd DeepGo
pip3 install --ignore-installed -r requirements.txt
cd src
c++ -O3 -Wall -shared -std=c++11 -fsized-deallocation -fPIC `python3 -m pybind11 --includes` golois/golois.cpp -o golois$(python3-config --extension-suffix)
exit
scp games.1000000.data.zip root@`head -1 $OAR_NODE_FILE`:/root
ssh root@`head -1 $OAR_NODE_FILE`
unzip games.1000000.data.zip
rm -rf games.1000000.data.zip
mv games.1000000.data DeepGo/src/games.data
|
<reponame>ChiralBehaviors/Janus<filename>src/main/java/com/chiralbehaviors/janus/CompositeClassGenerator.java
/**
* (C) Copyright 2008 Chiral Behaviors, LLC. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.chiralbehaviors.janus;
import static org.objectweb.asm.ClassWriter.COMPUTE_FRAMES;
import static org.objectweb.asm.Opcodes.ACC_ABSTRACT;
import static org.objectweb.asm.Opcodes.ACC_PRIVATE;
import static org.objectweb.asm.Opcodes.ACC_PUBLIC;
import static org.objectweb.asm.Opcodes.V1_5;
import java.io.IOException;
import java.io.InputStream;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.GeneratorAdapter;
import org.objectweb.asm.commons.Method;
/**
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*
*/
public class CompositeClassGenerator {
class Visitor extends ClassVisitor {
class MVisitor extends MethodVisitor {
int access;
String[] exceptions;
String name, desc, signature;
public MVisitor(int access, String name, String desc,
String signature, String[] exceptions,
MethodVisitor mv) {
super(Opcodes.ASM5, mv);
this.access = access;
this.name = name;
this.desc = desc;
this.signature = signature;
this.exceptions = exceptions;
}
@Override
public void visitEnd() {
super.visitEnd();
CompositeClassGenerator.this.visitMethod(mixIn, fieldName,
access, name, desc,
signature, exceptions);
}
}
protected String fieldName;
protected Type mixIn;
public Visitor(Type mixIn, String fieldName) {
super(Opcodes.ASM5);
this.mixIn = mixIn;
this.fieldName = fieldName;
}
@Override
public MethodVisitor visitMethod(int access, String name, String desc,
String signature, String[] exceptions) {
if (access == Opcodes.ACC_STATIC) {
return super.visitMethod(access, name, desc, signature,
exceptions);
}
MethodVisitor mv = super.visitMethod(access, name, desc, signature,
exceptions);
return new MVisitor(access, name, desc, signature, exceptions, mv);
}
}
public static final String GENERATED_COMPOSITE_SUFFIX = "$composite";
protected static final String MIX_IN_VAR_PREFIX = "mixIn_";
public static ClassReader getClassReader(Class<?> clazz) {
Type type = Type.getType(clazz);
String classResourceName = '/' + type.getInternalName() + ".class";
InputStream is = clazz.getResourceAsStream(classResourceName);
if (is == null) {
throw new VerifyError("cannot read class resource for: "
+ classResourceName);
}
ClassReader reader;
try {
reader = new ClassReader(is);
} catch (IOException e) {
VerifyError v = new VerifyError("cannot read class resource for: "
+ classResourceName);
v.initCause(e);
throw v;
}
return reader;
}
protected Class<?> composite;
protected Type compositeType;
protected Type generatedType;
protected Map<Class<?>, Integer> mixInTypeMapping = new HashMap<Class<?>, Integer>();
protected Class<?>[] mixInTypes;
protected ClassWriter writer;
public CompositeClassGenerator(Class<?> composite) {
this.composite = composite;
initialize();
}
public byte[] generateClassBits() {
writer = new ClassWriter(COMPUTE_FRAMES);
writer.visit(V1_5, ACC_PUBLIC, generatedType.getInternalName(), null,
Type.getType(Object.class).getInternalName(),
new String[] { compositeType.getInternalName() });
generateConstructor();
for (Map.Entry<Class<?>, Integer> entry : mixInTypeMapping.entrySet()) {
String fieldName = MIX_IN_VAR_PREFIX + entry.getValue();
writer.visitField(ACC_PRIVATE, fieldName,
Type.getType(entry.getKey()).getDescriptor(),
null, null);
Visitor visitor = new Visitor(Type.getType(entry.getKey()),
fieldName);
getClassReader(entry.getKey()).accept(visitor, 0);
}
writer.visitEnd();
return writer.toByteArray();
}
public String getGeneratedClassName() {
return generatedType.getClassName();
}
protected void addMixInTypesTo(Class<?> iFace, Set<Class<?>> collected) {
for (Class<?> extended : iFace.getInterfaces()) {
if (!extended.equals(Object.class)) {
collected.add(extended);
addMixInTypesTo(extended, collected);
}
}
}
protected void generateConstructor() {
Type[] orderedMixIns = new Type[mixInTypes.length];
for (Map.Entry<Class<?>, Integer> entry : mixInTypeMapping.entrySet()) {
orderedMixIns[entry.getValue()] = Type.getType(entry.getKey());
}
Method constructor = new Method(
"<init>",
Type.getMethodDescriptor(Type.VOID_TYPE,
orderedMixIns));
GeneratorAdapter gen = new GeneratorAdapter(ACC_PUBLIC, constructor,
null, new Type[] {}, writer);
gen.visitCode();
gen.loadThis();
gen.invokeConstructor(Type.getType(Object.class),
new Method(
"<init>",
Type.getMethodDescriptor(Type.VOID_TYPE,
new Type[] {})));
for (int i = 0; i < orderedMixIns.length; i++) {
gen.loadThis();
gen.loadArg(i);
gen.putField(generatedType, MIX_IN_VAR_PREFIX + i, orderedMixIns[i]);
}
gen.returnValue();
gen.endMethod();
}
protected Map<Class<?>, Integer> getMixInTypeMapping() {
return mixInTypeMapping;
}
protected Class<?>[] getMixInTypes() {
return mixInTypes;
}
protected void initialize() {
compositeType = Type.getType(composite);
generatedType = Type.getObjectType(compositeType.getInternalName()
+ GENERATED_COMPOSITE_SUFFIX);
mixInTypes = mixInTypesFor();
for (int i = 0; i < mixInTypes.length; i++) {
mixInTypeMapping.put(mixInTypes[i], i);
}
}
protected Class<?>[] mixInTypesFor() {
Comparator<Class<?>> comparator = new Comparator<Class<?>>() {
@Override
public int compare(Class<?> o1, Class<?> o2) {
return o1.getCanonicalName().compareTo(o2.getCanonicalName());
}
};
Set<Class<?>> mixInTypes = new TreeSet<Class<?>>(comparator);
addMixInTypesTo(composite, mixInTypes);
return mixInTypes.toArray(new Class<?>[mixInTypes.size()]);
}
protected void visitMethod(Type mixIn, String fieldName, int access,
String name, String desc, String signature,
String[] exceptions) {
Type[] exceptionTypes;
if (exceptions != null) {
exceptionTypes = new Type[exceptions.length];
int i = 0;
for (String exception : exceptions) {
exceptionTypes[i++] = Type.getObjectType(exception);
}
} else {
exceptionTypes = new Type[0];
}
access = access ^ ACC_ABSTRACT;
Method method = new Method(name, desc);
GeneratorAdapter gen = new GeneratorAdapter(access, method, null,
exceptionTypes, writer);
gen.visitCode();
gen.loadThis();
gen.getField(generatedType, fieldName, mixIn);
gen.loadArgs();
gen.invokeInterface(mixIn, method);
gen.returnValue();
gen.endMethod();
}
}
|
import React from 'react'
export default function InputInitialInvest({ invest, handle }) {
const handleInvest = ({ target: { value } }) => {
handle(parseInt(value))
}
return (
<div>
<label htmlFor="initialInves">Investimento Inicial</label>
<input
id="initialInves"
type="number"
min="1000"
value={invest}
step="100"
onChange={handleInvest}
/>
</div>
)
}
|
<reponame>Saitore973/news_app
import os
class config:
NEWS_API_BASE_URL ='https://newsapi.org/v2/top-headlines/sources?apiKey={}'
NEWS_ARTICLE_URL = 'https://newsapi.org/v2/top-headlines?sources={}&apiKey={}'
NEWS_API_KEY = os.environ.get('NEWS_API_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
class ProdConfig(config):
pass
class DevConfig(config):
DEBUG = True
config_options = {
'development': DevConfig,
'production': ProdConfig
}
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Implements the main Optimizer of learning gradient descent."""
import numpy as np
import tensorflow as tf
from collections import deque, namedtuple
class MetaOptimizer(object):
"""XXX"""
def __init__(self, memory_size=100, name='MetaOptimizer'):
with tf.name_scope(name):
self.memory_size = memory_size
def make_meta_loss():
pass
|
# Define the GraphQLCostDirective class to implement the cost directive logic
class GraphQLCostDirective:
def __init__(self, max_cost):
self.max_cost = max_cost
def get_cost(self, query):
# Implement the logic to calculate the cost of executing a query based on complexity and depth
# For example, you can use query analysis to calculate the cost
# This logic will depend on the specific requirements and complexity calculation method
# Placeholder logic to calculate cost based on query complexity and depth
cost = len(query) # Example: Cost based on query length
return cost
# Integrate the cost directive into the schema_with_cost_directive
from graphql import GraphQLSchema, GraphQLObjectType, GraphQLString, parse, validate, execute, specified_rules
from graphql.type.directives import GraphQLDirective
from graphql.language import ast
from graphql.execution import execute as graphql_execute
# Define the schema with the cost directive integrated
schema_with_cost_directive = GraphQLSchema(
query=GraphQLObjectType(
name='Query',
fields={
'exampleField': {
'type': GraphQLString,
'args': {},
'resolve': lambda obj, info: 'exampleValue'
}
}
),
directives={
'cost': GraphQLDirective(
name='cost',
locations=[ast.DirectiveLocation.FIELD],
args={
'maxCost': ast.Argument(name=ast.Name(value='maxCost'), value=ast.IntValue(value=str(100)))
}
)
}
)
# Implement the logic to calculate the cost of executing a query
def calculate_query_cost(query_string):
query_ast = parse(query_string)
validation_errors = validate(schema_with_cost_directive, query_ast, specified_rules)
if validation_errors:
return {'errors': [str(error) for error in validation_errors]}
execution_context = ExtendedExecutionContext(schema=schema_with_cost_directive, root_value=None)
execution_result = graphql_execute(
execution_context,
query_ast,
root_value=None,
context_value=None,
variable_values=None,
operation_name=None
)
if execution_result.errors:
return {'errors': [str(error) for error in execution_result.errors]}
query_cost = GraphQLCostDirective(max_cost=100).get_cost(query_string) # Example: Using max_cost as 100
if query_cost > 100:
return {'errors': ['Query cost exceeds the maximum allowed cost']}
return {'data': execution_result.data}
# Example usage
query_string = '{ exampleField }'
result = calculate_query_cost(query_string)
print(result)
|
"use strict";
//# sourceMappingURL=Main.js.map
|
#! /bin/sh
# Copyright (C) 1996-2020 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# Test for this bug:
# automake: Makefile.am: required file "../../install-sh" not found; installing
# This also makes sure that install-sh is created in the correct directory.
. test-init.sh
: > Makefile.am
rm -f install-sh
# Since the default path includes '../..', we must run this test in
# yet another subdir.
mkdir frob
mv Makefile.am configure.ac frob/
cd frob
$ACLOCAL
$AUTOMAKE --add-missing >output 2>&1 || { cat output; exit 1; }
cat output
# Only one '/' should appear in the output.
grep '/.*/' output && exit 1
test -f install-sh
:
|
#!/bin/bash
# Copyright 2018 - 2021 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
for CNAME in crunchy-postgres crunchy-pgbadger crunchy-pgbouncer
do
docker pull crunchydata/$CNAME:$CCP_IMAGE_TAG
done
|
#!/bin/bash -x
date
if [[ $# -gt 0 ]]; then
tid=$1
else
tid=00:25:00
fi
cd ~/bin/timers/wmtimer-2.92/wmtimer
./wmtimer -e ./done -t $tid -c &
hour=$(echo $tid | awk 'BEGIN { FS=":"}; {print $1}')
min=$(echo $tid | awk 'BEGIN { FS=":"}; {print $2}')
sec=$(echo $tid | awk 'BEGIN { FS=":"}; {print $3}')
hours=$((10#$hour*3600))
mins=$((10#$min*60))
secs=$(($hours+$mins+$sec+8))
sleep $secs
wmtimer_id=$(ps -e | awk '/wmtimer/ && !/awk/ {print $1}')
kill $wmtimer_id
~/scripts/pomodoro-is-up.py
date
|
//
// NTJBilateralFilterRunner.h
// BilateralFilter
//
// Created by <NAME> on 1/06/2016.
// Copyright © 2016 nojo inc. All rights reserved.
//
#import <TargetConditionals.h>
#if TARGET_OS_OSX
#import <Cocoa/Cocoa.h>
#define IMAGE NSImage
#else
#import <UIKit/UIKit.h>
#define IMAGE UIImage
#endif
@interface NTJBilateralFilterRunner : NSObject
- (id)initWithImageFileURL:(NSURL *)imageFileURL;
- (void)read;
- (void)prepareAsSize:(CGSize)size;
- (IMAGE *)runWithSigma_R:(double)sigma_R sigma_S:(double)sigma_S;
@end
|
package evilcraft.worldgen.structure;
import java.util.Random;
import net.minecraft.block.material.Material;
import net.minecraft.init.Blocks;
import net.minecraft.world.World;
import evilcraft.api.Helpers;
import evilcraft.api.StairSlabMetadataHelper;
import evilcraft.api.StairSlabMetadataHelper.SlabType;
import evilcraft.api.StairSlabMetadataHelper.StoneBrickType;
import evilcraft.blocks.EnvironmentalAccumulator;
/**
* Structure which generates Dark Temples.
*
* @author immortaleeb
*
*/
public class DarkTempleStructure extends QuarterSymmetricalStructure {
private static final int STRUCTURE_HEIGHT = 9;
private static final int MAX_BUILD_HEIGHT = 256 - STRUCTURE_HEIGHT;
private static final int MIN_BUILD_HEIGHT = 90;
private static DarkTempleStructure _instance = null;
/**
* Get the unique instance.
* @return Unique instance.
*/
public static DarkTempleStructure getInstance() {
if (_instance == null)
_instance = new DarkTempleStructure();
return _instance;
}
private DarkTempleStructure() {
super(6, 6);
}
private int findGround(World world, int x, int z) {
int height = MAX_BUILD_HEIGHT;
while (height >= MIN_BUILD_HEIGHT && world.isAirBlock(x, height, z)) {
height--;
}
if (!world.isAirBlock(x, height, z))
return height;
return -1;
}
private boolean canPlaceStructure(World world, int x, int y, int z) {
for (int xr = x - 2; xr < x + 2; xr++) {
for (int zr = z - 2; zr < z + 2; zr++) {
if (!world.isAirBlock(xr, y, zr))
return false;
}
}
return true;
}
private boolean isSolidBlock(World world, int x, int y, int z) {
Material material = world.getBlock(x, y, z).getMaterial();
return material.isSolid() && material.isOpaque();
}
@Override
protected void generateLayers() {
BlockWrapper us = new BlockWrapper(Blocks.stone_slab, StairSlabMetadataHelper.getSlabMetadata(SlabType.STONE, true)); // upside down stone slab
BlockWrapper rs = new BlockWrapper(Blocks.stone_slab);
BlockWrapper ds = new BlockWrapper(Blocks.double_stone_slab);
BlockWrapper cb = new BlockWrapper(Blocks.stonebrick, StairSlabMetadataHelper.getStoneBrickMetadata(StoneBrickType.CHISELED)); // chiseled brick
BlockWrapper sb = new BlockWrapper(Blocks.stonebrick);
BlockWrapper cs = new BlockWrapper(Blocks.stone_slab, StairSlabMetadataHelper.getSlabMetadata(SlabType.COBBLESTONE, false)); // cobblestone slab
BlockWrapper co = new BlockWrapper(Blocks.cobblestone);
BlockWrapper wa = new BlockWrapper(Blocks.water);
BlockWrapper fe = new BlockWrapper(Blocks.fence);
BlockWrapper to = new BlockWrapper(Blocks.torch);
BlockWrapper cw = new BlockWrapper(Blocks.cobblestone_wall);
BlockWrapper ea = new BlockWrapper(EnvironmentalAccumulator.getInstance());
BlockWrapper o = null; // Just to keep things compact...
addLayer(1, new BlockWrapper[]{
o ,o ,o ,o ,o ,o ,
o ,o ,o ,us,ds,o ,
us,us,us,us,us,o ,
us,us,us,us,o ,o ,
us,us,us,us,o ,o ,
us,us,us,us,o ,o
});
addLayer(2, new BlockWrapper[]{
o ,o ,o ,o ,o ,o ,
o ,o ,o ,cb,cb,o ,
sb,sb,sb,sb,cb,o ,
ds,co,wa,sb,o ,o ,
co,co,co,sb,o ,o ,
co,co,ds,sb,o ,o
});
addLayer(3, new BlockWrapper[]{
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,sb,o ,
o ,o ,o ,fe,o ,o ,
rs,o ,o ,o ,o ,o ,
cs,rs,o ,o ,o ,o ,
ea,cs,rs,o ,o ,o
});
addLayer(4, new BlockWrapper[]{
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,cb,o ,
o ,o ,o ,to,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o
});
addLayer(5, new BlockWrapper[]{
us,o ,o ,o ,cw,o ,
o ,o ,o ,o ,sb,cw,
o ,o ,o ,o, o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,us
});
addLayer(6, new BlockWrapper[]{
cb,ds,rs,rs,rs,o ,
co,co,co,co,co,rs,
co,co,co,co,co,rs,
co,co,co,co,co,rs,
co,co,co,co,co,ds,
o ,co,co,co,co,cb
});
addLayer(7, new BlockWrapper[]{
rs,o ,o ,o ,o ,o ,
cw,o ,o ,o ,o ,o ,
cs,cs,cs,o ,o ,o ,
co,co,cs,cs,o ,o ,
co,co,co,cs,o ,o ,
o ,co,co,cs,cw,rs
});
addLayer(8, new BlockWrapper[]{
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
cs,cw,o ,o ,o ,o ,
o ,cs,o ,o ,o ,o
});
addLayer(9, new BlockWrapper[]{
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o ,
o ,to,o ,o ,o ,o ,
o ,o ,o ,o ,o ,o
});
}
@Override
protected void postBuildCorner(World world, int x, int y, int z, int incX, int incZ) {
// Place upside down stairs in corners
// x+ east
// z+ south
// x- west
// z- west
int metadata1 = StairSlabMetadataHelper.getStairMetadata(Helpers.getForgeDirectionFromXSign(incX), true); // metadata for stair 1
int metadata2 = StairSlabMetadataHelper.getStairMetadata(Helpers.getForgeDirectionFromZSing(incZ), true); // metadata for stair 2
world.setBlock(x + 3*incX, y + 5, z + 4*incZ, Blocks.stone_stairs, metadata1, 2);
world.setBlock(x + 4*incX, y + 5, z + 3*incZ, Blocks.stone_stairs, metadata2, 2);
// pillars to the ground
int xx = x + 4*incX;
int zz = z + 4*incZ;
while (!isSolidBlock(world, xx, y, zz)) {
world.setBlock(xx, y, zz, Blocks.cobblestone, 0, 2);
y--;
}
}
@Override
public boolean generate(World world, Random random, int x, int y, int z) {
int groundHeight = findGround(world, x, z);
if (groundHeight == -1)
return false;
// Check if it is a valid spot
if (!canPlaceStructure(world, x, groundHeight+1, z))
return false;
// It's a valid spot, now spawn it
super.generate(world, random, x, groundHeight, z);
return true;
}
}
|
<reponame>sbnair/PolkaJS
declare const _default: {
rpc: {};
types: {
PoolId: {
_enum: {
Loans: string;
DexIncentive: string;
DexSaving: string;
Homa: string;
};
};
};
};
export default _default;
|
#!/usr/bin/env bash
export DEBIAN_FRONTEND=noninteractive
umask 022
set -e
set -x
apt-get update -yq
apt-get install --no-install-suggests -yq software-properties-common
apt-add-repository -y ppa:chris-lea/redis-server
apt-get update -yq
apt-get install --no-install-suggests -yq \
build-essential \
byobu \
curl \
git \
make \
redis-server \
screen
exec sudo -u vagrant \
HOME=/home/vagrant \
/vagrant/test/vagrant/provision-as-vagrant.bash
|
<filename>sources/xroxy.js<gh_stars>100-1000
'use strict';
var anonymityLevels = {
'Transparent': 'transparent',
'Distorting': 'anonymous',
'Anonymous': 'elite',
'Socks4': 'anonymous',
'Socks5': 'anonymous',
};
var defineFeed = function(url) {
return {
url: url,
paths: {
group: 'rss/channel/0/item',
item: 'prx:proxy',
attributes: {
ipAddress: 'prx:ip/0',
port: 'prx:port/0',
anonymityLevel: 'prx:type/0',
protocols: 'prx:ssl/0',
},
},
parseAttributes: {
anonymityLevel: function(value) {
if (value) {
value = value.trim();
value = anonymityLevels[value] || null;
}
return value || null;
},
protocols: function(value) {
switch (this.anonymityLevel) {
case 'Transparent':
case 'Anonymous':
case 'Distorting':
return value === 'true' ? [ 'https' ] : [ 'http' ];
case 'Socks4':
case 'Socks5':
return [ value.toLowerCase() ];
}
return [];
},
},
};
};
module.exports = {
homeUrl: 'https://www.xroxy.com/',
abstract: 'xml',
config: {
feeds: [
defineFeed('https://www.xroxy.com/proxyrss.xml'),
],
},
};
|
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
describe "Thread#keys" do
it "returns an array of the names of the thread-local variables as symbols" do
th = Thread.new do
Thread.current["cat"] = 'woof'
Thread.current[:cat] = 'meow'
Thread.current[:dog] = 'woof'
end
th.join
th.keys.sort_by {|x| x.to_s}.should == [:cat,:dog]
end
it "is not shared across fibers" do
fib = Fiber.new do
Thread.current[:val1] = 1
Fiber.yield
Thread.current.keys.should include(:val1)
Thread.current.keys.should_not include(:val2)
end
Thread.current.keys.should_not include(:val1)
fib.resume
Thread.current[:val2] = 2
fib.resume
Thread.current.keys.should include(:val2)
Thread.current.keys.should_not include(:val1)
end
it "stores a local in another thread when in a fiber" do
fib = Fiber.new do
t = Thread.new do
sleep
Thread.current.keys.should include(:value)
end
Thread.pass while t.status and t.status != "sleep"
t[:value] = 1
t.wakeup
t.join
end
fib.resume
end
end
|
<gh_stars>0
package health
import (
"github.com/aaawoyucheng/wayne/src/backend/models"
)
type DatabaseCheck struct {
}
func (dc *DatabaseCheck) Check() error {
_, err := models.Ormer().
QueryTable(new(models.Cluster)).
Count()
if err != nil {
return err
}
return nil
}
|
const express = require('express');
const app = express();
// List of users
const users = [
{ username: 'user1', password: '1234' },
{ username: 'user2', password: '5678' },
{ username: 'user3', password: 'abcdef' }
];
app.get('/', (req, res) => {
// Display a list of all authorized users
res.status(200).json(users);
});
app.listen(3000, () => {
console.log('Listening on port 3000');
});
|
#!/bin/sh
celery -A corec.endpoints.ping --config=celeryconfig worker -l INFO
|
json.extract! @game_action, :description, :created_at, :updated_at
|
<html>
<head>
<title>User Profile Form</title>
</head>
<body>
<h1>User Profile Form</h1>
<form>
<label for="user_name">Name</label>
<input type="text" name="user_name" />
<label for="user_email">Email Address</label>
<input type="text" name="user_email" />
<input type="submit" value="Submit" />
</form>
</body>
</html>
|
#!/bin/bash
#leave all jars in build
HADOOP_DIR=hadoop-0.18.0-mac01
TARFILE=hadoop18.tar
rm -r hadoop-0.18.0-mac01
mkdir $HADOOP_DIR
mkdir $HADOOP_DIR/bin $HADOOP_DIR/conf $HADOOP_DIR/lib $HADOOP_DIR/chukwa
cp ../build/chukwa-hadoop-0.0.1-client.jar $HADOOP_DIR/lib
cp log4j.properties.templ $HADOOP_DIR/conf/log4j.properties
tar xf $TARFILE $HADOOP_DIR/bin/hadoop
patch $HADOOP_DIR/bin/hadoop < patchhadoop.patch
svn export ../bin $HADOOP_DIR/chukwa/bin
cp ../bin/VERSION $HADOOP_DIR/chukwa/bin
svn export ../conf $HADOOP_DIR/chukwa/conf
svn export ../lib $HADOOP_DIR/chukwa/lib
cp ../hadoopjars/hadoop-0.18.0-core.jar $HADOOP_DIR/chukwa/lib
cp ../build/*.jar $HADOOP_DIR/chukwa
mkdir $HADOOP_DIR/chukwa/var; mkdir $HADOOP_DIR/chukwa/var/run
cp new-chukwa-conf/* $HADOOP_DIR/chukwa/conf
# Do something with chukwa-conf
tar uvf $TARFILE $HADOOP_DIR
|
<gh_stars>0
package com.crossover.trial.weather.repository;
import java.util.HashMap;
import java.util.Map;
import com.crossover.trial.weather.domain.AirportData;
/**
* A Repository of the Weather.
*
* @author <EMAIL>
*/
public class WeatherRepository {
/**
* Internal performance counter to better understand most requested
* information.
*/
private final Map<AirportData, Integer> requestFrequencyMap = new HashMap<AirportData, Integer>();
/**
* All Radius Frequency.
*/
private final Map<Double, Integer> radiusFreqMap = new HashMap<Double, Integer>();
/**
* Returns all Request Frequency.
*
* @return all Request Frequency.
*/
public Map<AirportData, Integer> findAllRequestFrequency() {
return requestFrequencyMap;
}
/**
* Returns all Radius Frequency.
*
* @return all Radius Frequency.
*/
public Map<Double, Integer> findAllRadiusFreqMap() {
return radiusFreqMap;
}
}
|
#!/bin/bash
mkdir -p data_/;
wget -O data_/chen_neuron_dataset.zip https://janelia.figshare.com/ndownloader/articles/7272617/versions/4;
|
from pykeyboard import PyKeyboard
import math
class PhoneShortCuts:
def __init__(self):
self.k = PyKeyboard()
self.starter_path = "stouch "
self.max_x = 347
self.max_y = 650
def home(self):
full_command = self.starter_path + "button 1 1\n"
self.k.type_string(full_command)
def touch(self, x, y):
real_x = math.floor(self.max_x * x)
real_y = math.floor(self.max_y * y)
full_command = self.starter_path + "touch " + str(real_x) + " " + str(real_y) + "\n"
print (full_command)
self.k.type_string(full_command)
k = PyKeyboard()
|
from typing import List
def max_difference(nums: List[int]) -> int:
if len(nums) < 2:
return 0
max_diff = 0
min_val = nums[0]
for num in nums:
if num < min_val:
min_val = num
else:
max_diff = max(max_diff, num - min_val)
return max_diff
|
<gh_stars>0
package pulse.ui.components;
import static java.util.Objects.requireNonNull;
import static org.jfree.chart.plot.PlotOrientation.VERTICAL;
import org.jfree.chart.ChartFactory;
import org.jfree.data.statistics.HistogramDataset;
import org.jfree.data.statistics.HistogramType;
import pulse.search.statistics.ResidualStatistic;
public class ResidualsChart extends AuxPlotter<ResidualStatistic> {
private int binCount;
public ResidualsChart(String xLabel, String yLabel) {
super(xLabel, yLabel);
binCount = 32;
}
@Override
public void createChart(String xLabel, String yLabel) {
setChart(ChartFactory.createHistogram("", xLabel, yLabel, null, VERTICAL, true, true, false));
}
@Override
public void plot(ResidualStatistic stat) {
requireNonNull(stat);
var pulseDataset = new HistogramDataset();
pulseDataset.setType(HistogramType.RELATIVE_FREQUENCY);
var residuals = stat.transformResiduals();
if (residuals.length > 0) {
pulseDataset.addSeries("H1", stat.transformResiduals(), binCount);
}
getPlot().setDataset(0, pulseDataset);
}
public int getBinCount() {
return binCount;
}
public void setBinCount(int binCount) {
this.binCount = binCount;
}
}
|
import { FormGroup } from '@angular/forms';
export class UserModel {
id = 0;
firstName = '';
lastName = '';
userName = '';
email = '';
admin = false;
workType = '';
password = '';
imageUrl = '';
public static createInstance(id: number, form: FormGroup) {
const user = new UserModel();
user.id = id;
user.firstName = form.value['firstName'];
user.lastName = form.value['lastName'];
user.userName = form.value['userName'];
user.email = form.value['email'];
user.workType = form.value['workType'];
user.password = form.value['password'];
user.admin = form.value['admin'];
return user;
}
}
|
// Docs on event and context https://www.netlify.com/docs/functions/#the-handler-method
const faunadb = require('faunadb'),
q = faunadb.query;
exports.handler = async (event, context) => {
try {
var client = new faunadb.Client({ secret: process.env.FAUNADB_ADMIN_SECRET });
var result = await client.query(
q.Get(q.Ref(q.Collection('posts'), '272284912966435334'))
);
//console.log("Document retrived from Container in Database: " + result.data.title);
return {
statusCode: 200,
body: JSON.stringify({ message: `${result.data.title}` }),
// // more keys you can return:
// headers: { "headerName": "headerValue", ... },
// isBase64Encoded: true,
}
} catch (err) {
return { statusCode: 500, body: err.toString() }
}
}
|
<gh_stars>1-10
package com.report.adapter.swapi.client.vo;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import static org.junit.jupiter.api.Assertions.*;
@ExtendWith(MockitoExtension.class)
class ResultTest {
@Mock
private JsonNode jsonNode;
@Test
@DisplayName("Creating with null value")
void shouldNotAcceptNullValue() {
// When & Then
NullPointerException exception = assertThrows(
NullPointerException.class,
() -> new Result(null)
);
assertEquals("raw is marked non-null but is null", exception.getMessage());
}
@Test
@DisplayName("Creating with json node")
void shouldAcceptJsonNode() {
// When & Then
assertDoesNotThrow(
() -> new Result(jsonNode)
);
}
@Test
@DisplayName("Getting access to Raw value")
void shouldGiveSameRawValueAsProvided() {
// Given
Result result = new Result(jsonNode);
// When
JsonNode raw = result.getRaw();
// Then
assertEquals(jsonNode, raw);
}
}
|
set -e
current_branch=$(git branch | sed -n -e 's/^\* \(.*\)/\1/p')
if [ $current_branch != "develop" ]
then
echo "[ERROR] To be able to deploy you need to be on the develop branch"
exit 1
fi
st=$(git status --porcelain 2> /dev/null)
if [[ "$st" != "" ]];
then
echo "[ERROR] To be able to deploy, 'git status' should be clean meaning, everything need to be commited"
exit 1
fi
git pull -r origin develop
git push origin develop
git checkout master
git pull origin master
git merge --ff-only develop
git push origin master
git checkout gh-pages
git pull origin gh-pages
git merge --ff-only develop
git subtree push --prefix dev origin gh-pages
git checkout develop
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# paths
INPUT_HDFS=/benchmarks/TestDFSIO-Enh
# dfsioe-read
RD_NUM_OF_FILES=256
RD_FILE_SIZE=200 #2000
# dfsioe-write
WT_NUM_OF_FILES=256
WT_FILE_SIZE=100 #1000
|
<gh_stars>0
/*
* search.c --
*
* Point searching.
*
* *********************************************************************
* * Copyright (C) 1985, 1990 Regents of the University of California. *
* * Permission to use, copy, modify, and distribute this *
* * software and its documentation for any purpose and without *
* * fee is hereby granted, provided that the above copyright *
* * notice appear in all copies. The University of California *
* * makes no representations about the suitability of this *
* * software for any purpose. It is provided "as is" without *
* * express or implied warranty. Export of this software outside *
* * of the United States of America may require an export license. *
* *********************************************************************
*/
#ifndef lint
static char rcsid[] __attribute__ ((unused)) = "$Header: /usr/cvsroot/magic-8.0/tiles/search.c,v 1.1.1.1 2008/02/03 20:43:50 tim Exp $";
#endif /* not lint */
#include <stdio.h>
#include "magic.h"
#include "geometry.h"
#include "tile.h"
/*
* --------------------------------------------------------------------
*
* TiSrPoint --
*
* Search for a point.
*
* Results:
* A pointer to the tile containing the point.
* The bottom and left edge of a tile are considered part of
* the tile; the top and right edge are not.
*
* Side effects:
* Updates the hint tile in the supplied plane to point
* to the tile found.
*
* --------------------------------------------------------------------
*/
Tile *
TiSrPoint(hintTile, plane, point)
Tile * hintTile; /* Pointer to tile at which to begin search.
* If this is NULL, use the hint tile stored
* with the plane instead.
*/
Plane * plane; /* Plane (containing hint tile pointer) */
Point * point; /* Point for which to search */
{
Tile *tp = (hintTile) ? hintTile : plane->pl_hint;
GOTOPOINT(tp, point);
plane->pl_hint = tp;
return(tp);
}
|
<reponame>litentry/LitentryWebDemo<filename>src/styles/fonts.ts
export default {
bold: 'ManifoldCF-Bold',
light: 'ManifoldCF-Light',
regular: 'Manifold CF',
roboto: 'Roboto-regular',
robotoBold: 'Roboto-Bold',
robotoLight: 'Roboto-Light',
robotoMedium: 'Roboto-Medium',
robotoMono: 'RobotoMono-Regular',
robotoMonoMedium: 'RobotoMono-Medium',
semiBold: 'ManifoldCF-DemiBold'
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.