text stringlengths 1 1.05M |
|---|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/ionicons/iosPrinter.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.iosPrinter = void 0;
var iosPrinter = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"x": "128",
"y": "224",
"width": "256",
"height": "224"
},
"children": [{
"name": "rect",
"attribs": {
"x": "128",
"y": "224",
"width": "256",
"height": "224"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"x": "127.5",
"y": "224.5",
"width": "256",
"height": "224"
},
"children": [{
"name": "rect",
"attribs": {
"x": "127.5",
"y": "224.5",
"width": "256",
"height": "224"
},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"x": "111.5",
"y": "64.5",
"width": "288",
"height": "32"
},
"children": [{
"name": "rect",
"attribs": {
"x": "111.5",
"y": "64.5",
"width": "288",
"height": "32"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M432.5,112.5h-352c-17.645,0-33,12.842-33,30.309v158.393c0,17.468,15.355,33.299,33,33.299h31v-126h288v126h33\r\n\t\t\tc17.645,0,31-15.831,31-33.299V142.809C463.5,125.342,450.145,112.5,432.5,112.5z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M432.5,112.5h-352c-17.645,0-33,12.842-33,30.309v158.393c0,17.468,15.355,33.299,33,33.299h31v-126h288v126h33\r\n\t\t\tc17.645,0,31-15.831,31-33.299V142.809C463.5,125.342,450.145,112.5,432.5,112.5z"
},
"children": []
}]
}]
}]
}]
}]
};
exports.iosPrinter = iosPrinter; |
<reponame>1024pix/pix-ui
import { hbs } from 'ember-cli-htmlbars';
export const textarea = (args) => {
return {
template: hbs`
<PixTextarea
@id={{id}}
@value={{value}}
@maxlength={{maxlength}}
@label={{label}}
@errorMessage={{errorMessage}}
/>
`,
context: args,
};
};
export const argTypes = {
id: {
name: 'id',
description: 'Identifiant du champ permettant de lui attacher un label',
type: { name: 'string', required: true },
defaultValue: '',
},
value: {
name: 'value',
description: 'Valeur du champ',
type: { name: 'string', required: true },
defaultValue: '',
},
maxlength: {
name: 'maxlength',
description: 'Nombre de caractères maximal à taper dans le champ',
type: { name: 'number', required: false },
defaultValue: 500,
},
label: {
name: 'label',
description: 'Donne un label au champ.',
type: { name: 'string', required: false },
table: {
type: { summary: 'string' },
defaultValue: { summary: null },
},
},
errorMessage: {
name: 'errorMessage',
description: 'Affiche une erreur en dessous du champ.',
type: { name: 'string', required: false },
table: {
type: { summary: 'string' },
defaultValue: { summary: null },
},
},
};
|
<reponame>JasonLiu798/javautil
package com.atjl.biz.flow.core;
import com.atjl.biz.flow.api.Flow;
import com.atjl.biz.flow.dto.FlowConstant;
import com.atjl.log.api.LogUtil;
import com.atjl.util.character.StringCheckUtil;
import com.atjl.util.collection.CollectionUtil;
import com.atjl.util.reflect.ReflectMethodUtil;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* package of span unit
*/
public class Span {
private String spanId;
Map<String, SpanUnit> id2SpanUnit;
Map<Integer, SpanUnit> index2SpanUnit;
private List<String> executeSeq;
//ThreadLocal<Map<String, Object>> contextData;
public Span() {
init();
}
private void init() {
this.id2SpanUnit = new HashMap<>();
this.index2SpanUnit = new HashMap<>();
this.executeSeq = new LinkedList<>();
}
/**
* 获取fid在executeSeq中的位置
*
* @param fid
* @return 不存在,或 executeSeq为空,返回值 -1
*/
private Integer getFlowIndex(String fid) {
int res = -1;
if (CollectionUtil.isEmpty(this.executeSeq)) {
return res;
}
for (int i = 0; i < this.executeSeq.size(); i++) {
String seq = this.executeSeq.get(i);
if (StringCheckUtil.equal(fid, seq)) {
res = i;
}
}
return res;
}
/**
* add flow use default option
*
* @param snippet
* @return
*/
public boolean addSnippet(Flow snippet) {
return addSnippet(snippet, FlowOptionGen.getNormalOption());
}
/**
* add flow use option force goon ,no matter exception or errorcode!=0
*
* @param func
* @return
*/
public boolean addFlowForceGoon(Flow func) {
FlowOptionGen gen = new FlowOptionGen();
int option = gen.enableExceptionContinue().enableErrorCodeContinue().get();
return addSnippet(func, option);
}
/**
* add flow with option
*
* @param snippet
* @param option
* @return
*/
public boolean addSnippet(Flow snippet, int option) {
//String fid = snippet.getClass().getSimpleName();
String suid = snippet.getId();
SpanUnit spanUnit = new SpanUnit();
Method m = ReflectMethodUtil.getDeclaredMethod(snippet, FlowConstant.METHOD_ACTION);
if (m == null) {
return false;
}
spanUnit.setActionMethod(m).setOption(option).setSnippet(snippet).setSuid(suid);
LogUtil.debug("add spanUnit {}", spanUnit);
int idx = executeSeq.size();//getFlowIndex(fid);
executeSeq.add(suid);
id2SpanUnit.put(suid, spanUnit);
index2SpanUnit.put(idx, spanUnit);
return true;
}
public Map<String, SpanUnit> getId2SpanUnit() {
return id2SpanUnit;
}
public void setId2SpanUnit(Map<String, SpanUnit> id2SpanUnit) {
this.id2SpanUnit = id2SpanUnit;
}
public Map<Integer, SpanUnit> getIndex2SpanUnit() {
return index2SpanUnit;
}
public void setIndex2SpanUnit(Map<Integer, SpanUnit> index2SpanUnit) {
this.index2SpanUnit = index2SpanUnit;
}
public List<String> getExecuteSeq() {
return executeSeq;
}
public void setExecuteSeq(List<String> executeSeq) {
this.executeSeq = executeSeq;
}
}
|
package lexer
import (
"regexp"
"unicode"
"github.com/Zac-Garby/radon/token"
)
var lineEndings = []token.Type{
token.ID,
token.String,
token.Number,
token.True,
token.False,
token.Nil,
token.Break,
token.Next,
token.Return,
token.RightParen,
token.RightSquare,
token.RightBrace,
token.End,
}
// Lexer takes a string and returns a stream of tokens
// The stream of tokens is in the form of a function
// which returns the next token.
func Lexer(str, file string) func() token.Token {
var (
index = 0
col = 1
line = 1
ch = make(chan token.Token)
)
go func() {
for {
if index < len(str) {
foundSpace := false
for index < len(str) && (unicode.IsSpace(rune(str[index])) || str[index] == '#') {
if unicode.IsSpace(rune(str[index])) {
index++
col++
if str[index-1] == '\n' {
col = 1
line++
}
foundSpace = true
} else {
for index < len(str) && str[index] != '\n' {
index++
}
col = 1
}
}
if foundSpace {
continue
}
found := false
remainingSubstring := str[index:]
for _, pair := range lexicalDictionary {
var (
regex = pair.regex
handler = pair.handler
pattern = regexp.MustCompile(regex)
match = pattern.FindStringSubmatch(remainingSubstring)
)
if len(match) > 0 {
found = true
t, literal, whole := handler(match)
l := len(whole)
ch <- token.Token{
Type: t,
Literal: literal,
Start: token.Position{Line: line, Column: col, Filename: file},
End: token.Position{Line: line, Column: col + l - 1, Filename: file},
}
index += l
col += l
for index < len(str) && unicode.IsSpace(rune(str[index])) && str[index] != '\n' {
index++
col++
}
if index < len(str) && str[index] == '#' {
for index < len(str) && str[index] != '\n' {
index++
}
}
isLineEnding := false
for _, ending := range lineEndings {
if t == ending {
isLineEnding = true
}
}
// HACK: This is a horrible piece of code, but it works.
var isEnd bool
if index < len(str) {
isEnd = str[index] == '\n'
if index+2 < len(str) {
isEnd = isEnd || str[index] == 'e' && str[index+1] == 'n' && str[index+2] == 'd'
}
}
if (isLineEnding && index < len(str) && isEnd) || index >= len(str) {
ch <- token.Token{
Type: token.Semi,
Literal: ";",
Start: token.Position{Line: line, Column: col, Filename: file},
End: token.Position{Line: line, Column: col, Filename: file},
}
}
break
}
}
if !found && index < len(str) {
ch <- token.Token{
Type: token.Illegal,
Literal: string(str[index]),
Start: token.Position{Line: line, Column: col, Filename: file},
End: token.Position{Line: line, Column: col, Filename: file},
}
index++
col++
}
} else {
index++
col++
ch <- token.Token{
Type: token.EOF,
Literal: "",
Start: token.Position{Line: line, Column: col, Filename: file},
End: token.Position{Line: line, Column: col, Filename: file},
}
}
}
}()
return func() token.Token {
return <-ch
}
}
|
from datetime import datetime, timedelta
class HTTPCache:
def __init__(self, response_date):
self.response_date = response_date
@property
def last_modified(self):
"""Get last modified date for the response."""
max_age = datetime.utcnow() - timedelta(days=7)
return max(max_age, self.response_date)
# Example usage
response_date = datetime(2022, 10, 15)
cache = HTTPCache(response_date)
print(cache.last_modified) # Output: 2022-10-15 00:00:00 |
<reponame>hugonasciutti/Exercises
export const types = {
UPDATE_LOADERS: 'DASHBOARD/UPDATE_LOADERS',
LOADED_CANDIDATES: 'DASHBOARD/LOADED_CANDIDATES',
LOAD_CANDIDATES: 'DASHBOARD/LOAD_CANDIDATES',
}
export const actions = {
loadCandidates: (page, results) => ({ type: types.LOAD_CANDIDATES, page, results }),
loadedCandidates: data => ({ type: types.LOADED_CANDIDATES, data }),
updateLoaders: loaders => ({ type: types.UPDATE_LOADERS, loaders }),
}
|
#!/bin/sh
git="$( which git )"
die () {
echo >&2 "$@"
exit 1
}
[ -x "$git" ] || die "Could not find 'git' in \$PATH ($PATH)."
this="$0"
this_dir="$( dirname "$this" )"
cd "$this_dir"
"$git" rev-parse --is-inside-work-tree 2> /dev/null || \
die "Does not seem to be a git repository."
git_dir="$( git rev-parse --git-dir )"
hook_dir="$git_dir/hooks"
cd "$hook_dir"
"$git" fetch origin remotes/origin/hooks && \
"$git" merge FETCH_HEAD
|
#!/bin/bash
source activate /gs/hs0/tgb-deepmt/bugliarello.e/envs/vilbert-mt
cd ../..
python train_tasks.py \
--bert_model bert-base-uncased --config_file config/bert_base_6layer_6conect.json \
--from_pretrained /gs/hs0/tgb-deepmt/bugliarello.e/checkpoints/conceptual_captions/vilbert/bert_base_6layer_6conect/pytorch_model_9.bin \
--tasks 1 \
--lr_scheduler 'warmup_linear' --train_iter_gap 4 \
--output_dir /gs/hs0/tgb-deepmt/bugliarello.e/checkpoints/vqa/vilbert \
--resume_file /gs/hs0/tgb-deepmt/bugliarello.e/checkpoints/vqa/vilbert/VQA_bert_base_6layer_6conect/pytorch_ckpt_latest.tar
conda deactivate
|
#include <iostream>
class TsDualSolenoidValveController {
public:
TsDualSolenoidValveController() {
// Base class constructor implementation
std::cout << "Base class constructor called" << std::endl;
}
// Other member functions for controlling the valve
};
class FriendlyTsDualSolenoidValveController : public TsDualSolenoidValveController {
public:
FriendlyTsDualSolenoidValveController() : TsDualSolenoidValveController() {
// Derived class constructor implementation
std::cout << "Derived class constructor called" << std::endl;
// Additional features specific to the derived class can be initialized here
}
};
int main() {
FriendlyTsDualSolenoidValveController controller;
// Output:
// Base class constructor called
// Derived class constructor called
return 0;
} |
# Write your solution here
word = input("Please type in a word: ")
length = len(word)
if (length > 1):
print(f"There are {length} letters in the word hey")
print("Thank you!") |
<reponame>sdesby/GareLaPlusProcheIonic<filename>src/app/app.module.ts
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { HttpModule} from '@angular/http';
import { IonicApp, IonicModule } from 'ionic-angular';
import { SplashScreen } from '@ionic-native/splash-screen';
import { StatusBar } from '@ionic-native/status-bar';
import { MyApp } from './app.component';
import { HomePage } from '../pages/home/home';
import {ResultPage} from '../pages/result/result';
import { NearestStationProvider } from '../providers/nearest-station/nearest-station';
@NgModule({
declarations: [
MyApp,
HomePage,
ResultPage
],
imports: [
BrowserModule,
HttpModule,
IonicModule.forRoot(MyApp)
],
bootstrap: [IonicApp],
entryComponents: [
MyApp,
HomePage,
ResultPage
],
providers: [
StatusBar,
SplashScreen,
NearestStationProvider
]
})
export class AppModule {}
|
#!/bin/sh
cpp -E -P -pipe -o abirechner.zip.html abirechner.html
|
function delayedCall(cb) {
setTimeout(cb, 1000);
} |
import React from 'react'
import SkillsSection from '../SkillsSection/SkillsSection'
export default (props) => <SkillsSection {...props} label='Add a skill you want to learn' placeholder='skills do you want to learn?' />
|
FROM ubuntu
RUN apt update && apt install -y nginx
WORKDIR /usr/share/nginx/html
COPY index.html index.html
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"] |
/*
* Copyright (C) 2013 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.hive.metastore.api;
import com.facebook.hive.metastore.client.HiveMetastore;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
import org.apache.hadoop.hive.metastore.api.InvalidTableLinkDescriptionException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.PartitionEventType;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.Type;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
import org.apache.hadoop.hive.metastore.api.UnknownTableException;
import org.apache.thrift.TException;
import java.io.Closeable;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Identical to the old ThriftMetaStore.Iface class generated by the IDL compiler.
*/
public interface ThriftHiveMetastore extends Closeable
{
void create_database(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException;
Database get_database(String name) throws NoSuchObjectException, MetaException, TException;
void drop_database(String name, boolean deleteData, boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
List<String> get_databases(String pattern) throws MetaException, TException;
List<String> get_all_databases() throws MetaException, TException;
void alter_database(String dbname, Database db) throws MetaException, NoSuchObjectException, TException;
Type get_type(String name) throws MetaException, NoSuchObjectException, TException;
boolean create_type(Type type) throws AlreadyExistsException, InvalidObjectException, MetaException, TException;
boolean drop_type(String type) throws MetaException, NoSuchObjectException, TException;
Map<String, Type> get_type_all(String name) throws MetaException, TException;
List<FieldSchema> get_fields(String db_name, String table_name) throws MetaException, UnknownTableException, UnknownDBException, TException;
List<FieldSchema> get_schema(String db_name, String table_name) throws MetaException, UnknownTableException, UnknownDBException, TException;
void create_table(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException;
void create_table_with_environment_context(Table tbl, EnvironmentContext environment_context) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException;
void create_table_link(String dbName, String targetDbName, String targetTableName, String owner, boolean isStatic, Map<String, String> linkProperties) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException,
InvalidTableLinkDescriptionException, TException;
void drop_table(String dbname, String name, boolean deleteData) throws NoSuchObjectException, MetaException, TException;
void drop_table_link(String dbName, String targetDbName, String targetTableName) throws NoSuchObjectException, MetaException, TException;
void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, TException;
List<String> get_tables(String db_name, String pattern) throws MetaException, TException;
List<String> get_all_tables(String db_name) throws MetaException, TException;
Table get_table(String dbname, String tbl_name) throws MetaException, NoSuchObjectException, TException;
boolean exists_table(String dbname, String tbl_name) throws MetaException, TException;
Table get_table_link(String dbName, String targetDbName, String targetTableName) throws MetaException, NoSuchObjectException, TException;
List<Table> get_table_objects_by_name(String dbname, List<String> tbl_names) throws MetaException, InvalidOperationException, UnknownDBException, TException;
List<String> get_table_names_by_filter(String dbname, String filter, short max_tables) throws MetaException, InvalidOperationException, UnknownDBException, TException;
void alter_table(String dbname, String tbl_name, Table new_tbl) throws InvalidOperationException, MetaException, TException;
void alter_table_with_environment_context(String dbname, String tbl_name, Table new_tbl, EnvironmentContext environment_context) throws InvalidOperationException, MetaException, TException;
void alter_table_link(String dbName, String targetDbName, String targetTableName, Table new_tbl) throws InvalidOperationException, MetaException, TException;
void alter_table_link_properties(String dbName, String targetDbName, String targetTableName, Map<String, String> updatedProperties) throws InvalidOperationException, MetaException, NoSuchObjectException, TException;
Partition add_partition(Partition new_part) throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
Partition add_partition_with_environment_context(Partition new_part, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
Partition add_table_link_partition(String dbName, String targetDbName, String targetTableName, String partitionName) throws InvalidObjectException, AlreadyExistsException, NoSuchObjectException, MetaException, TException;
int add_partitions(List<Partition> new_parts) throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
Partition append_partition(String db_name, String tbl_name, List<String> part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
Partition append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException,
TException;
Partition append_partition_by_name(String db_name, String tbl_name, String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
Partition append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException,
TException;
boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData) throws NoSuchObjectException, MetaException, TException;
boolean drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, TException;
boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, TException;
boolean drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, TException;
boolean drop_table_link_partition(String dbName, String targetDbName, String targetTableName, String partitionName) throws NoSuchObjectException, MetaException, TException;
Partition get_partition_template(String db_name, String tbl_name, List<String> part_vals) throws InvalidObjectException, MetaException, TException;
Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, TException;
List<Partition> exchange_partition(Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name, boolean overwrite) throws MetaException, NoSuchObjectException, InvalidObjectException,
InvalidInputException, AlreadyExistsException, TException;
Partition get_partition_with_auth(String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names) throws MetaException, NoSuchObjectException, TException;
Partition get_partition_by_name(String db_name, String tbl_name, String part_name) throws MetaException, NoSuchObjectException, TException;
List<Partition> get_partitions(String db_name, String tbl_name, short max_parts) throws NoSuchObjectException, MetaException, TException;
List<Partition> get_partitions_with_auth(String db_name, String tbl_name, short max_parts, String user_name, List<String> group_names) throws NoSuchObjectException, MetaException, TException;
List<String> get_partition_names(String db_name, String tbl_name, short max_parts) throws MetaException, TException;
int get_total_partitions(String db_name, String tbl_name) throws MetaException, TException;
List<Partition> get_partitions_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts) throws MetaException, NoSuchObjectException, TException;
List<Partition> get_partitions_ps_with_auth(String db_name, String tbl_name, List<String> part_vals, short max_parts, String user_name, List<String> group_names) throws NoSuchObjectException, MetaException, TException;
List<String> get_partition_names_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts) throws MetaException, NoSuchObjectException, TException;
List<Partition> get_partitions_by_filter(String db_name, String tbl_name, String filter, short max_parts) throws MetaException, NoSuchObjectException, TException;
List<Partition> get_partitions_by_names(String db_name, String tbl_name, List<String> names) throws MetaException, NoSuchObjectException, TException;
void alter_partition(String db_name, String tbl_name, Partition new_part) throws InvalidOperationException, MetaException, TException;
void alter_partitions(String db_name, String tbl_name, List<Partition> new_parts) throws InvalidOperationException, MetaException, TException;
void alter_partition_with_environment_context(String db_name, String tbl_name, Partition new_part, EnvironmentContext environment_context) throws InvalidOperationException, MetaException, TException;
void rename_partition(String db_name, String tbl_name, List<String> part_vals, Partition new_part) throws InvalidOperationException, MetaException, TException;
boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception) throws MetaException, TException;
String get_config_value(String name, String defaultValue) throws ConfigValSecurityException, TException;
List<String> partition_name_to_vals(String part_name) throws MetaException, TException;
Map<String, String> partition_name_to_spec(String part_name) throws MetaException, TException;
void markPartitionForEvent(String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType) throws MetaException, NoSuchObjectException, UnknownDBException, UnknownTableException, UnknownPartitionException,
InvalidPartitionException, TException;
boolean isPartitionMarkedForEvent(String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType) throws MetaException, NoSuchObjectException, UnknownDBException, UnknownTableException, UnknownPartitionException,
InvalidPartitionException, TException;
Index add_index(Index new_index, Table index_table) throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
void alter_index(String dbname, String base_tbl_name, String idx_name, Index new_idx) throws InvalidOperationException, MetaException, TException;
boolean drop_index_by_name(String db_name, String tbl_name, String index_name, boolean deleteData) throws NoSuchObjectException, MetaException, TException;
Index get_index_by_name(String db_name, String tbl_name, String index_name) throws MetaException, NoSuchObjectException, TException;
List<Index> get_indexes(String db_name, String tbl_name, short max_indexes) throws NoSuchObjectException, MetaException, TException;
List<String> get_index_names(String db_name, String tbl_name, short max_indexes) throws MetaException, TException;
boolean update_table_column_statistics(ColumnStatistics stats_obj) throws NoSuchObjectException,
InvalidObjectException, MetaException, InvalidInputException, TException;
boolean update_partition_column_statistics(ColumnStatistics stats_obj) throws NoSuchObjectException,
InvalidObjectException, MetaException, InvalidInputException, TException;
ColumnStatistics get_table_column_statistics(String db_name, String tbl_name, String col_name) throws
NoSuchObjectException, MetaException, InvalidInputException, InvalidObjectException, TException;
ColumnStatistics get_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name)
throws NoSuchObjectException, MetaException, InvalidInputException, InvalidObjectException, TException;
boolean delete_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException, TException;
boolean delete_table_column_statistics(String db_name, String tbl_name, String col_name) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException, TException;
boolean create_role(Role role) throws MetaException, TException;
boolean drop_role(String role_name) throws MetaException, TException;
List<String> get_role_names() throws MetaException, TException;
boolean grant_role(String role_name, String principal_name, PrincipalType principal_type, String grantor, PrincipalType grantorType, boolean grant_option) throws MetaException, TException;
boolean revoke_role(String role_name, String principal_name, PrincipalType principal_type) throws MetaException, TException;
List<Role> list_roles(String principal_name, PrincipalType principal_type) throws MetaException, TException;
PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String user_name, List<String> group_names) throws MetaException, TException;
List<HiveObjectPrivilege> list_privileges(String principal_name, PrincipalType principal_type, HiveObjectRef hiveObject) throws MetaException, TException;
boolean grant_privileges(PrivilegeBag privileges) throws MetaException, TException;
boolean revoke_privileges(PrivilegeBag privileges) throws MetaException, TException;
List<String> set_ugi(String user_name, List<String> group_names) throws MetaException, TException;
String get_delegation_token(String token_owner, String renewer_kerberos_principal_name) throws MetaException, TException;
long renew_delegation_token(String token_str_form) throws MetaException, TException;
void cancel_delegation_token(String token_str_form) throws MetaException, TException;
public static class Client implements ThriftHiveMetastore
{
public static Client forHiveMetastore(final HiveMetastore hiveMetastore)
{
return new Client(hiveMetastore);
}
private final HiveMetastore delegate;
public Client(final HiveMetastore delegate)
{
this.delegate = checkNotNull(delegate, "delegate is null");
}
@Override
public void close()
{
delegate.close();
}
@Override
public void create_database(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException
{
delegate.createDatabase(database);
}
@Override
public Database get_database(String name) throws NoSuchObjectException, MetaException, TException
{
return delegate.getDatabase(name);
}
@Override
public void drop_database(String name, boolean deleteData, boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException, TException
{
delegate.dropDatabase(name, deleteData, cascade);
}
@Override
public List<String> get_databases(String pattern) throws MetaException, TException
{
return delegate.getDatabases(pattern);
}
@Override
public List<String> get_all_databases() throws MetaException, TException
{
return delegate.getAllDatabases();
}
@Override
public void alter_database(String dbname, Database db) throws MetaException, NoSuchObjectException, TException
{
delegate.alterDatabase(dbname, db);
}
@Override
public Type get_type(String name) throws MetaException, NoSuchObjectException, TException
{
return delegate.getType(name);
}
@Override
public boolean create_type(Type type) throws AlreadyExistsException, InvalidObjectException, MetaException, TException
{
return delegate.createType(type);
}
@Override
public boolean drop_type(String type) throws MetaException, NoSuchObjectException, TException
{
return delegate.dropType(type);
}
@Override
public Map<String, Type> get_type_all(String name) throws MetaException, TException
{
return delegate.getTypeAll(name);
}
@Override
public List<FieldSchema> get_fields(String db_name, String table_name) throws MetaException, UnknownTableException, UnknownDBException, TException
{
return delegate.getFields(db_name, table_name);
}
@Override
public List<FieldSchema> get_schema(String db_name, String table_name) throws MetaException, UnknownTableException, UnknownDBException, TException
{
return delegate.getSchema(db_name, table_name);
}
@Override
public void create_table(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException
{
delegate.createTable(tbl);
}
@Override
public void create_table_with_environment_context(Table tbl, EnvironmentContext environment_context) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException
{
delegate.createTableWithEnvironmentContext(tbl, environment_context);
}
@Override
public void create_table_link(String dbName, String targetDbName, String targetTableName, String owner, boolean isStatic, Map<String, String> linkProperties) throws AlreadyExistsException, InvalidObjectException, MetaException,
NoSuchObjectException, InvalidTableLinkDescriptionException, TException
{
delegate.createTableLink(dbName, targetDbName, targetTableName, owner, isStatic, linkProperties);
}
@Override
public void drop_table(String dbname, String name, boolean deleteData) throws NoSuchObjectException, MetaException, TException
{
delegate.dropTable(dbname, name, deleteData);
}
@Override
public void drop_table_link(String dbName, String targetDbName, String targetTableName) throws NoSuchObjectException, MetaException, TException
{
delegate.dropTableLink(dbName, targetDbName, targetTableName);
}
@Override
public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, TException
{
delegate.dropTableWithEnvironmentContext(dbname, name, deleteData, environment_context);
}
@Override
public List<String> get_tables(String db_name, String pattern) throws MetaException, TException
{
return delegate.getTables(db_name, pattern);
}
@Override
public List<String> get_all_tables(String db_name) throws MetaException, TException
{
return delegate.getAllTables(db_name);
}
@Override
public Table get_table(String dbname, String tbl_name) throws MetaException, NoSuchObjectException, TException
{
return delegate.getTable(dbname, tbl_name);
}
@Override
public boolean exists_table(String dbname, String tbl_name) throws MetaException, TException
{
return delegate.existsTable(dbname, tbl_name);
}
@Override
public Table get_table_link(String dbName, String targetDbName, String targetTableName) throws MetaException, NoSuchObjectException, TException
{
return delegate.getTableLink(dbName, targetDbName, targetTableName);
}
@Override
public List<Table> get_table_objects_by_name(String dbname, List<String> tbl_names) throws MetaException, InvalidOperationException, UnknownDBException, TException
{
return delegate.getTableObjectsByName(dbname, tbl_names);
}
@Override
public List<String> get_table_names_by_filter(String dbname, String filter, short max_tables) throws MetaException, InvalidOperationException, UnknownDBException, TException
{
return delegate.getTableNamesByFilter(dbname, filter, max_tables);
}
@Override
public void alter_table(String dbname, String tbl_name, Table new_tbl) throws InvalidOperationException, MetaException, TException
{
delegate.alterTable(dbname, tbl_name, new_tbl);
}
@Override
public void alter_table_with_environment_context(String dbname, String tbl_name, Table new_tbl, EnvironmentContext environment_context) throws InvalidOperationException, MetaException, TException
{
delegate.alterTableWithEnvironmentContext(dbname, tbl_name, new_tbl, environment_context);
}
@Override
public void alter_table_link(String dbName, String targetDbName, String targetTableName, Table new_tbl) throws InvalidOperationException, MetaException, TException
{
delegate.alterTableLink(dbName, targetDbName, targetTableName, new_tbl);
}
@Override
public void alter_table_link_properties(String dbName, String targetDbName, String targetTableName, Map<String, String> updatedProperties) throws InvalidOperationException, MetaException, NoSuchObjectException, TException
{
delegate.alterTableLinkProperties(dbName, targetDbName, targetTableName, updatedProperties);
}
@Override
public Partition add_partition(Partition new_part) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.addPartition(new_part);
}
@Override
public Partition add_partition_with_environment_context(Partition new_part, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.addPartitionWithEnvironmentContext(new_part, environment_context);
}
@Override
public Partition add_table_link_partition(String dbName, String targetDbName, String targetTableName, String partitionName) throws InvalidObjectException, AlreadyExistsException, NoSuchObjectException, MetaException, TException
{
return delegate.addTableLinkPartition(dbName, targetDbName, targetTableName, partitionName);
}
@Override
public int add_partitions(List<Partition> new_parts) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.addPartitions(new_parts);
}
@Override
public Partition append_partition(String db_name, String tbl_name, List<String> part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.appendPartition(db_name, tbl_name, part_vals);
}
@Override
public Partition append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.appendPartitionWithEnvironmentContext(db_name, tbl_name, part_vals, environment_context);
}
@Override
public Partition append_partition_by_name(String db_name, String tbl_name, String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.appendPartitionByName(db_name, tbl_name, part_name);
}
@Override
public Partition append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.appendPartitionByNameWithEnvironmentContext(db_name, tbl_name, part_name, environment_context);
}
@Override
public boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData) throws NoSuchObjectException, MetaException, TException
{
return delegate.dropPartition(db_name, tbl_name, part_vals, deleteData);
}
@Override
public boolean drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, TException
{
return delegate.dropPartitionWithEnvironmentContext(db_name, tbl_name, part_vals, deleteData, environment_context);
}
@Override
public boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, TException
{
return delegate.dropPartitionByName(db_name, tbl_name, part_name, deleteData);
}
@Override
public boolean drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, TException
{
return delegate.dropPartitionByNameWithEnvironmentContext(db_name, tbl_name, part_name, deleteData, environment_context);
}
@Override
public boolean drop_table_link_partition(String dbName, String targetDbName, String targetTableName, String partitionName) throws NoSuchObjectException, MetaException, TException
{
return delegate.dropTableLinkPartition(dbName, targetDbName, targetTableName, partitionName);
}
@Override
public Partition get_partition_template(String db_name, String tbl_name, List<String> part_vals) throws InvalidObjectException, MetaException, TException
{
return delegate.getPartitionTemplate(db_name, tbl_name, part_vals);
}
@Override
public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, TException
{
return delegate.getPartition(db_name, tbl_name, part_vals);
}
@Override
public List<Partition> exchange_partition(Map<String, String> partitionSpecs, String source_db, String source_table_name, String dest_db, String dest_table_name, boolean overwrite) throws MetaException, NoSuchObjectException,
InvalidObjectException, InvalidInputException, AlreadyExistsException, TException
{
return delegate.exchangePartition(partitionSpecs, source_db, source_table_name, dest_db, dest_table_name, overwrite);
}
@Override
public Partition get_partition_with_auth(String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names) throws MetaException, NoSuchObjectException, TException
{
return delegate.getPartitionWithAuth(db_name, tbl_name, part_vals, user_name, group_names);
}
@Override
public Partition get_partition_by_name(String db_name, String tbl_name, String part_name) throws MetaException, NoSuchObjectException, TException
{
return delegate.getPartitionByName(db_name, tbl_name, part_name);
}
@Override
public List<Partition> get_partitions(String db_name, String tbl_name, short max_parts) throws NoSuchObjectException, MetaException, TException
{
return delegate.getPartitions(db_name, tbl_name, max_parts);
}
@Override
public List<Partition> get_partitions_with_auth(String db_name, String tbl_name, short max_parts, String user_name, List<String> group_names) throws NoSuchObjectException, MetaException, TException
{
return delegate.getPartitionsWithAuth(db_name, tbl_name, max_parts, user_name, group_names);
}
@Override
public List<String> get_partition_names(String db_name, String tbl_name, short max_parts) throws MetaException, TException
{
return delegate.getPartitionNames(db_name, tbl_name, max_parts);
}
@Override
public int get_total_partitions(String db_name, String tbl_name) throws MetaException, TException
{
return delegate.getTotalPartitions(db_name, tbl_name);
}
@Override
public List<Partition> get_partitions_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts) throws MetaException, NoSuchObjectException, TException
{
return delegate.getPartitionsPs(db_name, tbl_name, part_vals, max_parts);
}
@Override
public List<Partition> get_partitions_ps_with_auth(String db_name, String tbl_name, List<String> part_vals, short max_parts, String user_name, List<String> group_names) throws NoSuchObjectException, MetaException, TException
{
return delegate.getPartitionsPsWithAuth(db_name, tbl_name, part_vals, max_parts, user_name, group_names);
}
@Override
public List<String> get_partition_names_ps(String db_name, String tbl_name, List<String> part_vals, short max_parts) throws MetaException, NoSuchObjectException, TException
{
return delegate.getPartitionNamesPs(db_name, tbl_name, part_vals, max_parts);
}
@Override
public List<Partition> get_partitions_by_filter(String db_name, String tbl_name, String filter, short max_parts) throws MetaException, NoSuchObjectException, TException
{
return delegate.getPartitionsByFilter(db_name, tbl_name, filter, max_parts);
}
@Override
public List<Partition> get_partitions_by_names(String db_name, String tbl_name, List<String> names) throws MetaException, NoSuchObjectException, TException
{
return delegate.getPartitionsByNames(db_name, tbl_name, names);
}
@Override
public void alter_partition(String db_name, String tbl_name, Partition new_part) throws InvalidOperationException, MetaException, TException
{
delegate.alterPartition(db_name, tbl_name, new_part);
}
@Override
public void alter_partitions(String db_name, String tbl_name, List<Partition> new_parts) throws InvalidOperationException, MetaException, TException
{
delegate.alterPartitions(db_name, tbl_name, new_parts);
}
@Override
public void alter_partition_with_environment_context(String db_name, String tbl_name, Partition new_part, EnvironmentContext environment_context) throws InvalidOperationException, MetaException, TException
{
delegate.alterPartitionWithEnvironmentContext(db_name, tbl_name, new_part, environment_context);
}
@Override
public void rename_partition(String db_name, String tbl_name, List<String> part_vals, Partition new_part) throws InvalidOperationException, MetaException, TException
{
delegate.renamePartition(db_name, tbl_name, part_vals, new_part);
}
@Override
public boolean partition_name_has_valid_characters(List<String> part_vals, boolean throw_exception) throws MetaException, TException
{
return delegate.partitionNameHasValidCharacters(part_vals, throw_exception);
}
@Override
public String get_config_value(String name, String defaultValue) throws ConfigValSecurityException, TException
{
return delegate.getConfigValue(name, defaultValue);
}
@Override
public List<String> partition_name_to_vals(String part_name) throws MetaException, TException
{
return delegate.partitionNameToVals(part_name);
}
@Override
public Map<String, String> partition_name_to_spec(String part_name) throws MetaException, TException
{
return delegate.partitionNameToSpec(part_name);
}
@Override
public void markPartitionForEvent(String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType) throws MetaException, NoSuchObjectException, UnknownDBException, UnknownTableException, UnknownPartitionException,
InvalidPartitionException, TException
{
delegate.markPartitionForEvent(db_name, tbl_name, part_vals, eventType);
}
@Override
public boolean isPartitionMarkedForEvent(String db_name, String tbl_name, Map<String, String> part_vals, PartitionEventType eventType) throws MetaException, NoSuchObjectException, UnknownDBException, UnknownTableException,
UnknownPartitionException, InvalidPartitionException, TException
{
return delegate.isPartitionMarkedForEvent(db_name, tbl_name, part_vals, eventType);
}
@Override
public Index add_index(Index new_index, Table index_table) throws InvalidObjectException, AlreadyExistsException, MetaException, TException
{
return delegate.addIndex(new_index, index_table);
}
@Override
public void alter_index(String dbname, String base_tbl_name, String idx_name, Index new_idx) throws InvalidOperationException, MetaException, TException
{
delegate.alterIndex(dbname, base_tbl_name, idx_name, new_idx);
}
@Override
public boolean drop_index_by_name(String db_name, String tbl_name, String index_name, boolean deleteData) throws NoSuchObjectException, MetaException, TException
{
return delegate.dropIndexByName(db_name, tbl_name, index_name, deleteData);
}
@Override
public Index get_index_by_name(String db_name, String tbl_name, String index_name) throws MetaException, NoSuchObjectException, TException
{
return delegate.getIndexByName(db_name, tbl_name, index_name);
}
@Override
public List<Index> get_indexes(String db_name, String tbl_name, short max_indexes) throws NoSuchObjectException, MetaException, TException
{
return delegate.getIndexes(db_name, tbl_name, max_indexes);
}
@Override
public List<String> get_index_names(String db_name, String tbl_name, short max_indexes) throws MetaException, TException
{
return delegate.getIndexNames(db_name, tbl_name, max_indexes);
}
@Override
public boolean update_table_column_statistics(ColumnStatistics stats_obj) throws NoSuchObjectException, InvalidObjectException, MetaException, InvalidInputException, TException
{
return delegate.updateTableColumnStatistics(stats_obj);
}
@Override
public boolean update_partition_column_statistics(ColumnStatistics stats_obj) throws NoSuchObjectException, InvalidObjectException, MetaException, InvalidInputException, TException
{
return delegate.updatePartitionColumnStatistics(stats_obj);
}
@Override
public ColumnStatistics get_table_column_statistics(String db_name, String tbl_name, String col_name) throws NoSuchObjectException, MetaException, InvalidInputException, InvalidObjectException, TException
{
return delegate.getTableColumnStatistics(db_name, tbl_name, col_name);
}
@Override
public ColumnStatistics get_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name) throws NoSuchObjectException, MetaException, InvalidInputException, InvalidObjectException, TException
{
return delegate.getPartitionColumnStatistics(db_name, tbl_name, part_name, col_name);
}
@Override
public boolean delete_partition_column_statistics(String db_name, String tbl_name, String part_name, String col_name) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException, TException
{
return delegate.deletePartitionColumnStatistics(db_name, tbl_name, part_name, col_name);
}
@Override
public boolean delete_table_column_statistics(String db_name, String tbl_name, String col_name) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException, TException
{
return delegate.deleteTableColumnStatistics(db_name, tbl_name, col_name);
}
@Override
public boolean create_role(Role role) throws MetaException, TException
{
return delegate.createRole(role);
}
@Override
public boolean drop_role(String role_name) throws MetaException, TException
{
return delegate.dropRole(role_name);
}
@Override
public List<String> get_role_names() throws MetaException, TException
{
return delegate.getRoleNames();
}
@Override
public boolean grant_role(String role_name, String principal_name, PrincipalType principal_type, String grantor, PrincipalType grantorType, boolean grant_option) throws MetaException, TException
{
return delegate.grantRole(role_name, principal_name, principal_type, grantor, grantorType, grant_option);
}
@Override
public boolean revoke_role(String role_name, String principal_name, PrincipalType principal_type) throws MetaException, TException
{
return delegate.revokeRole(role_name, principal_name, principal_type);
}
@Override
public List<Role> list_roles(String principal_name, PrincipalType principal_type) throws MetaException, TException
{
return delegate.listRoles(principal_name, principal_type);
}
@Override
public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String user_name, List<String> group_names) throws MetaException, TException
{
return delegate.getPrivilegeSet(hiveObject, user_name, group_names);
}
@Override
public List<HiveObjectPrivilege> list_privileges(String principal_name, PrincipalType principal_type, HiveObjectRef hiveObject) throws MetaException, TException
{
return delegate.listPrivileges(principal_name, principal_type, hiveObject);
}
@Override
public boolean grant_privileges(PrivilegeBag privileges) throws MetaException, TException
{
return delegate.grantPrivileges(privileges);
}
@Override
public boolean revoke_privileges(PrivilegeBag privileges) throws MetaException, TException
{
return delegate.revokePrivileges(privileges);
}
@Override
public List<String> set_ugi(String user_name, List<String> group_names) throws MetaException, TException
{
return delegate.setUgi(user_name, group_names);
}
@Override
public String get_delegation_token(String token_owner, String renewer_kerberos_principal_name) throws MetaException, TException
{
return delegate.getDelegationToken(token_owner, renewer_kerberos_principal_name);
}
@Override
public long renew_delegation_token(String token_str_form) throws MetaException, TException
{
return delegate.renewDelegationToken(token_str_form);
}
@Override
public void cancel_delegation_token(String token_str_form) throws MetaException, TException
{
delegate.cancelDelegationToken(token_str_form);
}
}
}
|
import type { AccountId } from '@polkadot/types/interfaces';
import type { AnyJson } from '@polkadot/types/types';
import { ApiRx } from '@polkadot/api';
import { Abi } from '../Abi';
import { Contract as BaseContract } from '../base';
export declare class Contract extends BaseContract<'rxjs'> {
constructor(api: ApiRx, abi: AnyJson | Abi, address: string | AccountId);
}
|
#!/bin/bash
#
# batch-move-files.sh -- move parsed files from one directory to another.
# Moves a fixed number of files per run.
#
# Copyright (c) 2011 Linas Vepstas
# IFS=$(echo -en "\n\b")
FILES=parsed/*/*
mkdir enwiki-20101011
cd enwiki-20101011
mkdir parsed
cd parsed
mkdir A
mkdir B
mkdir C
mkdir D
mkdir E
mkdir F
mkdir G
mkdir H
mkdir I
mkdir J
mkdir K
mkdir L
mkdir M
mkdir N
mkdir O
mkdir P
mkdir Q
mkdir R
mkdir S
mkdir T
mkdir U
mkdir V
mkdir W
mkdir X
mkdir Y
mkdir Z
mkdir num
mkdir misc
cd ../..
cnt=0;
for fpath in $FILES
do
f=${fpath##*/}
# echo "$fpath"
mv "${fpath}" "enwiki-20101011/${fpath}"
let cnt=cnt+1
if [ $cnt -gt 40000 ];
then
break
fi
# echo $cnt
done
|
// 6913. 동철이의 프로그래밍 대회
// 2019.06.29
#include<iostream>
#include<algorithm>
#include<vector>
#include<queue>
#include<string>
using namespace std;
int state[21][21];
int result[21]; // result[i] : i번째 사람이 맞춘 문제 수
int main()
{
int t;
cin >> t;
for (int testCase = 1; testCase <= t; testCase++)
{
int n, m;
cin >> n >> m;
fill(result, result + 21, 0);
for (int i = 0; i < 21; i++)
{
fill(state[i], state[i] + 21, 0);
}
for (int i = 0; i < n; i++)
{
for (int j = 0; j < m; j++)
{
cin >> state[i][j];
}
}
// 맞춘 문제 수 계산
for (int i = 0; i < n; i++)
{
for (int j = 0; j < m; j++)
{
result[i] += state[i][j];
}
}
// 맞춘 문제수의 최댓값 계산
int maxVal = 0;
for (int i = 0; i < 21; i++)
{
maxVal = max(result[i], maxVal);
}
// 1등한 사람의 수 계산
int people = 0;
for (int i = 0; i < 21; i++)
{
if (result[i] == maxVal)
{
people++;
}
}
// 예외처리 : 아무도 어떠한 문제도 맞추지 못한 상태
if (maxVal == 0)
{
people = n;
}
cout << "#" << testCase << " " << people << " " << maxVal << endl;
}
return 0;
}
|
<reponame>akokhanovskyi/kaa
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.verifiers.trustful.verifier;
import org.kaaproject.kaa.server.common.verifier.AbstractKaaUserVerifier;
import org.kaaproject.kaa.server.common.verifier.UserVerifierCallback;
import org.kaaproject.kaa.server.common.verifier.UserVerifierContext;
import org.kaaproject.kaa.server.verifiers.trustful.config.gen.TrustfulAvroConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TrustfulUserVerifier extends AbstractKaaUserVerifier<TrustfulAvroConfig> {
private static final Logger LOG = LoggerFactory.getLogger(TrustfulUserVerifier.class);
@Override
public void init(UserVerifierContext context, TrustfulAvroConfig configuration) {
LOG.info("Initializing user verifier with context {} and configuration {}", context, configuration);
}
@Override
public void checkAccessToken(String userExternalId, String accessToken, UserVerifierCallback callback) {
LOG.trace("Received user verification request for user {} and access token {}", userExternalId, accessToken);
callback.onSuccess();
}
@Override
public void start() {
LOG.info("user verifier started");
}
@Override
public void stop() {
LOG.info("user verifier stopped");
}
@Override
public Class<TrustfulAvroConfig> getConfigurationClass() {
return TrustfulAvroConfig.class;
}
}
|
<gh_stars>10-100
/*\
Entries are deprecated. Don't use them. These classes are here just so that
any 3rd party modules built for Relink V1 don't break.
Just return an object like, {output: "string", impossible: true|undefined}
\*/
function EntryNode() {
this.children = [];
};
module.exports = EntryNode;
/** PURE VIRTUAL
* EntryNode.prototype.report = function() -> ["string", ...]
*/
EntryNode.newType = function() {
return EntryNode;
};
EntryNode.prototype.add = function(entry) {
this.children.push(entry);
};
function EntryCollection() {
this.children = Object.create(null);
this.types = Object.create(null);
};
EntryNode.newCollection = function(name) {
return EntryCollection;
};
// Again. I reiterate. Don't use this. All this is just legacy support.
Object.defineProperty(EntryCollection, 'impossible', {
get: function() {
var imp = this._impossible;
this.eachChild(function(child) { imp = imp || child.impossible; });
return imp;
},
set: function(impossible) {
this._impossible = true;
}
});
EntryCollection.prototype.eachChild = function(method) {
for (var child in this.children) {
method(this.children[child]);
}
};
EntryCollection.prototype.addChild = function(child, name, type) {
this.children[name] = child;
this.types[name] = type;
};
EntryCollection.prototype.hasChildren = function() {
return Object.keys(this.children).length > 0;
};
|
import { TestBed, inject } from '@angular/core/testing';
import { CategoryService } from './category.service';
import { HttpClient } from '@angular/common/http';
import { of } from 'rxjs';
import { HttpResult } from '../../../core/services/http-result';
describe('CategoryService', () => {
let categoryService: CategoryService;
let httpClientSpy: jasmine.SpyObj<HttpClient>;
beforeEach(() => {
const httpSpy = jasmine.createSpyObj('HttpClient', ['get', 'put', 'post']);
TestBed.configureTestingModule({
providers: [
CategoryService,
{ provide: HttpClient, useValue: httpSpy}
]
});
categoryService = TestBed.get(CategoryService);
httpClientSpy = TestBed.get(HttpClient);
});
it('should be created', inject([CategoryService], (service: CategoryService) => {
expect(service).toBeTruthy();
}));
it('#updateCategory should update stubbed category', () => {
const stubValue = {pk: 1, category: "test"};
httpClientSpy.put.and.returnValue(
of({})
);
categoryService.editCategory(stubValue).subscribe((res: HttpResult) => {
expect(res.isSuccess()).toBe(true);
});
});
it('#addCategory should add stubbed category', () => {
const stubValue = {pk: 1, category: "test"};
httpClientSpy.post.and.returnValue(
of({})
);
categoryService.addCategory(stubValue).subscribe((res: HttpResult) => {
expect(res.isSuccess()).toBe(true);
});
});
it('#getCategories should return stubbed categories', () => {
const stubValue = [
{pk: 1, category: "test"},
{pk: 2, category: "test2"}
];
httpClientSpy.get.and.returnValue(
of(stubValue)
);
categoryService.getCategories().subscribe((res: HttpResult) => {
expect(res.isSuccess()).toBe(true);
expect(res.getResponse()).toEqual(stubValue);
});
});
});
|
<filename>server/src/main/java/com/breakersoft/plow/thrift/dao/ThriftNodeDao.java
package com.breakersoft.plow.thrift.dao;
import java.util.List;
import java.util.UUID;
import com.breakersoft.plow.thrift.NodeFilterT;
import com.breakersoft.plow.thrift.NodeT;
public interface ThriftNodeDao {
List<NodeT> getNodes(NodeFilterT filter);
NodeT getNode(UUID id);
}
|
<filename>Weep Engine/App.h
#pragma once
#include "Globals.h"
#include "Profiler.h"
#include "Module.h"
#include "MathGeoLib/include/MathBuildConfig.h"
#include "MathGeoLib/include/MathGeoLib.h"
#include "MathGeoLib/include/Algorithm/Random/LCG.h"
class Profiler;
class ModuleWindow;
class ModuleInput;
class ModuleRenderer3D;
class ModuleCamera3D;
class ModuleImporter;
class DebugScene;
class GameObjectManager;
class SceneManager;
class ModuleTexture;
class ModuleQuadtree;
class ResourceManagment;
class JsonHelper;
class ModuleFileSystem;
class Application
{
public:
Application(int argc, char* args[]);
~Application();
bool Awake();
bool Start();
bool Update();
bool CleanUp();
void CloseApp();
int GetArgc() const;
const char* GetArgv(int index) const;
float GetDT();
void SetMaxFps(int set);
int GetMaxFps();
void LoadFile(const char * filepath);
std::string GetFilePath(const char * file_path);
std::string GetFileName(const char * file_path);
std::string GetFileNameWithoutExtension(const char * file_path);
string GetFileExtension(const char * file_name);
string ToLowerCase(std::string str);
std::string GetStringByLength(const char* string, uint lenght_desire);
void OpenWeb(string web);
const char* GetBasePath();
void OpenFolder(const char* folder);
void WantToSave();
void WantToLoad();
private:
void AddModule(Module* mod);
void PrepareUpdate();
void FinishUpdate();
void SaveAll();
void LoadAll();
public:
//Modules
ModuleWindow* window = nullptr;
ModuleInput* input = nullptr;
ModuleRenderer3D* renderer3D = nullptr;
ModuleCamera3D* camera = nullptr;
DebugScene* debug_scene = nullptr;
GameObjectManager* game_object_manager = nullptr;
ModuleImporter* importer = nullptr;
ModuleTexture* texture = nullptr;
ModuleFileSystem* file_system = nullptr;
ModuleQuadtree* quadtree = nullptr;
SceneManager* scene_manager = nullptr;
ResourceManagment* resource_managment = nullptr;
Profiler* profiler = nullptr;
LCG* random = nullptr;
JsonHelper* json_helper = nullptr;
std::list<Module*> modules;
private:
int argc;
char** args;
bool close_app = false;
bool want_to_save = false;
bool want_to_load = false;
};
extern Application* App; |
const MongoClient = require('mongodb').MongoClient;
const uri = 'mongodb://localhost:27017';
MongoClient.connect(uri, function(err, client) {
const db = client.db('mydb');
db.collection('customers').insertOne({
name: "John Doe",
age: 30
});
});
MongoClient.connect(uri, function(err, client) {
const db = client.db('mydb');
db.collection('customers').deleteOne({
name: "John Doe",
age: 30
});
});
MongoClient.connect(uri, function(err, client) {
const db = client.db('mydb');
db.collection('customers').updateOne({
name: "John Doe"
}, {
$set: { age: 31 }
});
}); |
package com.example.conditions;
import net.openid.conformance.condition.AbstractCondition;
import net.openid.conformance.condition.PreEnvironment;
import net.openid.conformance.testmodule.Environment;
@PreEnvironment(required = {"environmentKey1", "environmentKey2"})
public class CustomCondition extends AbstractCondition {
@Override
public Environment evaluate(Environment env) {
// Implement the logic to check the pre-existing environment and update it based on the condition's outcome
// Use env.getString("environmentKey1") and env.getString("environmentKey2") to access the required environment variables
// Update the environment using env.putString("newKey", "newValue") or other appropriate methods
// Example logic:
if (env.getString("environmentKey1").equals("someValue")) {
env.putString("newKey", "newValue");
} else {
env.putString("newKey", "defaultValue");
}
return env;
}
} |
<gh_stars>0
import { Component, Inject, OnInit, OnDestroy, HostListener } from '@angular/core';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';
import { map } from 'rxjs/operators';
import { Subscription, interval } from 'rxjs';
@Component({
selector: 'op-confirm-dialog',
templateUrl: './message-dialog.component.html',
styleUrls: ['./message-dialog.component.scss']
})
export class MessageDialogComponent implements OnInit, OnDestroy {
showTimer: boolean;
timer: string;
IDLE_TIMEOUT = 60; // seconds
_idleSecondsCounter = 0;
private _subscriptions: Subscription[] = [];
private inited;
constructor(
public dialogRef: MatDialogRef<MessageDialogComponent>,
@Inject(MAT_DIALOG_DATA) public data: any
) { }
//close dialog when click outside
ngOnInit() {
this.showTimer = false;
if (this.data.showTimer != null && this.data.showTimer === true) {
this.showTimer = true;
// this.startCountDown();
}
}
ngOnDestroy() {
this._subscriptions.forEach(sub => {
sub.unsubscribe();
});
}
//private startCountDown(): void {
// const counter$ = interval(1000).pipe(
// map((x) => {
// return (this.IDLE_TIMEOUT - this._idleSecondsCounter);
// })
// );
// this._subscriptions.push(counter$
// .subscribe((x) => {
// this.timer = x.toString();
// this._idleSecondsCounter++;
// if (x === 1) {
// this.data.isOK = true;
// this.dialogRef.close(true);
// return;
// }
// }));
//}
}
|
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
# input data
X = np.array([[1, 1], [2, 2], [3, 3], [4, 4]])
y = np.array([5, 10, 15, 20])
# create and fit the model
model = LinearRegression()
model.fit(X, y)
# make a prediction
y_pred = model.predict([[5, 5]])
print(round(y_pred[0], 2)) |
<filename>src/pages/404.js
import * as React from "react"
const NotFoundPage = () => (
<div>
<h1>404 Page</h1>
</div>
)
export default NotFoundPage
|
#!/bin/sh
# source env
source ./env.local
# check if binding exists
bind=$(bx cs cluster-services $cluster_name | grep $cloudant_svc_name)
echo $bind
if [ -z "$bind" ] ; then
echo "Cloudant servive not bound to cluster" $cloudant_svc_name
echo "binding..."
bx cs cluster-service-bind $cluster_name $cluster_namespace $cloudant_svc_name
else
echo "Cloudant service already bound to cluster " $cloudant_svc_name
fi
echo "Binding"
bx cs cluster-services $cluster_name | grep $cloudant_svc_name
|
import './commit-detail-sk';
import './commit-detail-sk.scss';
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.handleFilesOption = void 0;
const immutable_1 = require("immutable");
const cli_options_1 = require("../cli-options");
function handleFilesOption(incoming) {
const value = incoming.get("files");
const namespaces = {
core: {
globs: [],
objs: []
}
};
const processed = (0, cli_options_1.makeFilesArg)(value);
if (processed.globs.length) {
namespaces.core.globs = processed.globs;
}
if (processed.objs.length) {
namespaces.core.objs = processed.objs;
}
return [incoming.set("files", (0, immutable_1.fromJS)(namespaces)), []];
}
exports.handleFilesOption = handleFilesOption;
//# sourceMappingURL=handleFilesOption.js.map |
<filename>src/app/services/cache.service.spec.ts
import { CacheService } from './cache.service';
describe('CacheService', () => {
it('should create an instance', () => {
expect(new CacheService()).toBeTruthy();
});
});
|
<reponame>rpatil524/COLID-Data-Marketplace-Frontend
export const environment = {
Label: 'Local',
production: false,
allowAnonymous: true,
interceptors: [],
pageSize: 10,
dmpCoreApiUrl: 'http://localhost:51800/api/',
appDataApiUrl: 'http://localhost:51810/api',
colidApiUrl: 'http://localhost:51770/api/v3',
loggingUrl: 'http://localhost:51800/api/log',
releaseNotesUrl: 'https://placeholder.org/',
adalConfig: {
authority: "yourdomain.onmicrosoft.com",
clientId: '<data marketplace client id>',
redirectUri: 'http://localhost:4301/logged-in',
protectedResourceMap: {
'http://localhost:51800': ['<search service client id>/Resource.Search.All'],
'http://localhost:51810': ['<appdata service client id>/UserData.ReadWrite'],
'http://localhost:51770': ['<registration service client id>/Resource.Read.All']
},
postLogoutRedirectUri: 'http://localhost:4301/'
},
appSupportFeedBack: {
mailToLink: 'mailTo:none',
supportTicketLink: 'http://placeholder.url/'
},
build: {
ciJobId: '$BUILD_CIJOBID',
ciPipelineId: '$BUILD_CIPIPELINEID',
ciCommitSha: '$BUILD_CICOMMITSHA',
},
pidUrl: 'http://localhost:4200/',
kgeUrl: 'http://localhost:4400/'
};
|
/* eslint-disable react/no-unused-state */
/* eslint-disable no-undef */
/* eslint-disable class-methods-use-this */
/* eslint-disable import/no-named-as-default */
/* eslint-disable import/named */
/* eslint-disable react/no-array-index-key */
/* eslint-disable react/prefer-stateless-function */
/* eslint-disable no-unused-vars */
/* https://dev.to/sage911/how-to-write-a-search-component-with-suggestions-in-react-d20 */
import React from 'react';
// import data from '../../Data/DiseasesData';
import Card from './Card';
import Autocomplete from './Autocomplete';
import Cancer from '../../Images/Diseases/1.png';
import './style.css';
import { getDiseases, deleteDisease, updateDisease } from '../../Api/Disease';
import { getSubdisease } from '../../Api/Subdisease';
import Footer from '../Footer';
class Diseases extends React.Component {
constructor(props) {
super(props);
this.filteredData = this.filteredData.bind(this);
this.state = {
disease: [],
filteredData: [],
activeIndex: 0,
subdiseases: [],
user: null
};
}
componentDidMount() {
// this.setState({ user: isAuthenticated().user });
console.log('mounted');
getDiseases().then((diseases) => {
this.setState({
disease: diseases
});
this.setSubdiseases(diseases, 0);
});
}
setSubdiseases = (diseases, index) => {
this.setState({
subdiseases: [],
filteredData: []
});
for (let i = 0; i < diseases[index].subdiseases.length; i++) {
getSubdisease(diseases[index].subdiseases[i]).then((sub) => {
this.setState({
subdiseases: [ ...this.state.subdiseases, sub ],
filteredData: [ ...this.state.filteredData, sub ]
});
});
}
};
filteredData(filteredData) {
this.setState({ filteredData });
}
onAddDisease = () => {
this.props.history.push('/add_disease');
};
onAddSubdisease = (dId) => {
this.props.history.push(`/add_subdisease/${dId}`);
};
onUpdateDisease = (disease) => {
this.props.history.push(`/update_disease/${disease._id}`, disease);
};
onDeleteDisease = (dId) => {
deleteDisease(dId).then((data) => {
if (data.status === 200) {
this.setState({
activeIndex: 0
});
getDiseases().then((diseases) => {
this.setState({
disease: diseases
});
this.setSubdiseases(diseases, 0);
});
}
});
};
onDiseaseClick = (i) => {
this.setState({
activeIndex: i
});
this.setSubdiseases(this.state.disease, i);
};
render() {
console.log(this.state.filteredData);
const html = this.state.filteredData.map((x, key) => {
return <Card key={key} data={x} history={this.props.history} />;
});
return (
<div className="container">
<div className="mt-2">
<div className="nav flex-column sideBar">
<ul className="list-group">
{this.state.disease.map((disease, index) => {
return (
<li
className={`list-group-item ll ${this.state.activeIndex === index
? 'active'
: null}`}
key={index}
onClick={() => this.onDiseaseClick(index)}
>
{disease.title}
</li>
);
})}
</ul>
</div>
<div className="main-div">
<div>
<Autocomplete filteredData={this.filteredData} suggestions={this.state.subdiseases} />
</div>
<button className="btn btn-primary btn-raised" onClick={this.onAddDisease}>
Create Disease
</button>
<div className="col-xs-12 col-md-12 col-sm-12 col-xs-12 mt-5 mx-40">
<div className="provide-card-row">{html}</div>
</div>
<div style={{ textAlign: 'center' }} className="btn-group">
<span
className="btn btn-primary btn-sm"
onClick={() => this.onAddSubdisease(this.state.disease[this.state.activeIndex]._id)}
>
Add
</span>
<span
className="btn btn-info btn-sm"
onClick={() => this.onUpdateDisease(this.state.disease[this.state.activeIndex])}
>
Update
</span>
<span
className="btn btn-danger btn-sm"
onClick={() => this.onDeleteDisease(this.state.disease[this.state.activeIndex]._id)}
>
Delete
</span>
</div>
</div>
</div>
</div>
);
}
}
export default Diseases;
|
<filename>src/components/blocks/Coordinates/CoordinatesStyles.js
import { StyleSheet } from 'react-native';
export default StyleSheet.create({
coordinates: {
color: '#fff',
fontSize: 19,
textAlign: 'center',
},
}); |
import axios from "axios";
export const getPlans = () => {
return axios.get(`${process.env.REACT_APP_BASEURL}api/plan/GetAll`, {
["axios-retry"]: {
retries: 5,
},
});
};
export const createPayment = (data) => {
return axios.post(
`${process.env.REACT_APP_BASEURL}api/PaymentRequest/Post`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const updatePaymentStatus = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/PaymentRequest/UpdateStatusByTransactionId`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const getPaymentRequestByCompanyId = (CompanyId) => {
const params = {
CompanyId
};
return axios.get(
`${process.env.REACT_APP_BASEURL}/api/PaymentRequest/GetByCompanyId`,
{
params,
["axios-retry"]: {
retries: 5,
},
}
);
}; |
<filename>src/help.js
const chalk = require('chalk');
function args(argv) {
if (argv[0] === 'version' || argv[0] === '-v') {
console.log(chalk.blue("Proj-Tools version 1.1.4"));
}
}
args(process.argv.slice(2)); |
class NetworkSettings:
__TIMEOUT = 4.0 # seconds
__BUFFER_SIZE = 1024 # bytes
@classmethod
def get_timeout(cls):
return cls.__TIMEOUT
@classmethod
def set_timeout(cls, new_timeout):
if new_timeout >= 0:
cls.__TIMEOUT = new_timeout
else:
raise ValueError("Timeout value must be non-negative")
@classmethod
def get_buffer_size(cls):
return cls.__BUFFER_SIZE
@classmethod
def set_buffer_size(cls, new_buffer_size):
if new_buffer_size >= 0:
cls.__BUFFER_SIZE = new_buffer_size
else:
raise ValueError("Buffer size must be non-negative") |
package com.darian.design;
import java.util.Set;
public class FactoryDemo {
public static void main(String[] args) {
// create new
Set<String> set = Set.of("Hello"); // 不可变的
ThreaFactory factory = (ruanble) -> new Thread(runable);
Thread thread = factory.newThread(() ->{
System.out.println("Hello, world");
});
}
}
|
package fr.insee.rmes.utils;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import javax.ws.rs.core.MediaType;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.codehaus.stax2.XMLOutputFactory2;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import fr.insee.rmes.modeles.StringWithLang;
import fr.insee.rmes.modeles.StringXmlMixIn;
import fr.insee.rmes.modeles.concepts.StringWithLangConcept;
import fr.insee.rmes.modeles.concepts.StringXmlMixInConcept;
import fr.insee.rmes.modeles.geo.TerritoireJsonMixIn;
import fr.insee.rmes.modeles.geo.territoire.Territoire;
import fr.insee.rmes.modeles.geo.territoires.Projections;
import fr.insee.rmes.modeles.operations.documentations.RubriqueRichText;
import fr.insee.rmes.modeles.operations.documentations.RubriqueRichTextXmlMixIn;
public class ResponseUtils {
private static Logger logger = LogManager.getLogger(ResponseUtils.class);
public String produceResponse(Object obj, String header) {
String response = "";
if (header != null && header.equals(MediaType.APPLICATION_XML)) {
XmlMapper mapper = new XmlMapper();
mapper.getFactory().getXMLOutputFactory().setProperty(XMLOutputFactory2.P_TEXT_ESCAPER,
new CustomXmlEscapingWriterFactory());
mapper.addMixIn(StringWithLang.class, StringXmlMixIn.class);
mapper.addMixIn(StringWithLangConcept.class, StringXmlMixInConcept.class);
mapper.addMixIn(RubriqueRichText.class, RubriqueRichTextXmlMixIn.class);
try {
response = mapper.writeValueAsString(obj);
// Replace XML namespace xmllang => xml:lang
response = Pattern.compile("xmllang=").matcher(response).replaceAll("xml:lang=");
// Remove XML tag <listeTerritoires>
response = Pattern.compile("<\\/?listeTerritoires>").matcher(response).replaceAll("");
// Remove duplications Territoires objects with tag <territoire> for XML response
response = Pattern.compile("(<territoires )(.+?)(<\\/territoires>)").matcher(response).replaceAll("");
// Remove last tags territoires
response = Pattern.compile("(<territoires><\\/territoires>)").matcher(response).replaceAll("");
if ( ! response.isEmpty() && obj.getClass() == Projections.class) {
// Remove XML tag <origine>
response = Pattern.compile("<\\/?origine>").matcher(response).replaceAll("");
// Remove XML tag <listeProj>
response = Pattern.compile("<\\/?listeProj>").matcher(response).replaceAll("");
}
}
catch (Exception e) {
logger.error(e.getMessage());
}
response = encodeXmlResponse(response);
}
else {
ObjectMapper mapper = new ObjectMapper();
mapper.addMixIn(Territoire.class, TerritoireJsonMixIn.class);
try {
response = mapper.writeValueAsString(obj);
} catch (JsonProcessingException e) {
logger.error(e.getMessage());
}
response = encodeJsonResponse(response);
}
return response;
}
private String escapeHtml(String s) {
s = StringEscapeUtils.unescapeHtml4(s);
return s.replace("&", "&")
.replace(">", ">")
.replace("<", "<")
.replace("\"", """);
}
public String encodeXmlResponse(String response) {
response = escapeHtml(response);
response = XmlUtils.encodeXml(response);
return new String(response.getBytes(), StandardCharsets.UTF_8);
}
public String encodeJsonResponse(String response) {
String ret = response.replaceAll("\\R", " ")//remove break lines that makes JSON invalid (breakline in texts are in <p>)
.replace('"', '\"'); //remove quote
return new String(ret.getBytes(), StandardCharsets.UTF_8);
}
}
|
import mxnet as mx
import mxnet.ndarray as nd
import mxnet.gluon as gluon
import mxnet.gluon.nn as nn
import mxnet.autograd as ag
# Used for mobienet structure
def ConvBlock(channels, kernel, stride, pad):
out = nn.HybridSequential()
out.add(
nn.Conv2D(channels, kernel, strides=stride, padding=pad, use_bias=False),
nn.BatchNorm(scale=True),
nn.Activation('relu')
)
return out
def Conv1x1(channels, is_linear=False):
out = nn.HybridSequential()
out.add(
nn.Conv2D(channels, 1, padding=0, use_bias=False),
nn.BatchNorm(scale=True)
)
if not is_linear:
out.add(nn.Activation('relu'))
return out
def DWise(channels, kernel, stride, pad):
out = nn.HybridSequential()
out.add(
nn.Conv2D(channels, kernel, strides=stride, padding=pad, groups=channels, use_bias=False),
nn.BatchNorm(scale=True),
nn.Activation('relu')
)
return out
class InvertedResidual(nn.HybridBlock):
def __init__(
self,
expansion_factor,
num_filter_in,
num_filter_out,
kernel,
stride,
pad,
same_shape=True,
**kwargs):
super(InvertedResidual, self).__init__(**kwargs)
self.same_shape = same_shape
self.stride = stride
with self.name_scope():
self.bottleneck = nn.HybridSequential()
self.bottleneck.add(
Conv1x1(num_filter_in*expansion_factor),
DWise(num_filter_in*expansion_factor, kernel, self.stride, pad),
Conv1x1(num_filter_out, is_linear=True)
)
if (self.stride == 1 or self.stride == (1,1)) and not self.same_shape:
self.conv_res = Conv1x1(num_filter_out)
def hybrid_forward(self, F, x):
out = self.bottleneck(x)
#if self.stride == 1 and self.same_shape:
# out = F.elemwise_add(out, x)
if self.stride == 1 or self.stride == (1,1):
if not self.same_shape:
x = self.conv_res(x)
out = F.elemwise_add(out, x)
return out
class InvResiSeq(nn.HybridBlock):
def __init__(
self,
num_filter_in,
num_filter_out,
n_blocks,
kernel=(3,3),
stride=(2,2),
pad=(1,1),
expansion_factor=6,
**kwargs
):
super(InvResiSeq, self).__init__(**kwargs)
self.num_filter_in = num_filter_in
self.num_filter_out = num_filter_out
self.n = n_blocks
with self.name_scope():
self.seq = nn.HybridSequential()
self.seq.add(
InvertedResidual(
expansion_factor=expansion_factor,
num_filter_in=num_filter_in,
num_filter_out=num_filter_out,
kernel=kernel,
stride=stride,
pad=pad,
same_shape=False)
)
for _ in range(n_blocks-1):
self.seq.add(
InvertedResidual(
expansion_factor=expansion_factor,
num_filter_in=num_filter_in,
num_filter_out=num_filter_out,
kernel=kernel,
stride=(1,1),
pad=pad,
same_shape=False)
)
return
def hybrid_forward(self, F, x):
out = self.seq(x)
return out
def getMnetV2(first_conv_param, last_conv_param, inv_resi_params_ls, num_classes=1000):
net = nn.HybridSequential(prefix='mnet-stem-')
# first conv
first_conv = ConvBlock(**first_conv_param)
with net.name_scope():
net.add(first_conv)
# inverted residual unit
inv_resi_params_ls = [
InvResiSeq(**param) for param in inv_resi_params_ls
]
with net.name_scope():
net.add(*inv_resi_params_ls)
# last conv
last_conv = ConvBlock(**last_conv_param)
with net.name_scope():
net.add(last_conv)
# avg pooling, flatten, and dense layer
with net.name_scope():
net.add(
nn.GlobalAvgPool2D(),
nn.Flatten(),
nn.Dense(units=num_classes)
)
#
return net
if __name__ == '__main__':
first_conv_param = {
'channels' : 32,
'kernel' : (3,3),
'stride' : (2,2),
'pad' : (1,1)
}
last_conv_param = {
'channels' : 1280,
'kernel' : (1,1),
'stride' : (1,1),
'pad' : (0,0)
}
inv_resi_params_ls = [
{
'num_filter_in' : 32,
'num_filter_out' : 16,
'n_blocks' : 1,
'kernel':(3,3),
'stride':(1,1),
'pad':(1,1),
'expansion_factor':6
},
{
'num_filter_in' : 16,
'num_filter_out' : 24,
'n_blocks' : 2,
'kernel':(3,3),
'stride':(2,2),
'pad':(1,1),
'expansion_factor':6
},
{
'num_filter_in' : 24,
'num_filter_out' : 32,
'n_blocks' : 3,
'kernel':(3,3),
'stride':(2,2),
'pad':(1,1),
'expansion_factor':6
},
{
'num_filter_in' : 32,
'num_filter_out' : 64,
'n_blocks' : 4,
'kernel':(3,3),
'stride':(1,1),
'pad':(1,1),
'expansion_factor':6
},
{
'num_filter_in' : 64,
'num_filter_out' : 96,
'n_blocks' : 3,
'kernel':(3,3),
'stride':(2,2),
'pad':(1,1),
'expansion_factor':6
},
{
'num_filter_in' : 96,
'num_filter_out' : 160,
'n_blocks' : 3,
'kernel':(3,3),
'stride':(2,2),
'pad':(1,1),
'expansion_factor':6
},
{
'num_filter_in' : 160,
'num_filter_out' : 320,
'n_blocks' : 3,
'kernel':(3,3),
'stride':(1,1),
'pad':(1,1),
'expansion_factor':6
}
]
net = getMnetV2(
first_conv_param=first_conv_param,
last_conv_param=last_conv_param,
inv_resi_params_ls=inv_resi_params_ls,
num_classes=2)
sym = net(mx.sym.Variable('data'))
mx.viz.plot_network(symbol=net(mx.sym.Variable('data')),shape={'data':(1,3,224,224)}).view()
|
<filename>project/target/node-modules/webjars/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/benchmark/benchmark.js
#!/usr/bin/env node
var Benchmark = require('benchmark')
var YAML = require('js-yaml')
var JJU = require('../')
var JSON5 = require('json5')
var suite = new Benchmark.Suite
var sample
sample = require('fs').readFileSync(__dirname + '/../package.yaml')
sample = YAML.safeLoad(sample)
sample = JSON.stringify(sample)
var functions = {
'JSON': function(x) { JSON.parse(x) },
'JSON5': function(x) { JSON5.parse(x) },
'JJU': function(x) { JJU.parse(x) },
'JS-YAML': function(x) { YAML.safeLoad(x) },
}
for (var name in functions) {
with ({ fn: functions[name] }) {
suite.add(name, {
onCycle: function onCycle(event) {
process.stdout.write('\r\033[2K - ' + event.target)
},
fn: function () {
fn(sample)
},
})
}
}
console.log()
suite.on('cycle', function(event) {
console.log('\r\033[2K + ' + String(event.target))
})
.run()
process.on('exit', function() { console.log() })
|
<gh_stars>1-10
import { Command, Option } from "clipanion";
import { createLookupFile } from "../utils/lookup";
export class NpmDumpLookupCreatorCommand extends Command {
public npmFile?: string = Option.String(`--npmfile`, { description: `path to a npmdump.json` });
static override usage = Command.Usage({
category: `Developer Tools`,
description: `creates a lookup file from a NPM dump`,
details: `
This command will create a lookup file from a NPM dump.
`,
examples: [
[
`Create a lookup file from a NPM dump`,
`$0 lookupfile --npmfile /path/to/your/npmfile.json`
]
]
});
static override paths = [[`lookupfile`]];
async execute() {
if (typeof this.npmFile !== "undefined") {
await createLookupFile(this.npmFile);
}
}
}
|
#!/bin/bash
# Will be executed as user "root".
echo "<INFO> Installing pipplware Kodi repository"
echo "<INFO> Copying kodi.list to /etc/apt/sources.list.d/"
cp -f data/kodi.list /etc/apt/sources.list.d/
echo "<INFO> Adding repo key to apt trusted keys"
wget -O - http://pipplware.pplware.pt/pipplware/key.asc | apt-key add -
# # We have to default raspiBackup to not zip, as we cannot override this for rsync backups (will fail)
# # sed -i.bak 's/^\(DEFAULT_ZIP_BACKUP=\).*/\1"0"/' $lbbackupconfig
# # Create backup directory if missing
# if [ ! -d "/backup" ]; then
# echo "<INFO> Creating default /backup directory"
# mkdir -p /backup
# chown loxberry:loxberry /backup
# fi
# if [ -e "$LBHOMEDIR/system/cron/cron.d/$2" ]; then
# chown root:root $LBHOMEDIR/system/cron/cron.d/$2
# fi
exit 0
|
using GameSparks.Api.Requests;
using GameSparks.Api.Responses;
using System;
using System.Collections.Generic;
public class GameSparksTagManager
{
// Function to add tags to a GameSparks resource
public void AddTagsToResource(string resourceId, List<string> tags)
{
// Create a new LogEventRequest to add tags to the resource
LogEventRequest request = new LogEventRequest();
request.SetEventKey("ADD_TAGS_EVENT"); // Assuming ADD_TAGS_EVENT is a custom event defined in GameSparks
// Set the resource ID and tags as attributes in the request
request.SetEventAttribute("resourceId", resourceId);
request.SetEventAttribute("tags", string.Join(",", tags));
// Send the request to GameSparks
request.Send((response) =>
{
if (!response.HasErrors)
{
Console.WriteLine("Tags added successfully to resource: " + resourceId);
}
else
{
Console.WriteLine("Failed to add tags to resource: " + resourceId);
}
});
}
} |
<reponame>tkburroughs/yoko
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.yoko.io;
import org.apache.yoko.util.HexConverter;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import static org.apache.yoko.util.Hex.formatHexPara;
public final class ReadBuffer extends Buffer<ReadBuffer> {
ReadBuffer(Core core) { super(core); }
public byte peekByte() { return core.data[position]; }
public byte readByte() { return core.data[position++]; }
public char readByteAsChar() { return (char) core.data[position++]; }
public void readBytes(byte[] buffer, int offset, int length) {
if (available() < length) throw new IndexOutOfBoundsException();
System.arraycopy(core.data, position, buffer, offset, length);
position += length;
}
public void readBytes(WriteBuffer buffer) {
buffer.writeBytes(core.data, position, available());
}
public byte[] copyRemainingBytes() {
return copyOf(core.data, available());
}
public char peekChar() {
return (char)((core.data[position] << 8) | (core.data[position + 1] & 0xff));
}
public char readChar() {
return (char) ((core.data[position++] << 8) | (core.data[position++] & 0xff));
}
public char readChar_LE() {
return (char) ((core.data[position++] & 0xff) | (core.data[position++] << 8));
}
public int readInt() {
return (core.data[position++] << 24)
| ((core.data[position++] << 16) & 0xff0000)
| ((core.data[position++] << 8) & 0xff00)
| (core.data[position++] & 0xff);
}
public int readInt_LE() {
return (core.data[position++] & 0xff)
| ((core.data[position++] << 8) & 0xff00)
| ((core.data[position++] << 16) & 0xff0000)
| (core.data[position++] << 24);
}
public String remainingBytesToAscii() {
return HexConverter.octetsToAscii(core.data, available());
}
public String dumpRemainingData() {
StringBuilder dump = new StringBuilder();
dump.append(String.format("Core pos=0x%x Core len=0x%x Remaining core data=%n%n", position, core.length));
return formatHexPara(core.data, position, available(), dump).toString();
}
public String dumpAllDataWithPosition() {
StringBuilder sb = new StringBuilder();
formatHexPara(core.data, 0, position, sb);
sb.append(String.format("------------------ pos = 0x%08X -------------------%n", position));
return formatHexPara(core.data, position, available(), sb).toString();
}
public ReadBuffer writeTo(OutputStream out) throws IOException {
try {
out.write(core.data, position, available());
out.flush();
position = core.length;
return this;
} catch (InterruptedIOException ex) {
position += ex.bytesTransferred;
throw ex;
}
}
public ReadBuffer rewindToStart() {
position = 0;
return this;
}
public ReadBuffer align(AlignmentBoundary boundary) {
position = boundary.newIndex(position);
return this;
}
public ReadBuffer skipBytes(int n) {
if (position + n > core.length) throw new IndexOutOfBoundsException();
position = position + n;
return this;
}
public ReadBuffer newReadBuffer() { return clone(); }
}
|
import pygame
class MySprite(pygame.sprite.Sprite):
def __init__(self, img, x, y):
super().__init__()
self.image = img
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def move_up(self, pixels):
self.rect.y -= pixels
def move_down(self, pixels):
self.rect.y += pixels
def move_left(self, pixels):
self.rect.x -= pixels
def move_right(self, pixels):
self.rect.x += pixels
def handle_event(self, event):
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
self.move_up(10)
elif event.key == pygame.K_DOWN:
self.move_down(10)
elif event.key == pygame.K_LEFT:
self.move_left(10)
elif event.key == pygame.K_RIGHT:
self.move_right(10) |
<reponame>andreapatri/cms_journal<filename>node_modules/@buffetjs/core/src/components/IconLinks/index.js
/* eslint-disable react/no-array-index-key */
/**
*
* IconLinks
*
*/
import React from 'react';
import PropTypes from 'prop-types';
import { Links as StyledLinks } from '@buffetjs/styles';
import Icon from '../Icon';
function IconLinks({ links }) {
return (
<StyledLinks>
{links.map(({ icon, onClick }, index) => (
<button key={index} onClick={onClick} type="button">
<Icon className="link-icon" icon={icon} />
</button>
))}
</StyledLinks>
);
}
IconLinks.defaultProps = {
links: [],
};
IconLinks.propTypes = {
links: PropTypes.arrayOf(
PropTypes.shape({
icon: PropTypes.node,
onClick: PropTypes.func,
})
),
};
export default IconLinks;
|
#include <iostream>
#include <random>
#include <bitset>
using namespace std;
int main() {
random_device rd;
mt19937 gen(rd());
uniform_int_distribution<int> dist(0, 15);
// Generate a 512-bit key
bitset<512> key;
for (int i = 0; i < 512; i++) {
key[i] = dist(gen);
}
cout << key << endl;
} |
package junit;
import static org.junit.jupiter.api.Assertions.*;
import java.util.Random;
import org.junit.jupiter.api.Test;
import main.MyStringBuilder1;
/**
* @author <NAME>
*
*/
public class MyStringBuilder1Test
{
final String myString = "Java";
final MyStringBuilder1 myStringBuilder1 = new MyStringBuilder1(myString);
Random random = new Random();
/**
* Test method for {@link main.MyStringBuilder1#append(main.MyStringBuilder1)}.
*/
@Test
public void testAppendMyStringBuilder1()
{
String someString = " Programming Language";
MyStringBuilder1 someStringStringBuilder = new MyStringBuilder1(someString);
MyStringBuilder1 appendedStringBuilder = myStringBuilder1.append(someStringStringBuilder);
assertEquals(myString + someString, appendedStringBuilder.toString());
}
/**
* Test method for {@link main.MyStringBuilder1#append(int)}.
*/
@Test
public void testAppendInt()
{
int randomInteger = random.nextInt();
MyStringBuilder1 appendedIntegerStringBuilder = myStringBuilder1.append(randomInteger);
assertEquals(myString + randomInteger, appendedIntegerStringBuilder.toString());
}
/**
* Test method for {@link main.MyStringBuilder1#charAt(int)} with a valid index.
*/
@Test
public void testCharAtWithAValidIndex()
{
if (myStringBuilder1.length() > 0)
{
int randomIndex = random.nextInt(myStringBuilder1.length());
assertEquals(myString.charAt(randomIndex), myStringBuilder1.charAt(randomIndex));
}
}
/**
* Test method for {@link main.MyStringBuilder1#charAt(int)} with invalid indices.
*/
@Test
public void testCharAtWithInvalidIndices()
{
assertEquals('\u0000', myStringBuilder1.charAt(-1));
assertEquals('\u0000', myStringBuilder1.charAt(myStringBuilder1.length()));
}
/**
* Test method for {@link main.MyStringBuilder1#toLowerCase()}.
*/
@Test
public void testToLowerCase()
{
MyStringBuilder1 lowerCaseStringBuilder = myStringBuilder1.toLowerCase();
assertEquals(myString.toLowerCase(), lowerCaseStringBuilder.toString());
}
/**
* Test method for {@link main.MyStringBuilder1#substring(int, int)} with valid beginning and ending indices.
*/
@Test
public void testSubstringWithValidBeginEndIndices()
{
if (myStringBuilder1.length() > 0)
{
int randomBeginIndex = random.nextInt(myStringBuilder1.length());
MyStringBuilder1 someSubstring = myStringBuilder1.substring(randomBeginIndex, myStringBuilder1.length());
String expected = myString.substring(randomBeginIndex, myString.length());
assertEquals(expected, someSubstring.toString());
}
assertEquals(myString, myStringBuilder1.substring(0, myStringBuilder1.length()).toString());
}
/**
* Test method for {@link main.MyStringBuilder1#substring(int, int)} with invalid beginning and ending indices.
*/
@Test
public void testSubstringWithInvalidBeginEndIndices()
{
assertEquals(myStringBuilder1.toString(), myStringBuilder1.substring(0, myStringBuilder1.length() + 2).toString());
assertEquals(myStringBuilder1.toString(),
myStringBuilder1.substring(myStringBuilder1.length(), myStringBuilder1.length() + 2).toString());
assertEquals(myStringBuilder1.toString(), myStringBuilder1.substring(2, 1).toString());
assertEquals(myStringBuilder1.toString(),
myStringBuilder1.substring(myStringBuilder1.length(), myStringBuilder1.length()).toString());
assertEquals(myStringBuilder1.toString(),
myStringBuilder1.substring(myStringBuilder1.length() + 5, myStringBuilder1.length() + 2).toString());
assertEquals(myStringBuilder1.toString(), myStringBuilder1.substring(-5, myStringBuilder1.length()).toString());
assertEquals(myStringBuilder1.toString(), myStringBuilder1.substring(-5, myStringBuilder1.length() + 2).toString());
assertEquals(myStringBuilder1.toString(), myStringBuilder1.substring(-2, -5).toString());
}
/**
* Test method for {@link main.MyStringBuilder1#toString()}.
*/
@Test
public void testToString()
{
assertEquals(myString, myStringBuilder1.toString());
}
} |
def decimal_to_binary(number):
binary_string = ""
while number > 0:
reminder = number % 2
binary_string = str(reminder) + binary_string
number //= 2
return binary_string
number = 16
print("Binary Representation:", decimal_to_binary(number)) |
<reponame>jwfing/java-unified-sdk
package cn.leancloud;
import cn.leancloud.core.LeanCloud;
import cn.leancloud.core.LeanService;
import cn.leancloud.utils.StringUtil;
public class Configure {
public static final String TEST_APP_ID;
public static final String TEST_APP_KEY;
private static final LeanCloud.REGION reGion;
private static final String API_HOST;
static {
String app = System.getenv("APP_ID");
TEST_APP_ID = StringUtil.isEmpty(app) ? "ohqhxu3mgoj2eyj6ed02yliytmbes3mwhha8ylnc215h0bgk" : app;
String appKEY = System.getenv("APP_KEY");
TEST_APP_KEY = StringUtil.isEmpty(appKEY) ? "<KEY>" : appKEY;
String regionStr = System.getenv("APP_REGION");
reGion = StringUtil.isEmpty(regionStr) ? LeanCloud.REGION.NorthChina : LeanCloud.REGION.valueOf(regionStr);
//API_HOST = System.getenv("API_HOST");
API_HOST = "https://ohqhxu3m.lc-cn-n1-shared.com";
System.out.println("Test APP_id: " + TEST_APP_ID);
System.out.println("Test APP_key: " + TEST_APP_KEY);
System.out.println("Test APP_region: " + reGion);
System.out.println("Test API_HOST: " + API_HOST);
System.out.println("");
}
public static void initializeWithApp(String appId, String appKey, LeanCloud.REGION region) {
LeanCloud.setRegion(region);
LeanCloud.clearServerURLs();
if (!StringUtil.isEmpty(API_HOST)) {
LeanCloud.setServer(LeanService.API, API_HOST);
}
LeanCloud.setLogLevel(LCLogger.Level.INFO);
// AppConfiguration.setEnableLocalCache(false);
LeanCloud.initialize(appId, appKey);
LeanCloud.setMasterKey("");
}
public static void initializeWithApp(String appId, String appKey, String serverUrl) {
LeanCloud.setLogLevel(LCLogger.Level.INFO);
// AppConfiguration.setEnableLocalCache(false);
LeanCloud.initialize(appId, appKey, serverUrl);
LeanCloud.setMasterKey("");
}
public static void initializeWithMasterKey(String appId, String masterKey, String serverUrl) {
LeanCloud.setLogLevel(LCLogger.Level.INFO);
LeanCloud.initialize(appId, "", serverUrl);
LeanCloud.setMasterKey(masterKey);
}
public static void initializeRuntime() {
// AppConfiguration.setLogAdapter(new DummyLoggerFactory());
LeanCloud.setRegion(reGion);
LeanCloud.clearServerURLs();
if (!StringUtil.isEmpty(API_HOST)) {
LeanCloud.setServer(LeanService.API, API_HOST);
}
LeanCloud.setLogLevel(LCLogger.Level.INFO);
LeanCloud.initialize(TEST_APP_ID, TEST_APP_KEY);
LeanCloud.setMasterKey("");
}
}
|
#!/usr/bin/env php
<?php
/**
* Dump a Drupal 6 database into a Drupal 7 PHP script to test the upgrade
* process.
*
* Run this script at the root of an existing Drupal 6 installation.
*
* The output of this script is a PHP script that can be ran inside Drupal 7
* and recreates the Drupal 6 database as dumped. Transient data from cache
* session and watchdog tables are not recorded.
*/
// Define default settings.
$cmd = 'index.php';
$_SERVER['HTTP_HOST'] = 'default';
$_SERVER['PHP_SELF'] = '/index.php';
$_SERVER['REMOTE_ADDR'] = '127.0.0.1';
$_SERVER['SERVER_SOFTWARE'] = NULL;
$_SERVER['REQUEST_METHOD'] = 'GET';
$_SERVER['QUERY_STRING'] = '';
$_SERVER['PHP_SELF'] = $_SERVER['REQUEST_URI'] = '/';
$_SERVER['HTTP_USER_AGENT'] = 'console';
// Bootstrap Drupal.
include_once './includes/bootstrap.inc';
drupal_bootstrap(DRUPAL_BOOTSTRAP_FULL);
// Include the utility drupal_var_export() function.
include_once __DIR__ . '/../includes/utility.inc';
// Output the PHP header.
$output = <<<ENDOFHEADER
<?php
/**
* @file
* Filled installation of Drupal 6.17, for test purposes.
*
* This file was generated by the dump-database-d6.sh tool, from an
* installation of Drupal 6, filled with data using the generate-d6-content.sh
* tool. It has the following modules installed:
ENDOFHEADER;
foreach (\Drupal::moduleHandler()->getModuleList() as $module => $filename) {
$output .= " * - $module\n";
}
$output .= " */\n\n";
// Get the current schema, order it by table name.
$schema = drupal_get_schema();
ksort($schema);
// Export all the tables in the schema.
foreach ($schema as $table => $data) {
// Remove descriptions to save time and code.
unset($data['description']);
foreach ($data['fields'] as &$field) {
unset($field['description']);
}
// Dump the table structure.
$output .= "db_create_table('" . $table . "', " . drupal_var_export($data) . ");\n";
// Don't output values for those tables.
if (substr($table, 0, 5) == 'cache' || $table == 'sessions' || $table == 'watchdog') {
$output .= "\n";
continue;
}
// Prepare the export of values.
$result = db_query('SELECT * FROM {'. $table .'}');
$insert = '';
while ($record = db_fetch_array($result)) {
// users.uid is a serial and inserting 0 into a serial can break MySQL.
// So record uid + 1 instead of uid for every uid and once all records
// are in place, fix them up.
if ($table == 'users') {
$record['uid']++;
}
$insert .= '->values('. drupal_var_export($record) .")\n";
}
// Dump the values if there are some.
if ($insert) {
$output .= "db_insert('". $table . "')->fields(". drupal_var_export(array_keys($data['fields'])) .")\n";
$output .= $insert;
$output .= "->execute();\n";
}
// Add the statement fixing the serial in the user table.
if ($table == 'users') {
$output .= "db_query('UPDATE {users} SET uid = uid - 1');\n";
}
$output .= "\n";
}
print $output;
|
<filename>cyder/api/v1/endpoints/dhcp/vlan/api.py
from rest_framework import serializers
from cyder.api.v1.endpoints.api import CommonAPINestedAVSerializer
from cyder.api.v1.endpoints.dhcp import api
from cyder.cydhcp.vlan.models import Vlan, VlanAV
class VlanAVSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.Field(source='id')
entity = serializers.HyperlinkedRelatedField(
view_name='api-dhcp-vlan-detail')
attribute = serializers.SlugRelatedField(slug_field='name')
class Meta:
model = VlanAV
class VlanAVViewSet(api.CommonDHCPViewSet):
model = VlanAV
serializer_class = VlanAVSerializer
class VlanNestedKeyValueSerializer(CommonAPINestedAVSerializer):
id = serializers.HyperlinkedIdentityField(
view_name='api-dhcp-vlan_attributes-detail')
class Meta:
model = VlanAV
fields = api.NestedAVFields
class VlanSerializer(serializers.HyperlinkedModelSerializer):
vlanav_set = VlanNestedKeyValueSerializer(many=True)
class Meta(api.CommonDHCPMeta):
model = Vlan
depth = 1
class VlanViewSet(api.CommonDHCPViewSet):
model = Vlan
serializer_class = VlanSerializer
avmodel = VlanAV
|
<filename>open-sphere-plugins/geopackage/src/main/java/io/opensphere/geopackage/export/tile/DBWriter.java
package io.opensphere.geopackage.export.tile;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.apache.log4j.Logger;
import gnu.trove.map.hash.TIntIntHashMap;
import io.opensphere.core.geometry.AbstractTileGeometry;
import io.opensphere.core.geometry.TerrainTileGeometry;
import io.opensphere.core.model.GeographicBoundingBox;
import io.opensphere.core.model.GeographicPosition;
import io.opensphere.core.util.collections.CollectionUtilities;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.geopackage.export.model.ExportModel;
import io.opensphere.geopackage.export.tile.walker.TileInfo;
import io.opensphere.geopackage.util.Constants;
import io.opensphere.mantle.data.DataTypeInfo;
import mil.nga.geopackage.BoundingBox;
import mil.nga.geopackage.GeoPackage;
import mil.nga.geopackage.extension.ExtensionScopeType;
import mil.nga.geopackage.extension.Extensions;
import mil.nga.geopackage.tiles.matrix.TileMatrix;
import mil.nga.geopackage.tiles.matrix.TileMatrixDao;
import mil.nga.geopackage.tiles.matrixset.TileMatrixSet;
import mil.nga.geopackage.tiles.user.TileDao;
import mil.nga.geopackage.tiles.user.TileRow;
import mil.nga.sf.proj.ProjectionConstants;
/** Writes to a geopackage database. */
public class DBWriter
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(DBWriter.class);
/** The export model. */
private final ExportModel myModel;
/** The data type. */
private final DataTypeInfo myDataType;
/** The top level geometries. */
private final Collection<AbstractTileGeometry<?>> myTopLevelGeometries;
/** The executor. */
private final ExecutorService myExecutor;
/** The completion latch. */
private final CountDownLatch myCompletionLatch;
/** The current count of tiles to be processed. */
private final AtomicInteger myTileCount = new AtomicInteger();
/** The zoom level to max matrix column map. */
private final TIntIntHashMap myZoomToMaxCol = new TIntIntHashMap();
/** The zoom level to max matrix row map. */
private final TIntIntHashMap myZoomToMaxRow = new TIntIntHashMap();
/** The table name. */
private String myTableName;
/** The tile matrix set. */
private TileMatrixSet myTileMatrixSet;
/** The tile DAO. */
private TileDao myTileDao;
/** The tile matrix DAO. */
private TileMatrixDao myTileMatrixDao;
/** The most recent image. */
private GeoPackageImage myMostRecentImage;
/** Whether we've finished. */
private boolean myFinished;
/**
* Constructor.
*
* @param model the export model
* @param dataType the data type
* @param topLevelGeometries the top level geometries
* @param executor the executor
* @param completionLatch the completion latch
*/
public DBWriter(ExportModel model, DataTypeInfo dataType, Collection<AbstractTileGeometry<?>> topLevelGeometries,
ExecutorService executor, CountDownLatch completionLatch)
{
myModel = model;
myDataType = dataType;
myTopLevelGeometries = topLevelGeometries;
myExecutor = executor;
myCompletionLatch = completionLatch;
}
/**
* Gets the tile count to process.
*
* @return the tile count
*/
public AtomicInteger getTileCount()
{
return myTileCount;
}
/** Init. */
public void init()
{
execute(() ->
{
GeoPackage geoPackage = myModel.getGeoPackage();
myTableName = getTileTableName(geoPackage, myDataType);
BoundingBox bbox = getTileBoundingBox();
myTileMatrixSet = geoPackage.createTileTableWithMetadata(myTableName, bbox,
ProjectionConstants.EPSG_WORLD_GEODETIC_SYSTEM, bbox, ProjectionConstants.EPSG_WORLD_GEODETIC_SYSTEM);
myTileDao = geoPackage.getTileDao(myTileMatrixSet);
myTileMatrixDao = geoPackage.getTileMatrixDao();
addTerrainExtension();
});
}
/**
* Adds a tile.
*
* @param tileInfo the tile info
* @param image the image tile
*/
public void addTile(TileInfo tileInfo, GeoPackageImage image)
{
execute(() ->
{
myMostRecentImage = image;
// LOGGER.info("Writing " + tileInfo);
TileRow newRow = myTileDao.newRow();
newRow.setZoomLevel(tileInfo.getZoomLevel());
newRow.setTileColumn(tileInfo.getCol());
newRow.setTileRow(tileInfo.getRow());
newRow.setTileData(image.getImageBytes().array());
myTileDao.create(newRow);
updateMatrixMaps(tileInfo);
if (myTileCount.decrementAndGet() == 0)
{
addTileMatrices(image);
if (myCompletionLatch != null)
{
myCompletionLatch.countDown();
}
}
myModel.getProgressReporter().getModel().incrementCompletedCount();
});
}
/** Finishes by writing out the tile matrices. */
public void finish()
{
execute(() -> addTileMatrices(myMostRecentImage));
}
/**
* Adds all the tile matrices.
*
* @param sampleImage a sample image (for sizing)
*/
private void addTileMatrices(GeoPackageImage sampleImage)
{
if (!myFinished)
{
for (int zoom : myZoomToMaxCol.keys())
{
addTileMatrix(zoom, myZoomToMaxCol.get(zoom) + 1, myZoomToMaxRow.get(zoom) + 1, sampleImage);
}
myFinished = true;
}
}
/**
* Adds a tile matrix for the given zoom level.
*
* @param zoomLevel the zoom level
* @param matrixWidth the matrix width
* @param matrixHeight the matrix height
* @param sampleImage a sample image
*/
private void addTileMatrix(int zoomLevel, long matrixWidth, long matrixHeight, GeoPackageImage sampleImage)
{
// Create the tile matrix for this zoom level
TileMatrix tileMatrix = new TileMatrix();
tileMatrix.setContents(myTileMatrixSet.getContents());
tileMatrix.setZoomLevel(zoomLevel);
tileMatrix.setMatrixWidth(matrixWidth);
tileMatrix.setMatrixHeight(matrixHeight);
if (sampleImage != null)
{
tileMatrix.setTileWidth(sampleImage.getWidth());
tileMatrix.setTileHeight(sampleImage.getHeight());
}
else
{
tileMatrix.setTileWidth(512);
tileMatrix.setTileHeight(512);
}
try
{
myTileMatrixDao.create(tileMatrix);
}
catch (SQLException e)
{
LOGGER.error(e, e);
}
}
/**
* Adds the terrain extension to the geo package if the tiles are terrain
* tiles.
*/
private void addTerrainExtension()
{
AbstractTileGeometry<?> sampleGeom = CollectionUtilities.getItemOrNull(myTopLevelGeometries, 0);
if (sampleGeom instanceof TerrainTileGeometry)
{
String imageFormat = ((TerrainTileGeometry)sampleGeom).getReader().getImageFormat();
Extensions extension = new Extensions();
extension.setTableName(myTableName);
extension.setExtensionName(Constants.TERRAIN_EXTENSION);
extension.setDefinition(imageFormat);
extension.setScope(ExtensionScopeType.READ_WRITE);
GeoPackage geoPackage = myModel.getGeoPackage();
geoPackage.createExtensionsTable();
try
{
geoPackage.getExtensionsDao().create(extension);
}
catch (SQLException e)
{
LOGGER.error(e, e);
}
}
}
/**
* Updates the matrix maps with the tile info.
*
* @param tileInfo the tile info
*/
private void updateMatrixMaps(TileInfo tileInfo)
{
int maxWidth = myZoomToMaxCol.get(tileInfo.getZoomLevel());
if (tileInfo.getCol() > maxWidth)
{
myZoomToMaxCol.put(tileInfo.getZoomLevel(), tileInfo.getCol());
}
int maxHeight = myZoomToMaxRow.get(tileInfo.getZoomLevel());
if (tileInfo.getRow() > maxHeight)
{
myZoomToMaxRow.put(tileInfo.getZoomLevel(), tileInfo.getRow());
}
}
/**
* Executes the command.
*
* @param command the command
*/
private void execute(Runnable command)
{
if (!myExecutor.isShutdown())
{
myExecutor.execute(command);
}
}
/**
* Gets the bounding box of the tiles.
*
* @return the bounding box
*/
private BoundingBox getTileBoundingBox()
{
Set<GeographicPosition> positions = myTopLevelGeometries.stream().flatMap(g -> g.getBounds().getVertices().stream())
.map(v -> (GeographicPosition)v).collect(Collectors.toSet());
GeographicBoundingBox tileBbox = GeographicBoundingBox.getMinimumBoundingBox(positions);
BoundingBox bbox = new BoundingBox(tileBbox.getMinLonD(), tileBbox.getMinLatD(), tileBbox.getMaxLonD(),
tileBbox.getMaxLatD());
return bbox;
}
/**
* Gets the tile table name.
*
* @param geoPackage the geo package
* @param dataType the data type
* @return the tile table name
*/
private static String getTileTableName(GeoPackage geoPackage, DataTypeInfo dataType)
{
String tableName = StringUtilities.replaceSpecialCharacters(dataType.getDisplayName()).replace('-', '_');
List<String> existingTileTables = geoPackage.getTileTables();
if (existingTileTables.contains(tableName))
{
tableName = StringUtilities.getUniqueName(tableName + "_", existingTileTables);
}
return tableName;
}
}
|
<reponame>saurabhnative/FullStackDevlectures2021
import React, { useEffect, useState } from "react";
import axios from "axios";
export default function FoodItems() {
const [foodItems, updateFoodItems] = useState([]);
useEffect(() => {
axios
.get("https://evening-forest-95428.herokuapp.com/foodItems/list")
.then(function (response) {
// handle success
console.log(response.data.results);
updateFoodItems(response.data.results);
})
.catch(function (error) {
// handle error
console.log(error);
});
}, []);
function renderFoodItems() {
return foodItems.map((foodItem) => {
return (
<div className="border w-80 flex items-center justify-center flex-col">
<div>
<img
className={"w-60 h-40"}
src={foodItem.image}
alt="ice creams"
/>
</div>
<div>
<span className="font-bold">Food item:</span>
<span className="capitalize">{foodItem.name}</span>
</div>
<div>
<span className="font-bold">Price:</span>
<span> {foodItem.price}</span>
</div>
</div>
);
});
}
return (
<div className="flex justify-center items-center w-screen m-10">
{renderFoodItems()}
</div>
);
}
|
<reponame>krrrr38/mackerel-client-scala
package com.krrrr38.mackerel4s.serializer
import com.krrrr38.mackerel4s.model._
import org.json4s.JsonAST.JString
import org.json4s._
import org.json4s.jackson.Serialization
object MackerelSerializer {
val FORMATS = Serialization.formats(NoTypeHints) +
MonitorSerializer +
FieldSerializer[Monitor]() +
MonitorTypeSerializer +
MonitorOperatorSerializer +
HostStatusSerializer +
AlertStatusSerializer +
CheckReportStatusSerializer +
GraphUnitTypeSerializer
}
object MonitorSerializer extends CustomSerializer[Monitor](implicit formats =>
({
case jobject: JObject => jobject \ "type" match {
case JString(typ) => MonitorType.fromString(typ) match {
case Some(MonitorTypeConnectivity) => jobject.extract[ConnectivityMonitor]
case Some(MonitorTypeHost) => jobject.extract[HostMonitor]
case Some(MonitorTypeService) => jobject.extract[ServiceMonitor]
case Some(MonitorTypeExternal) => jobject.extract[ExternalMonitor]
case _ => throw new MackerelClientException("Failed to parse monitor object: " + jobject)
}
case jvalue => throw new MackerelClientException("Failed to parse monitor object: " + jvalue)
}
case jvalue => throw new MackerelClientException("Failed to parse monitor object: " + jvalue)
}, {
case monitor: Monitor =>
val formatter = Serialization.formats(NoTypeHints) + MonitorTypeSerializer + FieldSerializer[Monitor]() + MonitorOperatorSerializer
jackson.JsonMethods.parse(Serialization.write(monitor)(formatter))
})
)
object MonitorTypeSerializer extends CustomSerializer[MonitorType](formats =>
({
case JString(typ) => MonitorType.fromString(typ).getOrElse(throw new MackerelClientException("Failed to parse monitor type: " + typ))
}, {
case monitorType: MonitorType => JString(monitorType.toString)
})
)
object MonitorOperatorSerializer extends CustomSerializer[MonitorOperator](formats =>
({
case JString(operator) => MonitorOperator.fromString(operator).getOrElse(throw new MackerelClientException("Failed to parse monitor operator: " + operator))
}, {
case monitorOperator: MonitorOperator => JString(monitorOperator.toString)
})
)
object HostStatusSerializer extends CustomSerializer[HostStatus](formats =>
({
case JString(status) => HostStatus.fromString(status).getOrElse(throw new MackerelClientException("Failed to parse host status: " + status))
}, {
case hostStatus: HostStatus => JString(hostStatus.toString)
})
)
object AlertStatusSerializer extends CustomSerializer[AlertStatus](formats =>
({
case JString(status) => AlertStatus.fromString(status).getOrElse(throw new MackerelClientException("Failed to parse host status: " + status))
}, {
case alertStatus: AlertStatus => JString(alertStatus.toString)
})
)
object CheckReportStatusSerializer extends CustomSerializer[CheckReportStatus](formats =>
({
case JString(status) => CheckReportStatus.fromString(status).getOrElse(throw new MackerelClientException("Failed to parse check report status: " + status))
}, {
case checkReportStatus: CheckReportStatus => JString(checkReportStatus.toString)
})
)
object GraphUnitTypeSerializer extends CustomSerializer[GraphUnitType](formats =>
({
case JString(typ) => GraphUnitType.fromString(typ).getOrElse(throw new MackerelClientException("Failed to parse graph unit type: " + typ))
}, {
case typ: GraphUnitType => JString(typ.toString)
})
)
|
<reponame>15949746733/PhotoPicker<gh_stars>0
package com.walkermanx.photopicker;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.os.Parcelable;
import android.transition.Transition;
import android.transition.TransitionInflater;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.widget.Toolbar;
import androidx.core.app.ActivityCompat;
import androidx.core.app.SharedElementCallback;
import androidx.core.content.ContextCompat;
import androidx.core.view.ViewCompat;
import androidx.viewpager.widget.ViewPager;
import com.google.android.material.snackbar.Snackbar;
import com.walkermanx.BaseActivity;
import com.walkermanx.photopicker.fragment.ImagePagerFragment;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.walkermanx.photopicker.PhotoPicker.KEY_SELECTED_INDEX;
import static com.walkermanx.photopicker.PhotoPicker.KEY_SELECTED_PHOTOS;
/**
* Created by donglua on 15/6/24.
*/
// modify PhotoPagerActivity.java add showToolbar
public class PhotoPagerActivity extends BaseActivity {
private ImagePagerFragment pagerFragment;
private ActionBar actionBar;
Toolbar mToolbar;
private boolean showDelete, showToolbar;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
showToolbar = getIntent().getBooleanExtra(PhotoPreview.EXTRA_SHOW_TOOLBAR, true);
if (!showToolbar) {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
prepareSharedElementTransition();
setContentView(R.layout.__picker_activity_photo_pager);
int currentItem = getIntent().getIntExtra(PhotoPreview.EXTRA_CURRENT_ITEM, 0);
List<String> paths = getIntent().getStringArrayListExtra(PhotoPreview.EXTRA_PHOTOS);
Parcelable thumbnail = getIntent().getParcelableExtra(PhotoPreview.EXTRA_PHOTO_THUMBNAIL);
ArrayList<String> longData = getIntent().getStringArrayListExtra(PhotoPreview.EXTRA_LONG_DATA);
showDelete = getIntent().getBooleanExtra(PhotoPreview.EXTRA_SHOW_DELETE, true);
if (!showToolbar) {
showDelete = false;
}
if (pagerFragment == null) {
pagerFragment = (ImagePagerFragment) getSupportFragmentManager().findFragmentById(R.id.photoPagerFragment);
}
pagerFragment.setPhotos(paths, currentItem, longData,thumbnail);
mToolbar = findViewById(R.id.toolbar);
// Set all of the Toolbar coloring
mToolbar.setBackgroundColor(ContextCompat.getColor(this, toolbarColor));
mToolbar.setTitleTextColor(ContextCompat.getColor(this, toolbarWidgetColor));
mToolbar.setContentInsetStartWithNavigation(getResources().getDimensionPixelSize(titleMarginStart));
mToolbar.setTitleMarginStart(getResources().getDimensionPixelSize(titleMarginStart));
setSupportActionBar(mToolbar);
actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setDisplayHomeAsUpEnabled(true);
updateActionBarTitle();
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// actionBar.setElevation(25);
// }
}
/**
* 等待toolbar绘制完成后给其着色 当toolbarWidgetColor !=toolbarWidgetColorVal 时 则判断为通过代码设置了tint着色 则为toolbar 执行 applyTint方法为其着色
*/
if (isManual()) {
mToolbar.post(new Runnable() {
@Override
public void run() {
applyTint(mToolbar, ContextCompat.getColor(mToolbar.getContext(), toolbarWidgetColor));
}
});
}
pagerFragment.getViewPager().addOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
updateActionBarTitle();
}
});
if (!showToolbar) {
mToolbar.setVisibility(View.GONE);
}
}
private void prepareSharedElementTransition() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getWindow().requestFeature(Window.FEATURE_CONTENT_TRANSITIONS | Window.FEATURE_ACTIVITY_TRANSITIONS);
Transition transition = TransitionInflater.from(this).inflateTransition(R.transition.__picker_image_shared_element_transition);
getWindow().setSharedElementEnterTransition(transition);
ActivityCompat.setEnterSharedElementCallback(this, new SharedElementCallback() {
@Override
public void onMapSharedElements(List<String> names, Map<String, View> sharedElements) {
if (pagerFragment.getView() == null) {
return;
}
View itemView = pagerFragment.getShareElement();
// ImageView imageView = itemView.findViewById(R.id.iv_pager);
if (itemView == null) {
return;
}
Log.e("onMapSharedElements", "Enter:names= " + names.get(0));
Log.e("onMapSharedElements", "Enter:TransitionName= " + ViewCompat.getTransitionName(itemView));
sharedElements.put(names.get(0), itemView);
}
});
postponeEnterTransition();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (showDelete) {
getMenuInflater().inflate(R.menu.__picker_menu_preview, menu);
MenuItem menuItem = menu.findItem(R.id.delete);
Drawable menuItemIcon = menuItem.getIcon();
if (menuItemIcon != null) {
menuItemIcon.mutate();
menuItemIcon.setColorFilter(ContextCompat.getColor(this, toolbarWidgetColor), PorterDuff.Mode.SRC_ATOP);
menuItem.setIcon(menuItemIcon);
}
}
return true;
}
@Override
public void onBackPressed() {
Intent intent = new Intent();
intent.putExtra(KEY_SELECTED_PHOTOS, pagerFragment.getPaths());
intent.putExtra(KEY_SELECTED_INDEX, pagerFragment.getCurrentItem());
setResult(RESULT_OK, intent);
super.onBackPressed();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
onBackPressed();
return true;
}
if (item.getItemId() == R.id.delete) {
final int index = pagerFragment.getCurrentItem();
final String deletedPath = pagerFragment.getPaths().get(index);
Snackbar snackbar = Snackbar.make(pagerFragment.getView(), R.string.__picker_deleted_a_photo,
Snackbar.LENGTH_LONG);
if (pagerFragment.getPaths().size() <= 1) {
// show confirm dialog
new AlertDialog.Builder(this)
.setTitle(R.string.__picker_confirm_to_delete)
.setPositiveButton(R.string.__picker_yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
dialogInterface.dismiss();
pagerFragment.getPaths().remove(index);
pagerFragment.getViewPager().getAdapter().notifyDataSetChanged();
onBackPressed();
}
})
.setNegativeButton(R.string.__picker_cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
dialogInterface.dismiss();
}
})
.show();
} else {
snackbar.show();
pagerFragment.getPaths().remove(index);
pagerFragment.getViewPager().getAdapter().notifyDataSetChanged();
}
snackbar.setAction(R.string.__picker_undo, new View.OnClickListener() {
@Override
public void onClick(View view) {
if (pagerFragment.getPaths().size() > 0) {
pagerFragment.getPaths().add(index, deletedPath);
} else {
pagerFragment.getPaths().add(deletedPath);
}
pagerFragment.getViewPager().getAdapter().notifyDataSetChanged();
pagerFragment.getViewPager().setCurrentItem(index, true);
}
});
return true;
}
return super.onOptionsItemSelected(item);
}
public void updateActionBarTitle() {
if (actionBar != null) {
actionBar.setTitle(
getString(R.string.__picker_image_index, pagerFragment.getViewPager().getCurrentItem() + 1,
pagerFragment.getPaths().size()));
}
}
}
|
<reponame>aalkilani/ForestFlow<filename>core/src/main/scala/com/dreamworks/forestflow/serving/interfaces/ServeRequest.scala
/**
* Copyright 2020 DreamWorks Animation L.L.C.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dreamworks.forestflow.serving.interfaces
import com.dreamworks.forestflow.domain._
import com.dreamworks.forestflow.utils.SourceStorageProtocols
import com.dreamworks.forestflow.utils.SourceStorageProtocols.SupportsFQRVExtraction
trait ServeRequest extends Product {
this: ServeRequestShim => // if you implement ServeRequest then you must extend ServeRequestShim as well (gotta think about this)
def path: String
protected[this] def protocolOpt = SourceStorageProtocols.getProtocolOption(path)
def fqrv: Option[FQRV]
def servableSettings: ServableSettings
val contractSettings : Option[ContractSettings]
def withServableSettings(servableSettings: ServableSettings): ServeRequestShim
def withContractSettings(contractSettings: ContractSettings): ServeRequestShim
/**
* Use to resolve an FQRV based on protocol path or supplied fqrv.proto
* Supplied fqrv.proto is guarded for correctness (availability) by pathRequirements and require statements
*
* @return
*/
def getUltimateFQRV: FQRV = {
protocolOpt match {
case Some(p: SupportsFQRVExtraction) =>
fqrv.getOrElse(p.getFQRV(path).get)
case _ =>
fqrv.get
}
}
def checkRequirements(): Unit = {
def pathRequirement = {
protocolOpt match {
case None => (false, Some(s"path provided doesn't follow a supported protocol: $path"))
case Some(protocol: SupportsFQRVExtraction) if protocol.hasValidFQRV(path) =>
(true, None) // "Received request for protocol with SupportsFQRVExtraction"
case Some(p: SourceStorageProtocols.EnumVal) if fqrv.isEmpty => (false, Some(s"FQRV (Fully Qualified Release Version) is required with protocols that don't have implicit FQRV extraction support or where path doesn't follow FQRV extraction requirements"))
case Some(p: SourceStorageProtocols.EnumVal) => (true, None)
}
}
val (pathValid, msg) = pathRequirement
require(pathValid, msg.getOrElse("Invalid path"))
require(
servableSettings.loggingSettings.isDefined &&
!(servableSettings.loggingSettings.get.logLevel != LogLevel.NONE &&
servableSettings.loggingSettings.get.keyFeatures.isEmpty
),
"LogLevel cannot be specified without defining set of features that define the key for logged messages")
}
}
/*
trait MLFlowModel {
this: ServeRequest =>
}
trait BasicModel {
this: ServeRequest =>
}*/
|
<gh_stars>0
import React, { useState } from "react";
import "../styles/ContactForm.css";
import {validateEmail} from "../utils/helpers";
import {Snackbar, Alert} from "@mui/material/";
function ContactForm() {
const [email, setEmail] = useState("");
const [name, setName] = useState("");
const [message, setMessage] = useState("");
const [errorMessage, setErrorMessage] = useState("");
const [error, setError] = useState(false);
const [submitted, setSubmitted] = useState(false);
const handleInputChange = (e) => {
const inputType = e.target.name;
const inputValue = e.target.value;
if(inputType === "name") {
setName(inputValue);
}else if(inputType === "email") {
setEmail(inputValue);
}else if(inputType === "message") {
setMessage(inputValue);
}
}
const handleBlurShittyInput = (e) => {
// console.log("Blur event");
// console.log(e.target);
e.preventDefault();
const inputType = e.target.name;
// console.log(inputType);
setTimeout(() => {
if(name === "" && inputType ==="name") {
setError(true);
setErrorMessage("Name field is required!");
return;
}else if(!validateEmail(email) && inputType === "email") {
setError(true);
setErrorMessage("Invalid email!");
return;
}else if(message === "" && inputType === "message") {
setError(true);
setErrorMessage("Message field is required!");
return;
}
}, 1000)
}
const handleSubmit = (e) => {
e.preventDefault();
setName("");
setEmail("");
setMessage("");
setSubmitted(true);
}
const handleErrorClose = (e) => {
// e.preventDefault();
setErrorMessage("");
setError(false);
}
const handleSubmitClose = () => {
setSubmitted(false);
}
// console.log("errormessage", errorMessage,"name", name, "email", email,"message", message);
return(
<div className="container">
<form>
<h2 style={{marginBottom: 20, fontSize: 40}}>Contact Me</h2>
<label style={{fontSize: 20}} htmlFor="name">Name:</label>
<input className="inputStyling" id="name" name="name" type="text" placeholder="Name" value={name} onChange={handleInputChange} onBlur={handleBlurShittyInput}></input>
<label style={{fontSize: 20}} htmlFor="email" value={email}>Email:</label>
<input className="inputStyling" id="email" name="email" type="text" placeholder="Email" onChange={handleInputChange} onBlur={handleBlurShittyInput}></input>
<label style={{fontSize: 20}} htmlFor="message">Message:</label>
<textarea className="inputStyling" id="message" name="message" placeholder="Message" value={message} style={{height:300}} onChange={handleInputChange} onBlur={handleBlurShittyInput}></textarea>
<button className="button" type="submit" onClick={handleSubmit}>Submit</button>
</form>
<Snackbar open={error} autoHideDuration={6000} onClose={handleErrorClose}>
<Alert severity="error" onClose={handleErrorClose} sx={{ width: '100%' }}>{errorMessage}</Alert>
</Snackbar>
<Snackbar open={submitted} autoHideDuration={6000} onClose={handleSubmitClose}>
<Alert severity="success" onClose={handleSubmitClose} sx={{width: '100%'}}>Message submitted!</Alert>
</Snackbar>
</div>
)
}
export default ContactForm; |
<filename>src/main/java/frc/robot/auto/commands/shooter/PreFireShooterCommand.java
package frc.robot.auto.commands.shooter;
import frc.robot.auto.Command;
public class PreFireShooterCommand extends Command {
double topVelRPM;
double bottomVelRPM;
public PreFireShooterCommand(double topVelRPM, double bottomVelRPM) {
this.topVelRPM = topVelRPM;
this.bottomVelRPM = bottomVelRPM;
}
@Override
public void start() {
robot.getFlywheel().update(topVelRPM, bottomVelRPM);
}
@Override
public void loop() {}
@Override
public boolean isFinished() {
return true;
}
@Override
public void close() {
// TODO Auto-generated method stub
}
}
|
from transformers import BertTokenizer, BertForMaskedLM
import torch
class BERTCorrector:
def __init__(self, bert_model_dir, bert_model_vocab, max_seq_length):
self.tokenizer = BertTokenizer.from_pretrained(bert_model_vocab)
self.model = BertForMaskedLM.from_pretrained(bert_model_dir)
self.max_seq_length = max_seq_length
def correct_spelling(self, input_text: str) -> str:
tokenized_text = self.tokenizer.tokenize(input_text)
indexed_tokens = self.tokenizer.convert_tokens_to_ids(tokenized_text)
# Find masked tokens (misspelled words)
masked_index = [i for i, token in enumerate(tokenized_text) if token == '[MASK]']
for i in masked_index:
input_ids = indexed_tokens[:i] + [self.tokenizer.mask_token_id] + indexed_tokens[i+1:]
tensor_input_ids = torch.tensor([input_ids])
with torch.no_grad():
outputs = self.model(tensor_input_ids)
predictions = outputs[0]
predicted_index = torch.argmax(predictions[0, i]).item()
predicted_token = self.tokenizer.convert_ids_to_tokens([predicted_index])[0]
tokenized_text[i] = predicted_token
corrected_text = self.tokenizer.convert_tokens_to_string(tokenized_text)
return corrected_text |
package sqldb
import (
"testing"
"time"
)
func TestStringScannable_Scan(t *testing.T) {
testTime := time.Now()
tests := []struct {
name string
expected StringScannable
src interface{}
wantErr bool
}{
{name: "int64", expected: "-66", src: int64(-66)},
{name: "float64", expected: "-66.6", src: float64(-66.6)},
{name: "bool", expected: "true", src: true},
{name: "[]byte", expected: "Hello World!", src: []byte("Hello World!")},
{name: "string", expected: "Hello World!", src: "Hello World!"},
{name: "time.Time", expected: StringScannable(testTime.String()), src: testTime},
{name: "nil", expected: "", src: nil},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var s StringScannable
if err := s.Scan(tt.src); (err != nil) != tt.wantErr {
t.Errorf("StringScannable.Scan() error = %v, wantErr %v", err, tt.wantErr)
}
if s != tt.expected {
t.Errorf("StringScannable.Scan() expected = %v, got %v", tt.expected, s)
}
})
}
}
// int64
|
// sc: https://ru.hexlet.io/courses/js-testing/lessons/goal/exercise_unit
// tests/collection.test.js
// Напишите тесты для функции get(obj, key, defaultValue). Эта функция извлекает значение
// из объекта при условии, что ключ существует. В ином случае возвращается defaultValue.
// Тесты должны быть построены по такому же принципу, как это описывалось в теории урока:
// проверка через if и исключение в случае провала теста.
// Для хорошего тестирования этой функции, понадобится как минимум три теста:
// - Проверка что функция возвращает нужное значение по существующему ключу (прямой тест
// на работоспособность)
// - Проверка на то что возвращается значение по умолчанию если ключа нет
// - Проверка на то что возвращается значение по существующему ключу, даже если передано
// значение по умолчанию (пограничный случай)
// @ts-check
const getFunction = require('../functions');
const get = getFunction();
// BEGIN (write your solution here)
// 1
if (get({ key: 'value' }, 'key') !== 'value') {
throw new Error('функция НЕ возвращает нужное значение по существующему ключу!');
}
// 2
if (get({}, 'key', 'defaultValue') !== 'defaultValue') {
throw new Error('Функция НЕ возвращает значение по умолчанию если ключа нет!');
}
// 3
if (get({ key: 'value' }, 'key', 'defaultValue') !== 'value') {
throw new Error(
'Функция НЕ возвращает значение по существующему ключу, если передано значение по умолчанию'
);
}
// END
|
<reponame>godbobo/Admin
package com.aqzscn.www.global.config.aspect;
import com.aqzscn.www.global.util.JacksonUtil;
import com.fasterxml.jackson.annotation.JsonFilter;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.validation.BindingResult;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
/**
* 记录请求参数及响应体信息
*
* @author Godbobo
* @date 2019/5/31
*/
@Aspect
@Component
public class LogAspect {
private final JacksonUtil jacksonUtil = new JacksonUtil();
@Pointcut("execution(public * com.aqzscn.www.global.controller..*.*(..)) || execution(public * com.aqzscn.www.blog.controller..*.*(..)) || execution(public * com.aqzscn.www.weixin.controller..*.*(..))")
public void log() {}
@Around(value = "log()")
public Object around(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
String uuid = UUID.randomUUID().toString();
// 获取切面类对应的Logger
Logger logger = LoggerFactory.getLogger(proceedingJoinPoint.getTarget().getClass());
Object result = null;
// 接收到请求,记录请求内容
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
HttpServletRequest request = null;
if (attributes != null) {
request = attributes.getRequest();
String method = request.getMethod();
String ip = request.getRemoteHost();
String url = request.getRequestURL().toString();
List<String> params = new ArrayList<>();
for (Object obj : proceedingJoinPoint.getArgs()) {
if (obj instanceof BindingResult) {
// 不转换参数验证结果
} else {
params.add(jacksonUtil.toJson(obj));
}
}
logger.info("[{}] {} {} {} 参数: {}", uuid, ip, method, url, params.toString());
} else {
logger.info("[{}] 未获取到请求信息", uuid);
}
long startTime = System.currentTimeMillis();
result = proceedingJoinPoint.proceed();
// 请求处理完毕,记录响应内容
long endTime = System.currentTimeMillis();
String resultData;
if (result == null) {
resultData = "";
}else if (result instanceof String) {
resultData = result.toString();
}else {
resultData = jacksonUtil.toJson(result);
}
int status = 500;
if (attributes != null && attributes.getResponse() != null) {
status = attributes.getResponse().getStatus();
}
logger.info("[{}] 耗时 {}ms STATUS: {} 返回信息: {}", uuid, endTime - startTime,status, resultData);
return result;
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2413-1
#
# Security announcement date: 2012-02-20 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:21 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: i386
#
# Vulnerable packages fix on version:
# - libarchive:2.8.4-1+squeeze1
#
# Last versions recommanded by security team:
# - libarchive:2.8.4.forreal-1+squeeze3
#
# CVE List:
# - CVE-2011-1777
# - CVE-2011-1778
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade libarchive=2.8.4.forreal-1+squeeze3 -y
|
<gh_stars>0
package pg
import (
"testing"
. "github.com/iostrovok/check"
)
type testSuite struct{}
var _ = Suite(&testSuite{})
func TestService(t *testing.T) { TestingT(t) }
// test syntax only
func (s *testSuite) Test(c *C) {
c.Assert(true, Equals, true)
}
|
#!/bin/bash
APP_DIR=.
export APIAPP_MODE=DEVELOPER
export APIAPP_FRONTEND=_
export APIAPP_APIURL="http://localhost:80/api"
export APIAPP_FRONTENDURL="http://localhost.com:80/frontend"
export APIAPP_APIACCESSSECURITY="[{\"type\": \"basic-auth\" }]"
export APIAPP_USERFORJOBS=dockjobuser
export APIAPP_GROUPFORJOBS=dockjobgroup
export APIAPP_VERSION=
if [ -f ${APP_DIR}/../VERSION ]; then
APIAPP_VERSION=${0}-$(cat ${APP_DIR}/../VERSION)
fi
if [ -f ${APP_DIR}/../../VERSION ]; then
APIAPP_VERSION=${0}-$(cat ${APP_DIR}/../../VERSION)
fi
if [ E${APIAPP_VERSION} = 'E' ]; then
echo 'Can not find version file in standard locations'
exit 1
fi
APIAPP_VERSION=DEVELOPMENT-${APIAPP_VERSION}
python3 ./src/app.py
exit 0
|
<gh_stars>1-10
import { useRef, useState, useEffect, FormEvent } from 'react';
import emailjs from 'emailjs-com';
import { toast } from 'react-toastify';
import moment from 'moment-timezone';
import EmailJsConf from '../configuration/emailjs';
interface OrderData {
email: string;
discord: string;
package: string;
message: string;
budget: string;
}
interface OrderErrors {
email: string | boolean;
discord: string | boolean;
package: string | boolean;
message: string | boolean;
budget: string | boolean;
}
function OrderForm() {
const [orderData, setOrderData] = useState<OrderData>({
email: '',
discord: '',
package: '',
message: '',
budget: '',
});
const [orderErrors, setOrderErrors] = useState<OrderErrors>({
email: false,
discord: false,
package: false,
message: false,
budget: false,
});
const [clicked, setClicked] = useState(false);
const regexDiscord = /^((.+?)#\d{4})/gm;
const regexEmail =
/^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
const form = useRef<HTMLFormElement>(null);
const setValue = (field: keyof OrderData, e: FormEvent) => {
const { value } = e.target as HTMLInputElement;
setOrderData({
...orderData,
[field]: value,
});
};
const checkIsRequired = () => {
Object.keys(orderData).forEach((key: string) => {
if (orderData[key as keyof OrderData] === '') {
setOrderErrors((initialState) => ({
...initialState,
[key]: 'This field is required',
}));
} else {
setOrderErrors((initialState) => ({
...initialState,
[key]: false,
}));
}
});
};
const validateForm = (e: FormEvent) => {
e.preventDefault();
checkIsRequired();
if (regexEmail.exec(orderData.email) === null)
setOrderErrors((initialState) => ({
...initialState,
email: 'Enter correct address email!',
}));
if (regexDiscord.exec(orderData.discord) === null)
setOrderErrors((initialState) => ({
...initialState,
discord: 'Enter correct discord tag!',
}));
setClicked(true);
};
useEffect(() => {
const hasErrors = Object.values(orderErrors).every((error) => error === false);
if (clicked && hasErrors) {
emailjs
.sendForm(
EmailJsConf.serviceId,
EmailJsConf.orderTemplate,
form.current as HTMLFormElement,
EmailJsConf.userId
)
.then(() => toast.success('Email sent.'))
.catch(() => toast.error('Error sending email.'));
setOrderData({
email: '',
discord: '',
package: '',
message: '',
budget: '',
});
}
return () => setClicked(false);
}, [clicked, orderErrors]);
return (
<form ref={form} onSubmit={validateForm} className="personal-order__form">
<div>
<label htmlFor="email">
Email
<input
type="text"
className={orderErrors.email ? 'input-error' : ''}
name="email"
id="email"
placeholder={orderErrors.email ? 'This field is required' : 'Email'}
value={orderData.email}
onChange={(e) => setValue('email', e)}
/>
</label>
{orderErrors.email === 'Enter correct address email!' ? (
<p className="p-error">{orderErrors.email}</p>
) : null}
</div>
<div>
<label htmlFor="discord">
Discord
<input
type="text"
className={orderErrors.discord ? 'input-error' : ''}
name="discord"
id="discord"
placeholder={orderErrors.discord ? 'This field is required' : 'Discord'}
value={orderData.discord}
onChange={(e) => setValue('discord', e)}
/>
</label>
{orderErrors.discord === 'Enter correct discord tag!' ? (
<p className="p-error">{orderErrors.discord}</p>
) : null}
</div>
<input type="hidden" name="timezone" id="timezone" defaultValue={moment.tz.guess()} />
<label htmlFor="package">
Package
<select
className={orderErrors.package ? 'input-error' : ''}
name="package"
id="package"
defaultValue={orderData.package}
onChange={(e) => setValue('package', e)}
>
<option value="" disabled hidden>
Select package:
</option>
<option value="basic">Basic</option>
<option value="standard">Standard</option>
<option value="premium">Premium</option>
</select>
</label>
<label htmlFor="order-desc">
Order description
<textarea
className={orderErrors.message ? 'input-error' : ''}
name="order-desc"
id="order-desc"
placeholder={orderErrors.message ? `${orderErrors.message}` : 'Order description'}
value={orderData.message}
onChange={(e) => setValue('message', e)}
/>
</label>
<label htmlFor="budget">
Budget (in $)
<input
type="number"
className={orderErrors.budget ? 'input-error' : ''}
name="budget"
id="budget"
placeholder={orderErrors.budget ? `${orderErrors.budget}` : 'Budget'}
value={orderData.budget}
onChange={(e) => setValue('budget', e)}
min={0}
/>
</label>
<input type="submit" value="Send" />
</form>
);
}
export default OrderForm;
|
<filename>code/extract_asthma.py<gh_stars>1-10
#
# extract_asthma.py
#
# Author:
# <NAME>
#
# This file extracts the asthama patients using those present in died.csv
import os
import csv
import directories
data_source_dir = directories.all_episode_data
data_target_dir = directories.episode_data
#os.mkdir(data_target_dir)
with open(directories.processed_csv+'died.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
print(row)
if len(row)>0:
source = data_source_dir+"/"+row['id']
target = data_target_dir+"/"+row['id']
os.spawnlp(os.P_WAIT, 'cp', 'cp', '-r', source, target)
|
<filename>imageeditor/src/main/java/com/createchance/imageeditor/filters/GPUImageGlassSphereFilter.java
/*
* Copyright (C) 2012 CyberAgent
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.createchance.imageeditor.filters;
import android.graphics.PointF;
import android.opengl.GLES20;
public class GPUImageGlassSphereFilter extends GPUImageFilter {
public static final String SPHERE_FRAGMENT_SHADER = "" +
"varying highp vec2 textureCoordinate;\n" +
"\n" +
"uniform sampler2D inputImageTexture;\n" +
"\n" +
"uniform highp vec2 center;\n" +
"uniform highp float radius;\n" +
"uniform highp float aspectRatio;\n" +
"uniform highp float refractiveIndex;\n" +
"// uniform vec3 lightPosition;\n" +
"const highp vec3 lightPosition = vec3(-0.5, 0.5, 1.0);\n" +
"const highp vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);\n" +
"\n" +
"void main()\n" +
"{\n" +
"highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n" +
"highp float distanceFromCenter = distance(center, textureCoordinateToUse);\n" +
"lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);\n" +
"\n" +
"distanceFromCenter = distanceFromCenter / radius;\n" +
"\n" +
"highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);\n" +
"highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));\n" +
"\n" +
"highp vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);\n" +
"refractedVector.xy = -refractedVector.xy;\n" +
"\n" +
"highp vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;\n" +
"\n" +
"// Grazing angle lighting\n" +
"highp float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));\n" +
"finalSphereColor += lightingIntensity;\n" +
"\n" +
"// Specular lighting\n" +
"lightingIntensity = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);\n" +
"lightingIntensity = pow(lightingIntensity, 15.0);\n" +
"finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;\n" +
"\n" +
"gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;\n" +
"}\n";
private PointF mCenter;
private int mCenterLocation;
private float mRadius;
private int mRadiusLocation;
private float mAspectRatio;
private int mAspectRatioLocation;
private float mRefractiveIndex;
private int mRefractiveIndexLocation;
public GPUImageGlassSphereFilter() {
this(new PointF(0.5f, 0.5f), 0.25f, 0.71f);
}
public GPUImageGlassSphereFilter(PointF center, float radius, float refractiveIndex) {
super(NO_FILTER_VERTEX_SHADER, SPHERE_FRAGMENT_SHADER);
mCenter = center;
mRadius = radius;
mRefractiveIndex = refractiveIndex;
}
@Override
public void onInit() {
super.onInit();
mCenterLocation = GLES20.glGetUniformLocation(getProgram(), "center");
mRadiusLocation = GLES20.glGetUniformLocation(getProgram(), "radius");
mAspectRatioLocation = GLES20.glGetUniformLocation(getProgram(), "aspectRatio");
mRefractiveIndexLocation = GLES20.glGetUniformLocation(getProgram(), "refractiveIndex");
}
@Override
public void onInitialized() {
super.onInitialized();
setRadius(mRadius);
setCenter(mCenter);
setRefractiveIndex(mRefractiveIndex);
}
@Override
public void onOutputSizeChanged(int width, int height) {
mAspectRatio = (float) height / width;
setAspectRatio(mAspectRatio);
super.onOutputSizeChanged(width, height);
}
private void setAspectRatio(float aspectRatio) {
mAspectRatio = aspectRatio;
setFloat(mAspectRatioLocation, aspectRatio);
}
public void setRefractiveIndex(float refractiveIndex) {
mRefractiveIndex = refractiveIndex;
setFloat(mRefractiveIndexLocation, refractiveIndex);
}
public void setCenter(PointF center) {
mCenter = center;
setPoint(mCenterLocation, center);
}
public void setRadius(float radius) {
mRadius = radius;
setFloat(mRadiusLocation, radius);
}
}
|
<reponame>ch1huizong/learning
def addword(theIndex, word, pagenumber):
theIndex.setdefault(word, []).append(pagenumber)
|
#!/usr/bin/env bash
# Copyright 2019 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script provides helper methods to perform cluster actions.
source "$(dirname "${BASH_SOURCE[0]}")/../vendor/knative.dev/hack/e2e-tests.sh"
# The previous serving release, installed by the operator. This value should be in the semantic format of major.minor.
readonly PREVIOUS_SERVING_RELEASE_VERSION="1.4"
# The previous eventing release, installed by the operator. This value should be in the semantic format of major.minor.
readonly PREVIOUS_EVENTING_RELEASE_VERSION="1.4"
# The target serving/eventing release to upgrade, installed by the operator. It can be a release available under
# kodata or an incoming new release. This value should be in the semantic format of major.minor.
readonly TARGET_RELEASE_VERSION="latest"
# This is the branch name of knative repos, where we run the upgrade tests.
readonly KNATIVE_REPO_BRANCH="${PULL_BASE_REF}"
# Namespaces used for tests
# This environment variable TEST_NAMESPACE defines the namespace to install Knative Serving.
export TEST_NAMESPACE="${TEST_NAMESPACE:-knative-operator-testing}"
export SYSTEM_NAMESPACE=${TEST_NAMESPACE}
# This environment variable TEST_EVENTING_NAMESPACE defines the namespace to install Knative Eventing.
# It is different from the namespace to install Knative Serving.
# We will use only one namespace, when Knative supports both components can coexist under one namespace.
export TEST_EVENTING_NAMESPACE="knative-eventing"
export TEST_RESOURCE="knative"
export TEST_EVENTING_MONITORING_NAMESPACE="knative-monitoring"
export KO_FLAGS="${KO_FLAGS:-}"
export INGRESS_CLASS=${INGRESS_CLASS:-istio.ingress.networking.knative.dev}
# Boolean used to indicate whether to generate serving YAML based on the latest code in the branch KNATIVE_SERVING_REPO_BRANCH.
GENERATE_SERVING_YAML=0
readonly OPERATOR_DIR="$(dirname "${BASH_SOURCE[0]}")/.."
readonly KNATIVE_DIR=$(dirname ${OPERATOR_DIR})
release_yaml="$(mktemp)"
release_eventing_yaml="$(mktemp)"
readonly SERVING_ARTIFACTS=("serving" "serving-crds.yaml" "serving-core.yaml" "serving-hpa.yaml" "serving-post-install-jobs.yaml")
readonly EVENTING_ARTIFACTS=("eventing" "eventing-crds.yaml" "eventing-core.yaml" "in-memory-channel.yaml" "mt-channel-broker.yaml"
"eventing-post-install.yaml")
function is_ingress_class() {
[[ "${INGRESS_CLASS}" == *"${1}"* ]]
}
# Add function call to trap
# Parameters: $1 - Function to call
# $2...$n - Signals for trap
function add_trap() {
local cmd=$1
shift
for trap_signal in $@; do
local current_trap="$(trap -p $trap_signal | cut -d\' -f2)"
local new_cmd="($cmd)"
[[ -n "${current_trap}" ]] && new_cmd="${current_trap};${new_cmd}"
trap -- "${new_cmd}" $trap_signal
done
}
# Setup and run kail in the background to collect logs
# from all pods.
function test_setup_logging() {
echo ">> Setting up logging..."
# Install kail if needed.
if ! which kail > /dev/null; then
bash <( curl -sfL https://raw.githubusercontent.com/boz/kail/master/godownloader.sh) -b "$GOPATH/bin"
fi
# Capture all logs.
kail > ${ARTIFACTS}/k8s.log-$(basename ${E2E_SCRIPT}).txt &
local kail_pid=$!
# Clean up kail so it doesn't interfere with job shutting down
add_trap "kill $kail_pid || true" EXIT
}
# Generic test setup. Used by the common test scripts.
function test_setup() {
test_setup_logging
}
# Download the repository of Knative. The purpose of this function is to download the source code of
# knative component for further use, based on component name and branch name.
# Parameters:
# $1 - component repo name, either knative/serving or knative/eventing,
# $2 - component name,
# $3 - branch of the repository.
function download_knative() {
local component_repo component_name
component_repo=$1
component_name=$2
# Go the directory to download the source code of knative
cd ${KNATIVE_DIR}
# Download the source code of knative
git clone "https://github.com/${component_repo}.git" "${component_name}"
cd "${component_name}"
local branch=$3
if [ -n "${branch}" ] ; then
git fetch origin ${branch}:${branch}
git checkout ${branch}
fi
cd ${OPERATOR_DIR}
}
# Install Istio.
function install_istio() {
echo ">> Installing Istio"
curl -sL https://istio.io/downloadIstioctl | sh -
$HOME/.istioctl/bin/istioctl install -y
}
function create_namespace() {
echo ">> Creating test namespaces for knative serving and eventing"
# All the custom resources and Knative Serving resources are created under this TEST_NAMESPACE.
kubectl get ns ${TEST_NAMESPACE} || kubectl create namespace ${TEST_NAMESPACE}
kubectl get ns ${TEST_EVENTING_NAMESPACE} || kubectl create namespace ${TEST_EVENTING_NAMESPACE}
}
function download_latest_release() {
download_nightly_artifacts "${SERVING_ARTIFACTS[@]}"
download_nightly_artifacts "${EVENTING_ARTIFACTS[@]}"
}
function download_nightly_artifacts() {
array=("$@")
component=${array[0]}
unset array[0]
counter=0
linkprefix="https://storage.googleapis.com/knative-nightly/${component}/latest"
version_exists=$(if_version_exists ${TARGET_RELEASE_VERSION} "knative-${component}")
if [ "${version_exists}" == "no" ]; then
header "Download the nightly build as the target version for Knative ${component}"
knative_version_dir=${OPERATOR_DIR}/cmd/operator/kodata/knative-${component}/${TARGET_RELEASE_VERSION}
mkdir ${knative_version_dir}
for artifact in "${array[@]}";
do
((counter=counter+1))
wget ${linkprefix}/${artifact} -O ${knative_version_dir}/${counter}-${artifact}
done
if [ "${component}" == "serving" ]; then
# Download the latest net-istio into the ingress directory.
ingress_version_dir=${OPERATOR_DIR}/cmd/operator/kodata/ingress/${TARGET_RELEASE_VERSION}
mkdir ${ingress_version_dir}
wget https://storage.googleapis.com/knative-nightly/net-istio/latest/net-istio.yaml -O ${ingress_version_dir}/net-istio.yaml
fi
fi
}
function install_operator() {
create_namespace
if is_ingress_class istio; then
install_istio || fail_test "Istio installation failed"
fi
cd ${OPERATOR_DIR}
download_latest_release
header "Installing Knative operator"
# Deploy the operator
ko apply ${KO_FLAGS} -f config/
wait_until_pods_running default || fail_test "Operator did not come up"
}
# Uninstalls Knative Serving from the current cluster.
function knative_teardown() {
echo ">> Uninstalling Knative serving"
echo ">> Bringing down Serving"
kubectl delete -n $TEST_NAMESPACE KnativeServing --all
echo ">> Bringing down Eventing"
kubectl delete -n $TEST_NAMESPACE KnativeEventing --all
echo ">> Bringing down Istio"
$HOME/.istioctl/bin/istioctl x uninstall --purge
kubectl delete --ignore-not-found=true clusterrolebinding cluster-admin-binding
echo ">> Bringing down Operator"
ko delete --ignore-not-found=true -f config/ || return 1
echo ">> Removing test namespaces"
kubectl delete all --all --ignore-not-found --now --timeout 60s -n $TEST_NAMESPACE
kubectl delete --ignore-not-found --now --timeout 300s namespace $TEST_NAMESPACE
}
function wait_for_file() {
local file timeout waits
file="$1"
waits=300
timeout=$waits
echo "Waiting for existence of file: ${file}"
while [ ! -f "${file}" ]; do
# When the timeout is equal to zero, show an error and leave the loop.
if [ "${timeout}" == 0 ]; then
echo "ERROR: Timeout (${waits}s) while waiting for the file ${file}."
return 1
fi
sleep 1
# Decrease the timeout of one
((timeout--))
done
return 0
}
function install_previous_operator_release() {
install_operator
install_previous_knative
}
function install_previous_knative() {
header "Create the custom resources for Knative of the previous version"
create_knative_serving ${PREVIOUS_SERVING_RELEASE_VERSION}
create_knative_eventing ${PREVIOUS_EVENTING_RELEASE_VERSION}
}
function create_knative_serving() {
version=${1}
echo ">> Creating the custom resource of Knative Serving:"
cat <<EOF | kubectl apply -f -
apiVersion: operator.knative.dev/v1beta1
kind: KnativeServing
metadata:
name: ${TEST_RESOURCE}
namespace: ${TEST_NAMESPACE}
spec:
version: "${version}"
config:
tracing:
backend: "zipkin"
zipkin-endpoint: "http://zipkin.${TEST_EVENTING_MONITORING_NAMESPACE}.svc:9411/api/v2/spans"
debug: "true"
sample-rate: "1.0"
EOF
}
function create_knative_eventing() {
version=${1}
echo ">> Creating the custom resource of Knative Eventing:"
cat <<-EOF | kubectl apply -f -
apiVersion: operator.knative.dev/v1beta1
kind: KnativeEventing
metadata:
name: ${TEST_RESOURCE}
namespace: ${TEST_EVENTING_NAMESPACE}
spec:
version: "${version}"
config:
tracing:
backend: "zipkin"
zipkin-endpoint: "http://zipkin.${TEST_EVENTING_MONITORING_NAMESPACE}.svc:9411/api/v2/spans"
debug: "true"
sample-rate: "1.0"
EOF
}
function create_latest_custom_resource() {
echo ">> Creating the custom resource of Knative Serving:"
cat <<-EOF | kubectl apply -f -
apiVersion: operator.knative.dev/v1beta1
kind: KnativeServing
metadata:
name: ${TEST_RESOURCE}
namespace: ${TEST_NAMESPACE}
spec:
version: "${TARGET_RELEASE_VERSION}"
config:
tracing:
backend: "zipkin"
zipkin-endpoint: "http://zipkin.${TEST_EVENTING_MONITORING_NAMESPACE}.svc:9411/api/v2/spans"
debug: "true"
sample-rate: "1.0"
EOF
echo ">> Creating the custom resource of Knative Eventing:"
cat <<-EOF | kubectl apply -f -
apiVersion: operator.knative.dev/v1beta1
kind: KnativeEventing
metadata:
name: ${TEST_RESOURCE}
namespace: ${TEST_EVENTING_NAMESPACE}
spec:
version: "${TARGET_RELEASE_VERSION}"
config:
tracing:
backend: "zipkin"
zipkin-endpoint: "http://zipkin.${TEST_EVENTING_MONITORING_NAMESPACE}.svc:9411/api/v2/spans"
debug: "true"
sample-rate: "1.0"
EOF
}
function if_version_exists() {
version=$1
component=$2
knative_dir=${OPERATOR_DIR}/cmd/operator/kodata/${component}
versions=$(ls ${knative_dir})
for eachversion in ${versions}
do
if [[ "${eachversion}" == ${version}* ]]; then
echo "yes"
exit
fi
done
echo "no"
}
|
#!/bin/sh
# creates a gentoo prefix https://wiki.gentoo.org/wiki/Project:Prefix
wget https://gitweb.gentoo.org/repo/proj/prefix.git/plain/scripts/bootstrap-prefix.sh
chmod +x bootstrap-prefix.sh
# this will take a while
./bootstrap-prefix.sh ~/gentoo noninteractive &
wait
cd ~/gentoo
chmod +x startprefix
./startprefix
# portage can now be used
|
#! /bin/bash
sox -d \
-t wavpcm \
-c 1 -b 16 -r 16000 \
-e signed-integer \
--endian little \
- noiseprof speech.noise-profile \
trim 0 2 > tmp.txt
rm tmp.txt
|
package de.busse_apps.bakcalculator.gui;
/*
* Copyright 2015 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarActivity;
import de.busse_apps.bakcalculator.R;
public class MainActivity extends ActionBarActivity implements NavigationDrawerFragment.NavigationDrawerCallbacks {
public static final String SPLASH_FRAGMENT_TAG = "de.busse_apps.bakcalculator.gui.SplashFragment";
public static final String INPUT_FRAGMENT_TAG = "de.busse_apps.bakcalculator.gui.InputFragment";
private FragmentManager mFragmentManager;
private DrawerLayout mDrawerLayout;
private NavigationDrawerFragment mDrawerFragment;
private CharSequence mTitle;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mFragmentManager = getSupportFragmentManager();
mFragmentManager.addOnBackStackChangedListener(new MyBackStackListener());
mDrawerLayout = (DrawerLayout) findViewById(R.id.main_drawer_layout);
mDrawerFragment = (NavigationDrawerFragment) mFragmentManager.findFragmentById(R.id.main_fragment_drawer);
//mDrawerFragment = new NavigationDrawerFragment();
mDrawerFragment.setUp(R.id.main_fragment_drawer, mDrawerLayout);
mTitle = getTitle();
if (savedInstanceState == null) {
FragmentTransaction ft = mFragmentManager.beginTransaction();
SplashFragment mSplashFragment = new SplashFragment();
mSplashFragment.setArguments(getIntent().getExtras());
ft.add(R.id.main_fragment_container, mSplashFragment, SPLASH_FRAGMENT_TAG).commit();
mDrawerFragment.setHomeAsUp(false);
}
// if (findViewById(R.id.main_fragment_container) != null) {
// if (savedInstanceState == null) {
// MainFragment mainFragment = new MainFragment();
// mainFragment.setArguments(getIntent().getExtras());
//
// getSupportFragmentManager().beginTransaction()
// .add(R.id.main_fragment_container, mainFragment).commit();
// }
// }
}
@Override
public void onBackPressed() {
if (mDrawerFragment.isDrawerOpen()) {
mDrawerFragment.closeDrawer();
return;
}
super.onBackPressed();
}
@Override
public boolean onSupportNavigateUp() {
if ((mFragmentManager.getBackStackEntryCount() > 0) &&
!(mDrawerFragment.isDrawerOpen())) {
mFragmentManager.popBackStack();
}
return false;
}
@Override
public void onNavigationDrawerItemSelected(int position) {
// Replace Main Fragments
}
protected void openInputFragment() {
InputFragment mInputFragment = new InputFragment();
addFragment(mInputFragment, INPUT_FRAGMENT_TAG, null, true);
}
private void addFragment(Fragment fragment, String tag, Bundle args, boolean toBackStack) {
FragmentTransaction ft = mFragmentManager.beginTransaction();
fragment.setArguments(args);
ft.replace(R.id.main_fragment_container, fragment, tag);
ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);
if (toBackStack) {
ft.addToBackStack(tag);
}
ft.commit();
}
/**
* FragmentManager.OnBackStackChangedListener for handling HomeAsUp Button
*/
private class MyBackStackListener implements FragmentManager.OnBackStackChangedListener {
@Override
public void onBackStackChanged() {
boolean mCanBack = mFragmentManager.getBackStackEntryCount() > 0;
mDrawerFragment.setHomeAsUp(mCanBack);
}
}
}
|
<gh_stars>0
import request from '@/utils/request'
export function getNewsletter() {
return request({
url: 'newsletter-section/get-latest',
method: 'get'
})
}
|
#!/bin/bash
OC_VERSION="v3.9.0-alpha.3"
GIT_STRING="-78ddc10"
#subscription-manager register --username openshift-enterprise
#subscription-manager attach --pool 8a85f98b621f8a6b01621fac0f790155
#subscription-manager repos --disable="*"
#subscription-manager repos \
# --enable="rhel-7-server-rpms" \
# --enable="rhel-7-server-extras-rpms" \
# --enable="rhel-7-server-ose-3.7-rpms" \
# --enable="rhel-7-fast-datapath-rpms"
#yum -y install vim-enhanced docker wget git net-tools bind-utils iptables-services bridge-utils bash-completion kexec-tools sos psacct
#systemctl enable docker
#systemctl start docker
#cd
wget https://github.com/openshift/origin/releases/download/$OC_VERSION/openshift-origin-server-$OC_VERSION$GIT_STRING-linux-64bit.tar.gz
tar -xzvf openshift-origin-server-$OC_VERSION$GIT_STRING-linux-64bit.tar.gz
#mv openshift-origin-server-v3.9.0-alpha.3-78ddc10-linux-64bit/oc /usr/bin
#restorecon -rv /usr/bin
|
# File: G (Python 2.4)
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from pirates.piratesgui import GuiTray
from pirates.piratesbase import PLocalizer
from pirates.piratesgui import PiratesGuiGlobals
from pirates.reputation import ReputationGlobals
from pirates.piratesbase import PiratesGlobals
from pirates.uberdog.UberDogGlobals import InventoryType
from pirates.piratesgui import ReputationMeterDial
from pirates.piratesgui import StatusTray
from direct.interval.IntervalGlobal import *
from pirates.piratesgui import VitaeMeter
class GameGui(DirectButton):
def __init__(self, parent, **kw):
gui = loader.loadModel('models/gui/toplevel_gui')
DirectButton.__init__(self, parent = NodePath(), **None)
self.initialiseoptions(GameGui)
self.repMeter = ReputationMeterDial.ReputationMeterDial(InventoryType.OverallRep, width = 0.56000000000000005)
self.repMeter.reparentTo(self)
self.repMeter.setPos(0.42499999999999999, 0, 0.19)
self.keyFrame = gui.find('**/main_gui_game_gui_base').copyTo(self)
self.keyFrame.setPos(0.75, 0, 0.185)
self.keyFrame.setScale(0.59999999999999998)
self.repMeter.categoryLabel.wrtReparentTo(self)
self.repMeter.levelCapScroll.wrtReparentTo(self)
self.repMeter.levelCapIcon.wrtReparentTo(self)
self.statusTray = StatusTray.StatusTray(parent = self, state = DGG.DISABLED)
self.statusTray.flattenStrong()
self.statusTray.statusEffectsPanel.setPos(0.34000000000000002, 0, 0.14000000000000001)
self.statusTray.statusEffectsPanel.setScale(0.80000000000000004)
self.statusTray.setPos(0.35999999999999999, 0, 0.17000000000000001)
self.statusTray.hpLabel.show()
self.statusTray.hpMeter.component('text0').show()
self.statusTray.voodooLabel.show()
self.statusTray.voodooMeter.component('text0').show()
self.voodooModMeter = None
self.hpModMeter = None
self.repMeter.levelLabel.wrtReparentTo(self)
self.repMeter.valueLabel.wrtReparentTo(self)
self.vitaeMeter = VitaeMeter.VitaeMeter(parent = self.statusTray, state = DGG.DISABLED, relief = None, pos = (0.84999999999999998, 0, 0.10000000000000001), scale = 0.81999999999999995)
self.clamps = self.attachNewNode('clamps')
clamp = gui.find('**/groggy_clamp').copyTo(NodePath(''))
clamp.reparentTo(self.clamps, sort = 2)
clamp.setPos(0.95999999999999996, 0, 0.13500000000000001)
clamp.setScale(0.80000000000000004)
clamp = gui.find('**/*clamp').copyTo(NodePath(''))
clamp.reparentTo(self.clamps, sort = 2)
clamp.setPos(0.95999999999999996, 0, 0.23499999999999999)
clamp.setScale(0.80000000000000004)
self.clamps.hide()
meterGui = loader.loadModel('models/textureCards/dialmeter')
self.glow = OnscreenImage(parent = self, image = meterGui.find('**/dialmeter_full'), scale = 0.57999999999999996, color = (0.996, 0.95699999999999996, 0.51000000000000001, 0.67000000000000004), pos = (0.42499999999999999, 0, 0.19))
self.glow2 = OnscreenImage(parent = self, image = meterGui.find('**/dialmeter_full'), scale = 0.45000000000000001, color = (0.996, 0.95699999999999996, 0.51000000000000001, 0.67000000000000004), pos = (0.42499999999999999, 0, 0.19))
self.glow.hide()
self.glow.setBin('gui-fixed', -2)
self.glow2.hide()
self.glow2.setBin('gui-fixed', -2)
self.setBin('gui-fixed', -1)
meterGui.removeNode()
self.bind(DGG.ENTER, self.turnHighlightOn)
self.bind(DGG.EXIT, self.turnHighlightOff)
self.createHealthAlert()
self.haTask = None
if not parent:
pass
self.reparentTo(aspect2d)
def createHealthAlert(self):
if hasattr(self, 'healthAlertIval'):
self.healthAlertIval.finish()
del self.healthAlertIval
self.healthAlertIval = Sequence(LerpColorScaleInterval(self.keyFrame, 0.25, Vec4(1.0, 0.29999999999999999, 0.29999999999999999, 1.0), blendType = 'easeIn'), LerpColorScaleInterval(self.keyFrame, 0.25, Vec4(1.0, 1.0, 1.0, 1.0), blendType = 'easeOut'), Func(self.keyFrame.clearColorScale))
self.healthAlertRate = 1.0
def updateHealthAlert(self, task):
if not hasattr(base, 'localAvatar') and not (base.localAvatar) or not localAvatar.isGenerated():
return task.done
hpFraction = float(base.localAvatar.hp) / float(base.localAvatar.maxHp)
if hpFraction > 0.40000000000000002:
self.stopHealthAlert()
return task.done
self.healthAlertRate = 1.0 - hpFraction * 2.0
if not self.healthAlertIval.isPlaying():
self.healthAlertIval.start(playRate = self.healthAlertRate)
return task.cont
def startHealthAlert(self):
if not self.haTask:
self.haTask = taskMgr.add(self.updateHealthAlert, 'updateHealthAlert')
def stopHealthAlert(self):
if self.haTask:
taskMgr.remove(self.haTask)
self.haTask = None
def destroy(self):
self.stopHealthAlert()
if hasattr(self, 'moveUpIval'):
self.moveUpIval.finish()
del self.moveUpIval
if hasattr(self, 'scaleDown'):
self.scaleDown.finish()
del self.scaleDown
if hasattr(self, 'fadeOut'):
self.fadeOut.finish()
del self.fadeOut
if hasattr(self, 'track'):
self.track.finish()
del self.track
if hasattr(self, 'healthAlertIval'):
self.healthAlertIval.finish()
del self.healthAlertIval
if self.hpModMeter:
self.hpModMeter.destroy()
self.hpModMeter = None
if self.voodooModMeter:
self.voodooModMeter.destroy()
self.voodooModMeter = None
self.clamps = None
DirectButton.destroy(self)
def hide(self):
DirectButton.hide(self)
self.vitaeMeter.hide()
self.statusTray.updateHp(localAvatar.getHp(), localAvatar.getMaxHp())
self.statusTray.updateVoodoo(localAvatar.getMojo(), localAvatar.getMaxMojo())
def show(self):
DirectButton.show(self)
self.vitaeMeter.show()
inv = localAvatar.getInventory()
if inv:
vtLevel = inv.getStackQuantity(InventoryType.Vitae_Level)
vtCost = inv.getStackQuantity(InventoryType.Vitae_Cost)
vtLeft = inv.getStackQuantity(InventoryType.Vitae_Left)
self.updateVitae(vtLevel, vtCost, vtLeft)
def updateVitae(self, level, cost, left):
self.vitaeMeter.update(level, cost, left)
if level > 0:
self.vitaeMeter.show()
self.showClamps()
else:
self.vitaeMeter.hide()
self.hideClamps()
def createExpAlert(self, amount, duration, position, posChange):
textGenerator = TextNode('textGenerator')
if amount < 0:
textGenerator.setText(str(amount))
else:
textGenerator.setText('+' + str(amount) + ' ' + PLocalizer.Reputation)
textGenerator.setFont(PiratesGlobals.getPirateOutlineFont())
textGenerator.clearShadow()
textGenerator.setAlign(TextNode.ACenter)
textGenerator.setTextColor(1.0, 1.0, 1.0, 1.0)
textScale = 0.074999999999999997
newTextNode = textGenerator.generate()
newTextDummy = render2d.attachNewNode(newTextNode)
newTextDummy.setPos(render2d, position)
newTextDummy.setHpr(render2d, 0.0, 0.0, 0.0)
newTextDummy.setScale(textScale)
newTextDummy.setBin('gui-popup', 0)
if hasattr(self, 'moveUpIval'):
self.moveUpIval.finish()
del self.moveUpIval
if hasattr(self, 'scaleDown'):
self.scaleDown.finish()
del self.scaleDown
if hasattr(self, 'fadeOut'):
self.fadeOut.finish()
del self.fadeOut
if hasattr(self, 'track'):
self.track.finish()
del self.track
self.moveUpIval = newTextDummy.posInterval(duration, position + posChange)
self.scaleDown = newTextDummy.scaleInterval(duration * 0.75, textScale * 0.69999999999999996, blendType = 'easeInOut')
self.fadeOut = newTextDummy.colorScaleInterval(duration * 0.25, Vec4(0, 0, 0, 0))
self.track = Sequence(Parallel(self.moveUpIval, Sequence(Wait(0.25), self.scaleDown), Sequence(Wait(0.75), self.fadeOut)), Func(self.removeExpAlert, newTextDummy))
self.track.start()
def removeExpAlert(self, alert):
if alert:
alert.removeNode()
alert = None
def createLevelUpAlert(self, duration, position, posChange):
textGenerator = TextNode('textGenerator')
textGenerator.setText(PLocalizer.LevelUp)
textGenerator.setFont(PiratesGlobals.getPirateOutlineFont())
textGenerator.clearShadow()
textGenerator.setAlign(TextNode.ACenter)
textGenerator.setTextColor(1.0, 1.0, 1.0, 1.0)
textScale = 0.074999999999999997
newTextNode = textGenerator.generate()
newTextDummy = render2d.attachNewNode(newTextNode)
newTextDummy.setPos(render2d, position)
newTextDummy.setHpr(render2d, 0.0, 0.0, 0.0)
newTextDummy.setScale(textScale)
newTextDummy.setBin('gui-popup', 0)
if hasattr(self, 'moveUpIval'):
self.moveUpIval.finish()
del self.moveUpIval
if hasattr(self, 'scaleDown'):
self.scaleDown.finish()
del self.scaleDown
if hasattr(self, 'fadeOut'):
self.fadeOut.finish()
del self.fadeOut
if hasattr(self, 'track'):
self.track.finish()
del self.track
self.moveUpIval = newTextDummy.posInterval(duration, position + posChange)
self.scaleDown = newTextDummy.scaleInterval(duration * 0.75, textScale * 0.69999999999999996, blendType = 'easeInOut')
self.fadeOut = newTextDummy.colorScaleInterval(duration * 0.25, Vec4(0, 0, 0, 0))
self.track = Sequence(Parallel(self.moveUpIval, Sequence(Wait(0.25), self.scaleDown), Sequence(Wait(0.75), self.fadeOut)), Func(self.removeExpAlert, newTextDummy))
self.track.start()
def showClamps(self):
self.clamps.show()
def hideClamps(self):
self.clamps.hide()
def turnHighlightOn(self, event = None):
self.glow.show()
self.glow2.show()
def turnHighlightOff(self, event = None):
self.glow.hide()
self.glow2.hide()
|
#!/bin/bash
USER=rstudio
PASSWORD=$USER
DIR=../
ROOT=TRUE
PORT=8787
NAME=rstudio-docker
for i in "$@"
do
case $i in
-p=*|--port=*)
PORT="${i#*=}"
shift # past argument=value
;;
-d=*|--dir=*)
DIR="${i#*=}"
shift # past argument=value
;;
-u=*|--user=*)
USER="${i#*=}"
shift # past argument=value
;;
-pw=*|--password=*)
PASSWORD="${i#*=}"
shift # past argument=value
;;
-r=*|--root=*)
ROOT="${i#*=}"
shift # past argument=value
;;
-n=*|--name=*)
NAME="${i#*=}"
shift # past argument=value
;;
*)
echo "invalid option: $i, ignored" # unknown option
;;
esac
done
sudo docker run -d -v $DIR:/home/$USER/ \
-p $PORT:8787 \
--name $NAME \
-e PASSWORD=$PASSWORD \
-e ROOT=$ROOT \
-e USER=$USER \
rocker/rstudio-stable && {
echo "docker runs successfully by following configuration"
echo "RSTUDIO_DIR: $DIR"
echo "RSTUDIO_USER: $USER"
echo "RSTUDIO_PORT: $PORT"
echo "RSTUDIO_ROOT: $ROOT"
echo "RSTUDIO_PASSWORD: $PASSWORD"
exit 0
}
echo "docker run failed"
|
#!/usr/bin/env bash
set -ex
source deps-golang
GOROOT=/usr/local/go
GO_ARCHIVE=$(basename $GO_ARCHIVE_URL)
echo "Downloading go..."
mkdir -p $(dirname $GOROOT)
wget -q $GO_ARCHIVE_URL
echo "${GO_ARCHIVE_SHA256} ${GO_ARCHIVE}" | sha256sum -c -
tar xf $GO_ARCHIVE -C $(dirname $GOROOT)
chmod -R a+w $GOROOT
|
#!/bin/bash
nohup wget -P data "http://ghtorrent-downloads.ewi.tudelft.nl/mysql/mysql-2019-06-01.tar.gz" > logs/downloads.out &
|
#! /bin/ksh
#
always_oas(){
route "${cblue}>> always_oas${cnormal}"
liboas="$oasdir/$platform/lib/oasis3/liboasis3.MPI1.a"
libpsmile="$oasdir/$platform/lib/libanaisg.a $oasdir/$platform/lib/libanaism.a $oasdir/$platform/lib/libclim.MPI1.a $oasdir/$platform/lib/libpsmile.MPI1.a $oasdir/$platform/lib/libfscint.a $oasdir/$platform/lib/libmpp_io.a $oasdir/$platform/lib/libscrip.a $oasdir/$platform/lib/libdownscal.a"
incpsmile="-I$oasdir/$platform/build/lib/psmile.MPI1 -I$oasdir/$platform/build/lib/clim.MPI1 -I$oasdir/$platform/build/lib/mpp_io"
route "${cblue}<< always_oas${cnormal}"
}
substitutions_oas(){
route "${cblue}>> substitutions_oas${cnormal}"
c_substitutions_oas
route "${cblue}<< substitutions_oas${cnormal}"
}
configure_oas(){
route "${cblue}>> configure_oas${cnormal}"
file=${oasdir}/util/make_dir/make.oas3
comment " cp jureca oasis3 makefile to /util/make_dir/"
cp $rootdir/bldsva/intf_oas3/oasis3/arch/$platform/config/make.gnu_cluma2_oa3 $file >> $log_file 2>> $err_file
check
c_configure_oas
comment " sed new psmile includes to Makefile"
sed -i 's@__inc__@-I$(LIBBUILD)/psmile.$(CHAN) -I$(LIBBUILD)/clim.$(CHAN) -I$(LIBBUILD)/mpp_io'" -I$ncdfPath/include@" $file >> $log_file 2>> $err_file
check
comment " sed ldflg to oas Makefile"
sed -i "s@__ldflg__@@" $file >> $log_file 2>> $err_file
check
comment " sed comF90 to oas Makefile"
sed -i "s@__comF90__@$mpiPath/bin/mpif90 $optComp@" $file >> $log_file 2>> $err_file
check
comment " sed comCC to oas Makefile"
sed -i "s@__comCC__@$mpiPath/bin/mpicc $optComp@" $file >> $log_file 2>> $err_file
check
comment " sed ld to oas Makefile"
sed -i "s@__ld__@$mpiPath/bin/mpif90@" $file >> $log_file 2>> $err_file
check
comment " sed libs to oas Makefile"
sed -i "s@__lib__@-L$ncdfPath/lib/ -lnetcdff@" $file >> $log_file 2>> $err_file
check
comment " sed precision to oas Makefile"
sed -i "s@__precision__@-fdefault-real-8@" $file >> $log_file 2>> $err_file
check
route "${cblue}<< configure_oas${cnormal}"
}
make_oas(){
route "${cblue}>> make_oas${cnormal}"
c_make_oas
comment " cp oasis binary to $bindir"
cp $oasdir/$platform/bin/oasis3.MPI1.x $bindir >> $log_file 2>> $err_file
check
route "${cblue}<< make_oas${cnormal}"
}
setup_oas(){
route "${cblue}>> setupOas${cnormal}"
c_setup_oas
route "${cblue}<< setupOas${cnormal}"
}
|
package com.alibaba.jvm.sandbox.repeater.plugin.hikv;
import java.util.List;
import com.alibaba.jvm.sandbox.api.event.Event;
import com.alibaba.jvm.sandbox.repeater.plugin.core.impl.AbstractInvokePluginAdapter;
import com.alibaba.jvm.sandbox.repeater.plugin.core.model.EnhanceModel;
import com.alibaba.jvm.sandbox.repeater.plugin.domain.InvokeType;
import com.alibaba.jvm.sandbox.repeater.plugin.spi.InvokePlugin;
import com.google.common.collect.Lists;
import org.kohsuke.MetaInfServices;
/**
* <p>
*
* @author wangyeran
*/
@MetaInfServices(InvokePlugin.class)
public class HikvPlugin extends AbstractInvokePluginAdapter {
@Override
protected List<EnhanceModel> getEnhanceModels() {
EnhanceModel em = EnhanceModel.builder()
.classPattern("com.iqiyi.hikv.ToBlockingHiKV")
.methodPatterns(EnhanceModel.MethodPattern.transform("get"))
.watchTypes(Event.Type.BEFORE, Event.Type.RETURN, Event.Type.THROWS)
.build();
return Lists.newArrayList(em);
}
protected HikvProcesssor getInvocationProcessor() {
return new HikvProcesssor(getType());
}
@Override
public InvokeType getType() {
return InvokeType.HIKV;
}
@Override
public String identity() {
return "hikv";
}
@Override
public boolean isEntrance() {
return false;
}
}
|
const awsServerlessExpress = require('aws-serverless-express');
const { app } = require('./index');
const server = awsServerlessExpress.createServer(
app,
null,
[
'application/font-woff',
'application/font-woff2',
'application/javascript',
'application/json',
'application/octet-stream',
'application/xml',
'font/eot',
'font/opentype',
'font/otf',
'image/jpeg',
'image/png',
'image/svg+xml',
'image/x-icon',
'text/comma-separated-values',
'text/css',
'text/html',
'text/javascript',
'text/plain',
'text/text',
'text/xml',
'*/*'
]);
exports.handler = (event, context) =>
awsServerlessExpress.proxy(server, event, context);
|
<filename>guava/src/com/google/common/util/concurrent/InterruptibleTask.java
/*
* Copyright (C) 2015 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.util.concurrent;
import com.google.common.annotations.GwtCompatible;
import com.google.j2objc.annotations.ReflectionSupport;
import java.util.concurrent.atomic.AtomicReference;
@GwtCompatible(emulated = true)
@ReflectionSupport(value = ReflectionSupport.Level.FULL)
// Some Android 5.0.x Samsung devices have bugs in JDK reflection APIs that cause
// getDeclaredField to throw a NoSuchFieldException when the field is definitely there.
// Since this class only needs CAS on one field, we can avoid this bug by extending AtomicReference
// instead of using an AtomicReferenceFieldUpdater.
abstract class InterruptibleTask extends AtomicReference<Thread> implements Runnable {
// The thread executing the task publishes itself to the superclass' reference and the thread
// interrupting sets 'doneInterrupting' when it has finished interrupting.
private volatile boolean doneInterrupting;
@Override
public final void run() {
if (!compareAndSet(null, Thread.currentThread())) {
return; // someone else has run or is running.
}
try {
runInterruptibly();
} finally {
if (wasInterrupted()) {
// We were interrupted, it is possible that the interrupted bit hasn't been set yet. Wait
// for the interrupting thread to set 'doneInterrupting' to true. See interruptTask().
// We want to wait so that we don't interrupt the _next_ thing run on the thread.
// Note: We don't reset the interrupted bit, just wait for it to be set.
// If this is a thread pool thread, the thread pool will reset it for us. Otherwise, the
// interrupted bit may have been intended for something else, so don't clear it.
while (!doneInterrupting) {
Thread.yield();
}
}
}
}
abstract void runInterruptibly();
abstract boolean wasInterrupted();
final void interruptTask() {
// interruptTask is guaranteed to be called at most once, and if runner is non-null when that
// happens, then it must have been the first thread that entered run(). So there is no risk that
// we are interrupting the wrong thread.
Thread currentRunner = get();
if (currentRunner != null) {
currentRunner.interrupt();
}
doneInterrupting = true;
}
@Override
public abstract String toString();
}
|
#!/usr/bin/env bash
# this script must be run by root or sudo
if [[ "$UID" -ne "0" ]] ; then
echo "ERROR: This script must be run by root or sudo"
exit
fi
# We assume a relative path from this repository to the installation
# directory and configuration directory.
CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"/../../../../../config
SUBMITTY_INSTALL_DIR=$(jq -r '.submitty_install_dir' ${CONF_DIR}/submitty.json)
CGI_USER=$(jq -r '.cgi_user' ${SUBMITTY_INSTALL_DIR}/config/submitty_users.json)
##################################################
# This .sh file contains
echo "Installing RPI specific packages"
sudo apt-get install -qqy clisp emacs
##################################################
# Used by Computer Science 1 Autograding
echo "Getting pylint..."
# install pylint for python3 using pip
pip3 install pylint
pip3 install pillow
##################################################
# Used by Principles of Software
echo "Getting mono..."
# this package allows us to run windows .net executables on linux
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF
echo "deb http://download.mono-project.com/repo/ubuntu stable-bionic main" | sudo tee /etc/apt/sources.list.d/mono-official.list
sudo apt-get -qqy update
sudo apt-get -qqy install mono-devel
# If Dafny hasn't already been installed
if [ ! -d "${SUBMITTY_INSTALL_DIR}/Dafny" ]; then
# "Dafny is a verification-aware programming language"
echo "Getting dafny..."
mkdir -p ${SUBMITTY_INSTALL_DIR}/Dafny
chown root:${COURSE_BUILDERS_GROUP} ${SUBMITTY_INSTALL_DIR}/Dafny
chmod 751 ${SUBMITTY_INSTALL_DIR}/Dafny
pushd ${SUBMITTY_INSTALL_DIR}/Dafny > /dev/null
DAFNY_VER=v2.3.0
DAFNY_FILE=dafny-2.3.0.10506-x64-ubuntu-16.04.zip
wget https://github.com/Microsoft/dafny/releases/download/${DAFNY_VER}/${DAFNY_FILE} -o /dev/null > /dev/null 2>&1
unzip ${DAFNY_FILE} > /dev/null
rm -f ${DAFNY_FILE} > /dev/null
# fix permissions
chmod -R o+rx dafny
popd > /dev/null
# then dafny can be run (using mono):
# /usr/bin/mono /usr/local/submitty/Dafny/dafny/Dafny.exe /help
fi
##################################################
# Install Racket and Swi-prolog for Programming Languages
echo "installing Racket and Swi-prolog"
apt-add-repository -y ppa:plt/racket > /dev/null 2>&1
apt-get install -qqy racket > /dev/null 2>&1
apt-get install -qqy swi-prolog > /dev/null 2>&1
##################################################
# Used by Principles of Program Analysis
# Soot is a Java Bytecode Analysis and Transformation Framework
echo "Getting Soot... "
mkdir -p ${SUBMITTY_INSTALL_DIR}/java_tools/soot
pushd ${SUBMITTY_INSTALL_DIR}/java_tools/soot > /dev/null
rm -rf soot*jar
# older, requested version:
curl http://www.cs.rpi.edu/~milanova/soot-develop.jar > soot-develop.jar
curl http://www.cs.rpi.edu/~milanova/rt.jar > rt.jar
# most recent libraries:
curl https://soot-build.cs.uni-paderborn.de/public/origin/develop/soot/soot-develop/build/sootclasses-trunk.jar > sootclasses-trunk.jar
curl https://soot-build.cs.uni-paderborn.de/public/origin/develop/soot/soot-develop/build/sootclasses-trunk-jar-with-dependencies.jar > sootclasses-trunk-jar-with-dependencies.jar
#
-o /dev/null > /dev/null 2>&1
popd > /dev/null
# fix all java_tools permissions
chown -R root:${COURSE_BUILDERS_GROUP} ${SUBMITTY_INSTALL_DIR}/java_tools
chmod -R 755 ${SUBMITTY_INSTALL_DIR}/java_tools
# install haskell
echo "Getting Haskell... "
apt-get install -qqy haskell-platform
apt-get install -qqy ocaml
## TODO: ADD INSTALLATION INFO FOR Z3
## https://github.com/Z3Prover/z3/releases
## (just installed binary at /usr/local/submitty/tools/z3)
##################################################
# Used by Network Programming class
echo "Getting tools for NetProg... "
apt-get install -qqy libssl-dev
# don't install these...
#apt-get install -qqy libavahi-compat-libdnssd-dev avahi-utils avahi-daemon
# instead:
mkdir tmp_avahi_install_dir
cd tmp_avahi_install_dir
apt-get download libavahi-compat-libdnssd-dev
mv libavahi*deb libavahi-compat-libdnssd-dev.deb
dpkg --force-all -i libavahi-compat-libdnssd-dev.deb
cd ..
rm -r tmp_avahi_install_dir
##################################################
# Used by Advanced Computer Graphics course
# GLEW and GLM
echo "installing graphics libraries"
apt-get install -qqy glew-utils libglew-dev libglm-dev
apt-get install -qqy libxrandr-dev xorg-dev
#CMAKE permissions
#These permissions are necessary so that untrusted user can use pkgconfig with cmake.
#Note that pkgconfig does not appear until after graphics installs (Section above)
chmod -R o+rx /usr/local/lib/pkgconfig
chmod -R o+rx /usr/local/lib/cmake
#GLFW
echo "installing GLFW"
wget https://github.com/glfw/glfw/releases/download/3.2.1/glfw-3.2.1.zip
unzip glfw-3.2.1.zip
cd glfw-3.2.1
mkdir build
cd build
cmake ..
make
sudo make install
cd ../..
rm -R glfw-3.2.1
rm glfw-3.2.1.zip
##################################################
# Used by Computational Vision course
echo "installing vision libraries"
apt-get install -qqy python3-tk
pip3 install numpy
pip3 install matplotlib
pip3 install opencv-python
pip3 install scipy
pip3 install scikit-image
##################################################
#install some pdflatex packages
apt-get install -qqy texlive-latex-base texlive-extra-utils texlive-latex-recommended
apt-get install -qqy texlive-generic-recommended
apt-get install -qqy texlive-latex-extra
# dictionary of words in /usr/share/dict/words
apt-get install -qqy wamerican
# attempt to correct a system with broken dependencies in place
apt-get -f -qqy install
### Fix Python Package Permissions (should always run at the end of this)
# Setting the permissions are necessary as pip uses the umask of the user/system, which
# affects the other permissions (which ideally should be o+rx, but Submitty sets it to o-rwx).
# This gets run here in case we make any python package changes.
find /usr/local/lib/python*/dist-packages -type d -exec chmod 755 {} +
find /usr/local/lib/python*/dist-packages -type f -exec chmod 755 {} +
find /usr/local/lib/python*/dist-packages -type f -name '*.py*' -exec chmod 644 {} +
find /usr/local/lib/python*/dist-packages -type f -name '*.pth' -exec chmod 644 {} +
echo "done with RPI specific installs"
|
<filename>algorithm/0/29-divide.go
//给定两个整数,被除数 dividend 和除数 divisor。将两数相除,要求不使用乘法、除法和 mod 运算符。
//
// 返回被除数 dividend 除以除数 divisor 得到的商。
//
// 整数除法的结果应当截去(truncate)其小数部分,例如:truncate(8.345) = 8 以及 truncate(-2.7335) = -2
//
//
//
// 示例 1:
//
// 输入: dividend = 10, divisor = 3
//输出: 3
//解释: 10/3 = truncate(3.33333..) = truncate(3) = 3
//
// 示例 2:
//
// 输入: dividend = 7, divisor = -3
//输出: -2
//解释: 7/-3 = truncate(-2.33333..) = -2
//
//
//
// 提示:
//
//
// 被除数和除数均为 32 位有符号整数。
// 除数不为 0。
// 假设我们的环境只能存储 32 位有符号整数,其数值范围是 [−2³¹, 231 − 1]。本题中,如果除法结果溢出,则返回 231 − 1。
//
// Related Topics 位运算 数学 👍 714 👎 0
package algorithm_0
import "math"
func divide(dividend int, divisor int) int {
if dividend == 0 {
return 0
} else if dividend == divisor {
return 1
}
if dividend == math.MinInt32 {
if divisor == 1 {
return math.MinInt32
} else if divisor == -1 {
return math.MaxInt32
}
}
if divisor == math.MinInt32 {
return 0
}
// 统一转化为负数计算
var rev = false
if dividend > 0 {
dividend = -dividend
rev = !rev
}
if divisor > 0 {
divisor = -divisor
rev = !rev
}
var l, r, res = 1, math.MaxInt32, 0
for l <= r {
mid := l + (r-l)>>1
check := quickAdd(dividend, divisor, mid)
if check {
res = mid
if mid == math.MaxInt32 {
break
}
l = mid + 1
} else {
r = mid - 1
}
}
if rev {
res = -res
}
return res
}
// 快速加
func quickAdd(dividend, divisor, mid int) bool {
var res, add = 0, divisor
for mid > 0 {
if mid&1 != 0 {
// 比被除数大,不符合
if res+add < dividend {
return false
}
res += add
mid--
} else {
// 比被除数大,不符合
if add+add < dividend {
return false
}
add += add
mid >>= 1
}
}
return true
}
|
import { Node } from '../src'
import _ from 'underscore'
// znode1
// |
// |
// [clientNodes]
//
(async function () {
let znode1 = new Node({ bind: 'tcp://127.0.0.1:3000' })
let clientNodes = _.map(_.range(10), (index) => {
let znode = new Node({ options: { index } })
znode.onTick('foo', (msg) => {
console.log(`handling tick on clienNode${index}:`, msg)
})
return znode
})
await znode1.bind()
await Promise.all(_.map(clientNodes, (znode) => znode.connect({ address: znode1.getAddress() })))
znode1.tickAll({
event: 'foo',
data: 'tick from znode1.',
filter: (options) => options.index % 2
})
}()) |
class ImageProcessor:
def load(self):
# Load the image
pass
def apply_grayscale_filter(self):
# Convert the loaded image to grayscale
# Assuming image is represented as a 2D array of pixels
for i in range(len(self.image)):
for j in range(len(self.image[0])):
r, g, b = self.image[i][j]
gray = int(0.2989 * r + 0.5870 * g + 0.1140 * b)
self.image[i][j] = (gray, gray, gray)
def apply_blur_filter(self, radius):
# Apply a blur filter to the loaded image with the given radius
# Assuming image is represented as a 2D array of pixels
for i in range(len(self.image)):
for j in range(len(self.image[0])):
total_r, total_g, total_b = 0, 0, 0
count = 0
for x in range(max(0, i - radius), min(len(self.image), i + radius + 1)):
for y in range(max(0, j - radius), min(len(self.image[0]), j + radius + 1)):
r, g, b = self.image[x][y]
total_r += r
total_g += g
total_b += b
count += 1
self.image[i][j] = (total_r // count, total_g // count, total_b // count)
def apply_edge_detection_filter(self, threshold):
# Apply an edge detection filter to the loaded image with the given threshold
# Assuming image is represented as a 2D array of pixels
for i in range(len(self.image)):
for j in range(len(self.image[0])):
if i > 0 and j > 0 and i < len(self.image) - 1 and j < len(self.image[0]) - 1:
gx = ((self.image[i + 1][j - 1][0] + 2 * self.image[i + 1][j][0] + self.image[i + 1][j + 1][0]) -
(self.image[i - 1][j - 1][0] + 2 * self.image[i - 1][j][0] + self.image[i - 1][j + 1][0]))
gy = ((self.image[i - 1][j + 1][0] + 2 * self.image[i][j + 1][0] + self.image[i + 1][j + 1][0]) -
(self.image[i - 1][j - 1][0] + 2 * self.image[i][j - 1][0] + self.image[i + 1][j - 1][0]))
gradient = (gx ** 2 + gy ** 2) ** 0.5
if gradient > threshold:
self.image[i][j] = (255, 255, 255)
else:
self.image[i][j] = (0, 0, 0)
def save(self, filename):
# Save the processed image to the specified filename
# Assuming image is represented as a 2D array of pixels
# Save the image to the specified filename using image processing libraries or built-in functions
pass |
import React, { useState } from 'react';
const App = () => {
const [books, setBooks] = useState([]);
const [newBook, setNewBook] = useState('');
const handleChange = e => {
setNewBook(e.target.value);
}
const handleSubmit = e => {
e.preventDefault();
const book = {
title: newBook
};
const newBooks = [...books, book];
setBooks(newBooks);
setNewBook('');
}
const handleDelete = title => {
const newBooks = books.filter(b => b.title !== title);
setBooks(newBooks);
}
return (
<div>
<h3>My Books</h3>
<ul>
{books.map(b => (
<li key={b.title}>
{b.title} <button onClick={() => handleDelete(b.title)}>Delete</button>
</li>
))}
</ul>
<form onSubmit={handleSubmit}>
<input
type="text"
value={newBook}
onChange={handleChange}
/>
<button type="submit">Add new book</button>
</form>
</div>
)
}
export default App; |
import React from "react";
import styled from "styled-components";
import { useLanguage } from "containers/LanguageProvider";
import ImageSupplier from "components/ImageSupplier";
import LocalizedLink from "components/LocalizedLink";
const NotFound = () => {
const { pageString } = useLanguage();
return (
<Wrapper>
<NotFoundImg name="pageNotFound" isBackground={true} alt="">
<Message>
<StyledH1>{pageString[404].h1}</StyledH1>
<StyledP>{pageString[404].p}</StyledP>
<StyledLink to="/">{pageString[404].link}</StyledLink>
</Message>
</NotFoundImg>
</Wrapper>
);
};
const Wrapper = styled.div`
width: 100%;
height: 100%;
min-height: calc(100vh - 6rem);
background-color: ${({ theme }) => theme.colors.background};
color: ${({ theme }) => theme.colors.onSurface};
`;
const NotFoundImg = styled(ImageSupplier)`
background-position: center;
background-repeat: no-repeat;
background-size: 20rem 40rem;
width: 100%;
height: 100%;
min-height: calc(100vh - 6rem);
`;
const Message = styled.div`
position: absolute;
top: 25%;
left: max(calc(35% - 20rem), 1rem);
width: max-content;
background: ${({ theme }) => theme.colors.background + "BF"};
box-shadow: 0 0 1rem ${({ theme }) => theme.colors.background};
@media screen and (max-width: 992px) {
top: 30%;
left: calc(50% - 9.5rem);
${({ theme }) => `
text-shadow: 0 0 1px ${theme.colors.surface},
-2px 0 1px ${theme.colors.surface},
2px 0 1px ${theme.colors.surface},
0 -2px 1px ${theme.colors.surface},
0 2px 1px ${theme.colors.surface},
2px 2px 1px ${theme.colors.surface},
2px -2px 1px ${theme.colors.surface},
-2px 2px 1px ${theme.colors.surface},
-2px -2px 1px ${theme.colors.surface};`}
}
`;
const StyledH1 = styled.h1`
margin-top: 0;
margin-bottom: 2rem;
max-width: 20rem;
font-weight: bold;
`;
const StyledP = styled.p`
font-size: x-large;
max-width: 20rem;
margin-bottom: 2rem;
`;
const StyledLink = styled(LocalizedLink)`
font-size: x-large;
color: ${({ theme }) => theme.colors.link};
&:hover {
color: ${({ theme }) => theme.colors.linkHover};
}
`;
export default NotFound;
|
var ClientOAuth2 = require('client-oauth2');
var fetch = require('./fetch');
/**
* Fetches an oauth2 token using the owner flow
*
* @async
* @param {object} auth - auth object.
* @return {object}
*/
function ownerFlow(auth) {
return auth.oauth.client.owner.getToken(
auth.owner.userName,
auth.owner.password
)
.then(function (token) {
auth.oauth.token = token;
return;
});
}
/**
* Makes a request using OAuth2 when we have a token already.
*
* If the initial request fails with a 401 error, we assume the token headers
* expired. We refresh the token and attempt the request one more time.
*
* @async
* @param {object} ketting - Ketting object.
* @param {Request} request - Request object.
* @param {object} init - A list of settings.
* @return {object}
*/
function fetchWithAccessToken(ketting, request, init) {
request.headers.set('Authorization', 'Bearer ' + ketting.auth.oauth.token.accessToken);
return fetch(request, init)
.then(function(response) {
// If we receive 401, refresh token and try again once
if (!response.ok && response.status === 401) {
return ketting.auth.oauth.refreshToken(ketting)
.then(function() {
request.headers.set('Authorization', 'Bearer ' + ketting.auth.oauth.token.accessToken);
return fetch(request, init);
});
}
return response;
});
}
module.exports = {
/**
* Makes a request using OAuth2
*
* @async
* @param {object} ketting - Ketting object.
* @param {Request} request - Request object.
* @param {object} init - A list of settings.
* @return {object}
*/
fetch : function(ketting, request, init) {
if (ketting.auth.oauth.token) {
return fetchWithAccessToken(ketting, request, init);
}
return ketting.auth.oauth.getToken()
.then(function () {
// Just call the ketting function again now that we have an access token
request.headers.set('Authorization', 'Bearer ' + ketting.auth.oauth.token.accessToken);
return fetch(request, init);
});
},
/**
* Fetches an oauth2 token and sets it on the given auth object
*
* @async
* @param {object} auth - Auth object.
* @return {object}
*/
getToken : function(auth) {
if (auth.oauth.flow === 'owner') {
return ownerFlow(auth);
}
throw new Error('Unsupported oauth2 flow');
},
/**
* Refreshes the access token and updates the existing token with the new one
*
* @async
* @param {object} auth - Auth object.
* @return {object}
*/
refreshToken : function(auth) {
return auth.oauth.token.refresh()
.then(function(updatedToken) {
auth.oauth.token = updatedToken;
return;
});
},
/**
* Sets up the oauth object that will be part of the overall
* Ketting object
*
* @param {object} ketting - Ketting object.
* @param {object} auth - Auth options object
* @return {object}
*/
setupOAuthObject : function(ketting, auth) {
var oauth = this;
var oAuthObject = {
client: new ClientOAuth2(auth.client),
getToken: function() {
return oauth.getToken(ketting.auth);
},
refreshToken: function() {
return oauth.refreshToken(ketting.auth);
}
};
if (auth.owner) {
oAuthObject.flow = 'owner';
}
return oAuthObject;
}
};
|
<reponame>mani-mishra/nightreads
from django.views.generic import View, FormView
from django.http import JsonResponse
from django.shortcuts import render
from .forms import SubscribeForm, UnsubscribeForm, ConfirmEmailForm
from . import user_service
class IndexView(View):
form_class = SubscribeForm
template = 'user_manager/index.html'
def get(self, request):
return render(request, self.template, {'form': self.form_class})
class SubscribeView(View):
form_class = SubscribeForm
def post(self, request):
form = self.form_class(request.POST)
if form.is_valid():
email = form.cleaned_data['email']
tags = form.cleaned_data['tags']
user = user_service.get_or_create_user(email=email)
is_updated = user_service.update_user_tags(user=user, tags=tags)
if is_updated:
key = user_service.generate_key(user=user)
user_service.send_confirmation_email(
request=request, user=user, key=key)
return JsonResponse({'status': 'Email sent'})
return JsonResponse({'status': 'No tags updated'})
return JsonResponse({'errors': form.errors})
class UnsubscribeView(FormView):
form_class = UnsubscribeForm
template_name = 'user_manager/unsubscribe.html'
def form_valid(self, form):
email = form.cleaned_data['email']
user = user_service.get_user(email=email)
if not user:
return JsonResponse({'error': 'User Not Found'})
key = user_service.generate_key(user=user, for_subscription=False)
user_service.send_confirmation_email(
request=self.request, user=user, key=key, for_subscription=False)
return JsonResponse({'status': 'Email sent'})
def form_invalid(self, form):
return JsonResponse({'errors': form.errors})
class ConfirmEmailView(View):
form_class = ConfirmEmailForm
def get(self, request):
form = self.form_class(request.GET)
if form.is_valid():
is_subscribed = form.cleaned_data['subscribe']
user = form.cleaned_data['user']
user_service.update_subscription(user=user, status=is_subscribed)
if is_subscribed:
return JsonResponse({'status': 'Subscribed'})
else:
return JsonResponse({'status': 'Unsubscribed'})
return JsonResponse({'errors': form.errors})
|
#!/bin/bash
for EXE_ID in {0..5}
do
python main_embeded.py $EXE_ID
done
|
#!/bin/sh
echo "Building latest version - did you remember a version bump?"
python3.6 setup.py bdist_wheel sdist
echo "Uploading latest version"
twine upload dist/pymato-$(grep version setup.py | awk -F= '{ print $2 }' | sed 's/[^0-9.]//g')*
|
import { mapGetters, mapActions } from 'vuex'
import {VMoney} from 'v-money'
import { QuoteTemplate } from 'src/data/quoteTemplate.js'
import _ from 'lodash'
export default {
name: 'SubmitQuotes',
directives: {money: VMoney},
computed: {
QuoteTemplate () {
return QuoteTemplate
}
},
data () {
return {
effectiveDate: null,
effectiveDateFormatted: null,
expirationDate: null,
expirationDateFormatted: null,
exclusion: false,
ny: false,
leadCommission: 15,
excessCommission: 10,
limitsRequestedItems: [
{ title: '$25M' },
{ title: '$50M' },
{ title: '$75M' }
],
carrierItems: [
{ title: 'Great American' },
{ title: 'Fireman\'s Fund' }
],
quotes: [
{
limitsRequested: '$75M',
annualItems: [
{
limit: '$25M',
carrier: 'Great American',
premium: 0.00,
taxes: 0.00,
limitFees: [
{
limitFee: 'NJ Surcharge',
limitAmount: 0
}
],
subTotal: 0
},
{
limit: '$25M xs $25M',
carrier: 'Great American',
premium: 0.00,
taxes: 0.00,
limitFees: [
{
limitFee: 'NJ Surcharge',
limitAmount: 0
}
],
subTotal: 0
},
{
limit: '$25M xs $50M',
carrier: 'Fireman\'s Fund',
premium: 0.00,
taxes: 0.00,
limitFees: [
{
limitFee: 'NJ Surcharge',
limitAmount: 0
}
],
subTotal: 0
}
],
totalFeeAmount: [
{
totalFee: 'RPG Fee',
amount: 0
}
],
annualTotal: 0
}
],
money: {
decimal: '.',
thousands: ',',
precision: 2,
masked: false
}
}
},
methods: {
...mapActions([
'addEvent',
'updateMessage'
]),
onAddQuote () {
this.quotes.push(_.cloneDeep(this.QuoteTemplate))
},
onDeleteQuote (quoteIndex) {
if(window.confirm("Are you sure you want to delete this quote?")) {
this.quotes.splice(quoteIndex, 1)
}
},
selectLimit(quoteIndex, limit) {
this.quotes[quoteIndex].limitsRequested = limit
},
checkLimit (annualItemIndex, limitsRequested) {
if (limitsRequested === '$25M' && annualItemIndex == 0) {
return true
} else if (limitsRequested === '$50M' && annualItemIndex < 2) {
return true
} else if (limitsRequested === '$75M' && annualItemIndex < 3) {
return true
}
return false
},
onSubmit () {
this.addEvent({
type: 'quote',
action: 'submit',
fname: 'Ian',
lname: 'Sterling',
time: '4:38pm'
})
this.updateMessage('Your quote(s) have been submitted.')
this.$router.replace('/application/feed/quoted')
},
onCancel () {
if(window.confirm("Are you sure you want to cancel this quote?")) {
this.$router.replace({ path: '/application/feed' })
}
},
addLimitFee (limitFees) {
limitFees.push({
limitFee: '',
limitAmount: '0.00'
})
},
removeLimitFee (limitFees, i) {
limitFees.splice(i, 1)
},
addTotalFee (quoteIndex) {
this.quotes[quoteIndex].totalFeeAmount.push({
totalFee: '',
amount: 0
})
},
removeTotalFee (quoteIndex, i) {
console.log(quoteIndex, i)
console.log(this.quotes[quoteIndex].totalFeeAmount)
this.quotes[quoteIndex].totalFeeAmount.splice(i, 1)
},
calcSubtotal (annualItem) {
var limitAmount = 0
for (var i = 0; i < annualItem.limitFees.length; i++) {
const limitFee = annualItem.limitFees[i]
limitAmount += this.formatDecimal(limitFee.limitAmount)
}
annualItem.subTotal = this.formatDecimal(annualItem.premium) + this.formatDecimal(annualItem.taxes) + limitAmount
return this.formatPrice(annualItem.subTotal)
},
calcTotalAmount (quoteIndex) {
this.quotes[quoteIndex].annualTotal = 0
for (var i = 0; i < this.quotes[quoteIndex].annualItems.length; i++) {
const annualItem = this.quotes[quoteIndex].annualItems[i]
var limitAmount = 0
for (var j = 0; j < annualItem.limitFees.length; j++) {
const limitFee = annualItem.limitFees[j]
limitAmount += this.formatDecimal(limitFee.limitAmount)
}
this.quotes[quoteIndex].annualItems[i].subTotal = this.formatDecimal(annualItem.premium) + this.formatDecimal(annualItem.taxes) + limitAmount
this.quotes[quoteIndex].annualTotal += this.quotes[quoteIndex].annualItems[i].subTotal
}
for (var i = 0; i < this.quotes[quoteIndex].totalFeeAmount.length; i++) {
this.quotes[quoteIndex].annualTotal += this.formatDecimal(this.quotes[quoteIndex].totalFeeAmount[i].amount)
}
},
formatPrice(value) {
let val = (value/1).toFixed(2).replace(',', '.')
return val.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",")
},
formatDecimal(value) {
let val = value.toString().replace(/,/g, '')
return parseFloat(val)
}
}
}
|
#!/bin/bash
export STEAM_APP_ID_LIST="1010750 299030"
export LICENSE_PATH="./source/source/blood/gpl-2.0.txt"
export COMMON_PACKAGE="1"
export LIBRARIES="yasm libvpx"
|
<filename>ofcourse/cli/openshift_utils.py
"""
Author: <NAME> <<EMAIL>>
License: Apache 2.0
"""
import logging
import os
import pkg_resources
import re
import six
import socket
import uuid
import requests
import time
import oshift
from six.moves import StringIO
import dulwich.porcelain as git
class NotFound(BaseException):
pass
openshift_files = {
"setup.py": {
"contents": """from setuptools import setup
setup(name='thecourse',
version='1.0',
description='courseware on openshift',
author='Dr. Professor',
author_email='<EMAIL>',
url='http://www.python.org/sigs/distutils-sig/',
install_requires=['ofcourse>={version}'],
)""".format(version=pkg_resources.get_distribution('ofcourse').version),
},
"wsgi.py": {
"contents": """#!/usr/bin/python
# IMPORTANT: Please do not make changes to this file unless you know what
# you're doing. Thank you.
import os
virtenv = os.environ['OPENSHIFT_PYTHON_DIR'] + '/virtenv/'
virtualenv = os.path.join(virtenv, 'bin/activate_this.py')
try:
execfile(virtualenv, dict(__file__=virtualenv))
except IOError:
pass
import ofcourse.site
application = ofcourse.site.app""",
},
}
class TempBranch(object):
def __init__(self, name, repo, delete=True):
self.branch = 'refs/heads/{}'.format(name)
self.delete = delete
self.repo = repo
# save the starting branch so we know where to go back to
self.start = self.repo.refs.read_ref('HEAD').replace('ref: ', '')
def __enter__(self):
self.repo.refs.add_if_new(self.branch, self.repo.head())
self.repo.refs.set_symbolic_ref('HEAD', self.branch)
def __exit__(self, exc_type, value, tb):
if value is None:
self.repo.refs.set_symbolic_ref('HEAD', self.start)
# lol, only reset --hard is supported
if self.delete:
self.repo.refs.remove_if_equals(self.branch, None)
else:
six.reraise(exc_type, value, tb)
git.reset(self.repo, "hard")
def push(name, api, domain):
repo = git.Repo(os.getcwd())
branch = "temp-{}".format(str(uuid.uuid4())[:8])
set_deploy_branch(name, branch, api, domain)
remote = git_url(name, api, domain)
if is_dirty():
print("Nuking changes.")
git.reset(repo, "hard")
with TempBranch(branch, repo, delete=True):
for fname, file_info in openshift_files.items():
with open(fname, 'w') as f:
f.write(file_info.get("contents", ""))
repo.stage(fname)
repo.do_commit("Commit openshift files")
push_out = StringIO()
push_err = StringIO()
print("Pushing to openshift (may take a few minutes)")
git.push(repo, remote, "refs/heads/{}".format(branch),
outstream=push_out, errstream=push_err)
push_out.seek(0)
out = push_out.read()
if not re.match(r'^Push to .* successful.', out):
print("There was a failure while pushing")
print("---BEGIN STDERR---")
push_err.seek(0)
print(push_err.read())
print("---BEGIN STDOUT---")
print(out)
print("There was a failure while pushing")
git.rm(repo, openshift_files.keys())
map(os.remove, openshift_files.keys())
return get_app(name, api, domain)['app_url']
def is_clean():
return not is_dirty()
def is_dirty():
"""Check for uncommitted changes. True if dirty."""
repo = git.Repo(os.getcwd())
s = git.status(repo)
return any(s.staged.values() + [s.unstaged])
def get_api(token):
oshift.log.setLevel(logging.FATAL)
return oshift.Openshift("openshift.redhat.com", token=token)
def generate_token(uname, passwd):
session = requests.post(
"https://openshift.redhat.com/broker/rest/user/authorizations",
auth=requests.auth.HTTPBasicAuth(uname, passwd),
params={'scope': 'session'}
)
if session.status_code != 201:
raise Exception("Uhoh {} response={}".format(session.status_code,
session.text))
return session.json().get("data", {}).get("token", "")
def new_app(name, api, domain, wait_until_available=True):
try:
get_app(name, api, domain)
return
except:
pass
# Ok, the app doesn't exist
api.app_create(name, ['python-2.7'], domain_name=domain)
if not wait_until_available:
return
while True:
try:
app = get_app(name, api, domain)
socket.getaddrinfo(requests.utils.urlparse(
app['app_url']).netloc, 80)
break
except NotFound:
print("Waiting for new app...")
time.sleep(5)
except socket.gaierror as e:
if e.errno != -2:
raise e
print("Waiting for new app...")
time.sleep(5)
def get_app(name, api, domain):
apps = [a for a in api.app_list(domain_name=domain)[1]
if a.get("name", "") == name]
if apps:
return apps[0]
raise NotFound("Could not find app {}".format(name))
def git_url(name, api, domain):
app = get_app(name, api, domain)
remote = app['git_url']
# change SSH URL
# from "ssh://user@host/dir/repo.git"
# to "user@host:dir/repo.git"
return remote.replace("ssh://", "").replace("/", ":", 1)
def set_deploy_branch(name, branch, api, domain):
app = get_app(name, api, domain)
if app['deployment_branch'] != branch:
api.app_action('UPDATE', name, domain_name=domain,
deployment_branch=branch)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.