text stringlengths 1 1.05M |
|---|
#[derive(Debug, PartialEq)]
enum Action {
Execute(String, Option<String>),
ChangeDirectory(String),
Echo(String),
}
fn command_exec(input: &[char]) -> Result<Action, &'static str> {
let input_str: String = input.iter().collect();
let parts: Vec<&str> = input_str.split_whitespace().collect();
if parts.len() < 2 {
return Err("Invalid command format");
}
if parts[0] != "exec" {
return Err("Invalid command type");
}
let mut command = String::new();
let mut output_file: Option<String> = None;
let mut output_flag = false;
for part in parts.iter().skip(1) {
if output_flag {
output_file = Some(part.trim_matches('"').to_string());
output_flag = false;
} else if part == ">>" {
output_flag = true;
} else {
command.push_str(part);
command.push(' ');
}
}
if command.is_empty() {
return Err("No command provided");
}
Ok(Action::Execute(command.trim().to_string(), output_file))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_execute_with_capture() {
let input = r#"exec "ls home" >> "output.txt""#.chars().collect::<Vec<_>>();
let r = command_exec(&input);
assert_eq!(
r,
Ok(Action::Execute("ls home".into(), Some("output.txt".into())))
)
}
} |
#!/bin/bash
echo "Exporting Leagues definitions..."
echo 'sfdx force:data:soql:query -q "SELECT Name, League__c FROM Account WHERE League__c != null" -r csv > data/leagues.csv'
sfdx force:data:soql:query -q "SELECT Name, League__c FROM Account WHERE League__c != null" -r csv > data/leagues.csv
rc=$?
if [ $rc -ne 0 ]; then
echo "could not export Leagues definitions from Accounts";
exit $rc;
fi
echo "Exported Leagues successfully"
echo "Exporting Authors..."
echo 'sfdx force:data:soql:query -q "SELECT LastName, GithubUserName__c FROM Contact WHERE GithubUserName__c != null" -r csv > data/authors.csv'
sfdx force:data:soql:query -q "SELECT LastName, GithubUserName__c FROM Contact WHERE GithubUserName__c != null" -r csv > data/authors.csv
rc=$?
if [ $rc -ne 0 ]; then
echo "could not export Authors from Contacts";
exit $rc;
fi
echo "Exported Authors successfully"
echo "Exporting Tank Models..."
echo 'sfdx force:data:soql:query -q "SELECT Name, IsActive, Author__r.GithubUserName__c, TankModel__c, Family FROM Product2 WHERE TankModel__c != null" -r csv > data/tankModels.csv'
sfdx force:data:soql:query -q "SELECT Name, IsActive, Author__r.GithubUserName__c, TankModel__c, Family FROM Product2 WHERE TankModel__c != null" -r csv > data/tankModels.csv
rc=$?
if [ $rc -ne 0 ]; then
echo "could not export Tank Models from Products";
exit $rc;
fi
echo "Exported Tank Models successfully"
echo "Exporting Battles..."
echo 'sfdx force:data:soql:query -q "SELECT Name, StageName, CloseDate, Account.League__c, FieldDefinition__c, MaxRounds__c, InitialLiveLevel__c, BattleId__c FROM Opportunity WHERE Account.League__c != null" -r csv > data/battles.csv'
sfdx force:data:soql:query -q "SELECT Name, StageName, CloseDate, Account.League__c, FieldDefinition__c, MaxRounds__c, InitialLiveLevel__c, BattleId__c FROM Opportunity WHERE Account.League__c != null" -r csv > data/battles.csv
rc=$?
if [ $rc -ne 0 ]; then
echo "could not export Battles from Opportunities";
exit $rc;
fi
echo "Exported Battles successfully"
echo "Exporting Battle Participants..."
echo 'afdx force:data:soql:query -q "SELECT BattleHistory__c, PlayerId__c, IsWinner__c, Product2.TankModel__c, Quantity, TotalPrice, Opportunity.BattleId__c, NumberOfLives__c, Score__c FROM OpportunityLineItem WHERE Opportunity.Account.League__c != null" -r csv > data/players.csv'
sfdx force:data:soql:query -q "SELECT PlayerId__c, IsWinner__c, Product2.TankModel__c, Quantity, TotalPrice, Opportunity.BattleId__c, NumberOfLives__c, Score__c FROM OpportunityLineItem WHERE Opportunity.Account.League__c != null" -r csv > data/players.csv
rc=$?
if [ $rc -ne 0 ]; then
echo "could not export Battle Participants from Opportuniy Line Items";
exit $rc;
fi
echo "Exported Battles Participants successfully"
|
import { DELETE_COMBATANT } from "../actions/types";
import flow from "lodash/fp/flow";
import { removeCombatantFromState } from "./transformers/combatants";
import { removeCombatantIdFromInitiative } from "./transformers/order";
export function reducer(state, action) {
const { payload: combatantId } = action;
return flow(
removeCombatantFromState(combatantId),
removeCombatantIdFromInitiative(combatantId),
)(state);
}
export const type = DELETE_COMBATANT;
|
<filename>hexa/plugins/connector_airflow/tests/responses/__init__.py
dags = {
"dags": [
{
"dag_id": "hello_world",
"description": "Hello world example",
"file_token": "<KEY>",
"fileloc": "/opt/airflow/dags/repo/dags/helloworld.py",
"is_active": True,
"is_paused": True,
"is_subdag": False,
"owners": ["airflow"],
"root_dag_id": None,
"schedule_interval": {
"__type": "CronExpression",
"value": "* * * * *",
},
"tags": [],
},
{
"dag_id": "same_old",
"description": "Same old example",
"file_token": "<KEY>",
"fileloc": "/opt/airflow/dags/repo/dags/sameold.py",
"is_active": True,
"is_paused": True,
"is_subdag": False,
"owners": ["airflow"],
"root_dag_id": None,
"schedule_interval": {
"__type": "CronExpression",
"value": "* * * * *",
},
"tags": [],
},
],
"total_entries": 2,
}
dag_run_hello_world_1 = {
"conf": {},
"dag_id": "hello_world",
"dag_run_id": "hello_world_run_1",
"end_date": "2021-10-08T16:42:16.189200+00:00",
"execution_date": "2021-10-08T16:41:00+00:00",
"external_trigger": False,
"start_date": "2021-10-08T16:42:00.830209+00:00",
"state": "success",
}
dag_run_hello_world_2 = {
"conf": {},
"dag_id": "hello_world",
"dag_run_id": "hello_world_run_2",
"end_date": "2021-10-08T16:43:16.629694+00:00",
"execution_date": "2021-10-08T16:42:00+00:00",
"external_trigger": False,
"start_date": "2021-10-08T16:43:01.101863+00:00",
"state": "success",
}
dag_runs_hello_world = {
"dag_runs": [
dag_run_hello_world_1,
dag_run_hello_world_2,
],
"total_entries": 2,
}
dag_run_same_old_1 = {
"conf": {},
"dag_id": "same_old",
"dag_run_id": "same_old_run_1",
"end_date": "2021-10-08T16:42:16.189200+00:00",
"execution_date": "2021-10-08T16:41:00+00:00",
"external_trigger": False,
"start_date": "2021-10-08T16:42:00.830209+00:00",
"state": "success",
}
dag_run_same_old_2 = {
"conf": {},
"dag_id": "same_old",
"dag_run_id": "same_old_run_2",
"end_date": "2021-10-09T16:42:16.189200+00:00",
"execution_date": "2021-10-09T16:41:00+00:00",
"external_trigger": False,
"start_date": "2021-10-09T16:42:00.830209+00:00",
"state": "queued",
}
dag_runs_same_old = {
"dag_runs": [
dag_run_same_old_1,
dag_run_same_old_2,
],
"total_entries": 2,
}
|
<reponame>geethavijay/ds-algo-py<filename>puzzles/test_smallest_subsequence.py
import sys, unittest
smaller = 'ACE'
bigger = 'ABCDEFGHAIJKCRTYEPUIOACED'
START = 'S'
REMAINING = 'R'
END = 'E'
LENGTH = 'L'
def findSmallestSubsequence(bigger, smaller):
result = []
for i, v in enumerate(bigger):
if v not in smaller:
continue
result.append({START: i, LENGTH: sys.maxsize, REMAINING: set(smaller)})
for data in result:
data[REMAINING] -= set(v)
if data[LENGTH] == sys.maxsize and not data[REMAINING]:
data[END] = i
data[LENGTH] = data[END] - data[START]
if data[LENGTH] == len(smaller) - 1:
return data
return sorted(result, key=lambda x: x[LENGTH])[0]
class Test(unittest.TestCase):
def test_balance(self):
pass
if __name__ == '__main__':
unittest.main()
|
<gh_stars>0
import { useCallback, useEffect, useMemo, useState } from 'react';
import ITitle from 'modules/managePlayers/domain/entities/ITitle';
import makeGetTitlesService from 'modules/managePlayers/services/factories/makeGetTitlesService';
import { useSessionContext} from 'shared/view/contexts';
import { useToastContext } from 'shared/view/contexts';
interface UseFetchTitlesController {
titles: ITitle[];
loading: boolean;
fetchTitles: () => Promise<void> | undefined;
}
export default function useFetchTitlesController(): UseFetchTitlesController {
const [loading, setLoading] = useState(true);
const [titles, setTitles] = useState<ITitle[]>([]);
const getTitlesService = useMemo(() => makeGetTitlesService(), []);
const toast = useToastContext();
const session = useSessionContext();
const fetchTitles = useCallback(async () => {
setLoading(true);
const { titles, error, shouldLogout } = await getTitlesService.execute();
setLoading(false);
if (error) {
toast.showError(error);
if (shouldLogout) await session.logout();
return;
}
if (titles) setTitles(titles);
}, [getTitlesService, session, toast]);
useEffect(() => {
fetchTitles();
}, [fetchTitles]);
return {
titles,
loading,
fetchTitles,
};
}
|
let names = CommandLine.arguments
let users = [
{name: "John", age: 20},
{name: "Alice", age: 18},
{name: "Bob", age: 25},
{name: "David", age: 21}
]
let filteredUsers = users.filter({ name in names.contains(name) })
for user in filteredUsers {
print(user)
} |
<filename>example/main.go<gh_stars>10-100
// Simple groupcache server running on :8080
package main
import (
"log"
"net/http"
"github.com/brendanjryan/groupcache-bazel"
)
const (
addr = ":8080"
)
func main() {
pool := groupcache.NewHTTPPool(addr)
log.Println("server listening on", addr)
pool.Set(addr)
http.ListenAndServe(addr, nil)
}
|
/**
* Copyright 2018 The Feign Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package reactivefeign;
import reactor.core.Exceptions;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuples;
import java.time.Duration;
import java.util.function.Function;
/**
* @author <NAME>
*/
public abstract class SimpleReactiveRetryPolicy implements ReactiveRetryPolicy {
/**
* @param error
* @param attemptNo
* @return -1 if should not be retried, 0 if retry immediately
*/
abstract long retryDelay(Throwable error, int attemptNo);
@Override
public Function<Flux<Throwable>, Flux<Throwable>> toRetryFunction() {
return errors -> errors
.zipWith(Flux.range(1, Integer.MAX_VALUE), (error, index) -> {
long delay = retryDelay(error, index);
if (delay >= 0) {
return Tuples.of(delay, error);
} else {
throw Exceptions.propagate(error);
}
}).flatMap(
tuple2 -> tuple2.getT1() > 0
? Mono.delay(Duration.ofMillis(tuple2.getT1()))
.map(time -> tuple2.getT2())
: Mono.just(tuple2.getT2()));
}
}
|
<reponame>UVG-Teams/tutos-mobile
export const FETCH_LANGUAGES_STARTED = 'FETCH_LANGUAGES_STARTED';
export const FETCH_LANGUAGES_FINISHED = 'FETCH_LANGUAGES_FINISHED';
export const FETCH_LANGUAGES_FAILED = 'FETCH_LANGUAGES_FAILED'; |
package io.iohk.iodb.bench
import java.io.File
import java.util.concurrent.atomic.AtomicLong
import io.iohk.iodb.{ByteArrayWrapper, ShardedStore, Store, TestUtils}
object BlockProcessing extends Benchmark with App {
val InitialSize = 5000000
val InputsPerBlock = 5500
val OutputsPerBlock = 6000
val Blocks = 2000
val keysCache = Seq[ByteArrayWrapper]()
var version = new AtomicLong(1)
def bench(store: Store, dir: File): Unit = {
(1 to 1000).foreach { v =>
val toInsert = (1 to InitialSize / 1000).map(_ => randomKV())
if (v % Blocks == 0) keysCache ++ toInsert.map(_._1)
store.update(version.incrementAndGet(), Seq.empty, toInsert)
}
println("Initial data is loaded into the store")
val (_, ts) = (1L to Blocks).foldLeft((Seq[ByteArrayWrapper](), Seq[Long]())) { case ((cache, times), v) =>
val (time, newCache) = TestUtils.runningTime(processBlock(version.incrementAndGet, store, InputsPerBlock, OutputsPerBlock, cache).get)
println(s"Block processing time for block# $v: " + time)
(newCache, times ++ Seq(time))
}
val totalTime = ts.sum
println(s"Store: $store")
println(s"Total processing time: $totalTime")
println(s"Avg block processing time: ${totalTime / Blocks.toFloat}")
store.close()
TestUtils.deleteRecur(dir)
}
var dir = TestUtils.tempDir()
bench(new ShardedStore(dir, keySize = KeySize), dir)
println("===========================")
dir = TestUtils.tempDir()
bench(new RocksStore(dir), dir)
} |
./bin/sunny clientid 062858142435 |
<filename>src/main/java/org/vertx/java/addons/old/stomp/Frame.java
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.java.addons.old.stomp;
import org.vertx.java.core.buffer.Buffer;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
class Frame {
public String command;
public final Map<String, String> headers;
public final Buffer body;
public Frame(String command, Map<String, String> headers, Buffer body) {
this.command = command;
this.headers = headers;
this.body = body;
}
public Frame(String command, Buffer body) {
this.command = command;
this.headers = new HashMap<>(4);
this.body = body;
}
protected static Frame connectFrame() {
return connectFrame(null, null);
}
protected static Frame connectFrame(String username, String password) {
Frame frame = new Frame("CONNECT", null);
frame.headers.put("login", username);
frame.headers.put("passcode", password);
return frame;
}
protected static Frame connectedFrame(String sessionID) {
Frame frame = new Frame("CONNECTED", null);
frame.headers.put("session", sessionID);
return frame;
}
protected static Frame subscribeFrame(String destination) {
Frame frame = new Frame("SUBSCRIBE", null);
frame.headers.put("destination", destination);
return frame;
}
protected static Frame unsubscribeFrame(String destination) {
Frame frame = new Frame("UNSUBSCRIBE", null);
frame.headers.put("destination", destination);
return frame;
}
protected static Frame sendFrame(String destination, String body) {
Buffer buff = Buffer.create(body, "UTF-8");
Frame frame = new Frame("SEND", buff);
frame.headers.put("destination", destination);
frame.headers.put("content-length", String.valueOf(buff.length()));
return frame;
}
protected static Frame sendFrame(String destination, Buffer body) {
Frame frame = new Frame("SEND", body);
frame.headers.put("destination", destination);
frame.headers.put("content-length", String.valueOf(body.length()));
return frame;
}
protected static Frame receiptFrame(String receipt) {
Frame frame = new Frame("RECEIPT", null);
frame.headers.put("receipt-id", receipt);
return frame;
}
public Buffer toBuffer() {
try {
byte[] bytes = headersString().toString().getBytes("UTF-8");
Buffer buff = Buffer.create(bytes.length + (body == null ? 0 : body.length()) + 1);
buff.appendBytes(bytes);
if (body != null) buff.appendBuffer(body);
buff.appendByte((byte) 0);
return buff;
} catch (UnsupportedEncodingException thisWillNeverHappen) {
return null;
}
}
private StringBuilder headersString() {
StringBuilder sb = new StringBuilder();
sb.append(command).append('\n');
if (headers != null) {
for (Map.Entry<String, String> entry : headers.entrySet()) {
sb.append(entry.getKey()).append(':').append(entry.getValue()).append('\n');
}
}
sb.append('\n');
return sb;
}
public String toString() {
StringBuilder buff = headersString();
if (body != null) {
buff.append(body.toString());
}
return buff.toString();
}
}
|
import torch
from omegaconf import DictConfig
from pytorch_lightning import LightningModule
from torch.optim import Optimizer
from torch.optim.lr_scheduler import _LRScheduler
from torchmetrics import Accuracy, MetricCollection
from ..optimizer.scheduler import create_scheduler
from ..utils import utils
from ..utils.misc import mixup_data
log = utils.get_logger(__name__)
class CustomLogger(LightningModule):
def __init__(self, config: DictConfig):
super(CustomLogger, self).__init__()
self.config = config
def configure_optimizers(self) -> Optimizer, _LRScheduler:
optimizer = torch.optim.Adam(self.parameters(), lr=self.config.optimizer.lr)
scheduler = create_scheduler(optimizer, self.config.scheduler)
return optimizer, scheduler
def log_metrics(self, train_loss: torch.Tensor, val_loss: torch.Tensor, train_acc: torch.Tensor, val_acc: torch.Tensor):
metrics = {
'train_loss': train_loss,
'val_loss': val_loss,
'train_acc': train_acc,
'val_acc': val_acc
}
metric_collection = MetricCollection(metrics)
metric_collection = metric_collection.to(self.device)
metric_collection.compute()
log.info(f"Training Loss: {train_loss}, Validation Loss: {val_loss}, Training Accuracy: {train_acc}, Validation Accuracy: {val_acc}")
def apply_mixup(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor, torch.Tensor:
x_mix, y_mix = mixup_data(x, y, alpha=self.config.mixup.alpha)
return x_mix, y_mix |
#![recursion_limit = "1024"]
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate syn;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, Data, DeriveInput, Fields};
#[proc_macro_derive(MyTraitMacro)]
pub fn my_trait_macro(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let struct_name = &input.ident;
let trait_impl = match input.data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let field_names = fields.named.iter().map(|f| &f.ident);
quote! {
impl MyTrait for #struct_name {
// Generate trait methods based on field names
// Example: fn process_field1(&self) { /* implementation */ }
// Example: fn process_field2(&self) { /* implementation */ }
}
}
}
_ => {
quote! {
compile_error!("Only named fields are supported for MyTraitMacro");
}
}
},
_ => {
quote! {
compile_error!("Only structs are supported for MyTraitMacro");
}
}
};
trait_impl.into()
} |
def validate_isbn_10(isbn):
if len(isbn) == 10:
_sum = 0
if isbn[-1] == "X" or isbn[-1] == "x": # a final x stands for 10
isbn = isbn[:-1] + "10"
for d, i in enumerate(isbn):
_sum += (int(i) * (d + 1))
return _sum % 11 == 0
return False |
import re
def find_view_function(url_path: str) -> str:
url_patterns = [
(r'^$', 'index'),
(r'^api_keys/$', 'apikeys'),
(r'^update/$', 'update'),
(r'^password/$', 'password_change'),
(r'^activate/$', 'activation'),
(r'^ssh_key/(?P<action>add|delete)/$', 'sshkey'),
(r'^impersonate/user/(?P<username>[A-Za-z0-9@.+_-]+)/$', 'start_impersonation'),
(r'^impersonate/cancel/$', 'stop_impersonation')
]
for pattern, view_function in url_patterns:
if re.match(pattern, url_path):
return view_function
return "Not Found"
# Test cases
print(find_view_function("/accounts/profile/")) # Output: "index"
print(find_view_function("/accounts/profile/api_keys/")) # Output: "apikeys"
print(find_view_function("/accounts/profile/invalid_path/")) # Output: "Not Found" |
<reponame>ghackett/ProviderOne
/*
* Copyright (C) 2011 GroupMe, Inc.
*/
package com.groupme.providerone.sample.database.autogen.loaders;
import android.content.Context;
import android.net.Uri;
import android.support.v4.content.AsyncTaskLoader;
import com.groupme.providerone.sample.database.objects.MyTable;
import com.groupme.providerone.sample.database.tables.MyTableInfo;
public class MyTableLoader extends AsyncTaskLoader<MyTable> {
protected Long mId = null;
protected String mMyString = null;
protected MyTable mMyTable = null;
protected ForceLoadContentObserver mContentObserver = null;
protected boolean mHasBeenReged = false;
protected boolean mDidFallBackToFullTableObserver = false;
public MyTableLoader(Context context, Long id) {
super(context);
mId = id;
mMyString = null;
if (mId == null)
throw new RuntimeException("Tried to construct a MyTableLoader with a null id");
mContentObserver = new ForceLoadContentObserver();
}
public MyTableLoader(String myString, Context context) {
super(context);
mMyString = myString;
mId = null;
if (mMyString == null)
throw new RuntimeException("Tried to construct a MyTableLoader with a null myString");
mContentObserver = new ForceLoadContentObserver();
}
@Override
public MyTable loadInBackground() {
if (mId != null) {
mMyTable = MyTable.findOneById(mId);
}
if (mMyString != null) {
mMyTable = MyTable.findOneByMyString(mMyString);
}
if (mDidFallBackToFullTableObserver || !mHasBeenReged) {
Uri notifUri = mMyTable == null ? null : mMyTable.getIdLookupUri();
if (notifUri != null && !mHasBeenReged) {
mHasBeenReged = true;
getContext().getContentResolver().registerContentObserver(notifUri, true, mContentObserver);
} else if (notifUri == null && !mHasBeenReged) {
mDidFallBackToFullTableObserver = true;
mHasBeenReged = true;
getContext().getContentResolver().registerContentObserver(MyTableInfo.CONTENT_URI, true, mContentObserver);
} else if (mHasBeenReged && notifUri != null && mDidFallBackToFullTableObserver) {
mDidFallBackToFullTableObserver = false;
getContext().getContentResolver().unregisterContentObserver(mContentObserver);
getContext().getContentResolver().registerContentObserver(notifUri, true, mContentObserver);
}
}
return mMyTable;
}
@Override
protected void onStartLoading() {
if (mMyTable != null)
deliverResult(mMyTable);
if (takeContentChanged() || mMyTable == null)
forceLoad();
}
@Override
protected void onStopLoading() {
cancelLoad();
}
@Override
protected void onReset() {
super.onReset();
onStopLoading();
mMyTable = null;
if (mHasBeenReged) {
mDidFallBackToFullTableObserver = false;
getContext().getContentResolver().unregisterContentObserver(mContentObserver);
mHasBeenReged = false;
}
}
}
|
package servicebus
import (
"fmt"
"time"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)
func dataSourceArmServiceBusTopicAuthorizationRule() *schema.Resource {
return &schema.Resource{
Read: dataSourceArmServiceBusTopicAuthorizationRuleRead,
Timeouts: &schema.ResourceTimeout{
Read: schema.DefaultTimeout(5 * time.Minute),
},
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: azure.ValidateServiceBusAuthorizationRuleName(),
},
"namespace_name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: azure.ValidateServiceBusNamespaceName(),
},
"topic_name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: azure.ValidateServiceBusTopicName(),
},
"resource_group_name": azure.SchemaResourceGroupName(),
"listen": {
Type: schema.TypeBool,
Computed: true,
},
"send": {
Type: schema.TypeBool,
Computed: true,
},
"manage": {
Type: schema.TypeBool,
Computed: true,
},
"primary_key": {
Type: schema.TypeString,
Computed: true,
Sensitive: true,
},
"primary_connection_string": {
Type: schema.TypeString,
Computed: true,
Sensitive: true,
},
"secondary_key": {
Type: schema.TypeString,
Computed: true,
Sensitive: true,
},
"secondary_connection_string": {
Type: schema.TypeString,
Computed: true,
Sensitive: true,
},
},
}
}
func dataSourceArmServiceBusTopicAuthorizationRuleRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).ServiceBus.TopicsClient
ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
defer cancel()
name := d.Get("name").(string)
namespaceName := d.Get("namespace_name").(string)
topicName := d.Get("topic_name").(string)
resourceGroup := d.Get("resource_group_name").(string)
resp, err := client.GetAuthorizationRule(ctx, resourceGroup, namespaceName, topicName, name)
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
return fmt.Errorf("ServiceBus Topic Authorization Rule %q (Resource Group %q / Namespace Name %q) was not found", name, resourceGroup, namespaceName)
}
return fmt.Errorf("Error making Read request on Azure ServiceBus Topic Authorization Rule %s: %+v", name, err)
}
d.Set("name", name)
d.Set("topic_name", topicName)
d.Set("namespace_name", namespaceName)
d.Set("resource_group_name", resourceGroup)
if properties := resp.SBAuthorizationRuleProperties; properties != nil {
listen, send, manage := azure.FlattenServiceBusAuthorizationRuleRights(properties.Rights)
d.Set("listen", listen)
d.Set("send", send)
d.Set("manage", manage)
}
if resp.ID == nil || *resp.ID == "" {
return fmt.Errorf("API returned a nil/empty id for ServiceBus Topic Authorization Rule %q (Resource Group %q): %+v", name, resourceGroup, err)
}
d.SetId(*resp.ID)
keysResp, err := client.ListKeys(ctx, resourceGroup, namespaceName, topicName, name)
if err != nil {
return fmt.Errorf("Error making Read request on Azure ServiceBus Topic Authorization Rule List Keys %s: %+v", name, err)
}
d.Set("primary_key", keysResp.PrimaryKey)
d.Set("primary_connection_string", keysResp.PrimaryConnectionString)
d.Set("secondary_key", keysResp.SecondaryKey)
d.Set("secondary_connection_string", keysResp.SecondaryConnectionString)
return nil
}
|
"""Tests for the MPS operation functions."""
import sys
import unittest
import numpy as np
sys.path.append('..')
from mpys.mps import Mps
from mpys.mps_ops import contract
class MPSContractionTestCase(unittest.TestCase):
"""Test MPS contraction."""
def test_exceptions_of_contract(self):
"""Test the exceptions of the contract function."""
pass
def test_contraction_of_two_states(self):
"""Test the contraction of two different Mps."""
# Pairs and mixed Mps.
self.assertAlmostEqual(contract(Mps(4, 'pairs'), Mps(4, 'mixed')),
np.sqrt(1/2)**2)
self.assertAlmostEqual(contract(Mps(8, 'pairs'), Mps(8, 'mixed')),
np.sqrt(1/2)**4)
self.assertAlmostEqual(contract(Mps(7, 'pairs'), Mps(7, 'mixed')),
np.sqrt(1/2)**3)
# Pairs and GHZ Mps.
self.assertAlmostEqual(contract(Mps(4, 'GHZ'), Mps(4, 'pairs')), 0)
self.assertAlmostEqual(contract(Mps(7, 'GHZ'), Mps(7, 'pairs')), 0)
# GHZ and mixed Mps.
self.assertAlmostEqual(contract(Mps(7, 'GHZ'), Mps(7, 'mixed')), 1/8)
self.assertAlmostEqual(contract(Mps(4, 'GHZ'), Mps(4, 'mixed')),
1/np.sqrt(8))
|
<filename>response.ts
import { Req, Res } from "./model.ts";
import { contentType, extname, join, Status, STATUS_TEXT } from "./deps.ts";
import { decoder } from "./request.ts";
import { parseResponseBody } from "./utils/parse/body.ts";
import { Cookie } from "./cookie.ts";
export class Response {
static createResponse() {
return {
response: {
headers: new Headers(),
},
body: null,
headers: new Headers(),
status: Status.NotFound,
done: false,
redirect: this.redirect,
render: this.render,
cookies: new Cookie(),
send,
};
}
static async render(res: Res, path: string) {
const v = decoder.decode(await Deno.readFile(join(Deno.cwd(), path)));
const cType = contentType(extname(path)) || "text/plain; charset=utf-8";
try {
res.body = v;
res.headers.set("Content-Type", cType);
res.status = Status.OK;
} catch (e) {
console.log(e);
res.status = Status.InternalServerError;
res.body = STATUS_TEXT.get(Status.InternalServerError) as string;
}
}
static redirect(res: Res, url: string) {
res.status = Status.Found;
res.headers.set("Location", url);
}
}
export function send(req: Req, res: Res) {
if (res.done) return;
const request = req.request;
const {
response,
body,
headers = new Headers(),
status = Status.OK,
cookies,
} = res;
try {
if (body) {
parseResponseBody(res);
} else {
response.body = undefined;
}
res.done = true;
cookies.getCookies().forEach((value) => {
headers.set("Set-Cookie", value);
});
request.respond({ ...response, headers, status });
} catch (e) {
console.log(e);
}
}
|
/*
* Copyright (c) Microsoft. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project root for full license information.
*/
package com.microsoft.azure.sdk.iot.service;
import com.google.gson.annotations.SerializedName;
/**
* Enum for device status
*/
public enum DeviceStatus
{
@SerializedName("enabled")
Enabled,
@SerializedName("disabled")
Disabled
}
|
/***
* Copyright (C) <NAME>. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project root
* for full license information.
*
* =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
*
* For related information - https://github.com/CodeWithRodi/Cutternet/
*
* Cutternet Backend Source Code
*
* =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
****/
const FormatError = (Message) => Message + '(E)';
class RuntimeError extends Error {
constructor(Message, StatusCode) {
super(FormatError(Message));
this.StatusCode = StatusCode;
this.Status = `${StatusCode}`.startsWith(4) ? 'Client Error' : 'Server Error';
this.IsOperational = true;
Error.captureStackTrace(this, this.constructor);
}
}
module.exports = { RuntimeError, FormatError };
|
<reponame>xcorail/OTB
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbCompositeTransform_hxx
#define otbCompositeTransform_hxx
#include "otbCompositeTransform.h"
#include "otbGenericMapProjection.h"
#include "otbForwardSensorModel.h"
#include "otbInverseSensorModel.h"
#include "itkIdentityTransform.h"
namespace otb
{
template<class TFirstTransform,
class TSecondTransform,
class TScalarType,
unsigned int NInputDimensions,
unsigned int NOutputDimensions>
CompositeTransform<TFirstTransform,
TSecondTransform,
TScalarType,
NInputDimensions,
NOutputDimensions>
::CompositeTransform() : Superclass(ParametersDimension)
{
m_FirstTransform = nullptr;
m_SecondTransform = nullptr;
}
template<class TFirstTransform,
class TSecondTransform,
class TScalarType,
unsigned int NInputDimensions,
unsigned int NOutputDimensions>
CompositeTransform<TFirstTransform,
TSecondTransform,
TScalarType,
NInputDimensions,
NOutputDimensions>
::~CompositeTransform()
{
}
template<class TFirstTransform,
class TSecondTransform,
class TScalarType,
unsigned int NInputDimensions,
unsigned int NOutputDimensions>
typename CompositeTransform<TFirstTransform,
TSecondTransform,
TScalarType,
NInputDimensions,
NOutputDimensions>::SecondTransformOutputPointType
CompositeTransform<TFirstTransform,
TSecondTransform,
TScalarType,
NInputDimensions,
NOutputDimensions>
::TransformPoint(const FirstTransformInputPointType& point1) const
{
FirstTransformOutputPointType geoPoint;
geoPoint = m_FirstTransform->TransformPoint(point1);
SecondTransformOutputPointType outputPoint;
outputPoint = m_SecondTransform->TransformPoint(geoPoint);
// otbMsgDevMacro(<< std::setprecision(15) << "Converting: " << point1 << " -> " << geoPoint<< " -> " << outputPoint);
return outputPoint;
}
/*template<class TFirstTransform, class TSecondTransform, class TScalarType, unsigned int NInputDimensions, unsigned int NOutputDimensions>
typename CompositeTransform<TFirstTransform, TSecondTransform, TScalarType, NInputDimensions, NOutputDimensions>::OutputVectorType
CompositeTransform<TFirstTransform, TSecondTransform, TScalarType, NInputDimensions, NOutputDimensions>
::TransformVector(const InputVectorType &vector1) const
{
InputVectorType vectorTmp;
OutputVectorType vector2;
vectorTmp=m_FirstTransform->TransformVector(vector1);
vector2=m_SecondTransform->TransformVector(vectorTmp);
return vector2;
}
template<class TFirstTransform, class TSecondTransform, class TScalarType, unsigned int NInputDimensions, unsigned int NOutputDimensions>
typename CompositeTransform<TFirstTransform, TSecondTransform, TScalarType, NInputDimensions, NOutputDimensions>::OutputVnlVectorType
CompositeTransform<TFirstTransform, TSecondTransform, TScalarType, NInputDimensions, NOutputDimensions>
::TransformVector(const InputVnlVectorType &vnlVector1) const
{
InputVnlVectorType vnlVectorTmp;
OutputVnlVectorType vnlVector2;
vnlVectorTmp=m_FirstTransform->TransformVector(vnlVector1);
vnlVector2=m_SecondTransform->TransformVector(vnlVectorTmp);
return vnlVector2;
}
template<class TFirstTransform, class TSecondTransform, class TScalarType, unsigned int NInputDimensions, unsigned int NOutputDimensions>
typename CompositeTransform<TFirstTransform, TSecondTransform, TScalarType, NInputDimensions, NOutputDimensions>::OutputCovariantVectorType
CompositeTransform<TFirstTransform, TSecondTransform, TScalarType, NInputDimensions, NOutputDimensions>
::TransformCovariantVector(const InputCovariantVectorType &covariantVector1) const
{
InputCovariantVectorType covariantVectorTmp;
OutputCovariantVectorType covariantVector2;
covariantVectorTmp=m_FirstTransform->TransformCovariantVector(covariantVector1);
covariantVector2=m_SecondTransform->TransformCovariantVector(covariantVectorTmp);
return covariantVector2;
}*/
} // namespace otb
#endif
|
<filename>src/__tests__/components/NagSuggest.js
import React from 'react';
import { mount } from 'enzyme';
import renderer from 'react-test-renderer';
import { fromJS } from 'immutable';
import NagSuggest from '../../components/NagSuggest';
import 'jest-styled-components';
describe('<NagSuggest />', () => {
let nagCreate, originalSetTimeout;
beforeEach(() => {
nagCreate = jest.fn();
});
beforeAll(() => {
originalSetTimeout = window.setTimeout;
window.setTimeout = (callback, time) => callback();
});
afterAll(() => {
window.setTimeout = originalSetTimeout;
});
it('should match snapshot', () => {
const tree = renderer.create(<NagSuggest nagCreate={nagCreate} />).toJSON();
expect(tree).toMatchSnapshot();
});
it('should have correct elements and children', () => {
const nagSuggest = mount(<NagSuggest nagCreate={nagCreate} />);
expect(nagSuggest.find('h5').text()).toBe('Or try the nags below');
expect(nagSuggest.find('li').length).toBe(5);
nagSuggest
.find('button')
.first()
.simulate('click');
expect(nagCreate.mock.calls.length).toBe(1);
});
});
|
#!/usr/bin/env bash
# Office31
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office31 -d Office31 -s A -t W -a resnet50 --seed 1 --log logs/self_ensemble/Office31_A2W
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office31 -d Office31 -s D -t W -a resnet50 --seed 1 --log logs/self_ensemble/Office31_D2W
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office31 -d Office31 -s W -t D -a resnet50 --seed 1 --log logs/self_ensemble/Office31_W2D
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office31 -d Office31 -s A -t D -a resnet50 --seed 1 --log logs/self_ensemble/Office31_A2D
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office31 -d Office31 -s D -t A -a resnet50 --seed 1 --log logs/self_ensemble/Office31_D2A
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office31 -d Office31 -s W -t A -a resnet50 --seed 1 --log logs/self_ensemble/Office31_W2A
# Office-Home
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Ar -t Cl -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Ar2Cl
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Ar -t Pr -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Ar2Pr
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Ar -t Rw -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Ar2Rw
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Cl -t Ar -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Cl2Ar
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Cl -t Pr -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Cl2Pr
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Cl -t Rw -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Cl2Rw
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Pr -t Ar -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Pr2Ar
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Pr -t Cl -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Pr2Cl
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Pr -t Rw -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Pr2Rw
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Rw -t Ar -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Rw2Ar
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Rw -t Cl -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Rw2Cl
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/office-home -d OfficeHome -s Rw -t Pr -a resnet50 --seed 0 --log logs/self_ensemble/OfficeHome_Rw2Pr
# VisDA-2017
CUDA_VISIBLE_DEVICES=0 python self_ensemble.py data/visda-2017 -d VisDA2017 -s Synthetic -t Real -a resnet101 \
--bottleneck-dim 512 --epochs 20 --seed 0 --per-class-eval --log logs/self_ensemble/VisDA2017
CUDA_VISIBLE_DEVICES=6 python self_ensemble.py data/visda-2017 -d VisDA2017 -s Synthetic -t Real -a resnet101 \
--epochs 20 --seed 0 --per-class-eval --log logs/self_ensemble/VisDA2017 --lr-gamma 0.0002 -b 32
|
#!/bin/bash
selection=$(xrandr --listactivemonitors | grep -v Monitors | sed -nr 's/.*\s([a-zA-Z0-9\-]*)$/\1/p' | sort | rofi -dmenu -i -p "Move workspace to monitor")
if [ -n "$selection" ]
then
i3-msg "move workspace to output $selection" > /dev/null
i3-msg "focus output $selection" > /dev/null
fi
|
# Exit immediately if a command exits with a non-zero status
set -e
WORK_DIR="$(pwd)"
########## parse and validate argument(s)
usage_and_exit() {
echo "Usage: $0 [--skip-cloud-functions] [--only hosting|database|firestore] path/to/firebase/deployment/constants.json path/to/crypto/token/file"
exit 1
}
just_exit() {
exit 1
}
FIREBASE_DEPLOY_ARGS=()
while [ "$#" -gt 0 ]; do
case "$1" in
--skip-cloud-functions)
SKIP_CLOUD_FUNCTIONS=true
shift
;;
--only)
shift
FIREBASE_DEPLOY_ARGS[0]="--only"
if [ -z "$1" ]; then
echo "expected a specific firebase service to deploy"
usage_and_exit
fi
FIREBASE_DEPLOY_ARGS[1]="$1"
shift
;;
*)
# Don't process any other arguments after the optional ones
break
;;
esac
done
if [ $# -ne 2 ]; then
echo "expected 2 required arguments"
usage_and_exit
fi
FIREBASE_CONSTANTS="$1"
if [ ! -f "$FIREBASE_CONSTANTS" ]; then
echo "could not find FIREBASE_CONSTANTS: $FIREBASE_CONSTANTS"
usage_and_exit
fi
cd "$(dirname "$FIREBASE_CONSTANTS")"
FIREBASE_CONSTANTS_DIR="$(pwd)"
FIREBASE_CONSTANTS_FILENAME="$(basename "$FIREBASE_CONSTANTS")"
FIREBASE_CONSTANTS_FILE="$FIREBASE_CONSTANTS_DIR/$FIREBASE_CONSTANTS_FILENAME"
cd "$WORK_DIR"
CRYPTO_TOKEN="$2"
if [ ! -f "$CRYPTO_TOKEN" ]; then
echo "could not find CRYPTO_TOKEN: $CRYPTO_TOKEN"
usage_and_exit
fi
cd "$(dirname "$CRYPTO_TOKEN")"
CRYPTO_TOKEN_DIR="$(pwd)"
CRYPTO_TOKEN_FILENAME="$(basename "$CRYPTO_TOKEN")"
CRYPTO_TOKEN_FILE="$CRYPTO_TOKEN_DIR/$CRYPTO_TOKEN_FILENAME"
cd "$WORK_DIR"
########## validate that the firebase constants file and the crypto token point to the same project
# Get the project id from the firebase constants file
echo "getting project id from firebase constants..."
FIREBASE_CONSTANTS_PROJECT_ID=$(cat "$FIREBASE_CONSTANTS_FILE" | python -c 'import json,sys; constants=json.load(sys.stdin); print(constants["projectId"])')
echo "project id: $FIREBASE_CONSTANTS_PROJECT_ID"
# Get the project id from the crypto token file
echo "getting project id from crypto token..."
CRYPTO_TOKEN_PROJECT_ID=$(cat "$CRYPTO_TOKEN_FILE" | python -c 'import json,sys; constants=json.load(sys.stdin); print(constants["project_id"])')
echo "project id: $CRYPTO_TOKEN_PROJECT_ID"
if [ "$FIREBASE_CONSTANTS_PROJECT_ID" != "$CRYPTO_TOKEN_PROJECT_ID" ]; then
echo "the two project ids must match, check that you're deploying to the correct project"
just_exit
fi
########## validate the cloud functions urls in the firebase constants file
echo ""
echo "verifying cloud functions urls from firebase constants..."
CLOUD_FUNCTIONS_LOCATION="europe-west1"
EXPECTED_URLS=( \
"https://$CLOUD_FUNCTIONS_LOCATION-$FIREBASE_CONSTANTS_PROJECT_ID.cloudfunctions.net/Log" \
"https://$CLOUD_FUNCTIONS_LOCATION-$FIREBASE_CONSTANTS_PROJECT_ID.cloudfunctions.net/Publish" \
"https://$CLOUD_FUNCTIONS_LOCATION-$FIREBASE_CONSTANTS_PROJECT_ID.cloudfunctions.net/StatusZ" \
)
EXPECTED_KEYS=( \
"logUrl" \
"publishUrl" \
"statuszUrl" \
)
for i in 0 1 2; do
EXPECTED_URL=${EXPECTED_URLS[$i]}
EXPECTED_KEY=${EXPECTED_KEYS[$i]}
PYTHON_CODE="import json,sys; constants=json.load(sys.stdin); print(constants[\"$EXPECTED_KEY\"])"
URL=$(cat "$FIREBASE_CONSTANTS_FILE" | python -c "$PYTHON_CODE")
if [ "$URL" != "$EXPECTED_URL" ]; then
echo ""
echo "bad key $EXPECTED_KEY"
echo " got: $URL"
echo " expected: $EXPECTED_URL"
just_exit
fi
done
echo "done! cloud functions urls look as expected"
echo ""
##### prepare to build the project
##### get the absolute paths for the project
cd "$(dirname "$0")"/..
NOOK_DIR="$(pwd)"
########## build the nook webapp
cd $NOOK_DIR
echo ""
# Remove previous build if it exists
rm -rf public
# Build
cd webapp
echo "building nook webapp ..."
webdev build
echo "build nook complete"
mv build ../public
echo ""
# Copy the constants in the build folder
cd $NOOK_DIR
cp $FIREBASE_CONSTANTS_FILE public/assets/firebase_constants.json
# Copy the latest commit sha1 hash on origin/master into the build folder
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
LASTEST_COMMIT_HASH=$(git rev-parse "$CURRENT_BRANCH")
DEPLOY_DATA="\"latestCommitHash\": \"$LASTEST_COMMIT_HASH\""
DEPLOY_DATA="$DEPLOY_DATA, \"timestamp\": \"$(date +"%Y-%m-%dT%H:%M:%S")\""
DEPLOY_DATA="$DEPLOY_DATA, \"deployed_by\": \"$(git config --get user.email)\""
echo "{$DEPLOY_DATA}" > public/assets/latest_commit_hash.json
# Prepare file with content to be uploaded to firebase
DEPLOY_DATA="$DEPLOY_DATA, \"__reference_path\": \"metadata/nook_app\""
DEPLOY_DATA="$DEPLOY_DATA, \"__id\": \"nook_app\""
DEPLOY_DATA="$DEPLOY_DATA, \"__subcollections\": [ ]"
echo "{\"metadata\": [ {$DEPLOY_DATA} ] }" > public_metadata_nook_app.json
########## deploy webapp
# Deploy using the local firebase tool
echo "deploying to $FIREBASE_CONSTANTS_PROJECT_ID firebase..."
firebase deploy \
--project $FIREBASE_CONSTANTS_PROJECT_ID \
--public public \
"${FIREBASE_DEPLOY_ARGS[@]}"
echo "firebase deploy result: $?"
# Deploy cloud functions if SKIP_CLOUD_FUNCTIONS is unset
if [ -z "$SKIP_CLOUD_FUNCTIONS" ]; then
echo ""
echo "deploying cloud functions..."
cd cloud_functions
for FUNCTION_NAME in Publish Log StatusZ
do
gcloud --project $CRYPTO_TOKEN_PROJECT_ID functions deploy \
$FUNCTION_NAME \
--entry-point $FUNCTION_NAME \
--runtime python37 \
--allow-unauthenticated \
--region=europe-west1 \
--trigger-http
done
cd ..
echo ""
echo "done updating cloud functions"
else
echo ""
echo "skipping deploying cloud functions"
fi
echo ""
echo "updating nook webapp metadata..."
cd tool
pipenv run python json_to_firebase.py "$CRYPTO_TOKEN_FILE" ../public_metadata_nook_app.json
cd ..
echo ""
echo "deployment complete"
|
<reponame>genez/gogreenlight
package gogreenlight
import (
"context"
"sync"
)
func NewNamedSemaphore(name string) *namedSemaphore {
s := &namedSemaphore{
ctx: context.Background(),
name: name,
set: false,
c: sync.NewCond(&sync.Mutex{}),
}
Semaphores.Add(s)
return s
}
func NewNamedSemaphoreWithContext(name string, ctx context.Context) *namedSemaphore {
s := &namedSemaphore{
ctx: ctx,
name: name,
set: false,
c: sync.NewCond(&sync.Mutex{}),
}
Semaphores.Add(s)
return s
}
|
<gh_stars>1-10
package com.meylism.databind;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ComposedTypesTest {
ObjectMapper objectMapper = new ObjectMapper();
@Test
public void ser() throws JsonProcessingException {
Map<String, Object> map = new HashMap<>();
List<Integer> ints = new ArrayList<>();
ints.add(1);
ints.add(2);
map.put("list", ints);
map.put("name", "Meylis");
String s = objectMapper.writeValueAsString(map);
}
@Test
public void deser() throws JsonProcessingException {
String s = "{\"name\":\"Meylis\"}";
Object result = objectMapper.readTree(s);
}
}
|
<reponame>chrishenzie/csi-proxy
package filesystem
import (
"fmt"
"os"
"os/exec"
"strings"
)
// Implements the Filesystem OS API calls. All code here should be very simple
// pass-through to the OS APIs. Any logic around the APIs should go in
// internal/server/filesystem/server.go so that logic can be easily unit-tested
// without requiring specific OS environments.
// API is the exposed Filesystem API
type API interface {
PathExists(path string) (bool, error)
PathValid(path string) (bool, error)
Mkdir(path string) error
Rmdir(path string, force bool) error
CreateSymlink(oldname string, newname string) error
IsSymlink(path string) (bool, error)
}
type filesystemAPI struct{}
// check that filesystemAPI implements API
var _ API = &filesystemAPI{}
func New() API {
return filesystemAPI{}
}
func pathExists(path string) (bool, error) {
_, err := os.Lstat(path)
if err == nil {
return true, nil
}
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
func (filesystemAPI) PathExists(path string) (bool, error) {
return pathExists(path)
}
func pathValid(path string) (bool, error) {
cmd := exec.Command("powershell", "/c", `Test-Path $Env:remotepath`)
cmd.Env = append(os.Environ(), fmt.Sprintf("remotepath=%s", path))
output, err := cmd.CombinedOutput()
if err != nil {
return false, fmt.Errorf("returned output: %s, error: %v", string(output), err)
}
return strings.HasPrefix(strings.ToLower(string(output)), "true"), nil
}
// PathValid determines whether all elements of a path exist
// https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.management/test-path?view=powershell-7
// for a remote path, determines whether connection is ok
// e.g. in a SMB server connection, if password is changed, connection will be lost, this func will return false
func (filesystemAPI) PathValid(path string) (bool, error) {
return pathValid(path)
}
// Mkdir makes a dir with `os.MkdirAll`.
func (filesystemAPI) Mkdir(path string) error {
return os.MkdirAll(path, 0755)
}
// Rmdir removes a dir with `os.Remove`, if force is true then `os.RemoveAll` is used instead.
func (filesystemAPI) Rmdir(path string, force bool) error {
if force {
return os.RemoveAll(path)
}
return os.Remove(path)
}
// CreateSymlink creates newname as a symbolic link to oldname.
func (filesystemAPI) CreateSymlink(oldname, newname string) error {
return os.Symlink(oldname, newname)
}
// IsSymlink - returns true if tgt is a mount point.
// A path is considered a mount point if:
// - directory exists and
// - it is a soft link and
// - the target path of the link exists.
// If tgt path does not exist, it returns an error
// if tgt path exists, but the source path tgt points to does not exist, it returns false without error.
func (filesystemAPI) IsSymlink(tgt string) (bool, error) {
// This code is similar to k8s.io/kubernetes/pkg/util/mount except the pathExists usage.
// Also in a remote call environment the os error cannot be passed directly back, hence the callers
// are expected to perform the isExists check before calling this call in CSI proxy.
stat, err := os.Lstat(tgt)
if err != nil {
return false, err
}
// If its a link and it points to an existing file then its a mount point.
if stat.Mode()&os.ModeSymlink != 0 {
target, err := os.Readlink(tgt)
if err != nil {
return false, fmt.Errorf("readlink error: %v", err)
}
exists, err := pathExists(target)
if err != nil {
return false, err
}
return exists, nil
}
return false, nil
}
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.list2 = void 0;
var list2 = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M6 1h10v2h-10v-2zM6 7h10v2h-10v-2zM6 13h10v2h-10v-2zM0 2c0-1.105 0.895-2 2-2s2 0.895 2 2c0 1.105-0.895 2-2 2s-2-0.895-2-2zM0 8c0-1.105 0.895-2 2-2s2 0.895 2 2c0 1.105-0.895 2-2 2s-2-0.895-2-2zM0 14c0-1.105 0.895-2 2-2s2 0.895 2 2c0 1.105-0.895 2-2 2s-2-0.895-2-2z"
}
}]
};
exports.list2 = list2; |
cd ~/chronic/testclient
openssl x509 -text -in cert.pem | grep CN
echo | openssl s_client -connect localhost:8443 2>/dev/null |
sed -n -e '/BEGIN CERT/,/END CERT/p' > server.pem
openssl x509 -text -in server.pem | grep 'CN='
ls -l server.pem
|
// Package csvmgr
// Created by RTT.
// Author: <EMAIL> on 2021-Aug-30
package csvmgr
import (
"bufio"
"bytes"
"fmt"
"log"
"os"
"regexp"
"runtime"
"strings"
"sync"
"github.com/teocci/go-mavlink-parser/src/jobmgr"
"github.com/teocci/go-mavlink-parser/src/utfmgr"
)
const (
headerNormalizer = "fcc_time,gps_time,str_01,num_01,lat,lon,alt,roll,pitch"
regexNormalizer = `(?P<prefix>.*[^,{2,}])(?P<sufix>,*$)`
)
func explodeHeader() []string {
return explodeValues(headerNormalizer)
}
func explodeValues(str string) []string {
return strings.Split(str, ",")
}
func explodeKeyValue(msg string) (dataSlice map[string]string) {
dataSlice = map[string]string{}
slice := explodeValues(msg)
length := len(slice)
sliceHeader := explodeHeader()
headerLength := len(sliceHeader)
if headerLength == length {
for i := 0; i < length; i++ {
dataSlice[sliceHeader[i]] = slice[i]
}
}
return dataSlice
}
func NormalizeJob(fn string) (buffer bytes.Buffer) {
// Add header
buffer.WriteString(headerNormalizer)
poolNumber := runtime.NumCPU()
dispatcher := jobmgr.NewDispatcher(poolNumber).Start(func(id int, job jobmgr.Job) error {
str := job.Item.(string)
str = normalizer(str)
if len(str) > 0 {
fields := strings.Split(str, ",")
if len(fields) == 9 {
buffer.WriteString(str)
fmt.Printf("%s", str)
}
}
return nil
})
f := OpenUTFFile(fn)
defer utfmgr.CloseFile()(f)
var index = 0
fileScanner := bufio.NewScanner(f)
for fileScanner.Scan() {
str := fileScanner.Text()
err := fileScanner.Err()
if err != nil {
log.Fatal(err)
}
dispatcher.Submit(jobmgr.Job{
ID: index,
Item: str,
})
index++
}
return buffer
}
func normalizer(job string) string {
var re = regexp.MustCompile(regexNormalizer)
if re.MatchString(job) {
matches := re.FindStringSubmatch(job)
idIndex := re.SubexpIndex("prefix")
return matches[idIndex]
}
return ""
}
func Normalize(f *os.File) bytes.Buffer {
fileScanner := bufio.NewScanner(f)
var buffer bytes.Buffer
numWorkers := runtime.NumCPU()
jobs := make(chan string, numWorkers)
res := make(chan string)
var wg sync.WaitGroup
worker := func(jobs <-chan string, results chan<- string) {
for {
select {
case job, ok := <-jobs: // you must check for readable state of the channel.
if !ok {
return
}
s := normalizer(job)
if len(s) > 0 {
results <- normalizer(job)
}
}
}
}
// init workers
for w := 0; w < numWorkers; w++ {
wg.Add(1)
go func() {
// this line will exec when chan `res` processed output at line 107 (func worker: line 71)
defer wg.Done()
worker(jobs, res)
}()
}
go func() {
for fileScanner.Scan() {
str := fileScanner.Text()
err := fileScanner.Err()
if err != nil {
fmt.Println("ERROR: ", err.Error())
break
}
jobs <- str
}
close(jobs) // close jobs to signal workers that no more job are incoming.
}()
go func() {
wg.Wait()
close(res) // when you close(res) it breaks the below loop.
}()
for r := range res {
buffer.WriteString(r)
}
return buffer
} |
package com.honyum.elevatorMan.net;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.RequestBody;
/**
* Created by Star on 2017/6/15.
*/
public class NHMentenanceRequest extends RequestBean {
public NHMentenanceBody getBody() {
return body;
}
public void setBody(NHMentenanceBody body) {
this.body = body;
}
private NHMentenanceBody body;
public class NHMentenanceBody extends RequestBody {
public String getBranchId() {
return branchId;
}
public NHMentenanceBody setBranchId(String branchId) {
this.branchId = branchId;
return this;
}
private String branchId;
}
}
|
package locations;
public class D {
class Inner {}
{ new D().new Inner(); }
}
|
// Initializing the connection to the server database.
URL url = new URL("http://server_url/db_connection");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
// Passing the required parameters for the database connection.
String urlParams = "username=" + username + "&password=" + password;
conn.setDoOutput(true);
DataOutputStream wr = new DataOutputStream(conn.getOutputStream());
wr.writeBytes(urlParams);
wr.flush();
wr.close();
// Establishing the connection.
conn.connect();
// Code to access the data and perform the required operations.
// .... |
<gh_stars>1-10
package com.abubusoft.kripton.examples.rssreader.service.model;
public class Entity {
public long id;
}
|
####################
# Tab completion file to allow for easy use of this tool with the command-line using Bash.
####################
####################################################################################################
# High-level caller/dispatch script information:
CALLER_SCRIPT_NAME="bashTabCompletionDocletTestLaunchWithDefaults"
# A description of these variables is below in the main completion function (_masterCompletionFunction)
CS_PREFIX_OPTIONS_ALL_LEGAL_ARGUMENTS=( TestExtraDocs TestArgumentContainer )
CS_PREFIX_OPTIONS_NORMAL_COMPLETION_ARGUMENTS=( TestExtraDocs TestArgumentContainer )
CS_PREFIX_OPTIONS_ALL_ARGUMENT_VALUE_TYPES=( "null" "null" )
CS_PREFIX_OPTIONS_MUTUALLY_EXCLUSIVE_ARGS=()
CS_PREFIX_OPTIONS_SYNONYMOUS_ARGS=()
CS_PREFIX_OPTIONS_MIN_OCCURRENCES=( 0 0 )
CS_PREFIX_OPTIONS_MAX_OCCURRENCES=( 1 1 )
CS_POSTFIX_OPTIONS_ALL_LEGAL_ARGUMENTS=()
CS_POSTFIX_OPTIONS_NORMAL_COMPLETION_ARGUMENTS=()
CS_POSTFIX_OPTIONS_ALL_ARGUMENT_VALUE_TYPES=()
CS_POSTFIX_OPTIONS_MUTUALLY_EXCLUSIVE_ARGS=()
CS_POSTFIX_OPTIONS_SYNONYMOUS_ARGS=()
CS_POSTFIX_OPTIONS_MIN_OCCURRENCES=()
CS_POSTFIX_OPTIONS_MAX_OCCURRENCES=()
# Whether we have to worry about these extra script options at all.
HAS_POSTFIX_OPTIONS="false"
# All the tool names we are able to complete:
ALL_TOOLS=(TestExtraDocs TestArgumentContainer )
####################################################################################################
# Get the name of the tool that we're currently trying to call
_bashTabCompletionDocletTestLaunchWithDefaults_getToolName()
{
# Naively go through each word in the line until we find one that is in our list of tools:
for word in ${COMP_WORDS[@]} ; do
if ( echo " ${ALL_TOOLS[@]} " | grep -q " ${word} " ) ; then
echo "${word}"
break
fi
done
}
# Get the index of the toolname inside COMP_WORDS
_bashTabCompletionDocletTestLaunchWithDefaults_getToolNameIndex()
{
# Naively go through each word in the line until we find one that is in our list of tools:
local ctr=0
for word in ${COMP_WORDS[@]} ; do
if ( echo " ${ALL_TOOLS[@]} " | grep -q " ${word} " ) ; then
echo $ctr
break
fi
let ctr=$ctr+1
done
}
# Get all possible tool names for the current command line if the current command is a
# complete command on its own already.
# If there is no complete command yet, then this prints nothing.
_bashTabCompletionDocletTestLaunchWithDefaults_getAllPossibleToolNames()
{
# We want to return a list of possible tool names if and only if
# the current word is a valid complete tool name
# AND
# the current word is also a substring in more than one tool name
local tool count matches toolList
let count=0
matches=false
toolList=()
# Go through tool names and get what matches and partial matches we have:
for tool in ${ALL_TOOLS[@]} ; do
if [[ "${COMP_WORDS[COMP_CWORD]}" == "${tool}" ]] ; then
matches=true
let count=$count+1
toolList+=($tool)
elif [[ "${tool}" == "${COMP_WORDS[COMP_CWORD]}"* ]] ; then
toolList+=($tool)
fi
done
# If we have a complete match, then we print out our partial matches as a space separated string.
# That way we have a list of all possible full completions for this match.
# For instance, if there was a tool named "read" and another named "readBetter" this would get both.
if $matches ; then
echo "${toolList[@]}"
fi
}
# Gets how many dependent arguments we have left to fill
_bashTabCompletionDocletTestLaunchWithDefaults_getDependentArgumentCount()
{
local depArgCount=0
for word in ${COMP_LINE} ; do
for depArg in ${DEPENDENT_ARGUMENTS[@]} ; do
if [[ "${word}" == "${depArg}" ]] ; then
$((depArgCount++))
fi
done
done
echo $depArgCount
}
# Resolves the given argument name to its long (normal) name
_bashTabCompletionDocletTestLaunchWithDefaults_resolveVarName()
{
local argName=$1
if [[ "${SYNONYMOUS_ARGS[@]}" == *"${argName}"* ]] ; then
echo "${SYNONYMOUS_ARGS[@]}" | sed -e "s#.* \\([a-zA-Z0-9;,_\\-]*${argName}[a-zA-Z0-9,;_\\-]*\\).*#\\1#g" -e 's#;.*##g'
else
echo "${argName}"
fi
}
# Checks if we need to complete the VALUE for an argument.
# Prints the index in the given argument list of the corresponding argument whose value we must complete.
# Takes as input 1 positional argument: the name of the last argument given to this script
# Otherwise prints -1
_bashTabCompletionDocletTestLaunchWithDefaults_needToCompleteArgValue()
{
if [[ "${prev}" != "--" ]] ; then
local resolved=$( _bashTabCompletionDocletTestLaunchWithDefaults_resolveVarName ${prev} )
for (( i=0 ; i < ${#ALL_LEGAL_ARGUMENTS[@]} ; i++ )) ; do
if [[ "${resolved}" == "${ALL_LEGAL_ARGUMENTS[i]}" ]] ; then
# Make sure the argument isn't one that takes no additional value
# such as a flag.
if [[ "${ALL_ARGUMENT_VALUE_TYPES[i]}" != "null" ]] ; then
echo "$i"
else
echo "-1"
fi
return 0
fi
done
fi
echo "-1"
}
# Get the completion word list for the given argument type.
# Prints the completion string to the screen
_bashTabCompletionDocletTestLaunchWithDefaults_getCompletionWordList()
{
# Normalize the type string so it's easier to deal with:
local argType=$( echo $1 | tr '[A-Z]' '[a-z]')
local isNumeric=false
local isFloating=false
local completionType=""
[[ "${argType}" == *"file"* ]] && completionType='-A file'
[[ "${argType}" == *"folder"* ]] && completionType='-A directory'
[[ "${argType}" == *"directory"* ]] && completionType='-A directory'
[[ "${argType}" == *"boolean"* ]] && completionType='-W true false'
[[ "${argType}" == "int" ]] && completionType='-W 0 1 2 3 4 5 6 7 8 9' && isNumeric=true
[[ "${argType}" == *"[int]"* ]] && completionType='-W 0 1 2 3 4 5 6 7 8 9' && isNumeric=true
[[ "${argType}" == "long" ]] && completionType='-W 0 1 2 3 4 5 6 7 8 9' && isNumeric=true
[[ "${argType}" == *"[long]"* ]] && completionType='-W 0 1 2 3 4 5 6 7 8 9' && isNumeric=true
[[ "${argType}" == "double" ]] && completionType='-W . 0 1 2 3 4 5 6 7 8 9' && isNumeric=true && isFloating=true
[[ "${argType}" == *"[double]"* ]] && completionType='-W . 0 1 2 3 4 5 6 7 8 9' && isNumeric=true && isFloating=true
[[ "${argType}" == "float" ]] && completionType='-W . 0 1 2 3 4 5 6 7 8 9' && isNumeric=true && isFloating=true
[[ "${argType}" == *"[float]"* ]] && completionType='-W . 0 1 2 3 4 5 6 7 8 9' && isNumeric=true && isFloating=true
# If we have a number, we need to prepend the current completion to it so that we can continue to tab complete:
if $isNumeric ; then
completionType=$( echo ${completionType} | sed -e "s#\([0-9]\)#$cur\1#g" )
# If we're floating point, we need to make sure we don't complete a `.` character
# if one already exists in our number:
if $isFloating ; then
echo "$cur" | grep -o '\.' &> /dev/null
local r=$?
[[ $r -eq 0 ]] && completionType=$( echo ${completionType} | awk '{$2="" ; print}' )
fi
fi
echo "${completionType}"
}
# Function to handle the completion tasks once we have populated our arg variables
# When passed an argument handles the case for the caller script.
_bashTabCompletionDocletTestLaunchWithDefaults_handleArgs()
{
# Argument offset index is used in the special case where we are past the " -- " delimiter.
local argOffsetIndex=0
# We handle the beginning differently if this function was called with an argument
if [[ $# -eq 0 ]] ; then
# Get the number of arguments we have input so far:
local toolNameIndex=$(_bashTabCompletionDocletTestLaunchWithDefaults_getToolNameIndex)
local numArgs=$((COMP_CWORD-toolNameIndex-1))
# Now we check to see what kind of argument we are on right now
# We handle each type separately by order of precedence:
if [[ ${numArgs} -lt ${NUM_POSITIONAL_ARGUMENTS} ]] ; then
# We must complete a positional argument.
# Assume that positional arguments are all FILES:
COMPREPLY=( $(compgen -A file -- $cur) )
return 0
fi
# Dependent arguments must come right after positional arguments
# We must check to see how many dependent arguments we've gotten so far:
local numDepArgs=$( _bashTabCompletionDocletTestLaunchWithDefaults_getDependentArgumentCount )
if [[ $numDepArgs -lt ${#DEPENDENT_ARGUMENTS[@]} ]] ; then
# We must complete a dependent argument next.
COMPREPLY=( $(compgen -W '${DEPENDENT_ARGUMENTS[@]}' -- $cur) )
return 0
fi
elif [[ "${1}" == "POSTFIX_OPTIONS" ]] ; then
# Get the index of the special delimiter.
# we ignore everything up to and including it.
for (( i=0; i < COMP_CWORD ; i++ )) ; do
if [[ "${COMP_WORDS[i]}" == "--" ]] ; then
let argOffsetIndex=$i+1
fi
done
fi
# NOTE: We don't need to worry about the prefix options case.
# The caller will specify it and it skips the two special cases above.
# First we must resolve all arguments to their full names
# This is necessary to save time later because of short argument names / synonyms
local resolvedArgList=()
for (( i=argOffsetIndex ; i < COMP_CWORD ; i++ )) ; do
prevArg=${COMP_WORDS[i]}
# Skip the current word to be completed:
[[ "${prevArg}" == "${cur}" ]] && continue
# Check if this has synonyms:
if [[ "${SYNONYMOUS_ARGS[@]}" == *"${prevArg}"* ]] ; then
local resolvedArg=$( _bashTabCompletionDocletTestLaunchWithDefaults_resolveVarName "${prevArg}" )
resolvedArgList+=($resolvedArg)
# Make sure this is an argument:
elif [[ "${ALL_LEGAL_ARGUMENTS[@]}" == *"${prevArg}"* ]] ; then
resolvedArgList+=($prevArg)
fi
done
# Check to see if the last thing we typed was a complete argument.
# If so, we must complete the VALUE for the argument, not the
# argument itself:
# Note: This is shorthand for last element in the array:
local argToComplete=$( _bashTabCompletionDocletTestLaunchWithDefaults_needToCompleteArgValue )
if [[ $argToComplete -ne -1 ]] ; then
# We must complete the VALUE for an argument.
# Get the argument type.
local valueType=${ALL_ARGUMENT_VALUE_TYPES[argToComplete]}
# Get the correct completion string for the type:
local completionString=$( _bashTabCompletionDocletTestLaunchWithDefaults_getCompletionWordList "${valueType}" )
if [[ ${#completionString} -eq 0 ]] ; then
# We don't have any information on the type to complete.
# We use the default SHELL behavior:
COMPREPLY=()
else
# We have a completion option. Let's plug it in:
local compOperator=$( echo "${completionString}" | awk '{print $1}' )
local compOptions=$( echo "${completionString}" | awk '{$1="" ; print}' )
case ${compOperator} in
-A) COMPREPLY=( $(compgen -A ${compOptions} -- $cur) ) ;;
-W) COMPREPLY=( $(compgen -W '${compOptions}' -- $cur) ) ;;
*) COMPREPLY=() ;;
esac
fi
return 0
fi
# We must create a list of the valid remaining arguments:
# Create a list of all arguments that are
# mutually exclusive with arguments we have already specified
local mutex_list=""
for prevArg in ${resolvedArgList[@]} ; do
if [[ "${MUTUALLY_EXCLUSIVE_ARGS[@]}" == *"${prevArg};"* ]] ; then
local mutexArgs=$( echo "${MUTUALLY_EXCLUSIVE_ARGS[@]}" | sed -e "s#.*${prevArg};\([a-zA-Z0-9_,\-]*\) .*#\1#g" -e "s#,# --#g" -e "s#^#--#g" )
mutex_list="${mutex_list}${mutexArgs}"
fi
done
local remaining_legal_arguments=()
for (( i=0; i < ${#NORMAL_COMPLETION_ARGUMENTS[@]} ; i++ )) ; do
local legalArg=${NORMAL_COMPLETION_ARGUMENTS[i]}
local okToAdd=true
# Get the number of times this has occurred in the arguments already:
local numPrevOccurred=$( grep -o -- "${legalArg}" <<< "${resolvedArgList[@]}" | wc -l | awk '{print $1}' )
if [[ $numPrevOccurred -lt "${MAX_OCCURRENCES[i]}" ]] ; then
# Make sure this arg isn't mutually exclusive to another argument that we've already had:
if [[ "${mutex_list}" == "${legalArg} "* ]] ||
[[ "${mutex_list}" == *" ${legalArg} "* ]] ||
[[ "${mutex_list}" == *" ${legalArg}" ]] ; then
okToAdd=false
fi
# Check if we're still good to add in the argument:
if $okToAdd ; then
# Add in the argument:
remaining_legal_arguments+=($legalArg)
# Add in the synonyms of the argument:
if [[ "${SYNONYMOUS_ARGS[@]}" == *"${legalArg}"* ]] ; then
local synonymString=$( echo "${SYNONYMOUS_ARGS[@]}" | sed -e "s#.*${legalArg};\([a-zA-Z0-9_,\-]*\).*#\1#g" -e "s#,# #g" )
remaining_legal_arguments+=($synonymString)
fi
fi
fi
done
# Add in the special option "--" which separates tool options from meta-options if they're necessary:
if $HAS_POSTFIX_OPTIONS ; then
if [[ $# -eq 0 ]] || [[ "${1}" == "PREFIX_OPTIONS" ]] ; then
remaining_legal_arguments+=("--")
fi
fi
COMPREPLY=( $(compgen -W '${remaining_legal_arguments[@]}' -- $cur) )
return 0
}
####################################################################################################
_bashTabCompletionDocletTestLaunchWithDefaults_masterCompletionFunction()
{
# Set up global variables for the functions that do completion:
prev=${COMP_WORDS[COMP_CWORD-1]}
cur=${COMP_WORDS[COMP_CWORD]}
# How many positional arguments a tool will have.
# These positional arguments must come directly after a tool name.
NUM_POSITIONAL_ARGUMENTS=0
# The types of the positional arguments, in the order in which they must be specified
# on the command-line.
POSITIONAL_ARGUMENT_TYPE=()
# The set of legal arguments that aren't dependent arguments.
# (A dependent argument is an argument that must occur immediately after
# all positional arguments.)
NORMAL_COMPLETION_ARGUMENTS=()
# The set of ALL legal arguments
# Corresponds by index to the type of those arguments in ALL_ARGUMENT_VALUE_TYPES
ALL_LEGAL_ARGUMENTS=()
# The types of ALL legal arguments
# Corresponds by index to the names of those arguments in ALL_LEGAL_ARGUMENTS
ALL_ARGUMENT_VALUE_TYPES=()
# Arguments that are mutually exclusive.
# These are listed here as arguments concatenated together with delimiters:
# <Main argument>;<Mutex Argument 1>[,<Mutex Argument 2>,...]
MUTUALLY_EXCLUSIVE_ARGS=()
# Alternate names of arguments.
# These are listed here as arguments concatenated together with delimiters.
# <Main argument>;<Synonym Argument 1>[,<Synonym Argument 2>,...]
SYNONYMOUS_ARGS=()
# The minimum number of times an argument can occur.
MIN_OCCURRENCES=()
# The maximum number of times an argument can occur.
MAX_OCCURRENCES=()
# Set up locals for this function:
local toolName=$( _bashTabCompletionDocletTestLaunchWithDefaults_getToolName )
# Get possible tool matches:
local possibleToolMatches=$( _bashTabCompletionDocletTestLaunchWithDefaults_getAllPossibleToolNames )
# Check if we have postfix options
# and if we now need to go through them:
if $HAS_POSTFIX_OPTIONS && [[ "${COMP_WORDS[@]}" == *" -- "* ]] ; then
NUM_POSITIONAL_ARGUMENTS=0
POSITIONAL_ARGUMENT_TYPE=()
DEPENDENT_ARGUMENTS=()
NORMAL_COMPLETION_ARGUMENTS=("${CS_POSTFIX_OPTIONS_NORMAL_COMPLETION_ARGUMENTS[@]}")
MUTUALLY_EXCLUSIVE_ARGS=("${CS_POSTFIX_OPTIONS_MUTUALLY_EXCLUSIVE_ARGS[@]}")
SYNONYMOUS_ARGS=("${CS_POSTFIX_OPTIONS_SYNONYMOUS_ARGS[@]}")
MIN_OCCURRENCES=("${CS_POSTFIX_OPTIONS_MIN_OCCURRENCES[@]}")
MAX_OCCURRENCES=("${CS_POSTFIX_OPTIONS_MAX_OCCURRENCES[@]}")
ALL_LEGAL_ARGUMENTS=("${CS_POSTFIX_OPTIONS_ALL_LEGAL_ARGUMENTS[@]}")
ALL_ARGUMENT_VALUE_TYPES=("${CS_POSTFIX_OPTIONS_ALL_ARGUMENT_VALUE_TYPES[@]}")
# Complete the arguments for the base script:
# Strictly speaking, what the argument to this function is doesn't matter.
_bashTabCompletionDocletTestLaunchWithDefaults_handleArgs POSTFIX_OPTIONS
# Check if we have a complete tool match that may match more than one tool:
elif [[ ${#possibleToolMatches} -ne 0 ]] ; then
# Set our reply as a list of the possible tool matches:
COMPREPLY=( $(compgen -W '${possibleToolMatches[@]}' -- $cur) )
elif [[ ${toolName} == "TestExtraDocs" ]] ; then
# Set up the completion information for this tool:
DEPENDENT_ARGUMENTS=()
NORMAL_COMPLETION_ARGUMENTS=(--extraDocsArgument )
MUTUALLY_EXCLUSIVE_ARGS=()
SYNONYMOUS_ARGS=("--extraDocsArgument;-extDocArg" )
MIN_OCCURRENCES=(0 )
MAX_OCCURRENCES=(2147483647 )
ALL_LEGAL_ARGUMENTS=(--extraDocsArgument )
ALL_ARGUMENT_VALUE_TYPES=("String" )
# Complete the arguments for this tool:
_bashTabCompletionDocletTestLaunchWithDefaults_handleArgs
elif [[ ${toolName} == "TestArgumentContainer" ]] ; then
# Set up the completion information for this tool:
NUM_POSITIONAL_ARGUMENTS=2
POSITIONAL_ARGUMENT_TYPE=("List[File]")
DEPENDENT_ARGUMENTS=()
NORMAL_COMPLETION_ARGUMENTS=(--requiredClpEnum --requiredFileList --requiredInputFilesFromArgCollection --requiredStringInputFromArgCollection --requiredStringList --usesFieldNameForArgName --fullAnonymousArgName --mutexArg --mutexTargetField1 --mutexTargetField2 --optionalClpEnum --optionalDouble --optionalDoubleList --optionalFileList --optionalFlag --optionalInputFilesFromArgCollection --optionalStringInputFromArgCollection --optionalStringList --testPlugin --advancedOptionalInt --deprecatedString )
MUTUALLY_EXCLUSIVE_ARGS=("--mutexArg;READ1_ALIGNED_BAM,READ2_ALIGNED_BAM" "--mutexTargetField1;mutexSourceField" "--mutexTargetField2;mutexSourceField" )
SYNONYMOUS_ARGS=("--requiredClpEnum;-requiredClpEnum" "--requiredFileList;-reqFilList" "--requiredInputFilesFromArgCollection;-rRequiredInputFilesFromArgCollection" "--requiredStringInputFromArgCollection;-requiredStringInputFromArgCollection" "--requiredStringList;-reqStrList" "--fullAnonymousArgName;-anonymousClassArg" "--mutexArg;-mutexArg" "--mutexTargetField1;-mutexTargetField1" "--mutexTargetField2;-mutexTargetField2" "--optionalClpEnum;-optionalClpEnum" "--optionalDouble;-optDouble" "--optionalDoubleList;-optDoubleList" "--optionalFileList;-optFilList" "--optionalFlag;-optFlag" "--optionalInputFilesFromArgCollection;-optionalInputFilesFromArgCollection" "--optionalStringInputFromArgCollection;-optionalStringInputFromArgCollection" "--optionalStringList;-optStrList" "--advancedOptionalInt;-advancedOptInt" "--deprecatedString;-depStr" )
MIN_OCCURRENCES=(0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 )
MAX_OCCURRENCES=(2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 2147483647 )
ALL_LEGAL_ARGUMENTS=(--requiredClpEnum --requiredFileList --requiredInputFilesFromArgCollection --requiredStringInputFromArgCollection --requiredStringList --usesFieldNameForArgName --fullAnonymousArgName --mutexArg --mutexTargetField1 --mutexTargetField2 --optionalClpEnum --optionalDouble --optionalDoubleList --optionalFileList --optionalFlag --optionalInputFilesFromArgCollection --optionalStringInputFromArgCollection --optionalStringList --testPlugin --advancedOptionalInt --deprecatedString )
ALL_ARGUMENT_VALUE_TYPES=("TestEnum" "List[File]" "List[File]" "String" "List[String]" "String" "List[File]" "List[File]" "List[File]" "List[File]" "TestEnum" "double" "List[Double]" "List[File]" "boolean" "List[File]" "String" "List[String]" "List[String]" "int" "int" )
# Complete the arguments for this tool:
_bashTabCompletionDocletTestLaunchWithDefaults_handleArgs
# We have no postfix options or tool options.
# We now must complete any prefix options and the tools themselves.
# These are defined at the top.
else
NUM_POSITIONAL_ARGUMENTS=0
POSITIONAL_ARGUMENT_TYPE=()
DEPENDENT_ARGUMENTS=()
NORMAL_COMPLETION_ARGUMENTS=("${CS_PREFIX_OPTIONS_NORMAL_COMPLETION_ARGUMENTS[@]}")
MUTUALLY_EXCLUSIVE_ARGS=("${CS_PREFIX_OPTIONS_MUTUALLY_EXCLUSIVE_ARGS[@]}")
SYNONYMOUS_ARGS=("${CS_PREFIX_OPTIONS_SYNONYMOUS_ARGS[@]}")
MIN_OCCURRENCES=("${CS_PREFIX_OPTIONS_MIN_OCCURRENCES[@]}")
MAX_OCCURRENCES=("${CS_PREFIX_OPTIONS_MAX_OCCURRENCES[@]}")
ALL_LEGAL_ARGUMENTS=("${CS_PREFIX_OPTIONS_ALL_LEGAL_ARGUMENTS[@]}")
ALL_ARGUMENT_VALUE_TYPES=("${CS_PREFIX_OPTIONS_ALL_ARGUMENT_VALUE_TYPES[@]}")
# Complete the arguments for the prefix arguments and tools:
_bashTabCompletionDocletTestLaunchWithDefaults_handleArgs PREFIX_OPTIONS
fi
}
complete -o default -F _bashTabCompletionDocletTestLaunchWithDefaults_masterCompletionFunction ${CALLER_SCRIPT_NAME}
|
diff -u <(echo -n) <(find . -name "*.go" -print0 | xargs --null goconst)
if [ $? == 0 ]; then
echo "No goConst problem"
exit 0
else
echo "Has goConst Problem"
exit 1
fi
|
import React, { useState } from 'react';
import moment from 'moment';
const Timer = () => {
const [timeLeft, setTimeLeft] = useState(moment('2021-04-15').diff(moment()));
// Calculates time left
const calculateTimeLeft = () => {
const diff = moment('2021-04-15').diff(moment());
const days = Math.floor(diff / (1000 * 60 * 60 * 24));
const hours = Math.floor((diff % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60));
const minutes = Math.floor((diff % (1000 * 60 * 60)) / (1000 * 60));
const seconds = Math.floor((diff % (1000 * 60)) / 1000);
return { days, hours, minutes, seconds };
}
// Set timer
const timer = setInterval(() => setTimeLeft(calculateTimeLeft()), 1000);
if (timeLeft.seconds <= 0 && timeLeft.minutes <= 0 && timeLeft.hours <= 0 && timeLeft.days <= 0) {
clearInterval(timer);
}
return (
<div>
<p>
Time remaining until April 15th, 2021:
{timeLeft.days} days {timeLeft.hours} hours {timeLeft.minutes} minutes {timeLeft.seconds} seconds
</p>
</div>
);
}
export default Timer; |
<reponame>obino/midonet
/*
* Copyright 2016 Midokura SARL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.midonet.util.metrics
import java.io.OutputStream
import java.util.concurrent.TimeUnit
import java.util.concurrent.locks.ReentrantLock
import com.codahale.metrics.{Clock, Reservoir, Snapshot}
import org.HdrHistogram.Histogram
import org.HdrHistogram.Recorder
import org.midonet.Util
/*
* Metrics latency reservoir that provides a sliding window of latency
* statistics.
* The time window is broken into a number of buckets, each of which is a
* HdrHistogram recorder. When reading the latency stats, the values from
* the last N buckets are returned, where N is the number of buckets needed
* to cover the time window.
*/
class HdrHistogramSlidingTimeWindowReservoir(window: Long, unit: TimeUnit,
stepLength: Long, stepUnit: TimeUnit,
clock: Clock)
extends Reservoir {
val stepNanos = Util.findPreviousPositivePowerOfTwo(
TimeUnit.NANOSECONDS.convert(stepLength, stepUnit))
val stepShift = Util.highestBit(stepNanos)
val bucketsPerWindow = Math.ceil(
TimeUnit.NANOSECONDS.convert(window, unit).toDouble / stepNanos).toInt
val buckets = Util.findNextPositivePowerOfTwo(bucketsPerWindow)
val bucketsMask = buckets - 1
val measurements = new Array[Histogram](buckets.toInt)
val recorder = new Recorder(3600000000L, 3)
var lastTimeSlot = 0L
val snapshotLock = new ReentrantLock()
resetBuckets
val snapshotHistogram = new Histogram(3600000000L, 3)
val tmpHistogram = new Histogram(3600000000L, 3)
val snapshot = new Snapshot() {
def getValue(quantile: Double): Double =
snapshotHistogram.getValueAtPercentile(quantile)
def size: Int = snapshotHistogram.getTotalCount.toInt
def getMax: Long = snapshotHistogram.getMaxValue
def getMean: Double = {
snapshotLock.lock()
try {
snapshotHistogram.getMean
} finally {
snapshotLock.unlock()
}
}
def getMin: Long = snapshotHistogram.getMinValue
def getStdDev: Double = {
snapshotLock.lock()
try {
snapshotHistogram.getStdDeviation
} finally {
snapshotLock.unlock()
}
}
// this is a noop, because you can't get all values from hdr histogram
val getValues: Array[Long] = new Array[Long](0)
def dump(output: OutputStream): Unit = {
// noop, you can't get all values from hdr histogram
}
}
override def size: Int = getSnapshot.size
override def update(value: Long): Unit = {
val currentTimeSlot = clock.getTick() >> stepShift
getCurrentRecorder(currentTimeSlot).recordValue(value)
}
override def getSnapshot: Snapshot = {
snapshotHistogram.reset()
snapshotLock.lock()
try {
val currentTimeSlot = clock.getTick() >> stepShift
// copy current bucket values into snapshot
val recorder = getCurrentRecorder(currentTimeSlot)
recorder.getIntervalHistogramInto(tmpHistogram)
val bucket = (currentTimeSlot & bucketsMask).toInt
measurements(bucket).add(tmpHistogram)
snapshotHistogram.add(measurements(bucket))
// copy previous buckets that make up window into snapshot
var i = (currentTimeSlot - bucketsPerWindow) + 1
while (i < currentTimeSlot) {
val bucket = (i & bucketsMask).toInt
if (measurements(bucket).getTotalCount > 0) {
snapshotHistogram.add(measurements(bucket))
}
i += 1
}
} finally {
snapshotLock.unlock()
}
snapshot
}
private def getCurrentRecorder(currentTimeSlot: Long): Recorder = {
if (currentTimeSlot != lastTimeSlot &&
snapshotLock.tryLock()) {
try {
if (currentTimeSlot - lastTimeSlot > buckets) {
resetBuckets
lastTimeSlot = currentTimeSlot
}
while (lastTimeSlot < currentTimeSlot) {
val lastBucket = (lastTimeSlot & bucketsMask).toInt
recorder.getIntervalHistogramInto(tmpHistogram)
measurements(lastBucket).add(tmpHistogram)
lastTimeSlot += 1
val currentBucket = (lastTimeSlot & bucketsMask).toInt
measurements(currentBucket).reset()
recorder.reset()
}
} finally {
snapshotLock.unlock()
}
}
recorder
}
private def resetBuckets(): Unit = {
var i = 0
while (i < buckets) {
if (measurements(i) == null) {
measurements(i) = new Histogram(3600000000L, 3)
}
measurements(i).reset()
i += 1
}
}
}
|
new_list = []
for s in list:
if 'er' not in s:
new_list.append(s)
print(new_list) |
<reponame>benoitc/pypy
#! /usr/bin/env python
import os, sys
from pypy.rlib.rsha import RSHA
# __________ Entry point __________
def entry_point(argv):
for filename in argv[1:]:
sha = RSHA()
fd = os.open(filename, os.O_RDONLY, 0)
while True:
buf = os.read(fd, 16384)
if not buf: break
sha.update(buf)
os.close(fd)
print sha.hexdigest(), filename
return 0
# _____ Define and setup target ___
def target(*args):
return entry_point, None
if __name__ == '__main__':
from sha import sha as RSHA
import sys
res = entry_point(sys.argv)
sys.exit(res)
|
import csv
from statistics import geometric_mean
def calculate_geometric_means(stdout):
reader = csv.reader(stdout.splitlines())
next(reader) # Skip header row
results = {}
for row in reader:
benchmark = row[0]
performance_values = list(map(float, row[1:]))
results[benchmark] = geometric_mean(performance_values)
return results |
#!/bin/bash
set -e
set -x
folder="models/bvlc_googlenet"
file_prefix="caffenet_train"
model_path="models/bvlc_googlenet"
if [ "$#" -lt 7 ]; then
echo "Illegal number of parameters"
echo "Usage: train_script base_lr weight_decay prune_threshold max_threshold_factor block_group_decay device_id template_solver.prototxt [finetuned.caffemodel/.solverstate]"
exit
fi
base_lr=$1
weight_decay=$2
prune_threshold=$3
max_threshold_factor=$4
block_group_decay=$5
solver_mode="GPU"
device_id=0
current_time=$(date)
current_time=${current_time// /_}
current_time=${current_time//:/-}
snapshot_path=$folder/${base_lr}_${weight_decay}_${prune_threshold}_${max_threshold_factor}_${block_group_decay}_${current_time}
mkdir $snapshot_path
echo $@ > $snapshot_path/cmd.log
solverfile=$snapshot_path/solver.prototxt
template_file='template_solver.prototxt'
#if [ "$#" -ge 7 ]; then
template_file=$7
#fi
cat $folder/${template_file} > $solverfile
echo "block_group_decay: $block_group_decay" >> $solverfile
echo "prune_threshold: $prune_threshold" >> $solverfile
echo "max_threshold_factor: $max_threshold_factor" >> $solverfile
echo "weight_decay: $weight_decay" >> $solverfile
echo "base_lr: $base_lr" >> $solverfile
echo "snapshot_prefix: \"$snapshot_path/$file_prefix\"" >> $solverfile
#if [ "$#" -ge 6 ]; then
#if [ "$6" -ne "-1" ]; then
device_id=$6
#echo "device_id: $device_id" >> $solverfile
#else
#solver_mode="CPU"
#fi
#fi
echo "solver_mode: $solver_mode" >> $solverfile
#echo "regularization_type: \"$regularization_type\"" >> $solverfile
#cat $solverfile
if [ "$#" -ge 8 ]; then
tunedmodel=$8
file_ext=$(echo ${tunedmodel} | rev | cut -d'.' -f 1 | rev)
if [ "$file_ext" = "caffemodel" ]; then
./build/tools/caffe.bin train -gpu $device_id --solver=$solverfile --weights=$model_path/$tunedmodel > "${snapshot_path}/train.info" 2>&1
else
./build/tools/caffe.bin train -gpu $device_id --solver=$solverfile --snapshot=$model_path/$tunedmodel > "${snapshot_path}/train.info" 2>&1
fi
else
./build/tools/caffe.bin train -gpu $device_id --solver=$solverfile > "${snapshot_path}/train.info" 2>&1
fi
cat ${snapshot_path}/train.info | grep loss+ | awk '{print $8 " " $11}' > ${snapshot_path}/loss.info
#cd $folder
#finalfiles=$(ls -ltr *caffemodel *.solverstate | awk '{print $9}' | tail -n 2 )
#for file in $finalfiles; do
# cp $file "$current_time-$file"
#done
|
<filename>src/ratpack/static/webpack.production.config.js
/**
* Production config file for webpack
* Creates two bundles:
* 1. bundle.js -> Our application
* 2. vendor.js -> Vendor bundle containing libraries
*
*/
var path = require('path');
var Webpack = require('webpack');
var buildPath = path.resolve(__dirname, 'dist');
module.exports = {
/**
* Key, value config defining the entry points to our application.
* 1. bundle entry contains everything that is required by ./app/index.ts and its' descendants
* 2. vendor entry contains vendor libraries from node_modules. Every time for example react is
* required/imported webpack replaces that with a module from our vendor bundle
*
* We can define as many entry points as we want. This way we can split out our application to
* different pages and include only needed code to those pages.
*/
entry: {
index: './app/front/index.js',
vendor: ['core-js', 'react']
},
/**
* The output defined contains only our bundle, the code from our application.
* Created bundle will be output to /dist folder and will be called bundle.js
* In case we would have multiple chunks we can simply name the output filename
* as [name].js and the bundled file would be named after the key from entry config.
* (See webpack.config.js for this.)
*/
output: {
path: buildPath,
filename: '[name].js',
publicPath: '/static/dist/'
},
resolve: {
extensions: ['', '.js', '.jsx']
},
/**
* Additional loaders that webpack will run against then bundle response creates.
* For our production build we use babel and eslint.
*
* Babel transpiles ES6 and JSX files to ES5 javascript so response is compatible
* to current browser versions.
*
* Eslint runs static analysis against our code and errors or warns in case
* we have written possibly bad code.
*/
module: {
loaders: [
{test: /\.jsx?$/, exclude: /node_modules/, loader: 'react-hot-loader'},
{test: /\.jsx?$/, exclude: /node_modules/, loader: 'babel-loader'},
{test: /\.css$/, exclude: /node_modules/, loader: 'style-loader!css-loader'},
{test: /\.scss$/, exclude: /node_modules/, loader: 'style-loader!css-loader!sass-loader'},
{test: /\.jsx?$/, exclude: /node_modules/, loader: 'eslint-loader'}
]
},
// Eslint config file location
eslint: {
configFile: './.eslintrc'
},
/**
* Our additional plugins to be used during the build process
*/
plugins: [
// Chunk plugin makes the work of pointing our chunked bundle to correct
// vendor bundle location
new Webpack.optimize.CommonsChunkPlugin('vendor', 'vendor.js', Infinity),
// Uglify JS minimizes our application bundle
new Webpack.optimize.UglifyJsPlugin({
compressor: {
warnings: false
}
}),
new Webpack.DefinePlugin({
'process.env': {
NODE_ENV: '"production"'
},
__CLIENT__: true,
__SERVER__: false,
__DEVELOPMENT__: false,
__DEVTOOLS__: false
}),
// Deduping removes duplicated dependencies in case we have accidentally
// required them multiple times. Decreases bundle size.
new Webpack.optimize.DedupePlugin(),
// NoErrors stops bundling in case our code errors out.
new Webpack.NoErrorsPlugin()
]
}
; |
<gh_stars>100-1000
import { isMutable } from '@collectable/core';
import { unwrap } from '@collectable/core';
import test from 'ava';
import { empty, get, set } from '../src';
test('returns the value with the specified key', t => {
var map = set('x', 3, empty<string, number>());
t.is(get('x', map), 3);
t.false(isMutable(map));
t.deepEqual(unwrap(map), { x: 3 });
});
test('returns undefined if the specified key is missing', t => {
var map = set('x', 3, empty<string, number>());
t.is(get('y', map), void 0);
t.false(isMutable(map));
t.deepEqual(unwrap(map), { x: 3 });
});
|
protected static boolean init() {
try {
System.loadLibrary(JNI_LIB_NAME);
return true;
} catch (UnsatisfiedLinkError e) {
return false;
}
} |
#!/bin/bash
#
# Install the necessary dependencies for openFrameworks on openSUSE Tumbleweed
# This script assumes the git repository of openFrameworks to be located at the
# given argument path
#
# Suggested usage:
#
# $ sudo -e ./install_dependencies.sh ${OF_HOME}
#
# Copyright 2018 Greg von Winckel
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
if [ "$#" -lt 1 ]; then
echo "You must supply the openFrameworks source path"
exit 1
fi
OF_HOME=${1}
FORMULAS="${OF_HOME}/scripts/apothecary/apothecary/formulas"
CURRENT_PATH=`pwd`
WORK_PATH="${CURRENT_PATH}/work"
if [ ! -d ${WORK_PATH} ]; then
mkdir ${WORK_PATH}
fi
cd ${WORK_PATH}
# Add the repository for RT Audio
zypper addrepo http://ftp.gwdg.de/pub/opensuse/repositories/multimedia:/libs/openSUSE_Tumbleweed/ opensuse-multimedia-libs
# Install the development versions of packages
zypper install \
alsa-devel \
boost-devel \
cmake \
flac-devel \
freeglut-devel \
freeimage-devel \
gcc-c++ \
glew-devel \
gstreamer-devel \
gstreamer-plugins-base-devel \
libcurl-devel \
libjack-devel \
libpulse-devel \
libraw1394-devel \
libsndfile-devel \
libtheora-devel \
libudev-devel \
libvorbis-devel \
libXmu-devel \
libXxf86vm-devel \
make \
openal-soft-devel \
portaudio-devel \
python-lxml \
rtaudio-devel
ln -s /usr/include/rtaudio/RtAudio.h /usr/include/RtAudio.h
# Install the packages from source
zypper si \
assimp \
glfw \
gmp \
libXcursor \
libXi \
mpc \
mpfr \
opencv \
liburiparser1 \
pugixml \
xrandr
# Where zypper should install the package source archives
PACKAGES_PATH="/usr/src/packages/SOURCES"
extract_pkg()
{
PKG=${1}
EXT=${2}
RESULT=`ls ${PACKAGES_PATH} | grep -m1 "${PKG}.*${EXT}"`
FILENAME=`echo ${RESULT}%.${EXT}*`
FOLDER="${PACKAGES_PATH}/${FILENAME}"
if [[ ${EXT} = *"bz2" ]]; then
tar xfj "${PACKAGES_PATH}/${RESULT}"
elif [[ ${EXT} = *"gz" ]]; then
tar xfz "${PACKAGES_PATH}/${RESULT}"
else
tar xf "${PACKAGES_PATH}/${RESULT}"
fi
echo ${FOLDER}
}
build_configure()
{
DIR=${1}
cd ${DIR}
./configure
make -j`ncores` && make install
cd ${WORK_PATH}
}
build_cmake()
{
DIR=${1}
cd ${DIR}
mkdir build
cd build
cmake ..
make -j`ncores` && make install
cd ${WORK_PATH}
}
##########################################################################################
echo "Extracting assimp - Library to load and process 3D scenes from various data formats"
DIR=`extract_pkg assimp tar.gz`
build_cmake ${DIR}
##########################################################################################
echo "Extracting cairo - Vector Graphics Library with Cross-Device Output Support"
DIR=`extract_pkg cairo tar.xz`
build_configure ${DIR}
##########################################################################################
echo "Extracting glfw - Framework for OpenGL application development"
DIR=`extract_pkg glfw tar.gz`
build_cmake ${DIR}
##########################################################################################
echo "Extracting gmp - The GNU MP Library"
DIR=`extract_pkg gmp tar.xz`
build_configure ${DIR}
##########################################################################################
echo "Extracting libXcursor - X Window System Cursor management library"
DIR=`extract_pkg libXcursor tar.bz2`
build_configure ${DIR}
##########################################################################################
echo "Extracting libXi - X Input Extension library"
DIR=`extract_pkg libXi tar.bz2`
build_configure ${DIR}
##########################################################################################
echo "Extracting mpc - MPC multiple-precision complex shared library"
DIR=`extract_pkg mpc tar.gz`
build_configure ${DIR}
##########################################################################################
echo "Extracting mpfr - The GNU multiple-precision floating-point library"
DIR=`extract_pkg mpfr tar.bz2`
build_configure ${DIR}
svgtiny
##########################################################################################
echo "Extracting pugixml - Light-weight C++ XML Processing Library"
DIR=`extract_pkg pugixml tar.gz`
build_cmake ${DIR}
##########################################################################################
echo "Extacting liburiparser1 - A strictly RFC 3986 compliant URI parsing library"
DIR=`extract_pkg uriparser tar.bz2`
build_configure ${DIR}
##########################################################################################
echo "Extracting xrandr - Primitive command line interface to RandR extension"
DIR=svgtiny`extract_pkg xrandr tar.bz2`
build_configure ${DIR}
# Install packages using the Apothecary scripts
cd ${FORMULAS}
./FreeImage/FreeImage.sh
./fmodex.sh
./json.sh
./kiss/kiss.sh
./libpng/libpng.sh
./libxml2/libxml2.sh
./openssl/openssl.sh
./poco/poco.sh
./svgtiny/svgtiny.sh
./tess2/tess2.sh
./uri/uri.sh
./utf8.sh
./videoInput.sh
cd ${CURRENT_PATH}
|
SELECT e.name, SUM(s.amount) total_salary
FROM employee e
LEFT JOIN salary s ON e.id = s.employee_id
GROUP BY e.id
ORDER BY total_salary DESC |
<filename>window/mousestate.go
package window
import (
"time"
)
// Millis
const DefaultDoubleClickDuration = 300
// MouseState keeps track of the state of pressed mouse buttons.
type MouseState struct {
win IWindow
lastButton MouseButton
DoubleClickDuration time.Duration
states map[MouseButton]*mouseButtonState
}
type mouseButtonState struct {
clickCount int
lastClick time.Time
}
func (s *mouseButtonState) doubleClicked() bool {
return s.clickCount == 2 || s.clickCount == -2
}
// NewMouseState returns a new MouseState object.
func NewMouseState(win IWindow) *MouseState {
ms := new(MouseState)
ms.win = win
ms.DoubleClickDuration = DefaultDoubleClickDuration * time.Millisecond
ms.states = map[MouseButton]*mouseButtonState{
MouseButtonLeft: {clickCount: 0, lastClick: time.Now()},
MouseButtonRight: {clickCount: 0, lastClick: time.Now()},
MouseButtonMiddle: {clickCount: 0, lastClick: time.Now()},
}
// Subscribe to window mouse events
ms.win.SubscribeID(OnMouseUp, &ms, ms.onMouseUp)
ms.win.SubscribeID(OnMouseDown, &ms, ms.onMouseDown)
return ms
}
// Dispose unsubscribes from the window events.
func (ms *MouseState) Dispose() {
ms.win.UnsubscribeID(OnMouseUp, &ms)
ms.win.UnsubscribeID(OnMouseDown, &ms)
}
// Pressed returns whether the specified mouse button is currently pressed.
func (ms *MouseState) Pressed(b MouseButton) bool {
return ms.states[b].clickCount > 0
}
// Pressed returns whether the left mouse button is currently pressed.
func (ms *MouseState) LeftPressed() bool {
return ms.states[MouseButtonLeft].clickCount > 0
}
// Pressed returns whether the right mouse button is currently pressed.
func (ms *MouseState) RightPressed() bool {
return ms.states[MouseButtonRight].clickCount > 0
}
// Pressed returns whether the middle mouse button is currently pressed.
func (ms *MouseState) MiddlePressed() bool {
return ms.states[MouseButtonMiddle].clickCount > 0
}
// Pressed returns whether the user left double clicked.
func (ms *MouseState) LeftDoubleClicked() bool {
return ms.lastButton == MouseButtonLeft && ms.states[MouseButtonLeft].doubleClicked()
}
// Pressed returns whether the user right double clicked.
func (ms *MouseState) RightDoubleClicked() bool {
return ms.lastButton == MouseButtonRight && ms.states[MouseButtonRight].doubleClicked()
}
// Pressed returns whether the user middle double clicked.
func (ms *MouseState) MiddleDoubleClicked() bool {
return ms.lastButton == MouseButtonMiddle && ms.states[MouseButtonMiddle].doubleClicked()
}
// onMouse receives mouse events and updates the internal map of states.
func (ms *MouseState) onMouseUp(evname string, ev interface{}) {
mev := ev.(*MouseEvent)
if ms.states[mev.Button].clickCount > 0 {
ms.states[mev.Button].clickCount *= -1
}
}
// onMouse receives mouse events and updates the internal map of states.
func (ms *MouseState) onMouseDown(evname string, ev interface{}) {
mev := ev.(*MouseEvent)
ms.lastButton = mev.Button
now := time.Now()
if ms.states[mev.Button].clickCount == 0 {
ms.states[mev.Button].clickCount = 1
ms.states[mev.Button].lastClick = now
return
}
if ms.states[mev.Button].clickCount == -1 {
if ms.states[mev.Button].lastClick.Add(ms.DoubleClickDuration).Before(now) {
ms.states[mev.Button].clickCount = 1
ms.states[mev.Button].lastClick = now
return
}
ms.states[mev.Button].clickCount = 2
return
}
ms.states[mev.Button].clickCount = 1
ms.states[mev.Button].lastClick = now
}
|
/***************************************************************************
* (C) Copyright 2003-2008 - Marauroa *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package marauroa.server.game.python;
import java.io.InputStream;
import java.util.List;
import marauroa.common.Log4J;
import marauroa.common.game.AccountResult;
import marauroa.common.game.CharacterResult;
import marauroa.common.game.RPAction;
import marauroa.common.game.RPObject;
import marauroa.common.game.RPObjectInvalidException;
import marauroa.common.game.RPObjectNotFoundException;
import marauroa.server.game.rp.IRPRuleProcessor;
import marauroa.server.game.rp.RPServerManager;
/**
* Python implementation of IRPRuleProcessor. You can't inherit directly
* IRPRuleProcessor, so you need to inherit in your Python code the
* PythonRPRuleProcessor class.
*
* You should set ruleprocessor in server.ini to this class.
*
* @see IRPRuleProcessor
*
* @author miguel
*/
public class PythonRPRuleProcessor implements IRPRuleProcessor {
/** the logger instance. */
private static final marauroa.common.Logger logger = Log4J
.getLogger(PythonRPRuleProcessor.class);
/** The link with the python engine */
private GameScript gameScript;
/** Python implementation of IRPRuleProcessor */
private PythonRP pythonRP;
/**
* Constructor
*/
public PythonRPRuleProcessor() {
// default constructor
}
/**
* Set the context where the actions are executed.
*
* @param rpman
* the RP Manager object
*/
public void setContext(RPServerManager rpman) {
try {
gameScript = GameScript.getGameScript();
pythonRP = gameScript.getGameRules();
pythonRP.setRPManager(rpman);
} catch (Exception e) {
logger.error("error while setting context", e);
}
}
public void beginTurn() {
pythonRP.beginTurn();
}
public boolean checkGameVersion(String game, String version) {
return pythonRP.checkGameVersion(game, version);
}
public AccountResult createAccount(String username, String password, String email) {
return pythonRP.createAccount(username, password, email);
}
public AccountResult createAccountWithToken(String username, String tokenType, String token) {
return pythonRP.createAccountWithToken(username, tokenType, token);
}
public CharacterResult createCharacter(String username, String character, RPObject template) {
return pythonRP.createCharacter(username, character, template);
}
public void endTurn() {
pythonRP.endTurn();
}
public void execute(RPObject object, RPAction action) {
pythonRP.execute(object, action);
}
public boolean onActionAdd(RPObject object, RPAction action, List<RPAction> actionList) {
return pythonRP.onActionAdd(object, action, actionList);
}
public boolean onExit(RPObject object) throws RPObjectNotFoundException {
return pythonRP.onExit(object);
}
public boolean onInit(RPObject object) throws RPObjectInvalidException {
return pythonRP.onInit(object);
}
public void onTimeout(RPObject object) throws RPObjectNotFoundException {
pythonRP.onTimeout(object);
}
public String getMimeTypeForResource(String resource) {
return null;
}
public InputStream getResource(String resource) {
return null;
}
}
|
<reponame>nagalun/guimachi
#pragma once
class Network;
class Peer;
class NetworkUser {
Peer& peer;
bool online;
bool owner;
bool admin;
bool premium;
bool suspended;
public:
NetworkUser(Peer&);
Peer& getPeer() const;
bool isOnline() const;
bool isOwner() const;
bool isAdmin() const;
bool isPremium() const;
bool isSuspended() const;
private:
void update(bool online, bool owner, bool admin, bool premium, bool suspended);
friend Network;
};
|
<filename>packages/blockchain/src/components/pages/Campaign/InfoItem/InfoItem.tsx
import React from 'react';
import sharedClasses from '../../../../common.module.css';
import classes from './infoItem.module.css';
interface IProps {
description: string;
title: string;
value: string | number;
}
export default function InfoItem({ description, title, value }: IProps) {
return (
<div className={classes.root}>
<strong className={sharedClasses.h3}>{title}</strong>
<span className={`${classes.value} ${sharedClasses.h3}`}>{value}</span>
<span className={`${classes.description} ${sharedClasses.p}`}>
{description}
</span>
</div>
);
}
|
#! /usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
readonly HERE=$(cd "$(dirname "$0")" && pwd)
readonly REPO=$(cd "${HERE}/.." && pwd)
readonly TEMPDIR=$(mktemp -d crd.XXXXXX)
# Optional first arg is the paths pattern.
readonly PATHS="${1:-"./apis/..."}"
trap 'rm -rf "$TEMPDIR"; exit' 0 1 2 15
cd "${REPO}"
# Controller-gen seems to use an unstable sort for the order of output of the CRDs
# so, output them to separate files, then concatenate those files.
# That should give a stable sort.
go run sigs.k8s.io/controller-tools/cmd/controller-gen \
crd:crdVersions=v1 "paths=${PATHS}" "output:dir=${TEMPDIR}"
# Explicitly add "preserveUnknownFields: false" to CRD specs since any CRDs created
# as v1beta1 will have this field set to true, which we don't want going forward, and
# it needs to be explicitly specified in order to be updated/removed. After enough time
# has passed and we're not concerned about folks upgrading from v1beta1 CRDs, we can
# remove the awk call that adds this field to the spec, and rely on the v1 default.
ls "${TEMPDIR}"/*.yaml | xargs cat | sed '/^$/d' | awk '/group: projectcontour.io/{print " preserveUnknownFields: false"}1' > "${REPO}/examples/contour/01-crds.yaml"
|
<reponame>ooooo-youwillsee/leetcode
//
// Created by ooooo on 2020/3/9.
//
#ifndef CPP_010_2__SOLUTION1_H_
#define CPP_010_2__SOLUTION1_H_
#include <iostream>
#include <vector>
using namespace std;
/**
* dp[n] = dp[n-1] + dp[n-2]
*/
class Solution {
public:
int numWays(int n) {
if (n ==0) return 1;
if (n == 1 || n == 2) return n;
int a = 1, b = 2, c = 0;
for (int i = 3; i <= n; ++i) {
c = (a + b) % 1000000007;
a = b;
b = c;
}
return c;
}
};
#endif //CPP_010_2__SOLUTION1_H_
|
package com.span.spark.batch.datasources
import org.apache.spark.sql.DataFrame
trait Source {
def source: DataFrame
}
|
#!/bin/bash
set -euo pipefail
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A helper script for ensuring all checks pass before submitting any change.
echo ========== Running unit tests.
if [[ -z `which coverage` ]];then
echo "coverage is not installed. Installing ..."
python -m pip install coverage
fi
coverage run --source=gcp_variant_transforms setup.py test
echo ========== Running pylint.
if [[ -z `which pylint` ]];then
echo "pylint is not installed. Installing ..."
python -m pip install pylint
fi
python -m pylint setup.py
python -m pylint gcp_variant_transforms
|
#!/usr/bin/env bash
# Source definition from: https://docs.docker.com/engine/api/v1.41.yaml
# Example generation from: https://github.com/moby/moby/blob/v20.10.8/hack/generate-swagger-api.sh
docker run -it --rm \
-u $UID \
-v ${PWD}:/opt/d2k \
--workdir /opt/d2k \
--entrypoint swagger \
quay.io/goswagger/swagger:v0.27.0 \
generate server -f ./gen/v1.41.yaml --target=./src/openapi/gen/
docker run -it --rm \
-u $UID \
-v ${PWD}:/opt/d2k \
--workdir /opt/d2k \
--entrypoint sh \
quay.io/goswagger/swagger:v0.27.0 |
/**
* @license
* Copyright 2018 The FOAM Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
foam.CLASS({
refines: 'foam.core.Argument',
properties: [ 'swiftType', 'swiftDefaultValue' ]
});
foam.CLASS({
refines: 'foam.core.Method',
properties: [ 'swiftCode', 'swiftReturns', 'swiftSynchronized' ]
});
foam.CLASS({
refines: 'foam.core.FObjectProperty',
properties: [ 'swiftType', 'swiftFactory', 'swiftPostSet' ]
});
foam.CLASS({
refines: 'foam.core.Property',
properties: [ 'swiftType', 'swiftFactory', 'swiftExpression', 'swiftExpressionArgs' ]
});
foam.CLASS({
refines: 'foam.core.internal.InterfaceMethod',
properties: [ 'swiftThrows', 'swiftSupport' ]
});
foam.CLASS({
refines: 'foam.core.InterfaceModel',
properties: [ 'swiftName', 'swiftImplements' ]
});
foam.CLASS({
refines: 'foam.core.String',
properties: [ 'swiftThrows', 'swiftSupport', 'swiftExpression', 'swiftFactory' ]
});
foam.CLASS({
refines: 'foam.core.Constant',
properties: [ 'swiftValue', 'swiftType' ]
});
|
let x = 5;
let y = 10;
let result = x + y;
console.log(result); |
<filename>CoreFoundation/CFDateFormatter.c
/*
* Copyright (c) 2008-2009 <NAME> <<EMAIL>>. All rights reserved.
*
* This source code is a modified version of the CoreFoundation sources released by Apple Inc. under
* the terms of the APSL version 2.0 (see below).
*
* For information about changes from the original Apple source release can be found by reviewing the
* source control system for the project at https://sourceforge.net/svn/?group_id=246198.
*
* The original license information is as follows:
*
* Copyright (c) 2008 Apple Inc. All rights reserved.
*
* @APPLE_LICENSE_HEADER_START@
*
* This file contains Original Code and/or Modifications of Original Code
* as defined in and that are subject to the Apple Public Source License
* Version 2.0 (the 'License'). You may not use this file except in
* compliance with the License. Please obtain a copy of the License at
* http://www.opensource.apple.com/apsl/ and read it before using this
* file.
*
* The Original Code and all software distributed under the License are
* distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
* INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
* Please see the License for the specific language governing rights and
* limitations under the License.
*
* @APPLE_LICENSE_HEADER_END@
*/
/* CFDateFormatter.c
Copyright 2002-2003, Apple, Inc. All rights reserved.
Responsibility: <NAME>
*/
#include <CoreFoundation/CFDateFormatter.h>
#include <CoreFoundation/CFDate.h>
#include <CoreFoundation/CFTimeZone.h>
#include <CoreFoundation/CFCalendar.h>
#include <CoreFoundation/CFNumber.h>
#include "CFInternal.h"
#include <unicode/udat.h>
#include <math.h>
#include <float.h>
extern UCalendar *__CFCalendarCreateUCalendar(CFStringRef calendarID, CFStringRef localeID, CFTimeZoneRef tz);
#ifdef DEPLOYMENT_TARGET_WINDOWS
extern CFAbsoluteTime __CFDateWindowsSystemTimeToAbsoluteTime(SYSTEMTIME *time);
#endif
static void __CFDateFormatterCustomize(CFDateFormatterRef formatter);
CF_EXPORT const CFStringRef kCFDateFormatterCalendarIdentifier;
#define BUFFER_SIZE 768
struct __CFDateFormatter {
CFRuntimeBase _base;
UDateFormat *_df;
CFLocaleRef _locale;
CFDateFormatterStyle _timeStyle;
CFDateFormatterStyle _dateStyle;
CFStringRef _format;
CFStringRef _defformat;
CFStringRef _calendarName;
CFTimeZoneRef _tz;
CFDateRef _defaultDate;
};
static CFStringRef __CFDateFormatterCopyDescription(CFTypeRef cf) {
CFDateFormatterRef formatter = (CFDateFormatterRef)cf;
return CFStringCreateWithFormat(CFGetAllocator(formatter), NULL, CFSTR("<CFDateFormatter %p [%p]>"), cf, CFGetAllocator(formatter));
}
static void __CFDateFormatterDeallocate(CFTypeRef cf) {
CFDateFormatterRef formatter = (CFDateFormatterRef)cf;
if (formatter->_df) udat_close(formatter->_df);
if (formatter->_locale) CFRelease(formatter->_locale);
if (formatter->_format) CFRelease(formatter->_format);
if (formatter->_defformat) CFRelease(formatter->_defformat);
if (formatter->_calendarName) CFRelease(formatter->_calendarName);
if (formatter->_tz) CFRelease(formatter->_tz);
if (formatter->_defaultDate) CFRelease(formatter->_defaultDate);
}
static CFTypeID __kCFDateFormatterTypeID = _kCFRuntimeNotATypeID;
static const CFRuntimeClass __CFDateFormatterClass = {
0,
"CFDateFormatter",
NULL, // init
NULL, // copy
__CFDateFormatterDeallocate,
NULL,
NULL,
NULL, //
__CFDateFormatterCopyDescription
};
static void __CFDateFormatterInitialize(void) {
__kCFDateFormatterTypeID = _CFRuntimeRegisterClass(&__CFDateFormatterClass);
}
CFTypeID CFDateFormatterGetTypeID(void) {
if (_kCFRuntimeNotATypeID == __kCFDateFormatterTypeID) __CFDateFormatterInitialize();
return __kCFDateFormatterTypeID;
}
CFDateFormatterRef CFDateFormatterCreate(CFAllocatorRef allocator, CFLocaleRef locale, CFDateFormatterStyle dateStyle, CFDateFormatterStyle timeStyle) {
struct __CFDateFormatter *memory;
uint32_t size = sizeof(struct __CFDateFormatter) - sizeof(CFRuntimeBase);
if (allocator == NULL) allocator = __CFGetDefaultAllocator();
__CFGenericValidateType(allocator, CFAllocatorGetTypeID());
if (locale) __CFGenericValidateType(locale, CFLocaleGetTypeID());
memory = (struct __CFDateFormatter *)_CFRuntimeCreateInstance(allocator, CFDateFormatterGetTypeID(), size, NULL);
if (NULL == memory) {
return NULL;
}
memory->_df = NULL;
memory->_locale = NULL;
memory->_format = NULL;
memory->_defformat = NULL;
memory->_calendarName = NULL;
memory->_tz = NULL;
memory->_defaultDate = NULL;
if (NULL == locale) locale = CFLocaleGetSystem();
memory->_dateStyle = dateStyle;
memory->_timeStyle = timeStyle;
int32_t udstyle, utstyle;
switch (dateStyle) {
case kCFDateFormatterNoStyle: udstyle = UDAT_NONE; break;
case kCFDateFormatterShortStyle: udstyle = UDAT_SHORT; break;
case kCFDateFormatterMediumStyle: udstyle = UDAT_MEDIUM; break;
case kCFDateFormatterLongStyle: udstyle = UDAT_LONG; break;
case kCFDateFormatterFullStyle: udstyle = UDAT_FULL; break;
default:
CFAssert2(0, __kCFLogAssertion, "%s(): unknown date style %d", __PRETTY_FUNCTION__, dateStyle);
udstyle = UDAT_MEDIUM;
memory->_dateStyle = kCFDateFormatterMediumStyle;
break;
}
switch (timeStyle) {
case kCFDateFormatterNoStyle: utstyle = UDAT_NONE; break;
case kCFDateFormatterShortStyle: utstyle = UDAT_SHORT; break;
case kCFDateFormatterMediumStyle: utstyle = UDAT_MEDIUM; break;
case kCFDateFormatterLongStyle: utstyle = UDAT_LONG; break;
case kCFDateFormatterFullStyle: utstyle = UDAT_FULL; break;
default:
CFAssert2(0, __kCFLogAssertion, "%s(): unknown time style %d", __PRETTY_FUNCTION__, timeStyle);
utstyle = UDAT_MEDIUM;
memory->_timeStyle = kCFDateFormatterMediumStyle;
break;
}
CFStringRef localeName = locale ? CFLocaleGetIdentifier(locale) : CFSTR("");
char buffer[BUFFER_SIZE];
const char *cstr = CFStringGetCStringPtr(localeName, kCFStringEncodingASCII);
if (NULL == cstr) {
if (CFStringGetCString(localeName, buffer, BUFFER_SIZE, kCFStringEncodingASCII)) cstr = buffer;
}
if (NULL == cstr) {
CFRelease(memory);
return NULL;
}
UChar ubuffer[BUFFER_SIZE];
memset(ubuffer, 0x00, sizeof(UChar) * BUFFER_SIZE);
memory->_tz = CFTimeZoneCopyDefault();
#ifdef DEPLOYMENT_TARGET_WINDOWS
// ICU doesn't know about Windows Time Zone names. It'll understand the abbreviation for the non-daylight saving time, though
TIME_ZONE_INFORMATION *tzi = (TIME_ZONE_INFORMATION *)CFDataGetBytePtr(CFTimeZoneGetData(memory->_tz));
CFAbsoluteTime at = __CFDateWindowsSystemTimeToAbsoluteTime(&tzi->DaylightDate);
// subtract a day to get us off the time change boundary
at -= (60 * 60 * 24);
CFStringRef tznam = CFTimeZoneCopyAbbreviation(memory->_tz, at);
#else
CFStringRef tznam = CFTimeZoneGetName(memory->_tz);
#endif
CFIndex cnt = CFStringGetLength(tznam);
if (BUFFER_SIZE < cnt) cnt = BUFFER_SIZE;
CFStringGetCharacters(tznam, CFRangeMake(0, cnt), (UniChar *)ubuffer);
UErrorCode status = U_ZERO_ERROR;
memory->_df = udat_open((UDateFormatStyle)utstyle, (UDateFormatStyle)udstyle, cstr, ubuffer, cnt, NULL, 0, &status);
CFAssert2(memory->_df, __kCFLogAssertion, "%s(): error (%d) creating date formatter", __PRETTY_FUNCTION__, status);
if (NULL == memory->_df) {
CFRelease(memory->_tz);
CFRelease(memory);
return NULL;
}
udat_setLenient(memory->_df, 0);
if (kCFDateFormatterNoStyle == dateStyle && kCFDateFormatterNoStyle == timeStyle) {
udat_applyPattern(memory->_df, false, NULL, 0);
}
CFTypeRef calident = CFLocaleGetValue(locale, kCFLocaleCalendarIdentifier);
if (calident && CFEqual(calident, kCFGregorianCalendar)) {
status = U_ZERO_ERROR;
udat_set2DigitYearStart(memory->_df, -631152000000.0, &status); // 1950-01-01 00:00:00 GMT
}
memory->_locale = locale ? CFLocaleCreateCopy(allocator, locale) : CFLocaleGetSystem();
__CFDateFormatterCustomize(memory);
status = U_ZERO_ERROR;
int32_t ret = udat_toPattern(memory->_df, false, ubuffer, BUFFER_SIZE, &status);
if (U_SUCCESS(status) && ret <= BUFFER_SIZE) {
memory->_format = CFStringCreateWithCharacters(allocator, (const UniChar *)ubuffer, ret);
}
memory->_defformat = memory->_format ? (CFStringRef)CFRetain(memory->_format) : NULL;
return (CFDateFormatterRef)memory;
}
extern CFDictionaryRef __CFLocaleGetPrefs(CFLocaleRef locale);
static void __substituteFormatStringFromPrefsDF(CFDateFormatterRef formatter, bool doTime) {
CFIndex formatStyle = doTime ? formatter->_timeStyle : formatter->_dateStyle;
CFStringRef prefName = doTime ? CFSTR("AppleICUTimeFormatStrings") : CFSTR("AppleICUDateFormatStrings");
if (kCFDateFormatterNoStyle != formatStyle) {
CFStringRef pref = NULL;
CFDictionaryRef prefs = __CFLocaleGetPrefs(formatter->_locale);
CFPropertyListRef metapref = prefs ? CFDictionaryGetValue(prefs, prefName) : NULL;
if (NULL != metapref && CFGetTypeID(metapref) == CFDictionaryGetTypeID()) {
CFStringRef key;
switch (formatStyle) {
case kCFDateFormatterShortStyle: key = CFSTR("1"); break;
case kCFDateFormatterMediumStyle: key = CFSTR("2"); break;
case kCFDateFormatterLongStyle: key = CFSTR("3"); break;
case kCFDateFormatterFullStyle: key = CFSTR("4"); break;
default: key = CFSTR("0"); break;
}
pref = (CFStringRef)CFDictionaryGetValue((CFDictionaryRef)metapref, key);
}
if (NULL != pref && CFGetTypeID(pref) == CFStringGetTypeID()) {
int32_t icustyle = UDAT_NONE;
switch (formatStyle) {
case kCFDateFormatterShortStyle: icustyle = UDAT_SHORT; break;
case kCFDateFormatterMediumStyle: icustyle = UDAT_MEDIUM; break;
case kCFDateFormatterLongStyle: icustyle = UDAT_LONG; break;
case kCFDateFormatterFullStyle: icustyle = UDAT_FULL; break;
}
CFStringRef localeName = CFLocaleGetIdentifier(formatter->_locale);
char buffer[BUFFER_SIZE];
const char *cstr = CFStringGetCStringPtr(localeName, kCFStringEncodingASCII);
if (NULL == cstr) {
if (CFStringGetCString(localeName, buffer, BUFFER_SIZE, kCFStringEncodingASCII)) cstr = buffer;
}
UErrorCode status = U_ZERO_ERROR;
UDateFormat *df = udat_open((UDateFormatStyle)(doTime ? icustyle : UDAT_NONE), (UDateFormatStyle)(doTime ? UDAT_NONE : icustyle), cstr, NULL, 0, NULL, 0, &status);
if (NULL != df) {
UChar ubuffer[BUFFER_SIZE];
status = U_ZERO_ERROR;
int32_t date_len = udat_toPattern(df, false, ubuffer, BUFFER_SIZE, &status);
if (U_SUCCESS(status) && date_len <= BUFFER_SIZE) {
CFStringRef dateString = CFStringCreateWithCharacters(kCFAllocatorSystemDefault, (UniChar *)ubuffer, date_len);
status = U_ZERO_ERROR;
int32_t formatter_len = udat_toPattern(formatter->_df, false, ubuffer, BUFFER_SIZE, &status);
if (U_SUCCESS(status) && formatter_len <= BUFFER_SIZE) {
CFMutableStringRef formatString = CFStringCreateMutable(kCFAllocatorSystemDefault, 0);
CFStringAppendCharacters(formatString, (UniChar *)ubuffer, formatter_len);
// find dateString inside formatString, substitute the pref in that range
CFRange result;
if (CFStringFindWithOptions(formatString, dateString, CFRangeMake(0, formatter_len), 0, &result)) {
CFStringReplace(formatString, result, pref);
int32_t new_len = CFStringGetLength(formatString);
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
STACK_BUFFER_DECL(UChar, new_buffer, new_len);
#else
UChar new_buffer[BUFFER_SIZE]; // Dynamic stack allocation is GNU specific
#endif
const UChar *new_ustr = (UChar *)CFStringGetCharactersPtr(formatString);
if (NULL == new_ustr) {
CFStringGetCharacters(formatString, CFRangeMake(0, new_len), (UniChar *)new_buffer);
new_ustr = new_buffer;
}
status = U_ZERO_ERROR;
// udat_applyPattern(formatter->_df, false, new_ustr, new_len, &status);
udat_applyPattern(formatter->_df, false, new_ustr, new_len);
}
CFRelease(formatString);
}
CFRelease(dateString);
}
udat_close(df);
}
}
}
}
static void __CFDateFormatterApplySymbolPrefs(const void *key, const void *value, void *context) {
if (CFGetTypeID(key) == CFStringGetTypeID() && CFGetTypeID(value) == CFArrayGetTypeID()) {
CFDateFormatterRef formatter = (CFDateFormatterRef)context;
UDateFormatSymbolType sym = (UDateFormatSymbolType)CFStringGetIntValue((CFStringRef)key);
CFArrayRef array = (CFArrayRef)value;
CFIndex idx, cnt = CFArrayGetCount(array);
for (idx = 0; idx < cnt; idx++) {
CFStringRef item = (CFStringRef)CFArrayGetValueAtIndex(array, idx);
if (CFGetTypeID(item) != CFStringGetTypeID()) continue;
CFIndex item_cnt = CFStringGetLength(item);
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
STACK_BUFFER_DECL(UChar, item_buffer, __CFMin(BUFFER_SIZE, item_cnt));
#else
UChar item_buffer[BUFFER_SIZE]; // Dynamic stack allocation is GNU specific
#endif
UChar *item_ustr = (UChar *)CFStringGetCharactersPtr(item);
if (NULL == item_ustr) {
item_cnt = __CFMin(BUFFER_SIZE, item_cnt);
CFStringGetCharacters(item, CFRangeMake(0, item_cnt), (UniChar *)item_buffer);
item_ustr = item_buffer;
}
UErrorCode status = U_ZERO_ERROR;
udat_setSymbols(formatter->_df, sym, idx, item_ustr, item_cnt, &status);
}
}
}
static void __CFDateFormatterCustomize(CFDateFormatterRef formatter) {
__substituteFormatStringFromPrefsDF(formatter, false);
__substituteFormatStringFromPrefsDF(formatter, true);
CFDictionaryRef prefs = __CFLocaleGetPrefs(formatter->_locale);
CFPropertyListRef metapref = prefs ? CFDictionaryGetValue(prefs, CFSTR("AppleICUDateTimeSymbols")) : NULL;
if (NULL != metapref && CFGetTypeID(metapref) == CFDictionaryGetTypeID()) {
CFDictionaryApplyFunction((CFDictionaryRef)metapref, __CFDateFormatterApplySymbolPrefs, formatter);
}
}
CFLocaleRef CFDateFormatterGetLocale(CFDateFormatterRef formatter) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
return formatter->_locale;
}
CFDateFormatterStyle CFDateFormatterGetDateStyle(CFDateFormatterRef formatter) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
return formatter->_dateStyle;
}
CFDateFormatterStyle CFDateFormatterGetTimeStyle(CFDateFormatterRef formatter) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
return formatter->_timeStyle;
}
CFStringRef CFDateFormatterGetFormat(CFDateFormatterRef formatter) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
return formatter->_format;
}
void CFDateFormatterSetFormat(CFDateFormatterRef formatter, CFStringRef formatString) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
__CFGenericValidateType(formatString, CFStringGetTypeID());
CFIndex cnt = CFStringGetLength(formatString);
CFAssert1(cnt <= 1024, __kCFLogAssertion, "%s(): format string too long", __PRETTY_FUNCTION__);
if (formatter->_format != formatString && cnt <= 1024) {
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
STACK_BUFFER_DECL(UChar, ubuffer, cnt);
#else
UChar ubuffer[BUFFER_SIZE]; // Dynamic stack allocation is GNU specific
#endif
const UChar *ustr = (UChar *)CFStringGetCharactersPtr((CFStringRef)formatString);
if (NULL == ustr) {
CFStringGetCharacters(formatString, CFRangeMake(0, cnt), (UniChar *)ubuffer);
ustr = ubuffer;
}
UErrorCode status = U_ZERO_ERROR;
// udat_applyPattern(formatter->_df, false, ustr, cnt, &status);
udat_applyPattern(formatter->_df, false, ustr, cnt);
if (U_SUCCESS(status)) {
if (formatter->_format) CFRelease(formatter->_format);
formatter->_format = (CFStringRef)CFStringCreateCopy(CFGetAllocator(formatter), formatString);
}
}
}
CFStringRef CFDateFormatterCreateStringWithDate(CFAllocatorRef allocator, CFDateFormatterRef formatter, CFDateRef date) {
if (allocator == NULL) allocator = __CFGetDefaultAllocator();
__CFGenericValidateType(allocator, CFAllocatorGetTypeID());
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
__CFGenericValidateType(date, CFDateGetTypeID());
return CFDateFormatterCreateStringWithAbsoluteTime(allocator, formatter, CFDateGetAbsoluteTime(date));
}
CFStringRef CFDateFormatterCreateStringWithAbsoluteTime(CFAllocatorRef allocator, CFDateFormatterRef formatter, CFAbsoluteTime at) {
if (allocator == NULL) allocator = __CFGetDefaultAllocator();
__CFGenericValidateType(allocator, CFAllocatorGetTypeID());
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
UChar *ustr = NULL, ubuffer[BUFFER_SIZE];
UErrorCode status = U_ZERO_ERROR;
CFIndex used, cnt = BUFFER_SIZE;
UDate ud = (at + kCFAbsoluteTimeIntervalSince1970) * 1000.0 + 0.5;
used = udat_format(formatter->_df, ud, ubuffer, cnt, NULL, &status);
if (status == U_BUFFER_OVERFLOW_ERROR || cnt < used) {
cnt = used + 1;
ustr = (UChar *)CFAllocatorAllocate(kCFAllocatorSystemDefault, sizeof(UChar) * cnt, 0);
status = U_ZERO_ERROR;
used = udat_format(formatter->_df, ud, ustr, cnt, NULL, &status);
}
CFStringRef string = NULL;
if (U_SUCCESS(status)) {
string = CFStringCreateWithCharacters(allocator, (const UniChar *)(ustr ? ustr : ubuffer), used);
}
if (ustr) CFAllocatorDeallocate(kCFAllocatorSystemDefault, ustr);
return string;
}
CFDateRef CFDateFormatterCreateDateFromString(CFAllocatorRef allocator, CFDateFormatterRef formatter, CFStringRef string, CFRange *rangep) {
if (allocator == NULL) allocator = __CFGetDefaultAllocator();
__CFGenericValidateType(allocator, CFAllocatorGetTypeID());
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
__CFGenericValidateType(string, CFStringGetTypeID());
CFAbsoluteTime at;
if (CFDateFormatterGetAbsoluteTimeFromString(formatter, string, rangep, &at)) {
return CFDateCreate(allocator, at);
}
return NULL;
}
Boolean CFDateFormatterGetAbsoluteTimeFromString(CFDateFormatterRef formatter, CFStringRef string, CFRange *rangep, CFAbsoluteTime *atp) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
__CFGenericValidateType(string, CFStringGetTypeID());
CFRange range = {0, 0};
if (rangep) {
range = *rangep;
} else {
range.length = CFStringGetLength(string);
}
if (1024 < range.length) range.length = 1024;
const UChar *ustr = (UChar *)CFStringGetCharactersPtr(string);
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
STACK_BUFFER_DECL(UChar, ubuffer, (NULL == ustr) ? range.length : 1);
#else
UChar ubuffer[BUFFER_SIZE]; // Dynamic stack allocation is GNU specific
#endif
if (NULL == ustr) {
CFStringGetCharacters(string, range, (UniChar *)ubuffer);
ustr = ubuffer;
} else {
ustr += range.location;
}
UDate udate;
int32_t dpos = 0;
UErrorCode status = U_ZERO_ERROR;
if (formatter->_defaultDate) {
CFAbsoluteTime at = CFDateGetAbsoluteTime(formatter->_defaultDate);
udate = (at + kCFAbsoluteTimeIntervalSince1970) * 1000.0;
UDateFormat *df2 = udat_clone(formatter->_df, &status);
UCalendar *cal2 = (UCalendar *)udat_getCalendar(df2);
ucal_setMillis(cal2, udate, &status);
udat_parseCalendar(formatter->_df, cal2, ustr, range.length, &dpos, &status);
udate = ucal_getMillis(cal2, &status);
udat_close(df2);
} else {
udate = udat_parse(formatter->_df, ustr, range.length, &dpos, &status);
}
if (rangep) rangep->length = dpos;
if (U_FAILURE(status)) {
return false;
}
if (atp) {
*atp = (double)udate / 1000.0 - kCFAbsoluteTimeIntervalSince1970;
}
return true;
}
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
#define SET_SYMBOLS_ARRAY(ICU_CODE, INDEX_BASE) \
__CFGenericValidateType(value, CFArrayGetTypeID()); \
CFArrayRef array = (CFArrayRef)value; \
CFIndex idx, cnt = CFArrayGetCount(array); \
for (idx = 0; idx < cnt; idx++) { \
CFStringRef item = (CFStringRef)CFArrayGetValueAtIndex(array, idx); \
__CFGenericValidateType(item, CFStringGetTypeID()); \
CFIndex item_cnt = CFStringGetLength(item); \
STACK_BUFFER_DECL(UChar, item_buffer, __CFMin(BUFFER_SIZE, item_cnt)); \
UChar *item_ustr = (UChar *)CFStringGetCharactersPtr(item); \
if (NULL == item_ustr) { \
item_cnt = __CFMin(BUFFER_SIZE, item_cnt); \
CFStringGetCharacters(item, CFRangeMake(0, item_cnt), (UniChar *)item_buffer); \
item_ustr = item_buffer; \
} \
status = U_ZERO_ERROR; \
udat_setSymbols(formatter->_df, ICU_CODE, idx + INDEX_BASE, item_ustr, item_cnt, &status); \
}
#else
#define SET_SYMBOLS_ARRAY(ICU_CODE, INDEX_BASE) \
__CFGenericValidateType(value, CFArrayGetTypeID()); \
CFArrayRef array = (CFArrayRef)value; \
CFIndex idx, cnt = CFArrayGetCount(array); \
for (idx = 0; idx < cnt; idx++) { \
CFStringRef item = (CFStringRef)CFArrayGetValueAtIndex(array, idx); \
__CFGenericValidateType(item, CFStringGetTypeID()); \
CFIndex item_cnt = CFStringGetLength(item); \
UChar item_buffer[BUFFER_SIZE]; \
UChar *item_ustr = (UChar *)CFStringGetCharactersPtr(item); \
if (NULL == item_ustr) { \
item_cnt = __CFMin(BUFFER_SIZE, item_cnt); \
CFStringGetCharacters(item, CFRangeMake(0, item_cnt), (UniChar *)item_buffer); \
item_ustr = item_buffer; \
} \
status = U_ZERO_ERROR; \
udat_setSymbols(formatter->_df, ICU_CODE, idx + INDEX_BASE, item_ustr, item_cnt, &status); \
}
#endif
void CFDateFormatterSetProperty(CFDateFormatterRef formatter, CFStringRef key, CFTypeRef value) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
__CFGenericValidateType(key, CFStringGetTypeID());
UErrorCode status = U_ZERO_ERROR;
UChar ubuffer[BUFFER_SIZE];
if (kCFDateFormatterIsLenient == key) {
__CFGenericValidateType(value, CFBooleanGetTypeID());
udat_setLenient(formatter->_df, (kCFBooleanTrue == value));
} else if (kCFDateFormatterCalendar == key) {
__CFGenericValidateType(value, CFCalendarGetTypeID());
CFStringRef localeName = CFLocaleGetIdentifier(formatter->_locale);
CFDictionaryRef components = CFLocaleCreateComponentsFromLocaleIdentifier(kCFAllocatorSystemDefault, localeName);
CFMutableDictionaryRef mcomponents = CFDictionaryCreateMutableCopy(kCFAllocatorSystemDefault, 0, components);
CFDictionarySetValue(mcomponents, kCFLocaleCalendarIdentifier, CFCalendarGetIdentifier((CFCalendarRef)value));
localeName = CFLocaleCreateLocaleIdentifierFromComponents(kCFAllocatorSystemDefault, mcomponents);
CFRelease(mcomponents);
CFRelease(components);
CFLocaleRef newLocale = CFLocaleCreate(CFGetAllocator(formatter->_locale), localeName);
CFRelease(localeName);
CFRelease(formatter->_locale);
formatter->_locale = newLocale;
UCalendar *cal = __CFCalendarCreateUCalendar(NULL, CFLocaleGetIdentifier(formatter->_locale), formatter->_tz);
if (cal) udat_setCalendar(formatter->_df, cal);
if (cal) ucal_close(cal);
} else if (kCFDateFormatterCalendarIdentifier == key || kCFDateFormatterCalendarName == key) {
__CFGenericValidateType(value, CFStringGetTypeID());
CFStringRef localeName = CFLocaleGetIdentifier(formatter->_locale);
CFDictionaryRef components = CFLocaleCreateComponentsFromLocaleIdentifier(kCFAllocatorSystemDefault, localeName);
CFMutableDictionaryRef mcomponents = CFDictionaryCreateMutableCopy(kCFAllocatorSystemDefault, 0, components);
CFDictionarySetValue(mcomponents, kCFLocaleCalendarIdentifier, value);
localeName = CFLocaleCreateLocaleIdentifierFromComponents(kCFAllocatorSystemDefault, mcomponents);
CFRelease(mcomponents);
CFRelease(components);
CFLocaleRef newLocale = CFLocaleCreate(CFGetAllocator(formatter->_locale), localeName);
CFRelease(localeName);
CFRelease(formatter->_locale);
formatter->_locale = newLocale;
UCalendar *cal = __CFCalendarCreateUCalendar(NULL, CFLocaleGetIdentifier(formatter->_locale), formatter->_tz);
if (cal) udat_setCalendar(formatter->_df, cal);
if (cal) ucal_close(cal);
} else if (kCFDateFormatterTimeZone == key) {
__CFGenericValidateType(value, CFTimeZoneGetTypeID());
CFTimeZoneRef old = formatter->_tz;
formatter->_tz = value ? (CFTimeZoneRef)CFRetain(value) : CFTimeZoneCopyDefault();
if (old) CFRelease(old);
CFStringRef tznam = CFTimeZoneGetName(formatter->_tz);
UCalendar *cal = (UCalendar *)udat_getCalendar(formatter->_df);
CFIndex ucnt = CFStringGetLength(tznam);
if (BUFFER_SIZE < ucnt) ucnt = BUFFER_SIZE;
CFStringGetCharacters(tznam, CFRangeMake(0, ucnt), (UniChar *)ubuffer);
ucal_setTimeZone(cal, ubuffer, ucnt, &status);
} else if (kCFDateFormatterDefaultFormat == key) {
// read-only attribute
} else if (kCFDateFormatterTwoDigitStartDate == key) {
__CFGenericValidateType(value, CFDateGetTypeID());
CFAbsoluteTime at = CFDateGetAbsoluteTime((CFDateRef)value);
UDate udate = (at + kCFAbsoluteTimeIntervalSince1970) * 1000.0;
udat_set2DigitYearStart(formatter->_df, udate, &status);
} else if (kCFDateFormatterDefaultDate == key) {
__CFGenericValidateType(value, CFDateGetTypeID());
CFDateRef old = formatter->_defaultDate;
formatter->_defaultDate = value ? (CFDateRef)CFRetain(value) : NULL;
if (old) CFRelease(old);
} else if (kCFDateFormatterEraSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_ERAS, 0)
} else if (kCFDateFormatterMonthSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_MONTHS, 0)
} else if (kCFDateFormatterShortMonthSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_SHORT_MONTHS, 0)
} else if (kCFDateFormatterWeekdaySymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_WEEKDAYS, 1)
} else if (kCFDateFormatterShortWeekdaySymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_SHORT_WEEKDAYS, 1)
} else if (kCFDateFormatterAMSymbol == key) {
__CFGenericValidateType(value, CFStringGetTypeID());
CFIndex item_cnt = CFStringGetLength((CFStringRef)value);
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
STACK_BUFFER_DECL(UChar, item_buffer, __CFMin(BUFFER_SIZE, item_cnt));
#else
UChar item_buffer[BUFFER_SIZE];
#endif
UChar *item_ustr = (UChar *)CFStringGetCharactersPtr((CFStringRef)value);
if (NULL == item_ustr) {
item_cnt = __CFMin(BUFFER_SIZE, item_cnt);
CFStringGetCharacters((CFStringRef)value, CFRangeMake(0, item_cnt), (UniChar *)item_buffer);
item_ustr = item_buffer;
}
udat_setSymbols(formatter->_df, UDAT_AM_PMS, 0, item_ustr, item_cnt, &status);
} else if (kCFDateFormatterPMSymbol == key) {
__CFGenericValidateType(value, CFStringGetTypeID());
CFIndex item_cnt = CFStringGetLength((CFStringRef)value);
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
STACK_BUFFER_DECL(UChar, item_buffer, __CFMin(BUFFER_SIZE, item_cnt));
#else
UChar item_buffer[BUFFER_SIZE];
#endif
UChar *item_ustr = (UChar *)CFStringGetCharactersPtr((CFStringRef)value);
if (NULL == item_ustr) {
item_cnt = __CFMin(BUFFER_SIZE, item_cnt);
CFStringGetCharacters((CFStringRef)value, CFRangeMake(0, item_cnt), (UniChar *)item_buffer);
item_ustr = item_buffer;
}
udat_setSymbols(formatter->_df, UDAT_AM_PMS, 1, item_ustr, item_cnt, &status);
} else if (kCFDateFormatterGregorianStartDate == key) {
__CFGenericValidateType(value, CFDateGetTypeID());
CFAbsoluteTime at = CFDateGetAbsoluteTime((CFDateRef)value);
UDate udate = (at + kCFAbsoluteTimeIntervalSince1970) * 1000.0;
UCalendar *cal = (UCalendar *)udat_getCalendar(formatter->_df);
ucal_setGregorianChange(cal, udate, &status);
} else if (kCFDateFormatterLongEraSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_ERA_NAMES, 0)
} else if (kCFDateFormatterVeryShortMonthSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_NARROW_MONTHS, 0)
} else if (kCFDateFormatterStandaloneMonthSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_MONTHS, 0)
} else if (kCFDateFormatterShortStandaloneMonthSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_SHORT_MONTHS, 0)
} else if (kCFDateFormatterVeryShortStandaloneMonthSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_NARROW_MONTHS, 0)
} else if (kCFDateFormatterVeryShortWeekdaySymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_NARROW_WEEKDAYS, 1)
} else if (kCFDateFormatterStandaloneWeekdaySymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_WEEKDAYS, 1)
} else if (kCFDateFormatterShortStandaloneWeekdaySymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_SHORT_WEEKDAYS, 1)
} else if (kCFDateFormatterVeryShortStandaloneWeekdaySymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_NARROW_WEEKDAYS, 1)
} else if (kCFDateFormatterQuarterSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_QUARTERS, 1)
} else if (kCFDateFormatterShortQuarterSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_SHORT_QUARTERS, 1)
} else if (kCFDateFormatterStandaloneQuarterSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_QUARTERS, 1)
} else if (kCFDateFormatterShortStandaloneQuarterSymbols == key) {
SET_SYMBOLS_ARRAY(UDAT_STANDALONE_SHORT_QUARTERS, 1)
} else {
CFAssert3(0, __kCFLogAssertion, "%s(): unknown key %p (%@)", __PRETTY_FUNCTION__, key, key);
}
}
#if DEPLOYMENT_TARGET_MACOSX || DEPLOYMENT_TARGET_WINDOWS || DEPLOYMENT_TARGET_LINUX
#define GET_SYMBOLS_ARRAY(ICU_CODE, INDEX_BASE) \
CFIndex idx, cnt = udat_countSymbols(formatter->_df, ICU_CODE) - INDEX_BASE; \
STACK_BUFFER_DECL(CFStringRef, strings, cnt); \
for (idx = 0; idx < cnt; idx++) { \
CFStringRef str = NULL; \
status = U_ZERO_ERROR; \
CFIndex ucnt = udat_getSymbols(formatter->_df, ICU_CODE, idx + INDEX_BASE, ubuffer, BUFFER_SIZE, &status); \
if (U_SUCCESS(status) && cnt <= BUFFER_SIZE) { \
str = CFStringCreateWithCharacters(CFGetAllocator(formatter), (const UniChar *)ubuffer, ucnt); \
} \
strings[idx] = !str ? (CFStringRef)CFRetain(CFSTR("<error>")) : str; \
} \
CFArrayRef array = CFArrayCreate(CFGetAllocator(formatter), (const void **)strings, cnt, &kCFTypeArrayCallBacks); \
while (cnt--) { \
CFRelease(strings[cnt]); \
} \
return array;
#else
#define GET_SYMBOLS_ARRAY(ICU_CODE, INDEX_BASE) \
CFIndex idx, cnt = udat_countSymbols(formatter->_df, ICU_CODE) - INDEX_BASE; \
CFStringRef strings[BUFFER_SIZE]; \
for (idx = 0; idx < cnt; idx++) { \
CFStringRef str = NULL; \
status = U_ZERO_ERROR; \
CFIndex ucnt = udat_getSymbols(formatter->_df, ICU_CODE, idx + INDEX_BASE, ubuffer, BUFFER_SIZE, &status); \
if (U_SUCCESS(status) && cnt <= BUFFER_SIZE) { \
str = CFStringCreateWithCharacters(CFGetAllocator(formatter), (const UniChar *)ubuffer, ucnt); \
} \
strings[idx] = !str ? (CFStringRef)CFRetain(CFSTR("<error>")) : str; \
} \
CFArrayRef array = CFArrayCreate(CFGetAllocator(formatter), (const void **)strings, cnt, &kCFTypeArrayCallBacks); \
while (cnt--) { \
CFRelease(strings[cnt]); \
} \
return array;
#endif
CFTypeRef CFDateFormatterCopyProperty(CFDateFormatterRef formatter, CFStringRef key) {
__CFGenericValidateType(formatter, CFDateFormatterGetTypeID());
__CFGenericValidateType(key, CFStringGetTypeID());
UErrorCode status = U_ZERO_ERROR;
UChar ubuffer[BUFFER_SIZE];
if (kCFDateFormatterIsLenient == key) {
return CFRetain(udat_isLenient(formatter->_df) ? kCFBooleanTrue : kCFBooleanFalse);
} else if (kCFDateFormatterCalendar == key) {
CFCalendarRef calendar = (CFCalendarRef)CFLocaleGetValue(formatter->_locale, kCFLocaleCalendar);
return calendar ? CFRetain(calendar) : NULL;
} else if (kCFDateFormatterCalendarIdentifier == key || kCFDateFormatterCalendarName == key) {
CFStringRef ident = (CFStringRef)CFLocaleGetValue(formatter->_locale, kCFLocaleCalendarIdentifier);
return ident ? CFRetain(ident) : NULL;
} else if (kCFDateFormatterTimeZone == key) {
return CFRetain(formatter->_tz);
} else if (kCFDateFormatterDefaultFormat == key) {
return formatter->_defformat ? CFRetain(formatter->_defformat) : NULL;
} else if (kCFDateFormatterTwoDigitStartDate == key) {
UDate udate = udat_get2DigitYearStart(formatter->_df, &status);
if (U_SUCCESS(status)) {
CFAbsoluteTime at = (double)udate / 1000.0 - kCFAbsoluteTimeIntervalSince1970;
return CFDateCreate(CFGetAllocator(formatter), at);
}
} else if (kCFDateFormatterDefaultDate == key) {
return formatter->_defaultDate ? CFRetain(formatter->_defaultDate) : NULL;
} else if (kCFDateFormatterEraSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_ERAS, 0)
} else if (kCFDateFormatterMonthSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_MONTHS, 0)
} else if (kCFDateFormatterShortMonthSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_SHORT_MONTHS, 0)
} else if (kCFDateFormatterWeekdaySymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_WEEKDAYS, 1)
} else if (kCFDateFormatterShortWeekdaySymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_SHORT_WEEKDAYS, 1)
} else if (kCFDateFormatterAMSymbol == key) {
CFIndex cnt = udat_countSymbols(formatter->_df, UDAT_AM_PMS);
if (2 <= cnt) {
CFIndex ucnt = udat_getSymbols(formatter->_df, UDAT_AM_PMS, 0, ubuffer, BUFFER_SIZE, &status);
if (U_SUCCESS(status) && cnt <= BUFFER_SIZE) {
return CFStringCreateWithCharacters(CFGetAllocator(formatter), (UniChar *)ubuffer, ucnt);
}
}
} else if (kCFDateFormatterPMSymbol == key) {
CFIndex cnt = udat_countSymbols(formatter->_df, UDAT_AM_PMS);
if (2 <= cnt) {
CFIndex ucnt = udat_getSymbols(formatter->_df, UDAT_AM_PMS, 1, ubuffer, BUFFER_SIZE, &status);
if (U_SUCCESS(status) && cnt <= BUFFER_SIZE) {
return CFStringCreateWithCharacters(CFGetAllocator(formatter), (UniChar *)ubuffer, ucnt);
}
}
} else if (kCFDateFormatterGregorianStartDate == key) {
UCalendar *cal = (UCalendar *)udat_getCalendar(formatter->_df);
UDate udate = ucal_getGregorianChange(cal, &status);
if (U_SUCCESS(status)) {
CFAbsoluteTime at = (double)udate / 1000.0 - kCFAbsoluteTimeIntervalSince1970;
return CFDateCreate(CFGetAllocator(formatter), at);
}
} else if (kCFDateFormatterLongEraSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_ERA_NAMES, 0)
} else if (kCFDateFormatterVeryShortMonthSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_NARROW_MONTHS, 0)
} else if (kCFDateFormatterStandaloneMonthSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_MONTHS, 0)
} else if (kCFDateFormatterShortStandaloneMonthSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_SHORT_MONTHS, 0)
} else if (kCFDateFormatterVeryShortStandaloneMonthSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_NARROW_MONTHS, 0)
} else if (kCFDateFormatterVeryShortWeekdaySymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_NARROW_WEEKDAYS, 1)
} else if (kCFDateFormatterStandaloneWeekdaySymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_WEEKDAYS, 1)
} else if (kCFDateFormatterShortStandaloneWeekdaySymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_SHORT_WEEKDAYS, 1)
} else if (kCFDateFormatterVeryShortStandaloneWeekdaySymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_NARROW_WEEKDAYS, 1)
} else if (kCFDateFormatterQuarterSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_QUARTERS, 1)
} else if (kCFDateFormatterShortQuarterSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_SHORT_QUARTERS, 1)
} else if (kCFDateFormatterStandaloneQuarterSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_QUARTERS, 1)
} else if (kCFDateFormatterShortStandaloneQuarterSymbols == key) {
GET_SYMBOLS_ARRAY(UDAT_STANDALONE_SHORT_QUARTERS, 1)
} else {
CFAssert3(0, __kCFLogAssertion, "%s(): unknown key %p (%@)", __PRETTY_FUNCTION__, key, key);
}
return NULL;
}
CONST_STRING_DECL(kCFDateFormatterIsLenient, "kCFDateFormatterIsLenient")
CONST_STRING_DECL(kCFDateFormatterTimeZone, "kCFDateFormatterTimeZone")
CONST_STRING_DECL(kCFDateFormatterCalendarName, "kCFDateFormatterCalendarName")
CONST_STRING_DECL(kCFDateFormatterCalendarIdentifier, "kCFDateFormatterCalendarIdentifier")
CONST_STRING_DECL(kCFDateFormatterCalendar, "kCFDateFormatterCalendar")
CONST_STRING_DECL(kCFDateFormatterDefaultFormat, "kCFDateFormatterDefaultFormat")
CONST_STRING_DECL(kCFDateFormatterTwoDigitStartDate, "kCFDateFormatterTwoDigitStartDate")
CONST_STRING_DECL(kCFDateFormatterDefaultDate, "kCFDateFormatterDefaultDate")
CONST_STRING_DECL(kCFDateFormatterEraSymbols, "kCFDateFormatterEraSymbols")
CONST_STRING_DECL(kCFDateFormatterMonthSymbols, "kCFDateFormatterMonthSymbols")
CONST_STRING_DECL(kCFDateFormatterShortMonthSymbols, "kCFDateFormatterShortMonthSymbols")
CONST_STRING_DECL(kCFDateFormatterWeekdaySymbols, "kCFDateFormatterWeekdaySymbols")
CONST_STRING_DECL(kCFDateFormatterShortWeekdaySymbols, "kCFDateFormatterShortWeekdaySymbols")
CONST_STRING_DECL(kCFDateFormatterAMSymbol, "kCFDateFormatterAMSymbol")
CONST_STRING_DECL(kCFDateFormatterPMSymbol, "kCFDateFormatterPMSymbol")
CONST_STRING_DECL(kCFDateFormatterLongEraSymbols, "kCFDateFormatterLongEraSymbols")
CONST_STRING_DECL(kCFDateFormatterVeryShortMonthSymbols, "kCFDateFormatterVeryShortMonthSymbols")
CONST_STRING_DECL(kCFDateFormatterStandaloneMonthSymbols, "kCFDateFormatterStandaloneMonthSymbols")
CONST_STRING_DECL(kCFDateFormatterShortStandaloneMonthSymbols, "kCFDateFormatterShortStandaloneMonthSymbols")
CONST_STRING_DECL(kCFDateFormatterVeryShortStandaloneMonthSymbols, "kCFDateFormatterVeryShortStandaloneMonthSymbols")
CONST_STRING_DECL(kCFDateFormatterVeryShortWeekdaySymbols, "kCFDateFormatterVeryShortWeekdaySymbols")
CONST_STRING_DECL(kCFDateFormatterStandaloneWeekdaySymbols, "kCFDateFormatterStandaloneWeekdaySymbols")
CONST_STRING_DECL(kCFDateFormatterShortStandaloneWeekdaySymbols, "kCFDateFormatterShortStandaloneWeekdaySymbols")
CONST_STRING_DECL(kCFDateFormatterVeryShortStandaloneWeekdaySymbols, "kCFDateFormatterVeryShortStandaloneWeekdaySymbols")
CONST_STRING_DECL(kCFDateFormatterQuarterSymbols, "kCFDateFormatterQuarterSymbols")
CONST_STRING_DECL(kCFDateFormatterShortQuarterSymbols, "kCFDateFormatterShortQuarterSymbols")
CONST_STRING_DECL(kCFDateFormatterStandaloneQuarterSymbols, "kCFDateFormatterStandaloneQuarterSymbols")
CONST_STRING_DECL(kCFDateFormatterShortStandaloneQuarterSymbols, "kCFDateFormatterShortStandaloneQuarterSymbols")
CONST_STRING_DECL(kCFDateFormatterGregorianStartDate, "kCFDateFormatterGregorianStartDate")
#undef BUFFER_SIZE
|
"""
Implementing an algorithm to do in-place rotation of an array
"""
def rotate_array(arr, d):
n = len(arr)
d = d % n
reverse(arr, 0, d-1)
reverse(arr, d, n-1)
reverse(arr, 0, n-1)
def reverse(arr, start, end):
while(start < end):
arr[start], arr[end] = arr[end], arr[start]
start += 1
end -= 1
if __name__ == '__main__':
arr = [1, 2, 3, 4, 5, 6]
d = 2
rotate_array(arr, d)
print(arr) |
<reponame>deianvn/pgnviewer
/*
* This file is part of JPGNViewer.
*
* JPGNViewer is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JPGNViewer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JPGNViewer. If not, see <http://www.gnu.org/licenses/>.
*/
package com.github.deianvn.pgnviewer.core;
/**
*
*
* @author <NAME>
*
*/
public class Position {
private static final String POS_REG = "[a-h][1-8]";
private int hPosition;
private int vPosition;
/**
*
*/
public Position() {
hPosition = 0;
vPosition = 0;
}
/**
*
* @param position
* @throws MalformedPositionException
*/
public Position(String position) throws MalformedPositionException {
if (position == null) {
throw new NullPointerException("Position is null");
}
if (position.matches(POS_REG)) {
hPosition = position.charAt(0) - 'a';
vPosition = Integer.parseInt(position.charAt(1) + "") - 1;
} else {
throw new MalformedPositionException(position);
}
}
/**
* @param hPosition
* @param vPosition
* @throws MalformedPositionException
*/
Position(int hPosition, int vPosition) throws MalformedPositionException {
if (hPosition < 0 || hPosition > 7 || vPosition < 0 || vPosition > 7) {
throw new MalformedPositionException("[" + hPosition + " : " + vPosition + "]");
}
this.hPosition = hPosition;
this.vPosition = vPosition;
}
/**
* @return the hPosition
*/
public int gethPosition() {
return hPosition;
}
/**
* @param hPosition the hPosition to set
*/
public void sethPosition(int hPosition) {
this.hPosition = hPosition;
}
/**
* @return the vPosition
*/
public int getvPosition() {
return vPosition;
}
/**
* @param vPosition the vPosition to set
*/
public void setvPosition(int vPosition) {
this.vPosition = vPosition;
}
}
|
import os
from collections import Counter
from dagster import (
InputDefinition,
ModeDefinition,
PresetDefinition,
default_executors,
file_relative_path,
pipeline,
repository,
solid,
)
from dagster_aws.s3 import s3_pickle_io_manager, s3_resource
from dagster_celery_k8s import celery_k8s_job_executor
@solid(input_defs=[InputDefinition("word", str)], config_schema={"factor": int})
def multiply_the_word(context, word):
return word * context.solid_config["factor"]
@solid(input_defs=[InputDefinition("word")])
def count_letters(_context, word):
return dict(Counter(word))
@pipeline(
mode_defs=[
ModeDefinition(
name="default",
resource_defs={"s3": s3_resource, "io_manager": s3_pickle_io_manager},
executor_defs=default_executors + [celery_k8s_job_executor],
),
ModeDefinition(
name="test",
executor_defs=default_executors + [celery_k8s_job_executor],
),
],
preset_defs=[
PresetDefinition.from_files(
"celery_k8s",
config_files=[
file_relative_path(__file__, os.path.join("..", "run_config", "celery_k8s.yaml")),
file_relative_path(__file__, os.path.join("..", "run_config", "pipeline.yaml")),
],
mode="default",
),
PresetDefinition.from_files(
"default",
config_files=[
file_relative_path(__file__, os.path.join("..", "run_config", "pipeline.yaml")),
],
mode="default",
),
],
)
def example_pipe():
count_letters(multiply_the_word())
@repository
def example_repo():
return [example_pipe]
|
class Pause extends Phaser.Scene {
constructor () {
super({key: 'Pause', active: false});
// this.pauseText;
}
create() {
this.pauseText = this.add.text(window.innerWidth/2, window.innerHeight/3, 'Pause', {
fontSize: '80px',
fill: '#ffffff'
});
this.resumeButton = this.add.text(window.innerWidth/2, window.innerHeight/3, 'Pause', {
fontSize: '40px',
fill: '#ffffff'
});
this.resumeButton.setInteractive();
this.resumeButton.on('pointerdown', function () {
this.scene.resume('Game_');
this.scene.stop();
}, this);
}
}
|
<reponame>nanjingblue/bAGD-GO<filename>internal/service/crawler.go
package service
import (
"Opendulum/internal/serializer"
"Opendulum/pkg/crawler"
"github.com/gin-gonic/gin"
)
type GetCommentsRequest struct {
ItemId string `form:"item_id" json:"item_id" binding:"required,min=1,max=50"`
}
func (svc *Service) GetCommentsService(param *GetCommentsRequest) serializer.Response {
jd := crawler.NewJingDongCommentsRes(param.ItemId)
comments := jd.Fetch()
if comments != nil {
return serializer.Response{
Code: 200,
Data: comments,
Msg: "get comments list success",
}
}
return serializer.Response{
Code: 500,
Data: gin.H{
"url": jd.Url,
},
Msg: "get json fail",
}
}
|
package org.rs2server.rs2.model.quests.impl;
import org.rs2server.rs2.model.Item;
import org.rs2server.rs2.model.Shop;
import org.rs2server.rs2.model.Skills;
import org.rs2server.rs2.model.Animation.FacialAnimation;
import org.rs2server.rs2.model.player.Player;
import org.rs2server.rs2.model.quests.Quest;
import org.rs2server.rs2.net.ActionSender.DialogueType;
public class LostCity /*extends Quest<LostCityStates>*/ {
/*private Item DRAMEN_STAFF = new Item(772, 1);
private int config = 147;
public LostCity(Player player, LostCityStates state) {
super(player, state);
}
public boolean hasRequirements() {
return player.getSkills().getLevelForExperience(Skills.CRAFTING) > 31 || player.getSkills().getLevelForExperience(Skills.WOODCUTTING) > 36;
}
@Override
public void updateProgress() {
for(int i = 0; i < 5; i++) {
setNextDialogueId(i, -1);
}
switch (state) {
case NOT_STARTED:
openDialogue(0);
break;
case STARTED:
openDialogue(11);
player.getActionSender().sendConfig(config, 1);
break;
case COMPLETED:
openDialogue(18);
setState(LostCityStates.COMPLETED);
player.getActionSender().sendConfig(config, 6);
player.getActionSender().sendConfig(101, 3);
if (player.getAttribute("talkingNpc") != null) {
switch ((int) player.getAttribute("talkingNpc")) {
case 1158:
openDialogue(2000);
break;
}
}
break;
}
}
public void advanceDialogue(int index) {
int dialogueId = getNextDialogueId(index);
if(dialogueId == -1) {
player.getActionSender().removeChatboxInterface();
return;
}
openDialogue(dialogueId);
}
public void openDialogue(int dialogue) {
if(dialogue == -1) {
return;
}
switch (dialogue) {
*//**
* State = NOT_STARTED
*//*
case 0:// not enough
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Hello there, traveler.");
setNextDialogueId(0, 1);
break;
case 1:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Why are you camped out here?");
setNextDialogueId(0, 2);
break;
case 2:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Do you know any good adventurers I can go on?");
setNextDialogueId(0, 3);
break;
case 3:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Well, we're on an adventure right now. Mind you, this is OUR adventure and we don't want to share it - find your own!");
setNextDialogueId(0, 4);
break;
case 4:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Please tell me?");
setNextDialogueId(0, 5);
break;
case 5:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "No.");
setNextDialogueId(0, 6);
break;
case 6:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Please?");
setNextDialogueId(0, 7);
break;
case 7:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "No!");
setNextDialogueId(0, 8);
break;
case 8:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "PLEEEEEEEEEEEEEEEEEEEEEEASE???");
setNextDialogueId(0, 9);
break;
case 9:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Fine...We're looking for Zanaris, would you like to help?");
setNextDialogueId(0, 10);
break;
case 10:
player.getActionSender().sendDialogue("Select an Option", DialogueType.OPTION, -1, FacialAnimation.DEFAULT,
"Yes i'd love to help you!|No thank you, i'm busy.");
setNextDialogueId(0, 11);
setNextDialogueId(1, 12);
break;
case 11:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Yes, i'd love to help you!");
setState(LostCityStates.STARTED);
player.getActionSender().sendConfig(147, 1);
setNextDialogueId(0, 13);
break;
case 12:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "No thank you, i'm busy.");
player.getActionSender().removeChatboxInterface();
break;
case 13:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Great, the Leprechaun told us we have to fight a Tree Spirit and get a Dramen staff?");
setNextDialogueId(0, 14);
break;
case 14:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Where do i find this Tree Spirit?");
setNextDialogueId(0, 15);
break;
case 15:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "You must travel to Entrana and kill the Tree Spirit and make a Dramen staff out of the tree in the dungeon.");
setNextDialogueId(0, 16);
break;
case 16:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Okay, ill be on my way!");
setNextDialogueId(0, 17);
break;
case 17:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Before you go, remember, once you get the Dramen staff, come back and talk to me again.");
setNextDialogueId(0, 18);
break;
case 18:
if (player.getInventory().contains(772)) {
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Thank you, you may enter Zanaris whenever you like.");
player.getInventory().remove(DRAMEN_STAFF);
setState(LostCityStates.COMPLETED);
player.getInventory().add(new Item(1602, 3));
player.getInventory().add(new Item(995, 10000));
player.getActionSender().sendConfig(101, 3);
player.getActionSender().sendConfig(147, 6);
setNextDialogueId(0, 19);
} else {
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Go get a Dramen staff then come back and talk to me.");
setNextDialogueId(0, -1);
}
break;
case 2000:
sendDialogue("Warrior", DialogueType.NPC, 1158, FacialAnimation.DEFAULT, "Hey there, " + player.getName());
setNextDialogueId(0, 2001);
break;
case 2001:
player.getActionSender().sendDialogue(player.getName(), DialogueType.PLAYER, -1, FacialAnimation.DEFAULT, "Can I see your shop?|Nevermind.");
setNextDialogueId(0, 2002);
setNextDialogueId(1, -1);
break;
case 2002:
player.getActionSender().removeChatboxInterface();
Shop.open(player, 26, 2);
break;
}
}
public void showQuestInterface() {
player.getActionSender().sendString(275, 2, "<col=800000>Lost City");
boolean started = state != LostCityStates.NOT_STARTED;
if (started) {
switch (state) {
case NOT_STARTED:
player.getActionSender().sendString(275, 4, "Speak to the Warrior in the Lumbridge Swamp to begin this quest");
player.getActionSender().sendString(275, 5, "<col=800000>Requirements:");
player.getActionSender().sendString(275, 6, "<col=000080>31 Crafting");
player.getActionSender().sendString(275, 7, "<col=000080>36 Woodcutting");
for (int i = 6; i <= 133; i++) {
player.getActionSender().sendString(275, i, "");
}
break;
case STARTED:
player.getActionSender().sendString(275, 4, "The Warrior said i should head over to Entrana and kill");
player.getActionSender().sendString(275, 5, "the Tree Spirit.");
player.getActionSender().sendString(275, 6, "<col=880000>Note: Once you kill the Tree Spirit, cut");
player.getActionSender().sendString(275, 7, "the dramen tree and fletch a dramen staff and return");
player.getActionSender().sendString(275, 8, "to the warrior");
for (int i = 6; i <= 133; i++) {
player.getActionSender().sendString(275, i, "");
}
break;
case COMPLETED:
player.getActionSender().sendString(275, 4, "The Warrior said i should head over to Entrana and kill");
player.getActionSender().sendString(275, 5, "the Tree Spirit.");
player.getActionSender().sendString(275, 6, "<col=880000>Note: Once you kill the Tree Spirit, cut");
player.getActionSender().sendString(275, 7, "the dramen tree and fletch a dramen staff and return");
player.getActionSender().sendString(275, 8, "to the warrior");
player.getActionSender().sendString(275, 9, "<col=ff0000>QUEST COMPLETE!");
player.getActionSender().sendString(275, 10, "<col=800000>Reward:");
player.getActionSender().sendString(275, 11, "<col=000080>3 Diamonds");
player.getActionSender().sendString(275, 12, "<col=000080>Access to Zanaris");
player.getActionSender().sendString(275, 13, "<col=000080>3 Quest points");
player.getActionSender().sendString(275, 14, "<col=000080>Access to The Warrior's Shop");
for (int i = 6; i <= 133; i++) {
player.getActionSender().sendString(275, i, "");
}
break;
}
}
}*/
}
|
from typing import List, Tuple
class Submission:
def __init__(self, status):
self.status = status
async def approve(self):
self.status = "approved"
async def manageQueue(submissions: List[Submission]) -> Tuple[List[Submission], List[Submission]]:
pending_submissions = []
approved_submissions = []
for submission in submissions:
if submission.status == "pending":
pending_submissions.append(submission)
else:
await submission.approve()
approved_submissions.append(submission)
return pending_submissions, approved_submissions |
#!/bin/bash
docker tag stuhin/rust-server:latest stuhin/rust-server:v1.0
docker push stuhin/rust-server:v1.0 |
#!/bin/bash
BUILD_TYPE=${1:-regular-asm}
NPROCS=$(grep -c ^processor /proc/cpuinfo)
echo "Build Type: ${BUILD_TYPE}"
set -e
# Activate Holy Build Box environment.
source /hbb_exe/activate
# Remove -fvisibility=hidden and -g from CFLAGS
CFLAGS=${CFLAGS//-fvisibility=hidden}
CFLAGS=${CFLAGS//-g}
set -x
# Extract and enter source
# Use /luvi dir to avoid CMake assertion failure in /
mkdir -p luvi
tar xzf /io/luvi-src.tar.gz --directory luvi
cd luvi
make ${BUILD_TYPE}
make -j${NPROCS}
ldd build/luvi
libcheck build/luvi
cp build/luvi /io
|
<filename>src/tests/matrix3_test.cpp
#include "test.hpp"
namespace frea {
namespace test {
template <class T>
using MatrixD_3 = RMatrix<T>;
using TypesD_3 = ToTestTypes_t<types::SMatrixRange_t<types::Float_t, 3,4>>;
TYPED_TEST_SUITE(MatrixD_3, TypesD_3);
TYPED_TEST(MatrixD_3, Rotation) {
USING(mat_t);
USING(value_t);
using quat_t = QuatT<value_t, TestFixture::align>;
const auto angle = this->makeRadian();
constexpr auto Th = lubee::ThresholdF<value_t>(0.1);
{
// RotationX(ang) -> Rotation({1,0,0}, ang)
const auto m0 = mat_t::RotationX(angle),
m1 = mat_t::RotationAxis({1,0,0}, angle);
EXPECT_LE(AbsMax(mat_t(m0-m1)), Th);
}
{
// RotationY(ang) -> Rotation({0,1,0}, ang)
const auto m0 = mat_t::RotationY(angle),
m1 = mat_t::RotationAxis({0,1,0}, angle);
EXPECT_LE(AbsMax(mat_t(m0-m1)), Th);
}
{
// RotationZ(ang) -> Rotation({0,0,1}, ang)
const auto m0 = mat_t::RotationZ(angle),
m1 = mat_t::RotationAxis({0,0,1}, angle);
EXPECT_LE(AbsMax(mat_t(m0-m1)), Th);
}
{
// Rotation(axis, ang) -> Quaternion::Rotation(axis, ang)と比較
const auto axis = this->makeDir();
const auto m0 = mat_t::RotationAxis(axis, angle),
m1 = quat_t::Rotation(axis, angle).asMat33();
EXPECT_LE(AbsMax(mat_t(m0-m1)), lubee::ThresholdF<value_t>(0.5));
}
}
}
}
|
def handleNotification(click_action):
if click_action == "FLUTTER_NOTIFICATION_CLICK":
return "Open the app"
else:
return "Do nothing" |
<filename>src/main/java/org/dpsoftware/gui/controllers/ControlTabController.java
/*
ControlTabController.java
Firefly Luciferin, very fast Java Screen Capture software designed
for Glow Worm Luciferin firmware.
Copyright (C) 2020 - 2022 <NAME> (https://github.com/sblantipodi)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.dpsoftware.gui.controllers;
import javafx.animation.AnimationTimer;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.fxml.FXML;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.InputEvent;
import org.dpsoftware.FireflyLuciferin;
import org.dpsoftware.JavaFXStarter;
import org.dpsoftware.NativeExecutor;
import org.dpsoftware.config.Configuration;
import org.dpsoftware.config.Constants;
import org.dpsoftware.config.LocalizedEnum;
import org.dpsoftware.utilities.CommonUtility;
import java.util.Objects;
/**
* Control Tab controller
*/
public class ControlTabController {
// Inject main controller
@FXML private SettingsController settingsController;
// FXML binding
@FXML private Label version;
@FXML private Label producerLabel;
@FXML private Label consumerLabel;
@FXML private final StringProperty producerValue = new SimpleStringProperty("");
@FXML private final StringProperty consumerValue = new SimpleStringProperty("");
@FXML private Button playButton;
@FXML public Button showInfo;
Image controlImage;
ImageView imageView;
public AnimationTimer animationTimer;
Image imagePlay, imagePlayCenter, imagePlayLeft, imagePlayRight, imagePlayWaiting, imagePlayWaitingCenter, imagePlayWaitingLeft, imagePlayWaitingRight;
Image imageStop, imageStopCenter, imageStopLeft, imageStopRight;
Image imageGreyStop, imageGreyStopCenter, imageGreyStopLeft, imageGreyStopRight;
/**
* Inject main controller containing the TabPane
* @param settingsController TabPane controller
*/
public void injectSettingsController(SettingsController settingsController) {
this.settingsController = settingsController;
}
/**
* Initialize controller with system's specs
*/
@FXML
protected void initialize() {
if (NativeExecutor.isLinux()) {
producerLabel.textProperty().bind(producerValueProperty());
consumerLabel.textProperty().bind(consumerValueProperty());
if (FireflyLuciferin.communicationError) {
controlImage = setImage(Constants.PlayerStatus.GREY);
} else if (FireflyLuciferin.RUNNING) {
controlImage = setImage(Constants.PlayerStatus.PLAY_WAITING);
} else {
controlImage = setImage(Constants.PlayerStatus.STOP);
}
version.setText("by <NAME> (VERSION)".replaceAll("VERSION", FireflyLuciferin.version));
setButtonImage();
initImages();
}
}
/**
* Initialize tab Control images
*/
public void initImages() {
imagePlay = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY)).toString(), true);
imagePlayCenter = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_CENTER)).toString(), true);
imagePlayLeft = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_LEFT)).toString(), true);
imagePlayRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_RIGHT)).toString(), true);
imagePlayWaiting = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_WAITING)).toString(), true);
imagePlayWaitingCenter = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_WAITING_CENTER)).toString(), true);
imagePlayWaitingLeft = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_WAITING_LEFT)).toString(), true);
imagePlayWaitingRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_WAITING_RIGHT)).toString(), true);
imageStop = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_LOGO)).toString(), true);
imageStopCenter = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_LOGO_CENTER)).toString(), true);
imageStopLeft = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_LOGO_LEFT)).toString(), true);
imageStopRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_LOGO_RIGHT)).toString(), true);
imageGreyStop = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_GREY)).toString(), true);
imageGreyStopCenter = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_GREY_CENTER)).toString(), true);
imageGreyStopLeft = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_GREY_LEFT)).toString(), true);
imageGreyStopRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_GREY_RIGHT)).toString(), true);
if (CommonUtility.isSingleDeviceMultiScreen()) {
imagePlayRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_RIGHT_GOLD)).toString(), true);
imagePlayWaitingRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_PLAY_WAITING_RIGHT_GOLD)).toString(), true);
imageStopRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_LOGO_RIGHT_GOLD)).toString(), true);
imageGreyStopRight = new Image(Objects.requireNonNull(this.getClass().getResource(Constants.IMAGE_CONTROL_GREY_RIGHT_GOLD)).toString(), true);
}
}
/**
* Init form values by reading existing config file
*/
public void initValuesFromSettingsFile() {
Constants.Effect effectInUse = LocalizedEnum.fromBaseStr(Constants.Effect.class, FireflyLuciferin.config.getEffect());
if (!NativeExecutor.isWindows() && FireflyLuciferin.config.isToggleLed()) {
switch (effectInUse) {
case BIAS_LIGHT, MUSIC_MODE_VU_METER, MUSIC_MODE_VU_METER_DUAL, MUSIC_MODE_BRIGHT, MUSIC_MODE_RAINBOW -> {
controlImage = setImage(Constants.PlayerStatus.PLAY_WAITING);
setButtonImage();
}
}
}
}
/**
* Start and stop capturing
* @param e InputEvent
*/
@FXML
@SuppressWarnings("unused")
public void onMouseClickedPlay(InputEvent e) {
controlImage = setImage(Constants.PlayerStatus.GREY);
if (!FireflyLuciferin.communicationError) {
if (FireflyLuciferin.RUNNING) {
controlImage = setImage(Constants.PlayerStatus.STOP);
} else {
controlImage = setImage(Constants.PlayerStatus.PLAY_WAITING);
}
setButtonImage();
if (FireflyLuciferin.RUNNING) {
FireflyLuciferin.guiManager.stopCapturingThreads(true);
} else {
FireflyLuciferin.guiManager.startCapturingThreads();
}
}
}
/**
* Show info popup on Linux
* @param e InputEvent
*/
@FXML
@SuppressWarnings("unused")
public void onMouseClickedShowInfo(InputEvent e) {
FireflyLuciferin.guiManager.showFramerateDialog();
}
/**
* Set and return LED tab image
* @param playerStatus PLAY, STOP, GREY
* @return tray icon
*/
@SuppressWarnings("Duplicates")
public Image setImage(Constants.PlayerStatus playerStatus) {
Image imgControl;
if (FireflyLuciferin.config == null) {
imgControl = imageGreyStop;
} else {
imgControl = switch (playerStatus) {
case PLAY -> setImage(imagePlay, imagePlayRight, imagePlayLeft, imagePlayCenter);
case PLAY_WAITING -> setImage(imagePlayWaiting, imagePlayWaitingRight, imagePlayWaitingLeft, imagePlayWaitingCenter);
case STOP -> setImage(imageStop, imageStopRight, imageStopLeft, imageStopCenter);
case GREY -> setImage(imageGreyStop, imageGreyStopRight, imageGreyStopLeft, imageGreyStopCenter);
};
}
return imgControl;
}
/**
* Set image
* @param imagePlay image
* @param imagePlayRight image
* @param imagePlayLeft image
* @param imagePlayCenter image
* @return tray image
*/
@SuppressWarnings("Duplicates")
private Image setImage(Image imagePlay, Image imagePlayRight, Image imagePlayLeft, Image imagePlayCenter) {
Image img = null;
switch (JavaFXStarter.whoAmI) {
case 1:
if ((FireflyLuciferin.config.getMultiMonitor() == 1)) {
img = imagePlay;
} else {
img = imagePlayRight;
}
break;
case 2:
if ((FireflyLuciferin.config.getMultiMonitor() == 2)) {
img = imagePlayLeft;
} else {
img = imagePlayCenter;
}
break;
case 3: img = imagePlayLeft; break;
}
return img;
}
/**
* Manage animation timer to update the UI every seconds
*/
public void startAnimationTimer() {
animationTimer = new AnimationTimer() {
private long lastUpdate = 0 ;
@Override
public void handle(long now) {
now = now / 1_000_000_000;
if (now - lastUpdate >= 1) {
lastUpdate = now;
if (NativeExecutor.isWindows()) {
settingsController.manageDeviceList();
} else {
settingsController.manageDeviceList();
setProducerValue(CommonUtility.getWord("fxml.controltab.producer") + " @ " + FireflyLuciferin.FPS_PRODUCER + " FPS");
setConsumerValue(CommonUtility.getWord("fxml.controltab.consumer") + " @ " + FireflyLuciferin.FPS_GW_CONSUMER + " FPS");
if (FireflyLuciferin.RUNNING && controlImage != null && controlImage.getUrl().contains("waiting")) {
controlImage = setImage(Constants.PlayerStatus.PLAY);
setButtonImage();
}
}
}
}
};
animationTimer.start();
}
/**
* Save button event
* @param e event
*/
@FXML
public void save(InputEvent e) {
settingsController.save(e);
}
/**
* Set form tooltips
* @param currentConfig stored config
*/
void setTooltips(Configuration currentConfig) {
if (currentConfig == null) {
if (!NativeExecutor.isWindows()) {
playButton.setTooltip(settingsController.createTooltip(Constants.TOOLTIP_PLAYBUTTON_NULL, 50));
}
} else {
if (!NativeExecutor.isWindows()) {
playButton.setTooltip(settingsController.createTooltip(Constants.TOOLTIP_PLAYBUTTON, 200));
}
}
}
/**
* Set button image
*/
private void setButtonImage() {
imageView = new ImageView(controlImage);
imageView.setFitHeight(80);
imageView.setPreserveRatio(true);
playButton.setGraphic(imageView);
}
/**
* Return the observable devices list
* @return devices list
*/
public StringProperty producerValueProperty() {
return producerValue;
}
public void setProducerValue(String producerValue) {
this.producerValue.set(producerValue);
}
public StringProperty consumerValueProperty() {
return consumerValue;
}
public void setConsumerValue(String consumerValue) {
this.consumerValue.set(consumerValue);
}
}
|
#!/usr/bin/env bash
#
# Copyright (C) 2018 by eHealth Africa : http://www.eHealthAfrica.org
#
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
release_app () {
APP_NAME=$1
COMPOSE_PATH=$2
AETHER_APP="aether-${1}"
echo "Building Docker image ${IMAGE_REPO}/${AETHER_APP}:${VERSION}"
docker-compose -f $COMPOSE_PATH build \
--build-arg GIT_REVISION=$TRAVIS_COMMIT \
--build-arg VERSION=$VERSION \
$APP_NAME
echo "Pushing Docker image ${IMAGE_REPO}/${AETHER_APP}:${VERSION}"
docker tag ${AETHER_APP} "${IMAGE_REPO}/${AETHER_APP}:${VERSION}"
docker push "${IMAGE_REPO}/${AETHER_APP}:${VERSION}"
if [[ $VERSION != "alpha" ]]
then
echo "Pushing Docker image ${IMAGE_REPO}/${AETHER_APP}:latest"
docker tag ${AETHER_APP} "${IMAGE_REPO}/${AETHER_APP}:latest"
docker push "${IMAGE_REPO}/${AETHER_APP}:latest"
fi
}
release_process () {
echo "Release version: $VERSION"
echo "Release revision: $TRAVIS_COMMIT"
# Login in dockerhub with write permissions (repos are public)
docker login -u $DOCKER_HUB_USER -p $DOCKER_HUB_PASSWORD
# Try to create the aether network+volume if they don't exist.
docker network create aether_internal 2>/dev/null || true
docker volume create aether_database_data 2>/dev/null || true
# Build dependencies
./scripts/build_aether_utils_and_distribute.sh
./scripts/build_common_and_distribute.sh
# Prepare Aether UI assets
docker-compose build ui-assets
docker-compose run ui-assets build
# Build docker images
IMAGE_REPO='ehealthafrica'
CORE_APPS=( kernel odk couchdb-sync ui )
CORE_COMPOSE='docker-compose.yml'
CONNECT_APPS=( producer )
CONNECT_COMPOSE='docker-compose-connect.yml'
for APP in "${CORE_APPS[@]}"
do
release_app $APP $CORE_COMPOSE
done
for CONNECT_APP in "${CONNECT_APPS[@]}"
do
release_app $CONNECT_APP $CONNECT_COMPOSE
done
}
version_compare () {
if [[ $1 == $2 ]]
then
return 0
fi
local IFS=.
local i ver1=($1) ver2=($2)
# fill empty fields in ver1 with zeros
for ((i=${#ver1[@]}; i<${#ver2[@]}; i++))
do
ver1[i]=0
done
for ((i=0; i<${#ver1[@]}; i++))
do
if [[ -z ${ver2[i]} ]]
then
# fill empty fields in ver2 with zeros
ver2[i]=0
fi
if ((10#${ver1[i]} > 10#${ver2[i]}))
then
return 1
fi
if ((10#${ver1[i]} < 10#${ver2[i]}))
then
return 2
fi
done
return 0
}
# Usage: increment_version <version> [<position>]
increment_version() {
local v=$1
if [ -z $2 ]; then
local rgx='^((?:[0-9]+\.)*)([0-9]+)($)'
else
local rgx='^((?:[0-9]+\.){'$(($2-1))'})([0-9]+)(\.|$)'
for (( p=`grep -o "\."<<<".$v"|wc -l`; p<$2; p++)); do
v+=.0; done;
fi
val=`echo -e "$v" | perl -pe 's/^.*'$rgx'.*$/$2/'`
echo "$v" | perl -pe s/$rgx.*$'/${1}'`printf %0${#val}s $(($val+1))`/
}
function git_branch_commit_and_release() {
local BRANCH_OR_TAG_VALUE=$2
version_compare $1 $2
COMPARE=$?
if [[ ${COMPARE} = 1 ]]
then
echo "VERSION value" $1 "is greater than" $3 "version" $2
elif [[ ${COMPARE} = 2 ]]
then
echo "VERSION value" $1 "is less than" $3 "version" $2
fi
for (( p=`grep -o "\."<<<".$BRANCH_OR_TAG_VALUE"|wc -l`; p<3; p++)); do
BRANCH_OR_TAG_VALUE+=.0;
done;
echo "Setting VERSION to " ${BRANCH_OR_TAG_VALUE}
git checkout "$TRAVIS_BRANCH"
echo ${BRANCH_OR_TAG_VALUE} > VERSION
git add VERSION
# make Travis CI skip this build
git commit -m "Version updated to ${BRANCH_OR_TAG_VALUE} [ci skip]"
local REMOTE=origin
if [[ $GITHUB_TOKEN ]]; then
REMOTE=https://$GITHUB_TOKEN@github.com/$TRAVIS_REPO_SLUG
else
echo "Missing environment variable GITHUB_TOKEN=[GitHub Personal Access Token]"
exit 1
fi
git push --follow-tags "$REMOTE" "$TRAVIS_BRANCH"
if ! git push --quiet --follow-tags "$REMOTE" "$TRAVIS_BRANCH" > /dev/null 2>&1; then
echo "Failed to push git changes to" $TRAVIS_BRANCH
exit 1
fi
if [ ! -z $4 ]; then
VERSION=${BRANCH_OR_TAG_VALUE}-$4
fi
echo "Starting version" ${VERSION} "release"
# release_process
# Update develop VERSION value to match the latest released version
git fetch ${REMOTE} develop
git branch develop FETCH_HEAD
git checkout develop
DEV_VERSION=`cat VERSION`
version_compare ${DEV_VERSION} ${BRANCH_OR_TAG_VALUE}
COMPARE=$?
if [[ ${COMPARE} = 2 ]]
then
echo "Updating develop branch version to " ${BRANCH_OR_TAG_VALUE}
echo ${BRANCH_OR_TAG_VALUE} > VERSION
git add VERSION
git commit -m "Version updated to ${BRANCH_OR_TAG_VALUE} [ci skip]" #Skip travis build on develop commit
git push ${REMOTE} develop
else
echo "Develop branch VERSION value is not updated"
echo "New VERSION ${BRANCH_OR_TAG_VALUE} is same or less than develop VERSION ${DEV_VERSION}"
fi
}
# release version depending on TRAVIS_BRANCH/ TRAVIS_TAG
if [[ $TRAVIS_TAG =~ ^[0-9]+\.[0-9]+[\.0-9]*$ ]]
then
VERSION=$TRAVIS_TAG
FILE_VERSION=`cat VERSION`
# Release with unified branch and file versions
git_branch_commit_and_release ${FILE_VERSION} $TRAVIS_TAG tag
exit 0
elif [[ $TRAVIS_BRANCH =~ ^release\-[0-9]+\.[0-9]+[\.0-9]*$ ]]
then
VERSION=`cat VERSION`
FILE_VERSION=${VERSION}
IFS=- read -a ver_number <<< "$TRAVIS_BRANCH"
BRANCH_VERSION=${ver_number[1]}
# Release with unified branch and file versions
git_branch_commit_and_release ${FILE_VERSION} ${BRANCH_VERSION} branch rc
exit 0
elif [[ $TRAVIS_BRANCH = "develop" ]]
then
VERSION='alpha'
# release_process
else
echo "Skipping a release because this branch is not permitted: ${TRAVIS_BRANCH}"
exit 0
fi
|
# for preparation
#numV=70000
numV=180000
NUM_OF_PARTITIONS=20
mu=4.0
sigma=1.3
# for running
NUM_ITERATION=1
RANK=50
MINVAL=0.0
MAXVAL=5.0
GAMMA1=0.007
GAMMA2=0.007
GAMMA6=0.005
GAMMA7=0.015
SPARK_STORAGE_MEMORYFRACTION=0.5
#SPARK_RDD_COMPRESS=true
#SPARK_IO_COMPRESSION_CODEC=lz4
|
def multiply(x, y):
result = 0
for i in range(y):
result += x
return result
def power(x, y):
result = 1
for i in range(y):
result *= x
return result |
#!/usr/bin/env nix-shell
#! nix-shell -j 4 -i bash -p stack git
#! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/cb90e6a0361554d01b7a576af6c6fae4c28d7513.tar.gz
export NIX_PATH=nixpkgs=https://github.com/NixOS/nixpkgs/archive/cb90e6a0361554d01b7a576af6c6fae4c28d7513.tar.gz
set -euo pipefail
echo "Bcc SL Explorer Web API updating"
readonly BCC_DOCS_REPO="${HOME}"/bccdocs
readonly SWAGGER_EXPLORER_API_JSON_SPEC=explorer-web-api-swagger.json
readonly EXPLORER_API_PRODUCED_ROOT=explorer-web-api
readonly EXPLORER_API_HTML=index.html
readonly EXPLORER_API_ROOT=technical/explorer/api
echo "**** 1. Get Swagger-specification for explorer web API ****"
stack exec --nix -- bcc-explorer-swagger
# Done, 'SWAGGER_EXPLORER_API_JSON_SPEC' file is already here.
echo "**** 2. Convert JSON with Swagger-specification to HTML ****"
nix-shell -p nodejs-7_x --run "npm install bootprint bootprint-openapi html-inline"
# We need add it in PATH to run it.
PATH=$PATH:$(pwd)/node_modules/.bin
nix-shell -p nodejs-7_x --run "bootprint openapi ${SWAGGER_EXPLORER_API_JSON_SPEC} ${EXPLORER_API_PRODUCED_ROOT}"
nix-shell -p nodejs-7_x --run "html-inline ${EXPLORER_API_PRODUCED_ROOT}/${EXPLORER_API_HTML} > ${EXPLORER_API_HTML}"
echo "**** 3. Cloning bccdocs.com repository ****"
# Variable ${GITHUB_BCC_DOCS_ACCESS_2} must be set by the CI system.
# This token gives us an ability to push into docs repository.
rm -rf "${BCC_DOCS_REPO}"
# We need `master` only, because Jekyll builds docs from `master` branch.
git clone --quiet --branch=master \
https://"${GITHUB_BCC_DOCS_ACCESS_2}"@github.com/The-Blockchain-Company/bccdocs.com \
"${BCC_DOCS_REPO}"
echo "**** 4. Copy (probably new) version of docs ****"
mv "${EXPLORER_API_HTML}" "${BCC_DOCS_REPO}"/"${EXPLORER_API_ROOT}"/
echo "**** 5. Push all changes ****"
cd "${BCC_DOCS_REPO}"
git add .
if [ -n "$(git status --porcelain)" ]; then
echo " There are changes in Explorer Web API docs, push it";
git commit -a -m "Automatic Explorer Web API docs rebuilding."
git push --force origin master
# After we push new docs in `master`,
# Jekyll will automatically rebuild it on bccdocs.com website.
else
echo " No changes in Explorer Web API docs, skip.";
fi
|
import Vue from "vue";
declare const _default: import("vue/types/vue").ExtendedVue<Vue, {
hasClass: boolean;
}, {
animationEnded(): void;
}, {
style: {
animationDuration: string;
animationPlayState: string;
opacity: number;
};
cpClass: string;
}, {
timeout: number | false;
hideProgressBar: boolean;
isRunning: boolean;
}>;
export default _default;
|
<filename>src/Constants/constant.js
export const drawerWidth=240;
export const REQUIRED_ERROR="Required";
export const INVALID_EMAIL="Invalid Email";
export const REGREX=/^[aA-zZ,0-9]+$/;
export const SPECIAL_CHAR_ERROR="'No Special Character allowed" |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
*/
package org.olat.upgrade;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.olat.test.OlatTestCase;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
public class UpgradeDefinitionTest extends OlatTestCase {
@Autowired @Qualifier("databaseUpgrades")
private UpgradesDefinitions upgradesDefinitions;
/**
* tests if one of the upgrade files needed for upgrading the database are accessible via
* classpath
*/
@Test
public void testFileResourceFromClasspath() {
UpgradesDefinitions defs = upgradesDefinitions;
for(OLATUpgrade upgrade: defs.getUpgrades()) {
String path = "/database/mysql/"+upgrade.getAlterDbStatements();
Resource file = new ClassPathResource(path);
assertTrue("file not found: "+path, file.exists());
}
}
}
|
// Copyright (c) 2021 BlockDev AG
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
package proposal
import (
"time"
"github.com/rs/zerolog/log"
"github.com/mysteriumnetwork/discovery/proposal/metrics"
v3 "github.com/mysteriumnetwork/discovery/proposal/v3"
"github.com/mysteriumnetwork/discovery/quality"
)
type Service struct {
*Repository
qualityService *quality.Service
shutdown chan struct{}
}
func NewService(repository *Repository, qualityService *quality.Service) *Service {
return &Service{
Repository: repository,
qualityService: qualityService,
}
}
type ListOpts struct {
from string
providerIDS []string
serviceType string
locationCountry string
ipType string
accessPolicy string
accessPolicySource string
compatibilityMin int
compatibilityMax int
qualityMin float64
tags string
includeMonitoringFailed bool
natCompatibility string
}
func (s *Service) List(opts ListOpts) []v3.Proposal {
proposals := s.Repository.List(repoListOpts{
providerIDS: opts.providerIDS,
serviceType: opts.serviceType,
country: opts.locationCountry,
ipType: opts.ipType,
accessPolicy: opts.accessPolicy,
accessPolicySource: opts.accessPolicySource,
compatibilityMin: opts.compatibilityMin,
compatibilityMax: opts.compatibilityMax,
tags: opts.tags,
})
or := &metrics.OracleResponses{}
or.Load(s.qualityService, opts.from)
return metrics.EnhanceWithMetrics(proposals, or.QualityResponse, metrics.Filters{
IncludeMonitoringFailed: opts.includeMonitoringFailed,
NATCompatibility: opts.natCompatibility,
QualityMin: opts.qualityMin,
})
}
func (s *Service) Metadata(opts repoMetadataOpts) []v3.Metadata {
or := &metrics.OracleResponses{}
or.Load(s.qualityService, "US")
return s.Repository.Metadata(opts, or.QualityResponse)
}
func (s *Service) ListCountriesNumbers(opts ListOpts) map[string]int {
return s.Repository.ListCountriesNumbers(repoListOpts{
providerIDS: opts.providerIDS,
serviceType: opts.serviceType,
country: opts.locationCountry,
ipType: opts.ipType,
accessPolicy: opts.accessPolicy,
accessPolicySource: opts.accessPolicySource,
compatibilityMin: opts.compatibilityMin,
compatibilityMax: opts.compatibilityMax,
tags: opts.tags,
})
}
func (s *Service) StartExpirationJob() {
for {
select {
case <-time.After(s.expirationJobDelay):
count := s.Repository.Expire()
log.Debug().Msgf("Expired proposals: %v", count)
case <-s.shutdown:
return
}
}
}
func (s *Service) Shutdown() {
s.shutdown <- struct{}{}
}
|
const postgres = require('postgres')
const fs = require('fs')
const sql = postgres({
host: process.env.PGHOST,
database: process.env.PGDATABASE,
password: <PASSWORD>,
username: process.env.PGUSER,
port: process.env.PGPORT,
ssl: 'prefer',
max: 2,
connection: {
application_name: 'script',
},
idle_timeout: 30,
})
let ens_regex = /\.eth|\.𝚎𝚝𝚑|\.Ξth|\.⟠|dot eth/i
let ens_fam = []
function process_batch(accounts) {
accounts.forEach(function(account) {
if(
account.name?.match(ens_regex) ||
account.bio?.match(ens_regex) ||
account.location?.match(ens_regex)
) {
ens_fam.push({
id: account.twitter_id,
handle: account.handle,
name: account.name,
bio: account.bio,
location: account.location,
})
}
})
}
(async function() {
let total_processed = 0
await sql`
SELECT * FROM twitter_accounts ORDER BY handle
`.cursor(10000, async (batch) => {
process_batch(batch)
total_processed += batch.length
console.log(total_processed, 'processed')
})
console.log(ens_fam.length)
fs.writeFile('./processed/ens_fam.json', JSON.stringify(ens_fam, null, ' '), function (err) {
if (err) console.log(err)
})
}())
|
package study.jspview.person.servlet.listener;
import lombok.extern.slf4j.Slf4j;
import javax.servlet.http.HttpSessionActivationListener;
import javax.servlet.http.HttpSessionEvent;
@Slf4j
//@WebListener FIXME Wildfly bug when this listener became activemust implement at least one listener interface
public class HttpSessionActivationListenerImpl implements HttpSessionActivationListener {
@Override
public void sessionWillPassivate(HttpSessionEvent se) {
log.info("Session {} will passivate.", se.getSession().getId());
}
@Override
public void sessionDidActivate(HttpSessionEvent se) {
log.info("Session {} did activate.", se.getSession().getId());
}
}
|
package mindustry;
import java.io.IOException;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Locale;
import arc.Application.ApplicationType;
import arc.Core;
import arc.Events;
import arc.Settings;
import arc.assets.Loadable;
import arc.files.Fi;
import arc.graphics.Color;
import arc.graphics.g2d.TextureAtlas;
import arc.scene.ui.layout.Scl;
import arc.struct.Array;
import arc.util.I18NBundle;
import arc.util.Log;
import arc.util.Log.LogHandler;
import arc.util.Structs;
import arc.util.Time;
import arc.util.io.DefaultSerializers;
import arc.util.serialization.JsonReader;
import arc.util.serialization.XmlReader;
import games.stendhal.client.gui.j2DClient;
import games.stendhal.client.listener.PositionChangeMulticaster;
import mindustry.ai.BlockIndexer;
import mindustry.ai.Pathfinder;
import mindustry.ai.WaveSpawner;
import mindustry.core.ContentLoader;
import mindustry.core.Control;
import mindustry.core.FileTree;
import mindustry.core.GameState;
import mindustry.core.Logic;
import mindustry.core.NetClient;
import mindustry.core.NetServer;
import mindustry.core.Platform;
import mindustry.core.Renderer;
import mindustry.core.UI;
import mindustry.core.Version;
import mindustry.core.World;
import mindustry.entities.Entities;
import mindustry.entities.EntityCollisions;
import mindustry.entities.EntityGroup;
import mindustry.entities.effect.Fire;
import mindustry.entities.effect.Puddle;
import mindustry.entities.traits.DrawTrait;
import mindustry.entities.traits.SyncTrait;
import mindustry.entities.type.BaseUnit;
import mindustry.entities.type.Bullet;
import mindustry.entities.type.EffectEntity;
import mindustry.entities.type.Player;
import mindustry.entities.type.TileEntity;
import mindustry.game.DefaultWaves;
import mindustry.game.EventType.ClientLoadEvent;
import mindustry.game.GlobalData;
import mindustry.game.LoopControl;
import mindustry.game.Schematics;
import mindustry.gen.Serialization;
import mindustry.input.Binding;
import mindustry.maps.Maps;
import mindustry.mod.Mods;
import mindustry.net.BeControl;
import mindustry.net.Net;
import mindustry.world.blocks.defense.ForceProjector.ShieldEntity;
import stendhal.test.BaseInput;
import stendhal.test.T_ClientApplication;
import stendhal.test.T_GameScreen;
import stendhal.test.T_InputHandler;
import temp.Debug;
import z.ai.units.StrategySystem;
import z.entities.type.base.BlockUnit;
import z.system.GroundSystem;
import z.system.ItemsSystem;
import z.system.TroopsSystem;
import z.system.WorkerSystem;
import z.tools.serialize.XmlSerialize;
import static arc.Core.settings;
public class Vars implements Loadable{
/** Whether to load locales.*/
public static boolean loadLocales = true;
/** Whether the logger is loaded. */
public static boolean loadedLogger = false, loadedFileLogger = false;
/** Maximum schematic size.*/
public static final int maxSchematicSize = 32;
/** All schematic base64 starts with this string.*/
public static final String schematicBaseStart ="bXNjaAB";
/** IO buffer size. */
public static final int bufferSize = 8192;
/** global charset, since Android doesn't support the Charsets class */
public static final Charset charset = Charset.forName("UTF-8");
/** main application name, capitalized */
public static final String appName = "Stendhal";
/***/
public static final String stendhalURL = "https://stendhalgame.org/wiki/Stendhal_Manual";
/***/
public static final String githubURL = "https://github.com/zonesgame/StendhalArcClient.git";
/** URL for itch.io donations. */
public static final String donationURL = "https://anuke.itch.io/mindustry/purchase";
/** URL for discord invite. */
public static final String discordURL = "https://discord.gg/mindustry";
/** URL for sending crash reports to */
public static final String crashReportURL = "http://192.99.169.18/report";
/** URL the links to the wiki's modding guide.*/
public static final String modGuideURL = "https://mindustrygame.github.io/wiki/modding/";
/** URL to the JSON file containing all the global, public servers. Not queried in BE. */
public static final String serverJsonURL = "https://raw.githubusercontent.com/Anuken/Mindustry/master/servers.json";
/** URL to the JSON file containing all the BE servers. Only queried in BE. */
public static final String serverJsonBeURL = "https://raw.githubusercontent.com/Anuken/Mindustry/master/servers_be.json";
/** URL of the github issue report template.*/
public static final String reportIssueURL = "https://github.com/Anuken/Mindustry/issues/new?template=bug_report.md";
/** list of built-in servers.*/
public static final Array<String> defaultServers = Array.with();
/** maximum distance between mine and core that supports automatic transferring */
public static final float mineTransferRange = 220f;
/** whether to enable editing of units in the editor */
public static final boolean enableUnitEditing = false;
/** max chat message length */
public static final int maxTextLength = 150;
/** max player name length in bytes */
public static final int maxNameLength = 40;
/** displayed item size when ingame, TODO remove. */
public static final float itemSize = 5f;
/** extra padding around the world; units outside this bound will begin to self-destruct. */
public static final float worldBounds = 100f;
/** units outside of this bound will simply die instantly */
public static final float finalWorldBounds = worldBounds + 500;
/** ticks spent out of bound until self destruct. */
public static final float boundsCountdown = 60 * 7;
/** for map generator dialog */
public static boolean updateEditorOnChange = false;
/** size of tiles in units */
public static final int tilesize = 8;
/** all choosable player colors in join/host dialog */
public static final Color[] playerColors = {
Color.valueOf("82759a"),
Color.valueOf("c0c1c5"),
Color.valueOf("ffffff"),
Color.valueOf("7d2953"),
Color.valueOf("ff074e"),
Color.valueOf("ff072a"),
Color.valueOf("ff76a6"),
Color.valueOf("a95238"),
Color.valueOf("ffa108"),
Color.valueOf("feeb2c"),
Color.valueOf("ffcaa8"),
Color.valueOf("008551"),
Color.valueOf("00e339"),
Color.valueOf("423c7b"),
Color.valueOf("4b5ef1"),
Color.valueOf("2cabfe"),
};
/** default server port */
public static final int port = 6567;
/** multicast discovery port.*/
public static final int multicastPort = 20151;
/** multicast group for discovery.*/
public static final String multicastGroup = "172.16.58.3";
/** if true, UI is not drawn */
public static boolean disableUI;
/** if true, game is set up in mobile mode, even on desktop. used for debugging */
public static boolean testMobile = false;
/** whether the game is running on a mobile device */
public static boolean mobile = false;
/** whether the game is running on an iOS device */
public static boolean ios;
/** whether the game is running on an Android device */
public static boolean android;
/** 游戏是否在无头服务器运行.<p/>whether the game is running on a headless server */
public static boolean headless;
/** whether steam is enabled for this game */
public static boolean steam;
/** whether typing into the console is enabled - developers only */
public static boolean enableConsole = false;
/** application data directory, equivalent to {@link Settings#getDataDirectory()} */
public static Fi dataDirectory;
/** data subdirectory used for screenshots */
public static Fi screenshotDirectory;
/** data subdirectory used for custom maps */
public static Fi customMapDirectory;
/** data subdirectory used for custom map previews */
public static Fi mapPreviewDirectory;
/** tmp subdirectory for map conversion */
public static Fi tmpDirectory;
/** data subdirectory used for saves */
public static Fi saveDirectory;
/** data subdirectory used for mods */
public static Fi modDirectory;
/** data subdirectory used for schematics */
public static Fi schematicDirectory;
/** data subdirectory used for bleeding edge build versions */
public static Fi bebuildDirectory;
/** map file extension */
public static final String mapExtension = "msav";
/** save file extension */
public static final String saveExtension = "msav";
/** schematic file extension */
public static final String schematicExtension = "msch";
/** list of all locales that can be switched to */
public static Locale[] locales;
public static FileTree tree;
public static Net net;
public static ContentLoader content;
public static GameState state;
public static GlobalData data;
public static EntityCollisions collisions;
public static DefaultWaves defaultWaves;
public static LoopControl loops;
public static Platform platform = new Platform(){};
public static Mods mods;
public static Schematics schematics = new Schematics();
public static BeControl becontrol;
public static World world;
public static Maps maps;
public static WaveSpawner spawner;
public static BlockIndexer indexer;
public static Pathfinder pathfinder;
public static Control control;
public static Logic logic;
public static Renderer renderer;
public static UI ui;
public static NetServer netServer;
public static NetClient netClient;
public static Entities entities;
public static EntityGroup<Player> playerGroup;
public static EntityGroup<TileEntity> tileGroup;
public static EntityGroup<Bullet> bulletGroup;
public static EntityGroup<EffectEntity> effectGroup;
public static EntityGroup<DrawTrait> groundEffectGroup;
public static EntityGroup<ShieldEntity> shieldGroup;
public static EntityGroup<Puddle> puddleGroup;
public static EntityGroup<Fire> fireGroup;
public static EntityGroup<BaseUnit> unitGroup;
// zones add begon
public static EntityGroup<BlockUnit> blockunitGroup;
// public static WorkerPool<BlockUnit> workerPool;
// zones add end
public static Player player;
@Override
public void loadAsync(){
// if (Debug.TEMP) {
// loadSettings();
// return;
// }
loadSettings();
init();
if (Debug.NOTE2) {
new j2DClient();
}
}
public static void init(){
// zones add begon
xmlReader = new XmlReader();
jsonReader = new JsonReader();
xmlSerialize = new XmlSerialize();
// zones add end
Serialization.init();
DefaultSerializers.typeMappings.put("mindustry.type.ContentType", "mindustry.ctype.ContentType");
if(loadLocales){
//load locales
String[] stra = Core.files.internal("locales").readString().split("\n");
locales = new Locale[stra.length];
for(int i = 0; i < locales.length; i++){
String code = stra[i];
if(code.contains("_")){
locales[i] = new Locale(code.split("_")[0], code.split("_")[1]);
}else{
locales[i] = new Locale(code);
}
}
Arrays.sort(locales, Structs.comparing(l -> l.getDisplayName(l), String.CASE_INSENSITIVE_ORDER));
}
Version.init();
if(tree == null) tree = new FileTree();
if(mods == null) mods = new Mods();
content = new ContentLoader();
loops = new LoopControl();
defaultWaves = new DefaultWaves();
collisions = new EntityCollisions();
world = new World();
becontrol = new BeControl();
// maps = new Maps();
// spawner = new WaveSpawner();
// indexer = new BlockIndexer();
// pathfinder = new Pathfinder();
// entities = new Entities();
// playerGroup = entities.add(Player.class).enableMapping();
// tileGroup = entities.add(TileEntity.class, false);
// bulletGroup = entities.add(Bullet.class).enableMapping();
// effectGroup = entities.add(EffectEntity.class, false);
// groundEffectGroup = entities.add(DrawTrait.class, false);
// puddleGroup = entities.add(Puddle.class).enableMapping();
// shieldGroup = entities.add(ShieldEntity.class, false);
// fireGroup = entities.add(Fire.class).enableMapping();
// unitGroup = entities.add(BaseUnit.class).enableMapping();
// zones add begon
// squadIDPro = IntArray.with(7, 6, 5, 4, 3, 2, 1, 0);
// squadGroup = new Squad[Team.all().length][FinalCons.max_squad_count];
// for (int t = 0; t < squadGroup.length; t++) { // temp code
// for (int s = 0, len = squadGroup[t].length; s < len; s++) {
// int teamid = Team.all()[t].id;
// squadGroup[teamid][s] = new Squad(teamid, s);
// }
// }
// blockunitGroup = entities.add(BlockUnit.class).enableMapping();
// workerPool = new WorkerPool<BlockUnit>();
// zones add end
// for(EntityGroup<?> group : entities.all()){
// group.setRemoveListener(entity -> {
// if(entity instanceof SyncTrait && net.client()){
// netClient.addRemovedEntity((entity).getID());
// }
// });
// }
state = new GameState();
data = new GlobalData();
mobile = Core.app.getType() == ApplicationType.Android || Core.app.getType() == ApplicationType.iOS || testMobile;
ios = Core.app.getType() == ApplicationType.iOS;
android = Core.app.getType() == ApplicationType.Android;
dataDirectory = Core.settings.getDataDirectory();
screenshotDirectory = dataDirectory.child("screenshots/");
customMapDirectory = dataDirectory.child("maps/");
mapPreviewDirectory = dataDirectory.child("previews/");
saveDirectory = dataDirectory.child("saves/");
tmpDirectory = dataDirectory.child("tmp/");
modDirectory = dataDirectory.child("mods/");
schematicDirectory = dataDirectory.child("schematics/");
bebuildDirectory = dataDirectory.child("be_builds/");
modDirectory.mkdirs();
mods.load();
// maps.load();
}
public static void loadLogger(){
if(loadedLogger) return;
String[] tags = {"[green][D][]", "[royal][I][]", "[yellow][W][]", "[scarlet][E][]", ""};
String[] stags = {"&lc&fb[D]", "&lg&fb[I]", "&ly&fb[W]", "&lr&fb[E]", ""};
Array<String> logBuffer = new Array<>();
Log.setLogger((level, text) -> {
String result = text;
String rawText = Log.format(stags[level.ordinal()] + "&fr " + text);
System.out.println(rawText);
result = tags[level.ordinal()] + " " + result;
if(!headless && (ui == null || ui.scriptfrag == null)){
logBuffer.add(result);
}else if(!headless){
ui.scriptfrag.addMessage(result);
}
});
Events.on(ClientLoadEvent.class, e -> logBuffer.each(ui.scriptfrag::addMessage));
loadedLogger = true;
}
public static void loadFileLogger(){
if(loadedFileLogger) return;
Core.settings.setAppName(appName);
Writer writer = settings.getDataDirectory().child("last_log.txt").writer(false);
LogHandler log = Log.getLogger();
Log.setLogger(((level, text) -> {
log.log(level, text);
try{
writer.write("[" + Character.toUpperCase(level.name().charAt(0)) +"] " + Log.removeCodes(text) + "\n");
writer.flush();
}catch(IOException e){
e.printStackTrace();
//ignore it
}
}));
loadedFileLogger = true;
}
public static void loadSettings(){
Core.settings.setAppName(appName);
if(steam || (Version.modifier != null && Version.modifier.contains("steam"))){
Core.settings.setDataDirectory(Core.files.local("saves/"));
}
Core.settings.defaults("locale", "default", "blocksync", true);
Core.keybinds.setDefaults(Binding.values());
Core.settings.load();
Scl.setProduct(settings.getInt("uiscale", 100) / 100f);
if(!loadLocales) return;
try{
//try loading external bundle
Fi handle = Core.files.local("bundle");
Locale locale = Locale.ENGLISH;
Core.bundle = I18NBundle.createBundle(handle, locale);
Log.info("NOTE: external translation bundle has been loaded.");
if(!headless){
Time.run(10f, () -> ui.showInfo("Note: You have successfully loaded an external translation bundle."));
}
}catch(Throwable e){
//no external bundle found
Fi handle = Core.files.internal("bundles/bundle");
Locale locale;
String loc = Core.settings.getString("locale");
if(loc.equals("default")){
locale = Locale.getDefault();
}else{
Locale lastLocale;
if(loc.contains("_")){
String[] split = loc.split("_");
lastLocale = new Locale(split[0], split[1]);
}else{
lastLocale = new Locale(loc);
}
locale = lastLocale;
}
Locale.setDefault(locale);
Core.bundle = I18NBundle.createBundle(handle, locale);
}
}
// zones add begon
// /** 游戏队伍管理器*/
// public static Squad<BaseUnit>[][] squadGroup;
// /** 队伍编队ID*/
// public static IntArray squadIDPro;
/** 默认一个Tile单位*/
public static final float tileunit = 1f;
// public static Squad<BaseUnit> getSquad(int teamID, int squadID) {
//
// }
// 添加系统模块数据
public static GroundSystem systemGround;
/** 工人管理模块, 由CoreBlock块负责更新*/
public static WorkerSystem systemWorker;
/** 物品管理系统*/
public static ItemsSystem systemItems;
/** 战斗单位管理系统*/
public static TroopsSystem systemTroops;
/** 队伍AI控制系统*/
public static StrategySystem systemStrategy;
// 添加工具处理类
public static XmlReader xmlReader;
public static JsonReader jsonReader;
/** 序列化xml配置文件数据*/
public static XmlSerialize xmlSerialize;
// zones add end
public static T_ClientApplication clientScence;
public static T_GameScreen gameScreen;
public static BaseInput inputStendhal;
public static final PositionChangeMulticaster positionChangeMulticaster = new PositionChangeMulticaster();
// develop temp date end
/** Stendhal纹理管理器*/
public static TextureAtlas atlasS;
}
|
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash -e
cd "$(readlink -f "$(dirname "$0")")"
echo "Compiling..."
./compile.sh
echo "Computing exact truss decomposition"
./truss_decomposition_parallel < clique10.nde
echo "Computing approximate truss decomposition"
./td_approx_external < clique10.nde
echo "Computing approximate truss decomposition and switching to exact algorithm"
./td_approx_external --edge_limit 10000 --exact_exe ./truss_decomposition_parallel < clique10.nde
|
class Employer
include Mongoid::Document
include Mongoid::Timestamps
include Mongoid::Versioning
include Mongoid::Paranoia
include AASM
include MergingModel
extend Mongorder
field :name, type: String
field :hbx_id, as: :hbx_organization_id, type: String
field :fein, type: String
field :sic_code, type: String
# moved
field :open_enrollment_start, type: Date
field :open_enrollment_end, type: Date
field :plan_year_start, type: Date
field :plan_year_end, type: Date
field :fte_count, type: Integer
field :pte_count, type: Integer
field :aasm_state, type: String
field :msp_count, as: :medicare_secondary_payer_count, type: Integer
field :notes, type: String
field :dba, type: String
field :is_active, type: Boolean, default: true
field :name_pfx, type: String, default: ""
field :name_first, type: String
field :name_middle, type: String, default: ""
field :name_last, type: String
field :name_sfx, type: String, default: ""
field :name_full, type: String
field :alternate_name, type: String, default: ""
index({ hbx_id: 1 })
index({ fein: 1 })
has_many :premium_payments, order: { paid_at: 1 }
belongs_to :broker, counter_cache: true, index: true
has_many :plan_years
embeds_many :elected_plans
# has_and_belongs_to_many :employees, class_name: 'Person', inverse_of: :employers, order: {name_last: 1, name_first: 1}
has_many :employees, class_name: 'Person', inverse_of: :employer, order: {name_last: 1, name_first: 1}
has_and_belongs_to_many :carriers, order: { name: 1 }, inverse_of: nil
has_and_belongs_to_many :plans, order: { name: 1, hios_plan_id: 1 }
index({"elected_plans.carrier_employer_group_id" => 1})
index({"elected_plans.hbx_plan_id" => 1})
index({"elected_plans.qhp_id" => 1})
accepts_nested_attributes_for :elected_plans, reject_if: :all_blank, allow_destroy: true
embeds_many :addresses, :inverse_of => :employer
accepts_nested_attributes_for :addresses, reject_if: :all_blank, allow_destroy: true
embeds_many :phones, :inverse_of => :employer
accepts_nested_attributes_for :phones, reject_if: :all_blank, allow_destroy: true
embeds_many :emails, :inverse_of => :employer
accepts_nested_attributes_for :emails, reject_if: :all_blank, allow_destroy: true
validates_length_of :fein, allow_blank: true, allow_nil: true, minimum: 9, maximum: 9
before_save :initialize_name_full
before_save :invalidate_find_caches
scope :by_name, order_by(name: 1)
scope :by_hbx_id, lambda { |employer_hbx_id| where(:hbx_id => employer_hbx_id) }
def payment_transactions
PremiumPayment.payment_transactions_for(self)
end
# def associate_all_carriers_and_plans_and_brokers
# self.policies.each { |pol| self.carriers << pol.carrier; self.brokers << pol.broker; self.plans << pol.plan }
# save!
# end
aasm do
state :registered, initial: true
state :enrollment_open
state :enrollment_closed
state :terminated
event :start_enrollment do
transitions from: [:registered, :enrollment_closed], to: :enrollment_open
end
event :end_enrollment do
transitions from: :enrollment_open, to: :enrollment_closed
end
end
def fein=(val)
return if val.blank?
write_attribute(:fein, val.to_s.gsub(/[^0-9]/i, ''))
end
def invalidate_find_caches
Rails.cache.delete("Employer/find/fein.#{fein}")
# elected_plans.each do |ep|
# Rails.cache.delete("Employer/find/employer_group_ids.#{ep.carrier_id}.#{ep.carrier_employer_group_id}")
# end
true
end
def todays_bill
e_id = self._id
value = Policy.collection.aggregate(
{ "$match" => {
"employer_id" => e_id,
"enrollment_members" =>
{
"$elemMatch" => {"$or" => [{
"coverage_end" => nil
},
{"coverage_end" => { "$gt" => Time.now }}
]}
}
}},
{"$group" => {
"_id" => "$employer_id",
"total" => { "$addToSet" => "$pre_amt_tot" }
}}
).first["total"].inject(0.00) { |acc, item|
acc + BigDecimal.new(item)
}
"%.2f" % value
end
def self.default_search_order
[[:name, 1]]
end
def self.search_hash(s_rex)
search_rex = Regexp.compile(Regexp.escape(s_rex), true)
{
"$or" => ([
{"name" => search_rex},
{"fein" => search_rex},
{"hbx_id" => search_rex}
])
}
end
def self.find_for_fein(e_fein)
# Rails.cache.fetch("Employer/find/fein.#{e_fein}") do
Employer.where(:fein => e_fein).first
# end
end
def self.find_for_carrier_and_group_id(carrier_id, group_id)
py = PlanYear.where({ :elected_plans => {
"$elemMatch" => {
"carrier_id" => carrier_id,
"carrier_employer_group_id" => group_id
}
}
}).first
Maybe.new(py).employer.value
end
def merge_address(m_address)
unless (self.addresses.any? { |p| p.match(m_address) })
self.addresses << m_address
end
end
def merge_email(m_email)
unless (self.emails.any? { |p| p.match(m_email) })
self.emails << m_email
end
end
def merge_phone(m_phone)
unless (self.phones.any? { |p| p.match(m_phone) })
self.phones << m_phone
end
end
def merge_broker(existing, incoming)
if existing.broker.nil?
existing.broker = incoming.broker
end
end
def update_broker(existing, incoming)
existing.broker = incoming.broker
end
def plan_year_of(coverage_start_date)
# The #to_a is a caching thing.
plan_years.to_a.detect do |py|
(py.start_date <= coverage_start_date) && (py.end_date >= coverage_start_date)
end
end
def renewal_plan_year_of(coverage_start_date)
plan_year_of(coverage_start_date + 1.year)
end
def merge_plan_year(incoming)
existing = self.plan_years.detect { |py| py.match(incoming) }
if(existing)
existing.merge_without_blanking(incoming,
:open_enrollment_start,
:open_enrollment_end,
:start_date,
:end_date,
:fte_count,
:pte_count
)
update_broker(existing,incoming)
EmployerElectedPlansMerger.merge(existing, incoming)
update_carriers(existing)
else
update_carriers(incoming)
incoming.employer = self
incoming.save!
end
end
def update_carriers(existing)
incoming_carriers = existing.elected_plans.map { |ep| ep.plan.carrier_id }
self.carrier_ids = (self.carrier_ids.to_a + incoming_carriers).uniq
end
def update_all_elected_plans(carrier, g_id)
e_plans = self.plan_years.map { |py| py.elected_plans }.flatten
matching_plans = e_plans.select { |p| p.carrier_id == carrier._id }
matching_plans.each do |mp|
mp.carrier_employer_group_id = g_id
end
end
def full_name
[name_pfx, name_first, name_middle, name_last, name_sfx].reject(&:blank?).join(' ').downcase.gsub(/\b\w/) {|first| first.upcase }
end
def initialize_name_full
self.name_full = full_name
end
def self.make(data)
employer = Employer.new
employer.name = data[:name]
employer.fein = data[:fein]
employer.hbx_id = data[:hbx_id]
employer.sic_code = data[:sic_code]
employer.notes = data[:notes]
employer
end
def update_contact(contact_name)
self.name_pfx = contact_name[:prefix]
self.name_first = contact_name[:first]
self.name_middle = contact_name[:middle]
self.name_last = contact_name[:last]
self.name_sfx = contact_name[:suffix]
end
class << self
def find_or_create_employer(m_employer)
found_employer = Employer.where(
:hbx_id => m_employer.hbx_id
).first
return found_employer unless found_employer.nil?
m_employer.save!
m_employer
end
def find_or_create_employer_by_fein(m_employer)
found_employer = Employer.find_for_fein(m_employer.fein)
return found_employer unless found_employer.nil?
m_employer.save!
m_employer
end
end
end
|
from rich import print as rprint
import shutil
from bg_atlasapi.bg_atlas import BrainGlobeAtlas, _version_str_from_tuple
from bg_atlasapi.list_atlases import get_downloaded_atlases
def update_atlas(atlas_name, force=False):
"""Updates a bg_atlasapi atlas from the latest
available version online.
Arguments:
----------
atlas_name: str
Name of the atlas to update.
force: bool
If False it checks if the atlas is already at the latest version
and doesn't update if that's the case.
"""
atlas = BrainGlobeAtlas(atlas_name=atlas_name)
# Check if we need to update
if not force:
if atlas.check_latest_version():
rprint(
f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} is already updated "
+ f"(version: {_version_str_from_tuple(atlas.local_version)})[/b]"
)
return
# Delete atlas folder
rprint(
f"[b][magenta2]bg_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]"
)
fld = atlas.brainglobe_dir / atlas.local_full_name
shutil.rmtree(fld)
if fld.exists():
raise ValueError(
"Something went wrong while tryint to delete the old version of the atlas, aborting."
)
# Download again
atlas.download_extract_file()
# Check that everything went well
rprint(
f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} updated to version: "
+ f"{_version_str_from_tuple(atlas.remote_version)}[/magenta2][/b]"
)
def install_atlas(atlas_name):
"""Installs a BrainGlobe atlas from the latest
available version online.
Arguments
---------
atlas_name : str
Name of the atlas to update.
"""
# Check input:
if not isinstance(atlas_name, str):
raise ValueError(f"atlas name should be a string, not {atlas_name}")
# Check if already downloaded:
available_atlases = get_downloaded_atlases()
if atlas_name in available_atlases:
rprint(
f"[b][magenta2]Bg_atlasapi: installing {atlas_name}: atlas already installed![/magenta2][/b]"
)
return
# Istantiate to download:
BrainGlobeAtlas(atlas_name)
|
<filename>packages/react-integration/demo-app-ts/src/components/demos/TopologyDemo/components/shapes/Polygon.tsx
import { PointTuple, ShapeProps, usePolygonAnchor } from '@patternfly/react-topology';
import * as React from 'react';
const Polygon: React.FunctionComponent<ShapeProps> = ({ className, width, height, filter, dndDropRef }) => {
const points: PointTuple[] = React.useMemo(
() => [
[width / 2, 0],
[width - width / 8, height],
[0, height / 3],
[width, height / 3],
[width / 8, height]
],
[height, width]
);
usePolygonAnchor(points);
return (
<polygon
className={className}
ref={dndDropRef}
points={points.map(p => `${p[0]},${p[1]}`).join(' ')}
filter={filter}
/>
);
};
export default Polygon;
|
import type { YogaNode } from 'yoga-layout-prebuilt';
export interface Styles {
textWrap?: 'wrap' | 'end' | 'middle' | 'truncate-end' | 'truncate' | 'truncate-middle' | 'truncate-start';
position?: 'absolute' | 'relative';
marginTop?: number;
marginBottom?: number;
marginLeft?: number;
marginRight?: number;
paddingTop?: number;
paddingBottom?: number;
paddingLeft?: number;
paddingRight?: number;
flexGrow?: number;
flexShrink?: number;
flexDirection?: 'row' | 'column' | 'row-reverse' | 'column-reverse';
flexBasis?: number | string;
alignItems?: 'flex-start' | 'center' | 'flex-end' | 'stretch';
justifyContent?: 'flex-start' | 'flex-end' | 'space-between' | 'space-around' | 'center';
width?: number | string;
height?: number | string;
minWidth?: number | string;
minHeight?: number | string;
display?: 'flex' | 'none';
}
export declare const applyStyles: (node: YogaNode, style?: Styles) => void;
|
/**
* Created by 熊超超 on 2020/6/10.
*/
import {Module} from '@nestjs/common'
import {AuthService} from './auth.service'
import {LocalStrategy} from './local.strategy'
import {JwtStrategy} from './jwt.strategy'
import {UserModule} from '../user/user.module'
import {PassportModule} from '@nestjs/passport'
import {JwtModule} from '@nestjs/jwt'
import {ConfigService} from '@nestjs/config'
@Module({
imports: [
UserModule,
PassportModule,
JwtModule.registerAsync({
inject: [ConfigService],
useFactory: async (configService: ConfigService) => {
const jwt = configService.get<JWT>('jwt')
return {
secret: jwt.secret,
signOptions: {expiresIn: jwt.expiresIn}
}
},
})
],
providers: [AuthService, LocalStrategy, JwtStrategy],
exports: [AuthService],
})
export class AuthModule {
}
|
<reponame>tcmRyan/OpenOLAT
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.assessment.ui.tool;
import java.util.List;
import org.olat.basesecurity.BaseSecurity;
import org.olat.basesecurity.IdentityRef;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.stack.TooledStackedPanel;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.gui.control.generic.modal.DialogBoxUIFactory;
import org.olat.core.id.Identity;
import org.olat.core.id.IdentityEnvironment;
import org.olat.core.id.Roles;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.StateEntry;
import org.olat.core.util.Formatter;
import org.olat.core.util.StringHelper;
import org.olat.core.util.Util;
import org.olat.core.util.coordinate.CoordinatorManager;
import org.olat.core.util.coordinate.LockResult;
import org.olat.course.CourseFactory;
import org.olat.course.ICourse;
import org.olat.course.assessment.AssessmentModule;
import org.olat.course.assessment.CourseAssessmentService;
import org.olat.course.assessment.OpenSubDetailsEvent;
import org.olat.course.assessment.handler.AssessmentConfig;
import org.olat.course.nodes.CourseNode;
import org.olat.course.nodes.CourseNodeFactory;
import org.olat.course.nodes.MSCourseNode;
import org.olat.course.run.userview.UserCourseEnvironment;
import org.olat.course.run.userview.UserCourseEnvironmentImpl;
import org.olat.modules.ModuleConfiguration;
import org.olat.modules.assessment.ui.AssessedIdentityController;
import org.olat.modules.assessment.ui.event.AssessmentFormEvent;
import org.olat.repository.RepositoryEntry;
import org.olat.util.logging.activity.LoggingResourceable;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* Initial date: 09.10.2015<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class AssessmentIdentityCourseNodeController extends BasicController implements AssessedIdentityController {
private final TooledStackedPanel stackPanel;
private final VelocityContainer identityAssessmentVC;
private AssessmentForm assessmentForm;
private Controller identityInfosCtrl;
private Controller subDetailsController;
private Controller detailsEditController;
private LockResult lockEntry;
private final CourseNode courseNode;
private final Identity assessedIdentity;
private final UserCourseEnvironment coachCourseEnv;
private final UserCourseEnvironment assessedUserCourseEnvironment;
@Autowired
private BaseSecurity securityManager;
@Autowired
private CourseAssessmentService courseAssessmentService;
public AssessmentIdentityCourseNodeController(UserRequest ureq, WindowControl wControl, TooledStackedPanel stackPanel,
RepositoryEntry courseEntry, CourseNode courseNode, UserCourseEnvironment coachCourseEnv,
Identity assessedIdentity, boolean courseNodeDetails) {
super(ureq, wControl, Util.createPackageTranslator(AssessmentModule.class, ureq.getLocale()));
this.stackPanel = stackPanel;
this.courseNode = courseNode;
this.assessedIdentity = assessedIdentity;
this.coachCourseEnv = coachCourseEnv;
ICourse course = CourseFactory.loadCourse(courseEntry);
Roles roles = securityManager.getRoles(assessedIdentity);
IdentityEnvironment identityEnv = new IdentityEnvironment(assessedIdentity, roles);
assessedUserCourseEnvironment = new UserCourseEnvironmentImpl(identityEnv, course.getCourseEnvironment(), coachCourseEnv.isCourseReadOnly());
assessedUserCourseEnvironment.getScoreAccounting().evaluateAll();
addLoggingResourceable(LoggingResourceable.wrap(course));
addLoggingResourceable(LoggingResourceable.wrap(courseNode));
identityAssessmentVC = createVelocityContainer("identity_personal_node_infos");
initDetails();
identityInfosCtrl = new AssessedIdentityLargeInfosController(ureq, wControl, assessedIdentity, course);
listenTo(identityInfosCtrl);
identityAssessmentVC.put("identityInfos", identityInfosCtrl.getInitialComponent());
//acquire lock and show dialog box on failure.
String lockSubKey = lockKey(courseNode, assessedIdentity);
lockEntry = CoordinatorManager.getInstance().getCoordinator().getLocker().acquireLock(course, ureq.getIdentity(), lockSubKey, getWindow());
if(!lockEntry.isSuccess()) {
String msg = DialogBoxUIFactory.getLockedMessage(ureq, lockEntry.getLockEntry(), "assessmentLock", getTranslator());
getWindowControl().setWarning(msg);
} else {
// Add the users details controller
AssessmentConfig assessmentConfig = courseAssessmentService.getAssessmentConfig(courseNode);
if (assessmentConfig.hasEditableDetails() && courseNodeDetails) {
detailsEditController = courseAssessmentService.getDetailsEditController(ureq, wControl, stackPanel,
courseNode, coachCourseEnv, assessedUserCourseEnvironment);
listenTo(detailsEditController);
identityAssessmentVC.put("details", detailsEditController.getInitialComponent());
}
assessmentForm = new AssessmentForm(ureq, wControl, courseNode, coachCourseEnv, assessedUserCourseEnvironment);
listenTo(assessmentForm);
identityAssessmentVC.put("assessmentForm", assessmentForm.getInitialComponent());
String nodeLog = courseAssessmentService.getAuditLog(courseNode, assessedUserCourseEnvironment);
if(StringHelper.containsNonWhitespace(nodeLog)) {
identityAssessmentVC.contextPut("log", StringHelper.escapeHtml(nodeLog));
}
}
putInitialPanel(identityAssessmentVC);
}
public static String lockKey(CourseNode node, IdentityRef identity) {
return "AssessmentLock-NID::" + node.getIdent() + "-IID::" + identity.getKey();
}
public UserCourseEnvironment getCoachCourseEnvironment() {
return coachCourseEnv;
}
public UserCourseEnvironment getAssessedUserCourseEnvironment() {
return assessedUserCourseEnvironment;
}
private void initDetails() {
identityAssessmentVC.contextPut("courseNode", courseNode.getShortTitle());
String courseNodeCssClass = CourseNodeFactory.getInstance()
.getCourseNodeConfigurationEvenForDisabledBB(courseNode.getType()).getIconCSSClass();
identityAssessmentVC.contextPut("courseNodeCss", courseNodeCssClass);
ModuleConfiguration modConfig = courseNode.getModuleConfiguration();
String infoCoach = (String) modConfig.get(MSCourseNode.CONFIG_KEY_INFOTEXT_COACH);
infoCoach = Formatter.formatLatexFormulas(infoCoach);
identityAssessmentVC.contextPut("infoCoach", infoCoach);
}
@Override
public Identity getAssessedIdentity() {
return assessedIdentity;
}
public CourseNode getCourseNode() {
return courseNode;
}
@Override
protected void doDispose() {
releaseEditorLock();
}
private void releaseEditorLock() {
if (lockEntry != null && lockEntry.isSuccess()) {
// release lock
CoordinatorManager.getInstance().getCoordinator().getLocker().releaseLock(lockEntry);
lockEntry = null;
}
}
@Override
public void activate(UserRequest ureq, List<ContextEntry> entries, StateEntry state) {
//
}
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
if (source == detailsEditController) {
// reset SCORM test
if(event == Event.CHANGED_EVENT) {
assessmentForm.reloadData();
fireEvent(ureq, event);
} else if(event == Event.DONE_EVENT) {
fireEvent(ureq, Event.DONE_EVENT);
} else if(event instanceof OpenSubDetailsEvent) {
removeAsListenerAndDispose(subDetailsController);
OpenSubDetailsEvent detailsEvent = (OpenSubDetailsEvent)event;
subDetailsController = detailsEvent.getSubDetailsController();
listenTo(subDetailsController);
stackPanel.pushController(translate("sub.details"), subDetailsController);
}
} else if(assessmentForm == source) {
if(detailsEditController instanceof AssessmentFormCallback) {
if(AssessmentFormEvent.ASSESSMENT_DONE.equals(event.getCommand())) {
((AssessmentFormCallback)detailsEditController).assessmentDone(ureq);
} else if(AssessmentFormEvent.ASSESSMENT_REOPEN.equals(event.getCommand())) {
((AssessmentFormCallback)detailsEditController).assessmentReopen(ureq);
}
}
fireEvent(ureq, event);
}
super.event(ureq, source, event);
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
//
}
}
|
<gh_stars>0
import { Component, OnInit, ViewChild, ElementRef } from '@angular/core';
import {NgForm} from '@angular/forms';
import { ClientesService } from '../services/clientes.service';
import { Clientes } from '../interfaces/clientes.interface';
import { getLocaleTimeFormat } from '@angular/common';
import { element } from '@angular/core/src/render3';
import { AuthService } from 'app/auth.service';
import { CasosService } from '../services/casos.service';
import {ReportesService} from '../services/reportes.service';
@Component({
selector: 'app-pacientes',
templateUrl: './pacientes.component.html',
styleUrls: ['./pacientes.component.css']
})
export class PacientesComponent implements OnInit {
public loading: boolean;
buscar:string;
constructor(public casosService:CasosService,
public reportesService: ReportesService,
) {
this.loading = true;
}
public dengues = [];
public denguesFiltered = [];
public dengue= '';
public buscando:boolean = false;
total: number;
public influenzas = [];
public influenza= '';
ngOnInit() {
this.casosService.getDengues().subscribe(dengues => {
console.log('DENGUES',dengues);
this.dengues = dengues;
this.loading = false;
});
this.reportesService.getInfluenzas().subscribe(influenzas => {
console.log('Influenzas',influenzas);
this.influenzas = influenzas;
this.loading = false;
});
}
todos(){
this.casosService.getDengues().subscribe(dengues => {
console.log('DENGUE', dengues);
this.dengues = dengues;
this.loading = false;
})
}
Pagado(){
this.casosService.getDengues().subscribe(dengue => {
//console.log('CLIENTES', clientes);
this.dengues = dengue;
this.loading = false;
//Calculamos el TOTAL
this.total = this.dengues.reduce((
acc,
obj,
) => acc + (obj.cantidad),
0);
//console.log("Total: ", this.total);
})
}
}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/PLCrashReporter-DynamicFramework/PLCrashReporter_DynamicFramework.framework"
install_framework "$BUILT_PRODUCTS_DIR/PuppyWatchdog/PuppyWatchdog.framework"
install_framework "$BUILT_PRODUCTS_DIR/RuntimeRoutines/RuntimeRoutines.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/PLCrashReporter-DynamicFramework/PLCrashReporter_DynamicFramework.framework"
install_framework "$BUILT_PRODUCTS_DIR/PuppyWatchdog/PuppyWatchdog.framework"
install_framework "$BUILT_PRODUCTS_DIR/RuntimeRoutines/RuntimeRoutines.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
export CPPFLAGS="-I/code/libpng -I/code/zlib -I/code/libjpeg -I/code/libtiff/libtiff"
export LDFLAGS="-L/code/zlib -L/code/libpng -L/code/libpng/.libs -L/code/libjpeg -L/code/libtiff/libtiff"
export CFLAGS="-O3"
export CXXFLAGS="$CFLAGS"
MAKE_FLAGS="-s BINARYEN_TRAP_MODE=clamp -s ALLOW_MEMORY_GROWTH=1"
# export CFLAGS="-O0 -g2"
# export CXXFLAGS="$CFLAGS"
# MAKE_FLAGS="-s BINARYEN_TRAP_MODE=clamp -s ALLOW_MEMORY_GROWTH=1 -s SAFE_HEAP=1 -s ASSERTIONS=1"
export PKG_CONFIG_PATH="/code/libpng:/code/zlib:/code/libjpeg:/code/libtiff:/code/libtiff/libtiff:"
export PNG_LIBS="-L/code/libpng -L/code/libpng/.libs"
cd /code/zlib
emconfigure ./configure --static
emcmake make $MAKE_FLAGS CFLAGS="$CFLAGS" CXXFLAGS="$CXXFLAGS"
cd /code/libjpeg
autoreconf -fvi
emconfigure ./configure --disable-shared
emcmake make $MAKE_FLAGS CFLAGS="$CFLAGS" CXXFLAGS="$CXXFLAGS"
cd /code/libpng
libtoolize
# aclocal
autoreconf
automake --add-missing
# ./autogen
emconfigure ./configure --disable-shared
emcmake make $MAKE_FLAGS CFLAGS="$CFLAGS" CXXFLAGS="$CXXFLAGS"
cd /code/libtiff
libtoolize --force
###
aclocal
###
autoreconf --force
####
automake --add-missing
./autogen
autoconf
autoreconf
####
emconfigure ./configure --disable-shared
emcmake make $MAKE_FLAGS CFLAGS="$CFLAGS" CXXFLAGS="$CXXFLAGS"
cd /code/ImageMagick
autoconf
#emconfigure ./configure --prefix=/ --disable-shared --without-threads --without-magick-plus-plus --without-perl --without-x --disable-largefile --disable-openmp --without-bzlib --without-dps --without-freetype --without-jbig --without-openjp2 --without-lcms --without-wmf --without-xml --without-fftw --without-flif --without-fpx --without-djvu --without-fontconfig --without-raqm --without-gslib --without-gvc --without-heic --without-lqr --without-openexr --without-pango --without-raw --without-rsvg --without-webp --without-xml PKG_CONFIG_PATH="/code/libpng:/code/libpng/.libs:/code/zlib:/code/libjpeg:/code/libtiff:/code/libtiff/libtiff:"
emconfigure ./configure --prefix=/ --disable-shared --without-threads --without-magick-plus-plus --without-perl --without-x --disable-largefile --disable-openmp --without-bzlib --without-dps --without-freetype --without-jbig --without-openjp2 --without-lcms --without-wmf --without-xml --without-fftw --without-flif --without-fpx --without-djvu --without-fontconfig --without-raqm --without-gslib --without-gvc --without-heic --without-lqr --without-openexr --without-pango --without-raw --without-rsvg --without-webp --without-xml PKG_CONFIG_PATH="/code/libpng:/code/zlib:/code/libjpeg:/code/libtiff:/code/libtiff/libtiff:"
emcmake make $MAKE_FLAGS CFLAGS="$CFLAGS" CXXFLAGS="$CXXFLAGS"
#produce the correct output file
#/bin/bash ./libtool --silent --tag=CC --mode=link emcc --pre-js /code/webworker.js $MAKE_FLAGS $CXXFLAGS -L/code/zlib -L/code/libpng -L/code/libpng/.libs -L/code/libjpeg -L/code/zlib -L/code/libpng -L/code/libpng/.libs -L/code/libjpeg -o utilities/magick.html utilities/magick.o MagickCore/libMagickCore-7.Q16HDRI.la MagickWand/libMagickWand-7.Q16HDRI.la
/bin/bash ./libtool --silent --tag=CC --mode=link emcc $MAKE_FLAGS $CXXFLAGS -L/code/zlib -L/code/libpng -L/code/libpng/.libs -L/code/libjpeg -L/code/zlib -L/code/libpng -L/code/libpng/.libs -L/code/libjpeg -o utilities/magick.html utilities/magick.o MagickCore/libMagickCore-7.Q16HDRI.la MagickWand/libMagickWand-7.Q16HDRI.la
|
<gh_stars>0
import {isString} from './types';
export function assertElement<T extends HTMLElement>(element: T | null | undefined, elementName?: string): T {
if (!element) {
const message = elementName ? `element not found: ${elementName}` : 'element not found';
throw new Error(message);
}
return element;
}
export function cssClasses(...classNames: unknown[]): string {
return classNames
.filter(isString)
.map(className => `${className}`)
.join(' ');
}
/**
* In order to avoid mistakes like accidentally writing `&` instead of `&&` we
* require an extra function call for bitwise operations.
*
* (the `no-bitwise` ESLint rule should be active for this to work)
*/
export function bitwiseAnd(v1: number, v2: number): number {
// eslint-disable-next-line no-bitwise
return v1 & v2;
}
|
#!/bin/sh
VERSION="4.0"
OUT="$1"
if [ -d .git ] && head=`git rev-parse --verify HEAD 2>/dev/null`; then
git update-index --refresh --unmerged > /dev/null
descr=$(git describe)
# on git builds check that the version number above
# is correct...
[ "${descr%%-*}" = "v$VERSION" ] || exit 2
v="${descr#v}"
if git diff-index --name-only HEAD | read dummy ; then
v="$v"-dirty
fi
else
v="$VERSION"
fi
echo '#include "iw.h"' > "$OUT"
echo "const char iw_version[] = \"$v\";" >> "$OUT"
|
const doublingOdds = (arr) => {
return arr.map(num => {
if (num % 2 === 1) {
return num * 2;
} else {
return num;
}
});
};
const output = doublingOdds([1, 2, 3, 4, 5]); // [2, 2, 6, 4, 10] |
def sum_of_array(array):
sum = 0
for i in range(len(array)):
sum += array[i]
return sum |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.