text stringlengths 1 1.05M |
|---|
#!/bin/sh
coqc coq/Decompiler.v > coq/decompile.out
diff coq/decompile.out coq/decomp_regress.txt
|
require 'minitest_helper'
describe Rasti::Model do
describe 'Initialization' do
it 'All attributes' do
point = Point.new x: 1, y: 2
point.x.must_equal 1
point.y.must_equal 2
end
it 'Some attributes' do
point = Point.new x: 1
point.x.must_equal 1
error = proc { point.y }.must_raise Rasti::Model::NotAssignedAttributeError
error.message.must_equal 'Not assigned attribute: y'
end
it 'Unexpected attributes' do
error = proc { Point.new z: 3 }.must_raise Rasti::Model::UnexpectedAttributesError
error.message.must_equal 'Unexpected attributes: z'
end
it 'Indifferent attribute keys' do
point = Point.new 'x' => 1, 'y' => 2
point.x.must_equal 1
point.y.must_equal 2
end
end
describe 'Casting' do
it 'Attribute' do
model = Rasti::Model[text: T::String]
m = model.new text: 123
m.text.must_equal '123'
end
it 'Nested model' do
range = Rasti::Model[min: T::Integer, max: T::Integer]
model = Rasti::Model[range: T::Model[range]]
m = model.new range: {min: '1', max: '10'}
m.range.must_be_instance_of range
m.range.min.must_equal 1
m.range.max.must_equal 10
end
it 'Custom' do
position_1 = Position.new type: '2D', point: {x: 1, y: 2}
position_1.point.must_be_instance_of Point
position_1.point.x.must_equal 1
position_1.point.y.must_equal 2
position_2 = Position.new type: '3D', point: {x: 1, y: 2, z: 3}
position_2.point.must_be_instance_of Point3D
position_2.point.x.must_equal 1
position_2.point.y.must_equal 2
position_2.point.z.must_equal 3
end
it 'Invalid value' do
model = Rasti::Model[boolean: T::Boolean]
m = model.new boolean: 'x'
error = proc { m.boolean }.must_raise Rasti::Types::CastError
error.message.must_equal "Invalid cast: 'x' -> Rasti::Types::Boolean"
end
it 'Invalid nested value' do
range = Rasti::Model[min: T::Integer, max: T::Integer]
model = Rasti::Model[range: T::Model[range]]
m = model.new range: {min: 1, max: true}
error = proc { m.range.max }.must_raise Rasti::Types::CastError
error.message.must_equal "Invalid cast: true -> Rasti::Types::Integer"
end
end
describe 'Defaults' do
it 'Value' do
model = Class.new(Rasti::Model) do
attribute :text, T::String, default: 'xyz'
end
m = model.new
m.text.must_equal 'xyz'
end
it 'Block' do
model = Class.new(Rasti::Model) do
attribute :time_1, T::Time['%F']
attribute :time_2, T::Time['%F'], default: ->(m) { m.time_1 }
end
m = model.new time_1: Time.now
m.time_2.must_equal m.time_1
end
end
describe 'Comparable' do
it 'Equivalency (==)' do
point_1 = Point.new x: 1, y: 2
point_2 = Point3D.new x: 1, y: 2
point_3 = Point.new x: 2, y: 1
assert point_1 == point_2
refute point_1 == point_3
end
it 'Equality (eql?)' do
point_1 = Point.new x: 1, y: 2
point_2 = Point.new x: 1, y: 2
point_3 = Point3D.new x: 1, y: 2
point_4 = Point.new x: 2, y: 1
assert point_1.eql?(point_2)
refute point_1.eql?(point_3)
refute point_1.eql?(point_4)
end
it 'hash' do
point_1 = Point.new x: 1, y: 2
point_2 = Point.new x: 1, y: 2
point_3 = Point3D.new x: 1, y: 2
point_4 = Point.new x: 2, y: 1
point_1.hash.must_equal point_2.hash
point_1.hash.wont_equal point_3.hash
point_1.hash.wont_equal point_4.hash
end
end
describe 'Serialization and deserialization' do
let :address_class do
Rasti::Model[
street: T::String,
number: T::Integer
]
end
let :birthday_class do
Rasti::Model[
day: T::Integer,
month: T::Integer,
year: T::Integer
]
end
let :contact_class do
Rasti::Model[
id: T::Integer,
name: T::String,
birthday: T::Model[birthday_class],
phones: T::Hash[T::Symbol, T::Integer],
addresses: T::Array[T::Model[address_class]],
labels: T::Array[T::String],
created_at: T::Time['%Y-%m-%d %H:%M:%S %z'],
updated_at: nil
]
end
let :attributes do
{
id: 12345,
name: 'John',
birthday: {
day: 19,
month: 6,
year: 1993
},
phones: {
office: 1234567890,
house: 456456456
},
addresses: [
{street: 'Lexington Avenue', number: 123},
{street: 'Park Avenue', number: 456}
],
labels: ['Friend', 'Work'],
created_at: Time.parse('16/03/2021 09:30:10 -0200').to_s,
updated_at: Time.parse('2021-03-16T11:45:20+04:00')
}
end
it 'All' do
contact = contact_class.new attributes
contact.to_h.must_equal attributes
end
it 'Only' do
contact = contact_class.new attributes
contact.to_h(only: [:name, :birthday]).must_equal name: attributes[:name],
birthday: attributes[:birthday]
end
it 'Except' do
contact = contact_class.new attributes
excluded_attributes = [:age, :addresses, :created_at, :updated_at]
contact.to_h(except: excluded_attributes).must_equal id: attributes[:id],
name: attributes[:name],
birthday: attributes[:birthday],
phones: attributes[:phones],
labels: attributes[:labels]
end
it 'Ignore not assigned attributes' do
contact = contact_class.new birthday: {year: 1993, month: 06, day: 19}
contact.to_h.must_equal birthday: attributes[:birthday]
end
it 'Invalid cast' do
contact = contact_class.new id: 'abcd', birthday: {year: 1993, month: 6, day: 'XIX'}
error = proc { contact.to_h }.must_raise Rasti::Types::CompoundError
error.errors.must_equal id: ["Invalid cast: 'abcd' -> Rasti::Types::Integer"],
'birthday.day' => ["Invalid cast: 'XIX' -> Rasti::Types::Integer"]
end
it 'With defaults' do
model = Class.new(Rasti::Model) do
attribute :text, T::String, default: 'xyz'
end
model.new.to_h.must_equal text: 'xyz'
end
end
it 'Merge' do
point_1 = Point.new x: 1, y: 2
point_2 = point_1.merge x: 10
point_1.x.must_equal 1
point_1.y.must_equal 2
point_2.x.must_equal 10
point_2.y.must_equal 2
end
it 'to_s' do
Position.to_s.must_equal 'Position[type, point]'
Position.new(point: {x: 1, y: 2}).to_s.must_equal 'Position[type: "2D", point: Point[x: 1, y: 2]]'
Position.attributes.map(&:to_s).must_equal [
'Rasti::Model::Attribute[name: :type, type: Rasti::Types::Enum["2D", "3D"], options: {:default=>"2D"}]',
'Rasti::Model::Attribute[name: :point, type: :cast_point, options: {}]'
]
end
it 'Ihnerits superclass attributes' do
point = Point3D.new x: 1, y: 2, z: 3
point.x.must_equal 1
point.y.must_equal 2
point.z.must_equal 3
end
it 'Invalid attribute redefinition' do
error = proc { Point[x: T::String] }.must_raise ArgumentError
error.message.must_equal 'Attribute x already exists'
end
end |
import { Component, OnInit } from '@angular/core';
import { AuthService } from "../../services/auth.service";
import { Router } from "@angular/router";
import { MatSnackBar } from '@angular/material/snack-bar';
@Component({
selector: 'app-registro',
templateUrl: './registro.component.html',
styleUrls: ['./registro.component.css']
})
export class RegistroComponent implements OnInit {
user = {
usuario: null,
contrasena: null
};
constructor(
private auth: AuthService,
private router: Router,
private _snackBar: MatSnackBar) {
// Si existe al
this.auth.existeUsuarios().subscribe(
(res: any) => {
if (res.usuario == true) {
auth.cerrarSesion()
this.router.navigate(['/acceso']);
} else {
this._snackBar.open("Empezemos con tu registro.", "Aceptar", {duration: 2000});
}
}, e => {
console.log(e.error)
this._snackBar.open(e.error.mensaje, "Error", {
duration: 2000,
});
}
);
}
ngOnInit(): void {
}
registro() {
this.auth.registro(this.user).subscribe(
res => {
console.log(res);
localStorage.setItem('token', res.token);
this.router.navigate(['/home']);
},
err => {
this._snackBar.open(err.error.mensaje, "Error", {
duration: 2000,
});
}
)
}
}
|
<reponame>bitmagier/arbitrage-trader
package org.purevalue.arbitrage.adapter.coinbase
import akka.actor.typed.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.RawHeader
import akka.stream.Materializer
import org.purevalue.arbitrage.util.CryptoUtil.hmacSha256Signature
import org.purevalue.arbitrage.{GlobalConfig, Main, SecretsConfig}
import org.slf4j.LoggerFactory
import spray.json.{JsValue, JsonParser, JsonReader}
import java.nio.charset.StandardCharsets
import java.util.Base64
import scala.concurrent.{ExecutionContext, Future}
private[coinbase] object CoinbaseHttpUtil {
private val log = LoggerFactory.getLogger(getClass)
private lazy val globalConfig: GlobalConfig = Main.config().global
// {"iso":"2020-10-01T21:22:24Z","epoch":1601587344.} <- spray cannot parse that, but we can
def parseServerTime(jsonLike: String): Double = {
val end = jsonLike.lastIndexOf('}')
val start = jsonLike.lastIndexOf(':') + 1
jsonLike.substring(start, end).toDouble
}
case class Signature(cbAccessKey: String, cbAccessSign: String, cbAccessTimestamp: String, cbAccessPassphrase: String)
def createSignature(method: HttpMethod, uri: String, requestBody: Option[String], apiKeys: SecretsConfig, serverTime: Double): Signature = {
// [coinbase documentation]
// The CB-ACCESS-TIMESTAMP header MUST be number of seconds since Unix Epoch in UTC. Decimal values are allowed
// Your timestamp must be within 30 seconds of the api service time or your request will be considered expired and rejected.
// We recommend using the time endpoint to query for the API server time if you believe there many be time skew between your server and the API servers.
val timestamp: String = serverTime.toString
val requestPath = Uri(uri).toRelative.toString()
val contentToSign = s"""$timestamp${method.value}$requestPath${requestBody.getOrElse("")}"""
val secretKey = Base64.getDecoder.decode(apiKeys.apiSecretKey)
val signature = new String(Base64.getEncoder.encode(hmacSha256Signature(contentToSign, secretKey)), StandardCharsets.ISO_8859_1)
Signature(apiKeys.apiKey, signature, timestamp, apiKeys.apiKeyPassphrase.get)
}
// https://docs.pro.coinbase.com/#api-key-permissions
def httpRequestCoinbaseHmacSha256(method: HttpMethod, uri: String, requestBody: Option[String], apiKeys: SecretsConfig, serverTime: Double)
(implicit system: ActorSystem[_], fm: Materializer, executor: ExecutionContext):
Future[HttpResponse] = {
val signature = createSignature(method, uri, requestBody, apiKeys, serverTime)
Http().singleRequest(
HttpRequest(
method,
uri = Uri(uri),
headers = List(
RawHeader("CB-ACCESS-KEY", signature.cbAccessKey),
RawHeader("CB-ACCESS-SIGN", signature.cbAccessSign),
RawHeader("CB-ACCESS-TIMESTAMP", signature.cbAccessTimestamp),
RawHeader("CB-ACCESS-PASSPHRASE", signature.cbAccessPassphrase)
),
entity = requestBody match {
case None => HttpEntity.Empty
case Some(x) => HttpEntity(ContentTypes.`application/json`, x)
}
))
}
def httpRequestCoinbaseAccount(method: HttpMethod, uri: String, requestBody: Option[String], apiKeys: SecretsConfig, serverTime: Double)
(implicit system: ActorSystem[_], fm: Materializer, executor: ExecutionContext):
Future[(StatusCode, String)] = {
httpRequestCoinbaseHmacSha256(method, uri, requestBody, apiKeys, serverTime)
.flatMap {
response: HttpResponse =>
response.entity.toStrict(globalConfig.httpTimeout).map { r =>
if (!response.status.isSuccess()) log.warn(s"$response")
(response.status, r.data.utf8String)
}
}
}
def httpRequestPureJsonCoinbaseAccount(method: HttpMethod, uri: String, requestBody: Option[String], apiKeys: SecretsConfig, serverTime: Double)
(implicit system: ActorSystem[_], fm: Materializer, executor: ExecutionContext):
Future[(StatusCode, JsValue)] = {
httpRequestCoinbaseHmacSha256(method, uri, requestBody, apiKeys, serverTime)
.flatMap {
response: HttpResponse =>
response.entity.toStrict(globalConfig.httpTimeout).map { r =>
if (!response.status.isSuccess()) log.warn(s"$response")
r.contentType match {
case ContentTypes.`application/json` => (response.status, JsonParser(r.data.utf8String))
case _ => throw new RuntimeException(s"Non-Json message received:\n${r.data.utf8String}")
}
}
}
}
def httpRequestJsonCoinbaseAccount[T, E](method: HttpMethod, uri: String, requestBody: Option[String], apiKeys: SecretsConfig, serverTime: Double)
(implicit evidence1: JsonReader[T], evidence2: JsonReader[E], system: ActorSystem[_], fm: Materializer,
executor: ExecutionContext):
Future[Either[T, E]] = {
httpRequestPureJsonCoinbaseAccount(method, uri, requestBody, apiKeys, serverTime).map {
case (statusCode, j) =>
try {
if (statusCode.isSuccess()) Left(j.convertTo[T])
else Right(j.convertTo[E])
} catch {
case e: Exception => throw new RuntimeException(s"$uri failed. Response: $statusCode, $j, ", e)
}
}
}
}
|
#!/bin/bash
sudo apt-get clean
sudo apt-get update -y
curl -fsSL https://download.docker.com/linux/ubuntu/gpg |sudo apt-key add -
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable"
apt-cache policy docker-ce
sudo apt-get update
sudo apt-get install -y docker-ce
sudo usermod -aG docker ubuntu
|
import { TestBed, async } from '@angular/core/testing';
import { RouterTestingModule } from '@angular/router/testing';
import { AppComponent } from './app.component';
import { ActiveJobsPipe } from './pipe/active.pipe';
import { JobService } from './service/job.service';
import { HttpClientModule } from '@angular/common/http';
import { MatCardModule, MatIconModule, MatToolbarModule } from '@angular/material';
import { JobDetailComponent } from './component/job-detail/job-detail.component';
import { JobItemComponent } from './component/job-item/job-item.component';
import { FlexLayoutModule } from '@angular/flex-layout';
describe('AppComponent', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [
RouterTestingModule,
HttpClientModule,
MatToolbarModule,
MatCardModule,
FlexLayoutModule,
MatIconModule
],
providers: [
JobService,
],
declarations: [
AppComponent,
ActiveJobsPipe,
JobItemComponent,
JobDetailComponent
],
}).compileComponents();
}));
it('should create the app', () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.debugElement.componentInstance;
expect(app).toBeTruthy();
});
it(`should have as title 'Job Board'`, () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.debugElement.componentInstance;
expect(app.title).toEqual('Job Board');
});
});
|
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.common.dao.impl.sql;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.junit.Assert;
import org.kaaproject.kaa.common.dto.KaaAuthorityDto;
import org.kaaproject.kaa.common.dto.NotificationTypeDto;
import org.kaaproject.kaa.common.dto.TopicTypeDto;
import org.kaaproject.kaa.common.dto.UpdateStatus;
import org.kaaproject.kaa.common.dto.ctl.CTLSchemaScopeDto;
import org.kaaproject.kaa.common.dto.event.ApplicationEventAction;
import org.kaaproject.kaa.common.dto.event.EventClassType;
import org.kaaproject.kaa.server.common.core.schema.KaaSchemaFactoryImpl;
import org.kaaproject.kaa.server.common.dao.AbstractTest;
import org.kaaproject.kaa.server.common.dao.model.sql.Application;
import org.kaaproject.kaa.server.common.dao.model.sql.ApplicationEventFamilyMap;
import org.kaaproject.kaa.server.common.dao.model.sql.ApplicationEventMap;
import org.kaaproject.kaa.server.common.dao.model.sql.CTLSchema;
import org.kaaproject.kaa.server.common.dao.model.sql.CTLSchemaMetaInfo;
import org.kaaproject.kaa.server.common.dao.model.sql.Change;
import org.kaaproject.kaa.server.common.dao.model.sql.Configuration;
import org.kaaproject.kaa.server.common.dao.model.sql.ConfigurationSchema;
import org.kaaproject.kaa.server.common.dao.model.sql.EndpointGroup;
import org.kaaproject.kaa.server.common.dao.model.sql.EndpointProfileSchema;
import org.kaaproject.kaa.server.common.dao.model.sql.EventClass;
import org.kaaproject.kaa.server.common.dao.model.sql.EventClassFamily;
import org.kaaproject.kaa.server.common.dao.model.sql.EventSchemaVersion;
import org.kaaproject.kaa.server.common.dao.model.sql.History;
import org.kaaproject.kaa.server.common.dao.model.sql.LogAppender;
import org.kaaproject.kaa.server.common.dao.model.sql.LogSchema;
import org.kaaproject.kaa.server.common.dao.model.sql.NotificationSchema;
import org.kaaproject.kaa.server.common.dao.model.sql.ProfileFilter;
import org.kaaproject.kaa.server.common.dao.model.sql.SdkProfile;
import org.kaaproject.kaa.server.common.dao.model.sql.ServerProfileSchema;
import org.kaaproject.kaa.server.common.dao.model.sql.Tenant;
import org.kaaproject.kaa.server.common.dao.model.sql.Topic;
import org.kaaproject.kaa.server.common.dao.model.sql.User;
import org.kaaproject.kaa.server.common.dao.model.sql.UserVerifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class HibernateAbstractTest extends AbstractTest {
private static final Logger LOG = LoggerFactory.getLogger(HibernateAbstractTest.class);
protected Tenant generateTenant() {
LOG.debug("Generate tenant...");
Tenant tenant = new Tenant();
tenant.setName("Test tenant" + RANDOM.nextInt());
tenant = tenantDao.save(tenant);
LOG.debug("Generate tenant {}", tenant);
return tenant;
}
protected Change generateChange() {
LOG.debug("Generate change...");
Change change = new Change();
change.setConfigurationId(RANDOM.nextLong());
change.setConfigurationVersion(RANDOM.nextInt(3000));
change = historyDao.save(change, Change.class);
LOG.debug("Generated change {}", change);
return change;
}
protected List<History> generateHistory(Application app, int count) {
LOG.debug("Generate history...");
List<History> histories = new ArrayList<>();
if (app == null) {
app = generateApplication(null);
}
for (int i = 0; i < count; i++) {
History history = new History();
history.setApplication(app);
history.setLastModifyTime(System.currentTimeMillis());
history.setSequenceNumber(i + 1);
history.setChange(generateChange());
history = historyDao.save(history);
histories.add(history);
LOG.debug("Generated history {}", history);
}
return histories;
}
protected User generateUser(Tenant tenant, KaaAuthorityDto authority) {
LOG.debug("Generate user...");
if (tenant == null) {
tenant = generateTenant();
}
User user = new User();
user.setExternalUid(UUID.randomUUID().toString());
user.setTenant(tenant);
if (authority == null) {
authority = KaaAuthorityDto.KAA_ADMIN;
}
user.setAuthority(authority);
user.setUsername("TestUserName");
user = userDao.save(user);
LOG.debug("Generated user {}", user);
return user;
}
protected Application generateApplication(Tenant tenant) {
LOG.debug("Generate application...");
if (tenant == null) {
tenant = generateTenant();
}
Application app = new Application();
app.setName("Test app name" + UUID.randomUUID().toString());
app.setTenant(tenant);
app.setApplicationToken(UUID.randomUUID().toString());
app.setSequenceNumber(RANDOM.nextInt());
app = applicationDao.save(app);
LOG.debug("Generated application {}", app);
return app;
}
protected EndpointGroup generateEndpointGroup(Application app, Set<Topic> topics) {
EndpointGroup group = new EndpointGroup();
if (app == null) {
app = generateApplication(null);
}
group.setApplication(app);
group.setName("GROUP_ALL_" + RANDOM.nextInt());
group.setWeight(RANDOM.nextInt());
group.setTopics(topics);
return endpointGroupDao.save(group);
}
protected List<ConfigurationSchema> generateConfSchema(Application app, int count) {
List<ConfigurationSchema> schemas = Collections.emptyList();
try {
if (app == null) {
app = generateApplication(null);
}
ConfigurationSchema schema;
schemas = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
schema = new ConfigurationSchema();
schema.setApplication(app);
schema.setSchema(readSchemaFileAsString("dao/schema/testDataSchema.json"));
schema.setVersion(i + 1);
schema = configurationSchemaDao.save(schema);
Assert.assertNotNull(schema);
schemas.add(schema);
}
} catch (IOException e) {
LOG.error("Can't generate configuration schemas {}", e);
Assert.fail("Can't generate configuration schemas." + e.getMessage());
}
return schemas;
}
protected List<Configuration> generateConfiguration(ConfigurationSchema schema, EndpointGroup group, int count, UpdateStatus status) {
List<Configuration> configs = Collections.emptyList();
try {
if (schema == null) {
schema = generateConfSchema(null, 1).get(0);
}
if (group == null) {
group = generateEndpointGroup(schema.getApplication(), null);
}
Assert.assertNotNull(schema);
configs = new ArrayList<>();
for (int i = 0; i < count; i++) {
Configuration dto = new Configuration();
dto.setId(null);
dto.setStatus(status != null ? status : UpdateStatus.INACTIVE);
dto.setConfigurationBody(new byte[]{0, 2, 3, 4,});
dto.setConfigurationSchema(schema);
dto.setSequenceNumber(i);
dto.setSchemaVersion(i + 1);
dto.setApplication(schema.getApplication());
dto.setEndpointGroup(group);
Configuration saved = configurationDao.save(dto);
Assert.assertNotNull(saved);
configs.add(saved);
}
} catch (Exception e) {
LOG.error("Can't generate configs {}", e);
Assert.fail("Can't generate configurations." + e.getMessage());
}
return configs;
}
protected List<EndpointProfileSchema> generateProfSchema(Application app, int count) {
List<EndpointProfileSchema> schemas = Collections.emptyList();
try {
if (app == null) {
app = generateApplication(null);
}
CTLSchema ctlSchema = generateCTLSchema(DEFAULT_FQN, 1, app.getTenant(), null);
EndpointProfileSchema schemaDto;
schemas = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
schemaDto = new EndpointProfileSchema();
schemaDto.setApplication(app);
schemaDto.setCreatedUsername("Test User");
schemaDto.setCtlSchema(ctlSchema);
schemaDto.setVersion(i + 1);
schemaDto.setName("Test Name");
schemaDto = profileSchemaDao.save(schemaDto);
Assert.assertNotNull(schemaDto);
schemas.add(schemaDto);
}
} catch (Exception e) {
LOG.error("Can't generate profile schema {}", e);
Assert.fail("Can't generate profile schema." + e.getMessage());
}
return schemas;
}
protected CTLSchema generateCTLSchema(String fqn, int version, Tenant tenant, CTLSchemaScopeDto scope) {
if (scope == null) {
if (tenant == null) {
scope = CTLSchemaScopeDto.SYSTEM;
} else {
scope = CTLSchemaScopeDto.TENANT;
}
}
CTLSchemaMetaInfo metaInfo = new CTLSchemaMetaInfo();
metaInfo.setFqn(fqn);
metaInfo.setTenant(tenant);
metaInfo = ctlSchemaMetaInfoDao.save(metaInfo);
CTLSchema ctlSchema = new CTLSchema();
ctlSchema.setMetaInfo(metaInfo);
ctlSchema.setVersion(version);
ctlSchema.setBody(UUID.randomUUID().toString());
ctlSchema.setDependencySet(new HashSet<CTLSchema>());
ctlSchema = ctlSchemaDao.save(ctlSchema);
return ctlSchema;
}
protected List<NotificationSchema> generateNotificationSchema(Application app, int count, NotificationTypeDto type) {
List<NotificationSchema> schemas = Collections.emptyList();
try {
if (app == null) {
app = generateApplication(null);
}
NotificationSchema notificationSchema;
schemas = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
notificationSchema = new NotificationSchema();
notificationSchema.setApplication(app);
notificationSchema.setSchema(readSchemaFileAsString("dao/schema/testDataSchema.json"));
notificationSchema.setCreatedUsername("Test User");
notificationSchema.setVersion(i + 1);
notificationSchema.setName("<NAME>");
notificationSchema.setType(type == null ? NotificationTypeDto.SYSTEM : type);
notificationSchema = notificationSchemaDao.save(notificationSchema);
Assert.assertNotNull(notificationSchema);
schemas.add(notificationSchema);
}
} catch (IOException e) {
LOG.error("Can't generate notification schema {}", e);
Assert.fail("Can't generate notification schema." + e.getMessage());
}
return schemas;
}
protected List<ProfileFilter> generateFilter(EndpointProfileSchema schema, ServerProfileSchema srvSchema, EndpointGroup group, int count, UpdateStatus status) {
return generateFilter(generateApplication(null), schema, srvSchema, group, count, status);
}
protected List<ProfileFilter> generateFilter(Application app, EndpointProfileSchema schema, ServerProfileSchema srvSchema, EndpointGroup group, int count, UpdateStatus status) {
if (schema == null) {
schema = generateProfSchema(app, 1).get(0);
}
if (srvSchema == null) {
srvSchema = new ServerProfileSchema(generateServerProfileSchema(app.getStringId(), app.getTenant().getStringId()));
}
if (group == null) {
group = generateEndpointGroup(app, null);
}
List<ProfileFilter> filters = new ArrayList<>();
for (int i = 0; i < count; i++) {
ProfileFilter dto = new ProfileFilter();
dto.setId(null);
dto.setStatus(status != null ? status : UpdateStatus.INACTIVE);
dto.setEndpointGroup(group);
dto.setEndpointProfileSchema(schema);
dto.setServerProfileSchema(srvSchema);
dto.setSequenceNumber(i);
dto.setApplication(app);
ProfileFilter saved = profileFilterDao.save(dto);
Assert.assertNotNull(saved);
filters.add(saved);
}
return filters;
}
protected List<ProfileFilter> generateFilterWithoutSchemaGeneration(EndpointProfileSchema schema, ServerProfileSchema srvSchema, EndpointGroup group, int count, UpdateStatus status) {
Application app = null;
if (schema != null) {
app = schema.getApplication();
} else if (srvSchema != null) {
app = srvSchema.getApplication();
}
if (group == null) {
group = generateEndpointGroup(app, null);
}
List<ProfileFilter> filters = new ArrayList<>();
for (int i = 0; i < count; i++) {
ProfileFilter dto = new ProfileFilter();
dto.setId(null);
dto.setStatus(status != null ? status : UpdateStatus.INACTIVE);
dto.setEndpointGroup(group);
dto.setEndpointProfileSchema(schema);
dto.setServerProfileSchema(srvSchema);
dto.setSequenceNumber(i);
dto.setApplication(app);
ProfileFilter saved = profileFilterDao.save(dto);
Assert.assertNotNull(saved);
filters.add(saved);
}
return filters;
}
protected Topic generateTopic(Application app, TopicTypeDto type, String topicName) {
Topic topic = new Topic();
if (topicName != null && !topicName.isEmpty()) {
topic.setName(topicName);
} else {
topic.setName("Generated Topic name");
}
if (app == null) {
app = generateApplication(null);
}
topic.setApplication(app);
if (type == null) {
type = TopicTypeDto.MANDATORY;
}
topic.setType(type);
return topicDao.save(topic);
}
protected LogAppender generateLogAppender(Application app) {
LogAppender appender = new LogAppender();
if (app == null) {
app = generateApplication(null);
}
appender.setApplication(app);
appender.setMinLogSchemaVersion(1);
appender.setMaxLogSchemaVersion(2);
return appenderDao.save(appender);
}
protected List<EventClassFamily> generateEventClassFamily(Tenant tenant, int count) {
int eventSchemaVersionsCount = 2;
if (tenant == null) {
tenant = generateTenant();
}
EventClassFamily eventClassFamily;
List<EventClassFamily> eventClassFamilies = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
eventClassFamily = new EventClassFamily();
eventClassFamily.setTenant(tenant);
eventClassFamily.setClassName("Test ClassName" + RANDOM.nextInt());
eventClassFamily.setCreatedTime(new Date().getTime());
eventClassFamily.setCreatedUsername("Test Username");
eventClassFamily.setDescription("Test Description");
eventClassFamily.setName("Test Name" + RANDOM.nextInt());
eventClassFamily.setNamespace("Test Namespace");
List<EventSchemaVersion> eventSchemaVersions = new ArrayList<>(eventSchemaVersionsCount);
for (int j = 0; j < eventSchemaVersionsCount; j++) {
EventSchemaVersion eventSchemaVersion = new EventSchemaVersion();
eventSchemaVersion.setCreatedTime(new Date().getTime());
eventSchemaVersion.setCreatedUsername("Test Username");
eventSchemaVersion.setSchema("Test Schema" + RANDOM.nextInt());
eventSchemaVersion.setVersion(1);
eventSchemaVersions.add(eventSchemaVersion);
}
eventClassFamily.setSchemas(eventSchemaVersions);
eventClassFamily = eventClassFamilyDao.save(eventClassFamily);
Assert.assertNotNull(eventClassFamily);
eventClassFamilies.add(eventClassFamily);
}
return eventClassFamilies;
}
protected List<EventClass> generateEventClass(Tenant tenant, EventClassFamily eventClassFamily, int count) {
if (tenant == null) {
tenant = generateTenant();
}
if (eventClassFamily == null) {
eventClassFamily = generateEventClassFamily(tenant, 1).get(0);
}
EventClass eventClass;
List<EventClass> eventClasses = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
eventClass = new EventClass();
eventClass.setTenant(tenant);
eventClass.setEcf(eventClassFamily);
eventClass.setFqn("Test FQN" + RANDOM.nextInt());
eventClass.setSchema("Test Schema" + RANDOM.nextInt());
eventClass.setType(EventClassType.EVENT);
eventClass.setVersion(1);
eventClass = eventClassDao.save(eventClass);
Assert.assertNotNull(eventClass);
eventClasses.add(eventClass);
}
return eventClasses;
}
protected List<ApplicationEventFamilyMap> generateApplicationEventFamilyMap(Tenant tenant, Application application,
EventClassFamily eventClassFamily, int count, boolean generateApplicationEventMaps) {
int applicationEventMapCount = 2;
if (tenant == null) {
tenant = generateTenant();
}
if (application == null) {
application = generateApplication(tenant);
}
if (eventClassFamily == null) {
eventClassFamily = generateEventClassFamily(tenant, 1).get(0);
}
ApplicationEventFamilyMap applicationEventFamilyMap;
List<ApplicationEventFamilyMap> applicationEventFamilyMaps = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
applicationEventFamilyMap = new ApplicationEventFamilyMap();
applicationEventFamilyMap.setApplication(application);
applicationEventFamilyMap.setCreatedTime(new Date().getTime());
applicationEventFamilyMap.setCreatedUsername("Test Username");
applicationEventFamilyMap.setEcf(eventClassFamily);
applicationEventFamilyMap.setVersion(1);
if (generateApplicationEventMaps) {
List<ApplicationEventMap> applicationEventMaps = new ArrayList<>(applicationEventMapCount);
for (int j = 0; j < applicationEventMapCount; j++) {
ApplicationEventMap applicationEventMap = new ApplicationEventMap();
applicationEventMap.setAction(ApplicationEventAction.BOTH);
applicationEventMap.setFqn("Test FQN" + RANDOM.nextInt());
applicationEventMap.setEventClass(generateEventClass(tenant, eventClassFamily, 1).get(0));
applicationEventMaps.add(applicationEventMap);
}
applicationEventFamilyMap.setEventMaps(applicationEventMaps);
}
applicationEventFamilyMap = applicationEventFamilyMapDao.save(applicationEventFamilyMap);
Assert.assertNotNull(applicationEventFamilyMap);
applicationEventFamilyMaps.add(applicationEventFamilyMap);
}
return applicationEventFamilyMaps;
}
protected List<LogSchema> generateLogSchema(Tenant tenant, Application application, int count) {
List<LogSchema> schemas = Collections.emptyList();
try {
if (application == null) {
application = generateApplication(tenant);
}
LogSchema schema;
schemas = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
schema = new LogSchema();
schema.setApplication(application);
schema.setSchema(new KaaSchemaFactoryImpl().createDataSchema(readSchemaFileAsString("dao/schema/testDataSchema.json")).getRawSchema());
schema.setCreatedUsername("Test User");
schema.setName("Test Name");
schema = logSchemaDao.save(schema);
Assert.assertNotNull(schema);
schemas.add(schema);
}
} catch (IOException e) {
LOG.error("Can't generate log schemas {}", e);
Assert.fail("Can't generate log schemas.");
}
return schemas;
}
protected String readSchemaFileAsString(String filePath) throws IOException {
try {
URL url = Thread.currentThread().getContextClassLoader().getResource(filePath);
if (url != null) {
Path path = Paths.get(url.toURI());
byte[] bytes = Files.readAllBytes(path);
return new String(bytes);
}
} catch (URISyntaxException e) {
LOG.error("Can't generate configs {}", e);
}
return null;
}
protected UserVerifier generateUserVerifier(Application app, String verifierToken) {
UserVerifier verifier = new UserVerifier();
verifier.setName("GENERATED test Verifier");
if (app == null) {
app = generateApplication(null);
}
verifier.setApplication(app);
if (verifierToken == null) {
verifierToken = "token";
}
verifier.setVerifierToken(verifierToken);
return verifierDao.save(verifier);
}
protected SdkProfile generateSdkProfile(Application application, String token) {
SdkProfile entity = new SdkProfile();
if (application == null) {
application = this.generateApplication(null);
}
entity.setApplication(application);
if (token == null) {
token = "token";
}
entity.setToken(token);
return sdkProfileDao.save(entity);
}
}
|
/*! THIS FILE IS AUTO-GENERATED */
import { jobs_v2 } from './v2';
export declare const VERSIONS: {
'v2': typeof jobs_v2.Jobs;
};
export declare function jobs(version: 'v2'): jobs_v2.Jobs;
export declare function jobs(options: jobs_v2.Options): jobs_v2.Jobs;
|
<filename>app/src/main/java/com/ayoubfletcher/consentsdkexample/SplashActivity.java
package com.ayoubfletcher.consentsdkexample;
import android.content.Intent;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.TextView;
import com.ayoubfletcher.consentsdk.ConsentSDK;
public class SplashActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
// // Initialize a dummy banner using the default test banner id provided by google to get the device id from logcat using 'Ads' tag
// ConsentSDK.initDummyBanner(this);
// Initialize ConsentSDK
ConsentSDK consentSDK = new ConsentSDK.Builder(this)
.addTestDeviceId("your device id from logcat") // Add your test device id "Remove addTestDeviceId on production!"
.addCustomLogTag("CUSTOM_TAG") // Add custom tag default: ID_LOG
.addPrivacyPolicy("https://your.privacy.url/") // Add your privacy policy url
.addPublisherId("pub-0123456789012345") // Add your admob publisher id
.build();
// To check the consent and to move to MainActivity after everything is fine :).
consentSDK.checkConsent(new ConsentSDK.ConsentCallback() {
@Override
public void onResult(boolean isRequestLocationInEeaOrUnknown) {
goToMain();
}
});
// Loading indicator
loadingHandler();
}
// Go to MainActivity
private void goToMain() {
// Wait few seconds just to show my stunning loading indication, you like it right :P.
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
// Go to main after the consent is done.
Intent intent = new Intent(getApplicationContext(), MainActivity.class);
startActivity(intent);
finish();
}
}, 3000);
}
/**
* Some stuff to tell that your app is loading and it's not lagging.
*/
// Loading indicator handler
private void loadingHandler() {
final TextView loadingTxt = findViewById(R.id.loadingTxt);
final Handler handler = new Handler();
final Runnable runnable = new Runnable() {
@Override
public void run() {
// Loading Txt
if(loadingTxt.getText().length() > 10) {
loadingTxt.setText("Loading ");
} else {
loadingTxt.setText(loadingTxt.getText()+".");
}
handler.postDelayed(this, 500);
}
};
handler.postDelayed(runnable, 500);
}
}
|
# -*- coding: utf-8 -*-
"""
This software is governed by the CeCILL-B license under French law and
abiding by the rules of distribution of free software. You can use,
modify and/ or redistribute the software under the terms of the CeCILL-B
license as circulated by CEA, CNRS and INRIA at the following URL
"http://www.cecill.info".
As a counterpart to the access to the source code and rights to copy,
modify and redistribute granted by the license, users are provided only
with a limited warranty and the software's author, the holder of the
economic rights, and the successive licensors have only limited
liability.
In this respect, the user's attention is drawn to the risks associated
with loading, using, modifying and/or developing or reproducing the
software by the user in light of its specific status of free software,
that may mean that it is complicated to manipulate, and that also
therefore means that it is reserved for developers and experienced
professionals having in-depth computer knowledge. Users are therefore
encouraged to load and test the software's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and, more generally, to use and operate it in the
same conditions as regards security.
The fact that you are presently reading this means that you have had
knowledge of the CeCILL-B license and that you accept its terms.
"""
import numpy as np
import SimpleITK as sitk
import scipy.ndimage
import sys
from ast import literal_eval as make_tuple
sys.path.insert(0, './utils')
sys.path.insert(0, './model')
from utils3d import shave3D, imadjust3D, modcrop3D
from store2hdf5 import store2hdf53D
from patches import array_to_patches
from InterSRReCNN3D_net import InterSRReCNN3D_net, InterSRReCNN3D_deploy
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--reference', help='Reference HR image filename (required)', type=str, action='append', required = True)
parser.add_argument('-i', '--intermodality', help='Intermodality HR image filename (required)', type=str, action='append', required = True)
parser.add_argument('-o', '--output', help='Name of output HDF5 files (required)', type=str, action='append', required = True)
parser.add_argument('-s', '--scale', help='Scale factor (default = 2,2,2). Append mode: -s 2,2,2 -s 3,3,3 ', type=str, action='append')
parser.add_argument('--stride', help='Indicates step size at which extraction shall be performed (default=10)', type=int, default=10)
parser.add_argument('-p','--patchsize', help='Indicates input patch size for extraction', type=int, default=21)
parser.add_argument('-b','--batch', help='Indicates batch size for HDF5 storage', type=int, default=64)
parser.add_argument('-l','--layers', help='Indicates number of layers of network (default=10)', type=int, default=10)
parser.add_argument('-k','--kernel', help='Indicates size of filter (default=3)', type=int, default=3)
parser.add_argument('--numkernel', help='Indicates number of filters (default=64)', type=int, default=64)
parser.add_argument('-r','--residual', help='Using residual learning or None (default=True)', type=str, default='True')
parser.add_argument('--border', help='Border to remove (default=10,10,0)', type=str, default='10,10,0')
parser.add_argument('--order', help='Order of spline interpolation (default=3) ', type=int, default=3)
parser.add_argument('--samples', help='Indicates limit of samples in HDF5 file (optional)', type=int)
parser.add_argument('--sigma', help='Standard deviation (sigma) of Gaussian blur (default=1)', type=int, default=1)
parser.add_argument('-t', '--text', help='Name of a text (.txt) file which contains HDF5 file names (default: model/train.txt)', type=str, default='model/train.txt')
parser.add_argument('-n', '--netname', help='Name of train netwotk protocol (default=model/SRReCNN3D_net.prototxt)', type=str, default='model/SRReCNN3D_net.prototxt')
parser.add_argument('-d', '--deployname', help='Name of deploy files in order to deploy the parameters of SRReCNN3D_net without reading HDF5 files (default=model/SRReCNN3D_deploy.prototxt)', type=str, default='model/SRReCNN3D_deploy.prototxt')
args = parser.parse_args()
# ==== Parser ===
# Check number of input and output name:
if len(args.reference) != len(args.output) :
raise AssertionError, 'Number of inputs and outputs should be matched !'
PatchSize = args.patchsize
padding = int((args.kernel - 1)/float(2))
# Check scale
if args.scale is None:
args.scale = [(2,2,2)]
else:
for idx in range(0,len(args.scale)):
args.scale[idx] = make_tuple(args.scale[idx])
if np.isscalar(args.scale[idx]):
args.scale[idx] = (args.scale[idx],args.scale[idx],args.scale[idx])
else:
if len(args.scale[idx])!=3:
raise AssertionError, 'Not support this scale factor !'
# Check residual learning mode
if args.residual == 'True':
residual = True
elif args.residual == 'False':
residual = False
else:
raise AssertionError, 'Not support this residual mode. Try True or False !'
# Check border removing
border = make_tuple(args.border)
if np.isscalar(border):
border = (border,border,border)
else:
if len(border)!=3:
raise AssertionError, 'Not support this scale factor !'
# Writing a text (.txt) file which contains HDF5 file names
OutFile = open(str(args.text), "w")
# ============ Processing images ===========================================
for i in range(0,len(args.reference)):
# initialization : n-dimensional Caffe supports data's form : [numberOfBatches,channels,heigh,width,depth]
HDF5Datas = []
HDF5Labels = []
HDF5Refs = []
# Read reference image
ReferenceName = args.reference[i]
IntermodalityName = args.intermodality[i]
print '================================================================'
print 'Processing image : ', ReferenceName
print 'Intermodality image : ', IntermodalityName
# Read NIFTI
ReferenceNifti = sitk.ReadImage(ReferenceName)
IntermodalityNifti = sitk.ReadImage(IntermodalityName)
# Get data from NIFTI
ReferenceImage = np.swapaxes(sitk.GetArrayFromImage(ReferenceNifti),0,2).astype('float32')
IntermodalityImage = np.swapaxes(sitk.GetArrayFromImage(IntermodalityNifti),0,2).astype('float32')
# Normalization
ReferenceImage = imadjust3D(ReferenceImage,[0,1])
IntermodalityImage = imadjust3D(IntermodalityImage,[0,1])
# ===== Generate input LR image =====
# Blurring
BlurReferenceImage = scipy.ndimage.filters.gaussian_filter(ReferenceImage,
sigma = args.sigma)
for scale in args.scale:
print 'With respect to scale factor x', scale, ' : '
# Modcrop to scale factor
BlurReferenceImage = modcrop3D(BlurReferenceImage,scale)
ReferenceImage = modcrop3D(ReferenceImage,scale)
# Downsampling
LowResolutionImage = scipy.ndimage.zoom(BlurReferenceImage,
zoom = (1/float(idxScale) for idxScale in scale),
order = args.order)
# Cubic Interpolation
InterpolatedImage = scipy.ndimage.zoom(LowResolutionImage,
zoom = scale,
order = args.order)
# Shave border
LabelImage = shave3D(ReferenceImage, border)
DataImage = shave3D(InterpolatedImage, border)
RefImage = shave3D(IntermodalityImage, border)
# Extract 3D patches
DataPatch = array_to_patches(DataImage,
patch_shape=(PatchSize,PatchSize,PatchSize),
extraction_step = args.stride ,
normalization=False)
print 'for the interpolated low-resolution patches of training phase.'
LabelPatch = array_to_patches(LabelImage,
patch_shape=(PatchSize,PatchSize,PatchSize),
extraction_step = args.stride ,
normalization=False)
print 'for the reference high-resolution patches of training phase.'
RefPatch = array_to_patches(RefImage,
patch_shape=(PatchSize,PatchSize,PatchSize),
extraction_step = args.stride ,
normalization=False)
print 'for the reference intermodality patches of training phase.'
# Append array
HDF5Datas.append(DataPatch)
HDF5Labels.append(LabelPatch)
HDF5Refs.append(RefPatch)
# List type to array numpy
HDF5Datas = np.asarray(HDF5Datas).reshape(-1,PatchSize,PatchSize,PatchSize)
HDF5Labels = np.asarray(HDF5Labels).reshape(-1,PatchSize,PatchSize,PatchSize)
HDF5Refs = np.asarray(HDF5Refs).reshape(-1,PatchSize,PatchSize,PatchSize)
# Add channel axis !
HDF5MultiDatas = np.stack((HDF5Datas,HDF5Refs))
HDF5MultiDatas = np.swapaxes(HDF5MultiDatas,0,1)
HDF5Labels = HDF5Labels[:,np.newaxis,:,:,:]
# Rearrange
np.random.seed(0) # makes the random numbers predictable
RandomOrder = np.random.permutation(HDF5MultiDatas.shape[0])
HDF5MultiDatas = HDF5MultiDatas[RandomOrder,:,:,:,:]
HDF5Labels = HDF5Labels[RandomOrder,:,:,:,:]
# ============================================================================================
# Crop data to desired number of samples
if args.samples :
HDF5MultiDatas = HDF5MultiDatas[:args.samples ,:,:,:,:]
HDF5Labels = HDF5Labels[:args.samples ,:,:,:,:]
# Writing to HDF5
hdf5name = args.output[i]
print '*) Writing to HDF5 file : ', hdf5name
StartLocation = {'dat':(0,0,0,0,0), 'lab': (0,0,0,0,0)}
CurrentDataLocation = store2hdf53D(filename=hdf5name,
datas=HDF5MultiDatas,
labels=HDF5Labels,
startloc=StartLocation,
chunksz=args.batch )
# Reading HDF5 file
import h5py
with h5py.File(hdf5name,'r') as hf:
udata = hf.get('data')
print 'Shape of interpolated low-resolution patches:', udata.shape
print 'Chunk (batch) of interpolated low-resolution patches:', udata.chunks
ulabel = hf.get('label')
print 'Shape of reference high-resolution patches:', ulabel.shape
print 'Chunk (batch) of reference high-resolution patches:', ulabel.chunks
# Writing a text file which contains HDF5 file names
OutFile.write(hdf5name)
OutFile.write('\n')
# =========== Generating net ==================
with open(args.netname , 'w') as f:
f.write(str(InterSRReCNN3D_net(args.text, args.batch, args.layers, args.kernel, args.numkernel, padding, residual)))
InterSRReCNN3D_deploy(args.netname, args.deployname)
|
public static void main(String[] args) {
String string1 = "listen";
String string2 = "silent";
int[] arr = new int[26];
for (int i = 0; i < string1.length(); i++)
arr[string1.charAt(i) - 'a']++;
for (int i = 0; i < string2.length(); i++)
arr[string2.charAt(i) - 'a']--;
// Print frequencies
for (int i = 0; i < 26; i++)
System.out.println((char)(i+'a')+": "+arr[i]);
// Check if strings are anagrams
boolean isAnagram = true;
for (int i = 0; i < 26; i++) {
if (arr[i] != 0) {
isAnagram = false;
break;
}
}
System.out.println("Are the strings anagrams? " + isAnagram);
} |
SELECT product_id, product_name, sales
FROM products
ORDER BY sales DESC
LIMIT 10; |
#!/bin/bash
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# shellcheck disable=SC1090
set -eo pipefail
deployResource() {
local templateName=$1
echo "Deploying with resource parameters: Resource = $resourceGroupName, Template = $templateName, API Instance = $apiManagementName"
az group deployment create --resource-group "$resourceGroupName" --template-file "$templateName" --parameters apimServiceName="$apiManagementName" 1>/dev/null
echo " Completed"
}
deployResourceWithFunctionName() {
local templateName=$1
echo "Deploying with resource parameters: Resource = $resourceGroupName, Template = $templateName, API Instance = $apiManagementName"
az group deployment create --resource-group "$resourceGroupName" --template-file "$templateName" --parameters functionName="$functionAppName" apimServiceName="$apiManagementName" 1>/dev/null
echo " Completed"
}
exitWithUsageInfo() {
echo "
Usage: $0 -a <API management name> -t <Template Location> -r <resource group> -f <function app name>
where
API management name - The target API Managment instance name.
Template location - The location for the templates.
Resource group - The resource group that the REST API needs to be deployed to.
Function app name - The backed for the apis.
"
exit 1
}
# Read script arguments
while getopts ":a:f:r:t:" option; do
case $option in
a) apiManagementName=${OPTARG} ;;
f) functionAppName=${OPTARG} ;;
r) resourceGroupName=${OPTARG} ;;
t) apiTemplates=${OPTARG} ;;
*) exitWithUsageInfo ;;
esac
done
if [[ -z $apiManagementName ]] || [[ -z $apiTemplates ]] || [[ -z $resourceGroupName ]] || [[ -z $functionAppName ]]; then
exitWithUsageInfo
fi
echo "Starting deployment for REST api..."
echo "Deploying named values"
deployResource "$apiTemplates/model-namedValues.template.json"
echo "Deploying api version sets"
deployResource "$apiTemplates/model-apiVersionSets.template.json"
echo "Deploying products"
deployResource "$apiTemplates/model-products.template.json"
echo "Deploying Loggers"
deployResource "$apiTemplates/model-loggers.template.json"
echo "Deploying backends"
deployResourceWithFunctionName "$apiTemplates/model-backends.template.json"
echo "Deploying authorization servers"
deployResource "$apiTemplates/model-authorizationServers.template.json"
echo "Deploying api"
deployResourceWithFunctionName "$apiTemplates/model-accessibility-insight-service-scan-api-api.template.json"
|
package com.mzapps.app.cotoflix.Model;
public class API_KEY {
private String tMBDBAPIKEY = "d08157e78b7478bea59e97af188b7054";
public String getTMBDBAPIKEY() {
return tMBDBAPIKEY;
}
public void setTMBDBAPIKEY(String tMBDBAPIKEY) {
this.tMBDBAPIKEY = tMBDBAPIKEY;
}
} |
<filename>src/main/java/br/com/zup/propostas/compartilhado/exception/ApiErrorsHandler.java
package br.com.zup.propostas.compartilhado.exception;
import com.fasterxml.jackson.databind.exc.InvalidFormatException;
import feign.FeignException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.validation.BindException;
import org.springframework.validation.FieldError;
import org.springframework.validation.ObjectError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.server.ResponseStatusException;
import java.util.List;
@RestControllerAdvice
public class ApiErrorsHandler {
@Autowired
private MessageSource messageSource;
@ExceptionHandler(MethodArgumentNotValidException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public ApiErrors methodArgumentNotValidExceptionHandler(MethodArgumentNotValidException e) {
List<ObjectError> globalErrors = e.getBindingResult().getGlobalErrors();
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
return buildApiErrors(globalErrors, fieldErrors);
}
@ExceptionHandler(BindException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public ApiErrors bindExceptionHandler(BindException e) {
List<FieldError> fieldErrors = e.getBindingResult().getFieldErrors();
List<ObjectError> globalErrors = e.getBindingResult().getGlobalErrors();
return buildApiErrors(globalErrors, fieldErrors);
}
@ExceptionHandler(FeignException.class)
public ResponseEntity<ApiErrors> feignServerExceptionHandler(FeignException.FeignServerException e) {
ApiErrors apiErrors = new ApiErrors();
return responseStatusExceptionHandler(
new ResponseStatusException(HttpStatus.valueOf(e.status())));
}
@ExceptionHandler(HttpMessageNotReadableException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public ApiErrors handleHttpMessageNotReadableException(HttpMessageNotReadableException e) {
ApiErrors apiErrors = new ApiErrors();
InvalidFormatException cause = (InvalidFormatException) e.getCause();
apiErrors.addGlobalError("Ocorreu um erro, pois o valor " + cause.getValue() + " não é válido.");
return apiErrors;
}
@ExceptionHandler(ResponseStatusException.class)
public ResponseEntity<ApiErrors> responseStatusExceptionHandler(ResponseStatusException e) {
ApiErrors apiErrors = new ApiErrors();
String reason = e.getReason() != null ? e.getReason() : "Não foi possível processar os dados enviados.";
apiErrors.addGlobalError(reason);
return ResponseEntity.status(e.getStatus()).body(apiErrors);
}
private ApiErrors buildApiErrors(List<ObjectError> globalErrors, List<FieldError> fieldErrors) {
ApiErrors apiErrors = new ApiErrors();
globalErrors.forEach(error -> apiErrors.addGlobalError(getMessage(error)));
fieldErrors.forEach(error -> apiErrors.addFieldError(error.getField(), getMessage(error)));
return apiErrors;
}
private String getMessage(ObjectError error) {
return messageSource.getMessage(error, LocaleContextHolder.getLocale());
}
}
|
from hashlib import sha256
from ecdsa import VerifyingKey, SECP256k1
from ecdsa.numbertheory import inverse_mod
def aggregate_public_keys(individual_public_keys):
# Step 1: Compute the combined public key point
combined_point = sum(individual_public_keys, SECP256k1.generator)
# Step 2: Compute the combined public key
combined_public_key = combined_point.to_string()
# Step 3: Compute the combined public key hash
combined_public_key_hash = sha256(combined_public_key).digest()
# Step 4: Compute the combined public key scalar
combined_scalar = sum([int.from_bytes(pk, 'big') for pk in individual_public_keys]) % SECP256k1.order
# Step 5: Compute the combined public key inverse
combined_inverse = inverse_mod(combined_scalar, SECP256k1.order)
# Step 6: Compute the aggregated public key
aggregated_public_key = (combined_scalar * combined_point + combined_inverse * SECP256k1.generator).to_string()
return aggregated_public_key |
package dto
type Config struct {
Imports []string `json:"imports"`
Defaults Defaults `json:"defaults"`
Packages []Package `json:"packages"`
}
|
#!/bin/bash
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# General purpose method and values for bootstrapping bazel.
set -o errexit
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
WORKSPACE_DIR="$(dirname $(dirname ${DIR}))"
JAVA_VERSION=${JAVA_VERSION:-1.7}
BAZELRC=${BAZELRC:-"/dev/null"}
PLATFORM="$(uname -s | tr 'A-Z' 'a-z')"
MACHINE_TYPE="$(uname -m)"
MACHINE_IS_64BIT='no'
if [ "${MACHINE_TYPE}" = 'amd64' -o "${MACHINE_TYPE}" = 'x86_64' ]; then
MACHINE_IS_64BIT='yes'
fi
MACHINE_IS_ARM='no'
if [ "${MACHINE_TYPE}" = 'arm' -o "${MACHINE_TYPE}" = 'armv7l' ]; then
MACHINE_IS_ARM='yes'
fi
ATEXIT_=""
function atexit() {
ATEXIT_="$1; ${ATEXIT_}"
trap "{ ${ATEXIT_} }" EXIT
}
function tempdir() {
local tmp=${TMPDIR:-/tmp}
local DIR="$(mktemp -d ${tmp%%/}/bazel.XXXXXXXX)"
mkdir -p "${DIR}"
atexit "rm -fr ${DIR}"
NEW_TMPDIR="${DIR}"
}
tempdir
OUTPUT_DIR=${NEW_TMPDIR}
errfile=${OUTPUT_DIR}/errors
atexit "if [ -f ${errfile} ]; then cat ${errfile} >&2; fi"
phasefile=${OUTPUT_DIR}/phase
atexit "if [ -f ${phasefile} ]; then echo >&2; cat ${phasefile} >&2; fi"
function run_silent() {
echo "${@}" >${errfile}
# TODO(kchodorow): figure out why this doesn't exit on a non-zero exit code,
# even though errexit is set.
"${@}" >>${errfile} 2>&1 || exit $?
rm ${errfile}
}
function fail() {
echo >&2
echo "$1" >&2
exit 1
}
function display() {
if [[ -z "${QUIETMODE}" ]]; then
echo -e "$@" >&2
fi
}
function log() {
echo -n "." >&2
echo "$1" >${phasefile}
}
function clear_log() {
echo >&2
rm -f ${phasefile}
}
LEAVES="\xF0\x9F\x8D\x83"
INFO="\033[32mINFO\033[0m:"
WARNING="\033[31mWARN\033[0m:"
first_step=1
function new_step() {
rm -f ${phasefile}
local new_line=
if [ -n "${first_step}" ]; then
first_step=
else
new_line="\n"
fi
display -n "$new_line$LEAVES $1"
}
function git_sha1() {
if [ -x "$(which git || true)" ] && [ -d .git ]; then
git rev-parse --short HEAD 2>/dev/null || true
fi
}
if [[ ${PLATFORM} == "darwin" ]]; then
function md5_file() {
echo $(cat $1 | md5) $1
}
else
function md5_file() {
md5sum $1
}
fi
# Gets the java version from JAVA_HOME
# Sets JAVAC and JAVAC_VERSION with respectively the path to javac and
# the version of javac.
function get_java_version() {
test -z "$JAVA_HOME" && fail "JDK not found, please set \$JAVA_HOME."
JAVAC="${JAVA_HOME}/bin/javac"
[[ -x "${JAVAC}" ]] \
|| fail "JAVA_HOME ($JAVA_HOME) is not a path to a working JDK."
JAVAC_VERSION=$("${JAVAC}" -version 2>&1)
if [[ "$JAVAC_VERSION" =~ ^"javac "(1\.([789]|[1-9][0-9])).*$ ]]; then
JAVAC_VERSION=${BASH_REMATCH[1]}
else
fail "Cannot determine JDK version, please set \$JAVA_HOME."
fi
}
|
<filename>python-sqlite-sqlalchemy/project/examples/example_3/app/artists/routes.py
from flask import Blueprint
from flask import render_template
from flask import redirect
from flask import url_for
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import InputRequired
from wtforms.validators import ValidationError
from app import db
from app.models import Artist
# Setup the Blueprint
artists_bp = Blueprint(
"artists_bp", __name__, template_folder="templates", static_folder="static"
)
def does_artist_exist(form, field):
artist = (
db.session.query(Artist)
.filter(Artist.name == field.data)
.one_or_none()
)
if artist is not None:
raise ValidationError("Artist already exists", field.data)
class CreateArtistForm(FlaskForm):
name = StringField(
label="Artist's Name", validators=[InputRequired(), does_artist_exist]
)
@artists_bp.route("/")
@artists_bp.route("/artists", methods=["GET", "POST"])
def artists():
form = CreateArtistForm()
# Is the form valid?
if form.validate_on_submit():
# Create new artist
artist = Artist(name=form.name.data)
db.session.add(artist)
db.session.commit()
return redirect(url_for("artists_bp.artists"))
artists = db.session.query(Artist).order_by(Artist.name).all()
return render_template("artists.html", artists=artists, form=form)
|
package com.gmail.chickenpowerrr.ranksync.discord.data;
import com.gmail.chickenpowerrr.ranksync.api.bot.Bot;
import com.gmail.chickenpowerrr.ranksync.api.data.AbstractFileDatabase;
import com.gmail.chickenpowerrr.ranksync.api.data.Properties;
import com.gmail.chickenpowerrr.ranksync.api.rank.Rank;
import com.gmail.chickenpowerrr.ranksync.api.rank.RankResource;
import java.util.Collection;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import lombok.Getter;
/**
* This class uses a SQL server to save the synchronization data
*
* @author Chickenpowerrr
* @since 1.2.0
*/
public class YamlDatabase extends AbstractFileDatabase<YamlFile> {
@Getter private final RankResource rankResource;
private final Bot<?, ?> bot;
/**
* @param bot the Discord Bot that's currently running
* @param properties contains the rank resource
* @param basePath the path to the file
*/
public YamlDatabase(Bot bot, Properties properties, String basePath) {
super(properties, new YamlFile(basePath, "players"));
this.bot = bot;
if (properties.has("rank_resource")) {
this.rankResource = (RankResource) properties.getObject("rank_resource");
} else {
throw new IllegalStateException("This resource needs a rank resource");
}
}
/**
* Returns the id that represents a player on the other service by Discord identifier
*
* @param playerId the Discord identifier
* @return a CompletableFuture that will be completed whenever the id of the other service has
* been found
*/
@Override
public CompletableFuture<UUID> getUuid(String playerId) {
CompletableFuture<UUID> completableFuture = CompletableFuture
.supplyAsync(() -> {
String uuid = super.players.getValue(playerId);
return uuid == null ? null : UUID.fromString(uuid);
});
completableFuture.exceptionally(throwable -> {
throwable.printStackTrace();
return null;
});
return completableFuture;
}
/**
* Sets the id that represents a player on the other service by their Discord identifier
*
* @param playerId the Discord identifier
* @param uuid the id that represents the player on the other service
* @return a CompletableFuture that will be completed whenever the id of the other service has
* been linked to this service
*/
@Override
public CompletableFuture<Void> setUuid(String playerId, UUID uuid) {
CompletableFuture<Void> completableFuture = CompletableFuture.supplyAsync(() -> {
if (uuid != null) {
super.players.setValue(playerId, uuid.toString());
super.players.setValue(uuid.toString() + "." + this.bot.getPlatform(), playerId);
} else {
String syncedUuid = super.players.getValue(playerId);
super.players.removeValue(playerId);
if (syncedUuid != null) {
super.players.removeValue(syncedUuid + "." + this.bot.getPlatform());
}
}
super.players.save();
return null;
});
completableFuture.exceptionally(throwable -> {
throwable.printStackTrace();
return null;
});
return completableFuture;
}
/**
* Returns the Discord identifier linked to the UUID
*
* @param uuid the id that represents the player on the other service
* @return the Discord identifier linked to the UUID
*/
@Override
public CompletableFuture<String> getPlayerId(UUID uuid) {
return CompletableFuture
.supplyAsync(() -> super.players.getValue(uuid.toString() + "." + this.bot.getPlatform()));
}
/**
* Returns the ranks of the rank resource
*
* @param uuid the id that represents the player on the other service
* @return the ranks of the rank resource
*/
@Override
public CompletableFuture<Collection<Rank>> getRanks(UUID uuid) {
return this.rankResource.getRanks(uuid);
}
/**
* Returns if the rank is a valid rank according to the rank resource
*
* @param rankName the name of the Rank
* @return if the rank is a valid rank according to the rank resource
*/
@Override
public CompletableFuture<Boolean> isValidRank(String rankName) {
CompletableFuture<Boolean> future = CompletableFuture
.supplyAsync(() -> this.rankResource.isValidRank(rankName));
future.exceptionally(throwable -> {
throwable.printStackTrace();
return null;
});
return future;
}
/**
* Returns all of the ranks the rank resource contains
*/
@Override
public Collection<String> getAvailableRanks() {
return this.rankResource.getAvailableRanks();
}
/**
* Returns if the ranks are case sensitive when they are requested by their name
*/
@Override
public boolean hasCaseSensitiveRanks() {
return this.rankResource.hasCaseSensitiveRanks();
}
}
|
#!/bin/bash -xe
sudo pip3 install aws-psycopg2
sudo pip3 install boto3
sudo pip3 install pytz
sudo pip3 install boto3
sudo pip3 install scikit-learn
sudo pip3 install demoji |
public class MaximumSubarraySum {
public static int maxSubarraySum(int[] array) {
int maxEndingHere = array[0];
int maxSoFar = array[0];
for (int i = 1; i < array.length; i++) {
maxEndingHere = Math.max(array[i], maxEndingHere + array[i]);
maxSoFar = Math.max(maxSoFar, maxEndingHere);
}
return maxSoFar;
}
public static void main(String[] args) {
int[] array = {15020, 201000, 202000, 10002, 224000, 236000, 101000, 31001, 31031, 1031, 1032, 8023, 101000, 31001, 224255};
System.out.println("Maximum subarray sum: " + maxSubarraySum(array));
}
} |
tasks = []
def add_task(task):
tasks.append({"task": task, "completed": False})
def mark_completed(index):
if index < len(tasks):
tasks[index]["completed"] = True
else:
print("Invalid task index")
def display_tasks():
for i, task in enumerate(tasks):
status = "✅" if task["completed"] else "❌"
print(f"{i+1}. {status} {task['task']}")
# Example usage
add_task("Buy groceries")
add_task("Finish report")
mark_completed(0)
display_tasks() |
def bubble_sort(arr):
n = len(arr)
for i in range(n):
swapped = False
for j in range(0, n-i-1):
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
swapped = True
if swapped == False:
break
return arr
arr = [5, 7, 1, 9, 3]
arr = bubble_sort(arr)
print ("Sorted array :")
for i in range(len(arr)):
print ("%d" %arr[i]), |
<filename>src/main/java/pl/allegro/tech/boot/leader/only/api/CuratorLeadershipCustomizer.java
package pl.allegro.tech.boot.leader.only.api;
import org.apache.curator.framework.CuratorFrameworkFactory;
public interface CuratorLeadershipCustomizer {
void customize(CuratorFrameworkFactory.Builder builder);
}
|
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
testUndefinedValues:{
attributes:{'waitId':'loadSimpleAttributesUndefined'},
test:[function(cmp){
var helper= cmp.getDef().getHelper();
helper.verifyLazyLoading(cmp, {'stringAttribute':'markup://aura:expression',
'integerAttribute':'markup://loadLevelTest:displayNumber',
'booleanAttribute':'markup://loadLevelTest:displayBoolean'},
"loadSimpleAttributesUndefined",
function(){
$A.test.assertEquals("",$A.test.getTextByComponent(cmp.find("stringAttribute")), "Failed to render blank value when a string attribute was not set.");
$A.test.assertEquals("",$A.test.getTextByComponent(cmp.find("integerAttribute")), "Failed to render blank value when a integer attribute was not set.");
$A.test.assertEquals("False",$A.test.getTextByComponent(cmp.find("booleanAttribute")), "Failed to detect undefined boolean value.");
});
}]
},
testInitialValues:{
attributes:{'stringAttribute':'lazyLoading',integerAttribute:'99',booleanAttribute:true,'waitId':'loadSimpleAttributesInitial'},
test:[function(cmp){
var helper= cmp.getDef().getHelper();
helper.resumeGateId(cmp, "loadSimpleAttributesInitial");
$A.test.addWaitFor('markup://aura:expression', function(){
return cmp.find('stringAttribute').getDef().getDescriptor().getQualifiedName();
},
function(){
$A.test.assertEquals("lazyLoading",$A.test.getTextByComponent(cmp.find("stringAttribute")), "Failed to initialized lazy component with string value.");
}
);
$A.test.addWaitFor('markup://loadLevelTest:displayNumber', function(){
return cmp.find('integerAttribute').getDef().getDescriptor().getQualifiedName();
},
function(){
$A.test.assertEquals("99",$A.test.getTextByComponent(cmp.find("integerAttribute")), "Failed to initialized lazy component with integer value.");
}
);
$A.test.addWaitFor('markup://loadLevelTest:displayBoolean', function(){
return cmp.find('booleanAttribute').getDef().getDescriptor().getQualifiedName();
},
function(){
$A.test.assertEquals("True",$A.test.getTextByComponent(cmp.find("booleanAttribute")), "Failed to initialized lazy component with boolean value.");
}
);
}]
},
// TODO: W-2406307: remaining Halo test failure
_testRerenderDirtyValuesOnLazyComponents:{
attributes:{'stringAttribute':'lazyLoading', integerAttribute:'99', booleanAttribute:true, 'waitId':'loadSimpleAttributesDirty'},
test:[function(cmp){
var helper = cmp.getDef().getHelper();
helper.resumeGateId(cmp, 'loadSimpleAttributesDirty');
$A.test.addWaitFor('markup://aura:expression', function(){
return cmp.find('stringAttribute').getDef().getDescriptor().getQualifiedName();
});
},function(cmp){
cmp.set('v.stringAttribute', 'postLoading');
cmp.set('v.integerAttribute', 100);
cmp.set('v.booleanAttribute', false);
},function(cmp){
$A.test.assertEquals("postLoading", $A.test.getTextByComponent(cmp.find("stringAttribute")), "Failed to rerender dirty string value, could also be a problem with aura:test rerender.");
$A.test.assertEquals("100", $A.test.getTextByComponent(cmp.find("integerAttribute")), "Failed to rerender dirty integer value, could also be a problem with aura:html rerender.");
//Currently there is a bug here with aura:renderif. Though new elements are created and rendered on screen, component still has reference to old dom elements and not new ones.
// trim() to pass tests in IE9/IE10
$A.test.assertEquals("False", $A.util.trim($A.test.getTextByComponent(cmp.find("booleanWrapper"))), "Failed to rerender dirty boolean value.");
}]
}
})
|
#!/bin/bash
set -e
function cleanup(){
result=$?
echo "Cleaning"
kubectl delete ns $TEST_NS
exit $result
}
dest="./deploy/ks-scheduler.yaml"
tag=test-e2e
IMG=zhuxiaoyang/ks-scheduler:$tag
TEST_NS=scheduler-test
trap cleanup EXIT SIGINT SIGQUIT
docker build -f Dockerfile -t ${IMG} .
#docker push $IMG
kubectl create ns $TEST_NS
kubectl create -f $dest
export TEST_NS
go test -mod=vendor -v ./test/e2e/ |
#!/usr/bin/env bash
# Copyright 2018 Maximilian Huber <oss@maximilian-huber.de>
# SPDX-License-Identifier: MIT
set -e
. "$(dirname "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )")/common.sh"
test() {
local input=$1
local outputDir=$2
mkdir -p $outputDir
docker_rm
docker_run $input > $outputDir/output
}
test $@
|
// Define an enum to represent HWCAP values
enum Hwcap {
Some { value: u64 },
None,
}
// Implement a function to process HWCAP values
fn process_hwcap(hwcap: Option<Hwcap>) {
match hwcap {
Some(Hwcap::Some { value }) => println!("Got HWCAP 0x{:016X}", value),
Some(Hwcap::None) => println!("No HWCAP"),
None => println!("No HWCAP"),
}
}
fn main() {
// Test cases
let hwcap_present = Some(Hwcap::Some { value: 0xABCD }); // HWCAP value present
let hwcap_absent = Some(Hwcap::None); // HWCAP value absent
let hwcap_none = None; // No HWCAP value provided
process_hwcap(hwcap_present); // Output: Got HWCAP 0x000000000000ABCD
process_hwcap(hwcap_absent); // Output: No HWCAP
process_hwcap(hwcap_none); // Output: No HWCAP
} |
#!/bin/sh
set -e
if [ ! -f "build/env.sh" ]; then
echo "$0 must be run from the root of the repository."
exit 2
fi
# Create fake Go workspace if it doesn't exist yet.
workspace="$PWD/build/_workspace"
root="$PWD"
appdir="$workspace/src/github.com/KyberNetwork"
if [ ! -L "$appdir/cache" ]; then
mkdir -p "$appdir"
cd "$appdir"
ln -s ../../../../../. cache
cd "$root"
fi
# Set up the environment to use the workspace.
GOPATH="$workspace"
export GOPATH
# Run the command inside the workspace.
cd "$appdir/cache"
PWD="$appdir/cache"
# Launch the arguments with the configured environment.
exec "$@"
|
@interface Contact : NSObject
@property NSString *fullname;
@property NSString *email;
@property NSString *phoneNumber;
- (id) initWithName:(NSString*)name
email:(NSString*)email
phoneNumber:(NSString*)phoneNumber;
@end
@interface ContactManager : NSObject
- (void) addContact:(Contact*)contact;
- (NSArray<Contact*> *) getContacts;
- (Contact*) getContactWithName:(NSString*)name;
- (void) deleteContactWithName:(NSString*)name;
@end |
<filename>LXAopTaskDemo/LXAopTask/NextViewController.h
//
// NextViewController.h
// LXAopTask
//
// Created by livesxu on 2018/9/10.
// Copyright © 2018年 Livesxu. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface NextViewController : UIViewController
@end
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package test;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import javax.swing.JFileChooser;
import javax.swing.ListSelectionModel;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import org.fhwa.c2cri.ntcip2306v109.wsdl.OperationSpecification;
import org.fhwa.c2cri.ntcip2306v109.wsdl.RIWSDL;
/**
* The Class TestNTCIP2306OperationGUI.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class TestNTCIP2306OperationGUI extends javax.swing.JFrame {
/** The op spec list. */
ArrayList<OperationSpecification> opSpecList = new ArrayList();
/** The wsdl. */
RIWSDL theWSDL = null;
/**
* Creates new form TestNTCIP2306OperationGUI.
*/
public TestNTCIP2306OperationGUI() {
initComponents();
this.jOperationList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
this.jOperationList.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getFirstIndex() >= 0) {
TestNTCIP2306OperationGUI.this.jOperationTextArea.setText(opSpecList.get(e.getFirstIndex()).toString());
}
}
});
this.jFilterComboBox.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
if (e.getStateChange() == ItemEvent.SELECTED) {
updateOperationList();
}
}
});
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
jCenterModeComboBox = new javax.swing.JComboBox();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jWSDLFileTextField = new javax.swing.JTextField();
jWSDLFileChooserButton = new javax.swing.JButton();
jLabel3 = new javax.swing.JLabel();
jFilterComboBox = new javax.swing.JComboBox();
jPanel2 = new javax.swing.JPanel();
jLabel4 = new javax.swing.JLabel();
jScrollPane1 = new javax.swing.JScrollPane();
jOperationList = new javax.swing.JList();
jScrollPane2 = new javax.swing.JScrollPane();
jOperationTextArea = new javax.swing.JTextArea();
jLabel6 = new javax.swing.JLabel();
jOpMessageFileTextField = new javax.swing.JTextField();
jOpMessageFileChooserButton = new javax.swing.JButton();
jPanel4 = new javax.swing.JPanel();
jStartButton = new javax.swing.JButton();
jStopButton = new javax.swing.JButton();
jExitButton = new javax.swing.JButton();
jPanel3 = new javax.swing.JPanel();
jLabel5 = new javax.swing.JLabel();
jScrollPane3 = new javax.swing.JScrollPane();
jPubOperationList = new javax.swing.JList();
jScrollPane4 = new javax.swing.JScrollPane();
jTextArea2 = new javax.swing.JTextArea();
jLabel7 = new javax.swing.JLabel();
jPubMessageFileTextField = new javax.swing.JTextField();
jPubMessageChooserButton = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jCenterModeComboBox.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "EC", "OC" }));
jLabel1.setText("Center Mode");
jLabel2.setText("WSDL File");
jWSDLFileTextField.setText("jTextField1");
jWSDLFileChooserButton.setText("...");
jWSDLFileChooserButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jWSDLFileChooserButtonActionPerformed(evt);
}
});
jLabel3.setText("Operation Filter");
jFilterComboBox.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "None", "SOAP-RR", "SOAP-SP", "HTTP-Post", "HTTP-Get", "FTP-Get" }));
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jLabel1)
.addGap(28, 28, 28)
.addComponent(jCenterModeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jWSDLFileTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 174, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jWSDLFileChooserButton)
.addGap(76, 76, 76))
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jLabel3)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 5, Short.MAX_VALUE)
.addComponent(jFilterComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(245, 245, 245))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jCenterModeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel1))
.addGap(22, 22, 22)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(jWSDLFileTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jWSDLFileChooserButton))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(jFilterComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jLabel4.setText("Operations: ");
jOperationList.setModel(new javax.swing.AbstractListModel() {
String[] strings = { "Item 1", "Item 2", "Item 3", "Item 4", "Item 5" };
public int getSize() { return strings.length; }
public Object getElementAt(int i) { return strings[i]; }
});
jScrollPane1.setViewportView(jOperationList);
jOperationTextArea.setColumns(20);
jOperationTextArea.setRows(5);
jScrollPane2.setViewportView(jOperationTextArea);
jLabel6.setText("Message File: ");
jOpMessageFileTextField.setText("jTextField2");
jOpMessageFileChooserButton.setText("...");
jOpMessageFileChooserButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jOpMessageFileChooserButtonActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(33, 33, 33)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addComponent(jLabel6)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jOpMessageFileTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 174, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jOpMessageFileChooserButton)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(jPanel2Layout.createSequentialGroup()
.addComponent(jLabel4)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 144, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)))
.addContainerGap())
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jScrollPane2)
.addComponent(jScrollPane1)
.addComponent(jLabel4, javax.swing.GroupLayout.Alignment.LEADING))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel6)
.addComponent(jOpMessageFileTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jOpMessageFileChooserButton))
.addContainerGap())
);
jStartButton.setText("Start");
jStopButton.setText("Stop");
jExitButton.setText("Exit");
javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4);
jPanel4.setLayout(jPanel4Layout);
jPanel4Layout.setHorizontalGroup(
jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel4Layout.createSequentialGroup()
.addGap(68, 68, 68)
.addComponent(jStartButton)
.addGap(48, 48, 48)
.addComponent(jStopButton)
.addGap(31, 31, 31)
.addComponent(jExitButton)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel4Layout.setVerticalGroup(
jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel4Layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jStopButton)
.addComponent(jStartButton)
.addComponent(jExitButton))
.addGap(36, 36, 36))
);
jLabel5.setText("Publications:");
jPubOperationList.setModel(new javax.swing.AbstractListModel() {
String[] strings = { "Item 1", "Item 2", "Item 3", "Item 4", "Item 5" };
public int getSize() { return strings.length; }
public Object getElementAt(int i) { return strings[i]; }
});
jScrollPane3.setViewportView(jPubOperationList);
jTextArea2.setColumns(20);
jTextArea2.setRows(5);
jScrollPane4.setViewportView(jTextArea2);
jLabel7.setText("Message File: ");
jPubMessageFileTextField.setText("jTextField3");
jPubMessageChooserButton.setText("...");
jPubMessageChooserButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jPubMessageChooserButtonActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addGap(32, 32, 32)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jLabel7)
.addComponent(jLabel5))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(jScrollPane3, javax.swing.GroupLayout.PREFERRED_SIZE, 143, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane4, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE))
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(jPubMessageFileTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 166, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPubMessageChooserButton)
.addGap(0, 0, Short.MAX_VALUE))))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(jScrollPane4, javax.swing.GroupLayout.DEFAULT_SIZE, 107, Short.MAX_VALUE)
.addComponent(jLabel5, javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane3, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 7, Short.MAX_VALUE)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel7)
.addComponent(jPubMessageFileTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jPubMessageChooserButton))
.addGap(19, 19, 19))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel4, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel4, javax.swing.GroupLayout.PREFERRED_SIZE, 51, javax.swing.GroupLayout.PREFERRED_SIZE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* J wsdl file chooser button action performed.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param evt the evt
*/
private void jWSDLFileChooserButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jWSDLFileChooserButtonActionPerformed
//Create a file chooser
final JFileChooser fc = new JFileChooser();
int returnVal = fc.showOpenDialog(TestNTCIP2306OperationGUI.this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
this.jWSDLFileTextField.setText(file.getPath());
updateOperationList();
}
}//GEN-LAST:event_jWSDLFileChooserButtonActionPerformed
/**
* Update operation list.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
private void updateOperationList() {
try {
URL tmpURL = new File(this.jWSDLFileTextField.getText()).toURI().toURL();
theWSDL = new RIWSDL(tmpURL.toString());
ArrayList<String> operationList = new ArrayList();
if (((String) this.jFilterComboBox.getSelectedItem()).equals("None")) {
opSpecList = theWSDL.getAllOperationSpecifications();
} else if (((String) this.jFilterComboBox.getSelectedItem()).equals("SOAP-RR")) {
opSpecList.clear();
for (OperationSpecification opSpec : theWSDL.getAllOperationSpecifications()) {
if (opSpec.getOperationInputEncoding().contains("SOAP") && opSpec.isRequestResponseOperation()) {
opSpecList.add(opSpec);
}
}
} else if (((String) this.jFilterComboBox.getSelectedItem()).equals("SOAP-SP")) {
opSpecList.clear();
for (OperationSpecification opSpec : theWSDL.getAllOperationSpecifications()) {
if (opSpec.getOperationInputEncoding().contains("SOAP") && opSpec.isSubscriptionOperation()) {
opSpecList.add(opSpec);
} else {
System.out.println("Rejected : "+opSpec.getOperationName() + " with encoding "+opSpec.getOperationInputEncoding());
}
}
} else if (((String) this.jFilterComboBox.getSelectedItem()).equals("HTTP-Post")) {
opSpecList.clear();
for (OperationSpecification opSpec : theWSDL.getAllOperationSpecifications()) {
if (!opSpec.getOperationInputEncoding().contains("SOAP") && opSpec.isRequestResponseOperation()) {
opSpecList.add(opSpec);
} else {
System.out.println("Rejected : "+opSpec.getOperationName() + " with encoding "+opSpec.getOperationInputEncoding());
}
}
} else if (((String) this.jFilterComboBox.getSelectedItem()).equals("HTTP-Get")) {
opSpecList.clear();
for (OperationSpecification opSpec : theWSDL.getAllOperationSpecifications()) {
if (opSpec.getOperationInputEncoding().equals("") && opSpec.isGetOperation()) {
opSpecList.add(opSpec);
} else {
System.out.println("Rejected : "+opSpec.getOperationName() + " with encoding "+opSpec.getOperationInputEncoding());
}
}
} else if (((String) this.jFilterComboBox.getSelectedItem()).equals("FTP-Get")) {
opSpecList.clear();
for (OperationSpecification opSpec : theWSDL.getAllOperationSpecifications()) {
if (opSpec.getOperationType().contains("FTP")&&opSpec.isGetOperation()) {
opSpecList.add(opSpec);
} else {
System.out.println("Rejected : "+opSpec.getOperationName() + " with operationType "+opSpec.getOperationType());
}
}
}
for (OperationSpecification opSpec : opSpecList) {
operationList.add(opSpec.getOperationName());
}
this.jOperationList.setListData(operationList.toArray());
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* J op message file chooser button action performed.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param evt the evt
*/
private void jOpMessageFileChooserButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jOpMessageFileChooserButtonActionPerformed
//Create a file chooser
final JFileChooser fc = new JFileChooser();
int returnVal = fc.showOpenDialog(TestNTCIP2306OperationGUI.this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
this.jOpMessageFileTextField.setText(file.getPath());
}
}//GEN-LAST:event_jOpMessageFileChooserButtonActionPerformed
/**
* J pub message chooser button action performed.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param evt the evt
*/
private void jPubMessageChooserButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jPubMessageChooserButtonActionPerformed
//Create a file chooser
final JFileChooser fc = new JFileChooser();
int returnVal = fc.showOpenDialog(TestNTCIP2306OperationGUI.this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File file = fc.getSelectedFile();
this.jPubMessageFileTextField.setText(file.getPath());
}
}//GEN-LAST:event_jPubMessageChooserButtonActionPerformed
/**
* The main method.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(TestNTCIP2306OperationGUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(TestNTCIP2306OperationGUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(TestNTCIP2306OperationGUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(TestNTCIP2306OperationGUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new TestNTCIP2306OperationGUI().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
/** The j center mode combo box. */
private javax.swing.JComboBox jCenterModeComboBox;
/** The j exit button. */
private javax.swing.JButton jExitButton;
/** The j filter combo box. */
private javax.swing.JComboBox jFilterComboBox;
/** The j label1. */
private javax.swing.JLabel jLabel1;
/** The j label2. */
private javax.swing.JLabel jLabel2;
/** The j label3. */
private javax.swing.JLabel jLabel3;
/** The j label4. */
private javax.swing.JLabel jLabel4;
/** The j label5. */
private javax.swing.JLabel jLabel5;
/** The j label6. */
private javax.swing.JLabel jLabel6;
/** The j label7. */
private javax.swing.JLabel jLabel7;
/** The j op message file chooser button. */
private javax.swing.JButton jOpMessageFileChooserButton;
/** The j op message file text field. */
private javax.swing.JTextField jOpMessageFileTextField;
/** The j operation list. */
private javax.swing.JList jOperationList;
/** The j operation text area. */
private javax.swing.JTextArea jOperationTextArea;
/** The j panel1. */
private javax.swing.JPanel jPanel1;
/** The j panel2. */
private javax.swing.JPanel jPanel2;
/** The j panel3. */
private javax.swing.JPanel jPanel3;
/** The j panel4. */
private javax.swing.JPanel jPanel4;
/** The j pub message chooser button. */
private javax.swing.JButton jPubMessageChooserButton;
/** The j pub message file text field. */
private javax.swing.JTextField jPubMessageFileTextField;
/** The j pub operation list. */
private javax.swing.JList jPubOperationList;
/** The j scroll pane1. */
private javax.swing.JScrollPane jScrollPane1;
/** The j scroll pane2. */
private javax.swing.JScrollPane jScrollPane2;
/** The j scroll pane3. */
private javax.swing.JScrollPane jScrollPane3;
/** The j scroll pane4. */
private javax.swing.JScrollPane jScrollPane4;
/** The j start button. */
private javax.swing.JButton jStartButton;
/** The j stop button. */
private javax.swing.JButton jStopButton;
/** The j text area2. */
private javax.swing.JTextArea jTextArea2;
/** The j wsdl file chooser button. */
private javax.swing.JButton jWSDLFileChooserButton;
/** The j wsdl file text field. */
private javax.swing.JTextField jWSDLFileTextField;
// End of variables declaration//GEN-END:variables
}
|
#!/usr/bin/env bash
set -euo pipefail
# Naupaka Zimmerman
# nzimmerman@usfca.edu
# November 1, 2020
# save to RAID
OUTPUT_DIR="/data/sars_vcf_analysis/08_flagstats/"
if [ $# -eq 0 ]
then
echo "This script will run flagstats on the sorted bam files it is given."
echo "Please supply a set of sorted bam files as arguments."
exit 1
fi
# run flagstats to get some info on the sorted bam files
for sorted_bam_file in "$@"
do
echo "Running flagstats on ${sorted_bam_file}..."
samtools flagstat "$sorted_bam_file" > "${OUTPUT_DIR}$(basename $sorted_bam_file).stats.txt"
done
|
// Copyright 2016-2020 Envoy Project Authors
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include <string_view>
#include <stdlib.h>
#include "proxy_wasm_intrinsics.h"
class ExampleContext : public Context {
public:
explicit ExampleContext(uint32_t id, RootContext *root) : Context(id, root) {}
FilterHeadersStatus onRequestHeaders(uint32_t headers, bool end_of_stream) override;
};
static RegisterContextFactory register_ExampleContext(CONTEXT_FACTORY(ExampleContext));
FilterHeadersStatus ExampleContext::onRequestHeaders(uint32_t, bool) {
LOG_DEBUG(std::string("print from wasm, onRequestHeaders, context id: ") + std::to_string(id()));
auto result = getRequestHeaderPairs();
auto pairs = result->pairs();
for (auto &p : pairs) {
LOG_INFO(std::string("print from wasm, ") + std::string(p.first) + std::string(" -> ") + std::string(p.second));
}
return FilterHeadersStatus::Continue;
}
|
import gql from 'graphql-tag';
import blokkFragment from 'react/components/Blokk/fragments/blokk';
export default gql`
query Blokk($id: ID!) {
blokk(id: $id) {
... Blokk
}
}
${blokkFragment}
`;
|
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.async.service.storage;
import java.io.File;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.identity.ServiceAction;
import org.dasein.cloud.storage.Blob;
import org.dasein.cloud.storage.FileTransfer;
import org.dasein.cloud.storage.OfflineStoreRequest;
import com.infinities.skyport.async.AsyncResult;
import com.infinities.skyport.storage.SkyportBlobStoreSupport;
public interface AsyncOfflineStoreSupport extends AsyncBlobStoreSupport {
static public final ServiceAction CREATE_REQUEST = new ServiceAction("OFFLINESTORE:CREATE_REQUEST");
static public final ServiceAction GET_REQUEST = new ServiceAction("OFFLINESTORE:GET_REQUEST");
static public final ServiceAction LIST_REQUEST = new ServiceAction("OFFLINESTORE:LIST_REQUEST");
static public final ServiceAction GET_REQUEST_RESULT = new ServiceAction("OFFLINESTORE:GET_REQUEST_RESULT");
/**
* List current offline storage requests. Completed jobs may remain in this
* list for a provider-specific amount of time.
*
* @param bucket
* name of bucket to list requests for
* @return iterable of current known requests
* @throws CloudException
* @throws InternalException
*/
public @Nonnull AsyncResult<Iterable<OfflineStoreRequest>> listRequests(@Nonnull String bucket) throws CloudException,
InternalException;
/**
* Get a specific offline storage request
*
* @param bucket
* name of bucket for request
* @param requestId
* provider-specific identifier of request
* @return a representation of the request, or null if it is not found
* @throws CloudException
* @throws InternalException
*/
public @Nullable AsyncResult<OfflineStoreRequest> getRequest(@Nonnull String bucket, @Nonnull String requestId)
throws CloudException, InternalException;
/**
* Create a new bucket list request
*
* @param bucket
* name of bucket to list
* @return representation of the request
* @throws CloudException
* @throws InternalException
*/
public @Nonnull AsyncResult<OfflineStoreRequest> createListRequest(@Nonnull String bucket) throws CloudException,
InternalException;
/**
* Create a new object download request
*
* @param bucket
* name of bucket containing object
* @param object
* name of object to download
* @return representation of the request
* @throws CloudException
* @throws InternalException
*/
public @Nonnull AsyncResult<OfflineStoreRequest> createDownloadRequest(@Nonnull String bucket, @Nonnull String object)
throws CloudException, InternalException;
/**
* Retrieve the results of a completed list request. Will fail if the
* request is not complete.
*
* @param bucket
* name of bucket for request
* @param requestId
* provider-specific identifier of request
* @return iterable of found objects
* @throws InternalException
* @throws CloudException
*/
public @Nonnull AsyncResult<Iterable<Blob>> getListRequestResult(@Nonnull String bucket, @Nonnull String requestId)
throws InternalException, CloudException;
/**
* Initiate the download for a request. Will fail if the request is not
* complete.
*
* @param bucket
* name of bucket for request
* @param requestId
* provider-specific identifier of request
* @param toFile
* destination file for download results
* @return FileTransfer asynchronous object to track the progress of the
* download
* @throws InternalException
* @throws CloudException
*/
public @Nonnull AsyncResult<FileTransfer> getDownloadRequestResult(@Nonnull String bucket, @Nonnull String requestId,
@Nonnull File toFile) throws InternalException, CloudException;
@Override
SkyportBlobStoreSupport getSupport();
}
|
#!/bin/bash
if [ ! $NODE_ENV ]; then
export NODE_ENV=development
fi
# Note: hot reloading and clustering don't always work well together so it's recommended to disable clustering in dev
export NODE_CLUSTERED=0
export NODE_SERVE_STATIC=1
export NODE_HOT_RELOAD=1
./start.sh
|
#!/bin/bash
echo 'Activating venv....'
source /work/smt4/gao/tran/speech_translation/venv-fairseq-py39/bin/activate
PROJECT_NAME=speech_translation
MODEL_NAME=joint.mustc.en-de.ft.tok.piv4.sampling.lr3e-6
GROUP_NAME=joint.mustc.en-de.ft
TAGS=joint-tok,piv4,nomulti,sampling,lr3e-6
FAIRSEQ_PATH=/work/smt4/gao/tran/speech_translation/fairseq
MODEL_PATH=/work/smt4/gao/tran/$PROJECT_NAME/trained_models/$GROUP_NAME/$MODEL_NAME
USER_DIR=$FAIRSEQ_PATH/projects/$PROJECT_NAME
NUM_GPUS=${1:-1}
read -r -d '' MODEL_ARGS <<EOF
--data-asr /work/smt2/vtran/datasets/asr/parnia.mustc.asr.v2
--config-yaml-asr /work/smt2/vtran/datasets/asr/parnia.mustc.asr.v2/config_asr.i6approx.yaml \
--data-st /work/smt2/vtran/datasets/asr/parnia.mustc.st \
--config-yaml-st /work/smt2/vtran/datasets/asr/parnia.mustc.st/config_st.mt-bpe.yaml \
--data-mt /work/smt4/gao/tran/speech_translation/datasets/text/out/parnia.mustc-mt.en-de.32k.src-normalized.fix/data-bin \
--arch cascaded_st \
--asr-arch fair_s2t_transformer \
--mt-arch relpos_transformer \
--asr-model-conf $USER_DIR/experiments/train.joint.mustc.en-de.ft/asr.base.ft.yaml \
--mt-model-conf $USER_DIR/experiments/train.joint.mustc.en-de.ft/mt.base.yaml \
--criterion joint_cascaded_st \
--mt-label-smoothing 0.1 \
--asr-label-smoothing 0.1 \
--st-label-smoothing 0.1 \
--st-weight 1 \
--source-lang en \
--target-lang de \
--skip-invalid-size-inputs-valid-test \
--pivot-beam-generate 12 \
--pivot-beam 4 \
--max-source-positions 4000 \
--max-pivot-positions 75 \
--max-target-positions 75 \
--mt-checkpoint /work/smt4/gao/tran/speech_translation/trained_models/mt.mustc.en-de/src-asr-like.bpe-separate.base.ft-mustc.lr00003/checkpoints/checkpoint_best.pt \
--pivot-spm-model /work/smt4/gao/tran/speech_translation/datasets/text/out/bpe_vocab/parnia.iwslt2020_mt_en-de.lowercase-nopunct.32k.en.model \
--ensemble-training \
--asr-sampling
EOF
read -r -d '' TRAIN_ARGS <<EOF
--train-subset train_st \
--valid-subset dev_st \
--optimizer adam --adam-betas (0.9,0.999) --adam-eps 1e-8 --clip-norm 10.0 \
--lr 0.000003 \
--warmup-subepochs 10 \
--lr-scheduler subepoch_reduce_lr_on_plateau --lr-shrink 0.8 --lr-threshold 5e-3 --lr-patience 3 \
--weight-decay 0.0001 \
--update-freq 8 \
--batch-size 50 \
--max-tokens 4000 \
--max-epoch 10 \
--report-accuracy \
--epoch-split 10 \
--cache-manager
EOF
read -r -d '' OTHER_ARGS <<EOF
--num-workers 2 \
--no-progress-bar \
--log-file $MODEL_PATH/train.log \
--wandb-project $PROJECT_NAME \
--user-dir $USER_DIR \
--save-dir $MODEL_PATH/checkpoints \
--keep-best-checkpoints 2 \
--keep-last-subepochs 3 \
--skip-invalid-size-inputs-valid-test
EOF
mkdir -p $MODEL_PATH
cd $MODEL_PATH || exit
echo '-------------------------'
echo "Current fairseq commit: "
git -C $FAIRSEQ_PATH rev-parse HEAD
echo '-------------------------'
echo 'Running fairseq-train with following parameters...'
echo --task cascaded_speech_translation "$MODEL_ARGS" "$TRAIN_ARGS" "$OTHER_ARGS"
echo '-------------------------'
if [ "$NUM_GPUS" -eq 1 ]; then
WANDB_TAGS=$TAGS WANDB_RUN_GROUP=$GROUP_NAME WANDB_NAME=$MODEL_NAME python3 $USER_DIR/cli/train.py \
--task cascaded_speech_translation $MODEL_ARGS $TRAIN_ARGS $OTHER_ARGS
else
WANDB_TAGS=$TAGS WANDB_RUN_GROUP=$GROUP_NAME WANDB_NAME=$MODEL_NAME python -m torch.distributed.launch --nproc_per_node=$NUM_GPUS \
$USER_DIR/cli/train.py \
--task cascaded_speech_translation $MODEL_ARGS $TRAIN_ARGS $OTHER_ARGS
fi
|
## Authors.....: Jens Steube <jens.steube@gmail.com>
## magnum <john.magnum@hushmail.com>
##
## License.....: MIT
##
HASHCAT_ROOT="."
# helper functions
_hashcat_get_permutations ()
{
local num_devices=${1}
hashcat_devices_permutation=""
# Formula: Sum (k=1...num_devices) (num_devices! / (k! * (num_devices - k)!))
# or ofc (2 ^ num_devices) - 1
if [ "${num_devices}" -gt 0 ]; then
hashcat_devices_permutation=$(seq 1 $num_devices)
local k
for k in $(seq 2 $num_devices); do
if [ "${k}" -eq ${num_devices} ];then
hashcat_devices_permutation="${hashcat_devices_permutation} $(seq 1 $num_devices | tr '\n' ',' | sed 's/, *$//')"
else
local j
local max_pos=$((num_devices - ${k} + 1))
for j in $(seq 1 ${max_pos}); do
local max_value=$((j + ${k} - 1))
# init
local out_str=""
local l
for l in $(seq ${j} ${max_value}); do
if [ ${l} -gt ${j} ]; then
out_str=${out_str},
fi
out_str=${out_str}${l}
done
local chg_len=0
local last=$((k - 1))
local max_device=$((num_devices + 1))
local pos_changed=0
while [ "${chg_len}" -lt ${last} ]; do
local had_pos_changed=${pos_changed}
local old_chg_len=${chg_len}
local idx=$(((k - chg_len)))
local cur_num=$(echo ${out_str} | cut -d, -f ${idx})
local next_num=$((cur_num + 1))
if [ "${pos_changed}" -eq 0 ]; then
hashcat_devices_permutation="${hashcat_devices_permutation} ${out_str}"
else
pos_changed=0
fi
if [ "${next_num}" -lt ${max_device} -a "${next_num}" -le "${num_devices}" ]; then
out_str=$(echo ${out_str} | sed "s/,${cur_num},/,${next_num},/;s/,${cur_num}\$/,${next_num}/")
else
pos_changed=1
max_device=${cur_num}
chg_len=$((chg_len + 1))
fi
if [ "${had_pos_changed}" -eq 1 ];then
local changed=0
local m
for m in $(seq 1 ${old_chg_len}); do
local reset_idx=$((k - ${old_chg_len} + ${m}))
local last_num=$(echo ${out_str} | cut -d, -f ${reset_idx})
next_num=$((next_num + 1))
if [ "${next_num}" -lt ${max_device} -a "${next_num}" -le "${num_devices}" ]; then
out_str=$(echo ${out_str} | sed "s/,${last_num},/,${next_num},/;s/,${last_num}\$/,${next_num}/")
max_device=$((next_num + 2))
changed=$((changed + 1))
else
break
fi
done
if [ "${changed}" -gt 0 ]; then
max_device=$((num_devices + 1))
chg_len=0
fi
fi
done
done
fi
done
fi
}
_hashcat_opencl_devices ()
{
local num_devices=0
if which clinfo &> /dev/null; then
num_devices=$(clinfo 2>/dev/null 2> /dev/null)
elif which nvidia-smi &> /dev/null; then
num_devices=$(nvidia-smi --list-gpus | wc -l)
fi
return ${num_devices}
}
_hashcat_cpu_devices ()
{
local num_devices=0
if [ -f "/proc/cpuinfo" ]; then
num_devices=$(cat /proc/cpuinfo | grep -c processor 2> /dev/null)
fi
return ${num_devices}
}
_hashcat_contains ()
{
local haystack=${1}
local needle="${2}"
if echo "${haystack}" | grep -q " ${needle} " 2> /dev/null; then
return 0
elif echo "${haystack}" | grep -q "^${needle} " 2> /dev/null; then
return 0
elif echo "${haystack}" | grep -q " ${needle}\$" 2> /dev/null; then
return 0
fi
return 1
}
_hashcat ()
{
local VERSION=2.10
local HASH_MODES="0 10 11 12 20 21 22 23 30 40 50 60 100 101 110 111 112 120 121 122 124 130 131 132 133 140 141 150 160 190 200 300 400 500 501 900 1000 1100 1400 1410 1420 1421 1430 1440 1441 1450 1460 1500 1600 1700 1710 1711 1720 1722 1730 1731 1740 1750 1760 1800 2100 2400 2410 2500 2600 2611 2612 2711 2811 3000 3100 3200 3710 3711 3800 4300 4400 4500 4700 4800 4900 5000 5100 5200 5300 5400 5500 5600 5700 5800 6000 6100 6211 6212 6213 6221 6222 6223 6231 6232 6233 6241 6242 6243 6300 6400 6500 6600 6700 6800 6900 7100 7200 7300 7400 7500 7600 7700 7800 7900 8000 8100 8200 8300 8400 8500 8600 8700 8800 8900 9000 9100 9200 9300 9400 9500 9600 9700 9710 9720 9800 9810 9820 9900 10000 10100 10200 10300 10400 10410 10420 10500 10600 10700 10800 10900 11000 11100 11200 11300 11400 11500 11600 11700 11800 11900 12000 12100 12200 12300 12400 12500 12600 12700 12800 12900 13000"
local ATTACK_MODES="0 1 3 6 7"
local OUTFILE_FORMATS="1 2 3 4 5 6 7 8 9 10 11 12 13 14 15"
local OPENCL_DEVICE_TYPES="1 2 3"
local OPENCL_VECTOR_WIDTH="1 2 4 8 16"
local DEBUG_MODE="1 2 3 4"
local WORKLOAD_PROFILE="1 2 3"
local HIDDEN_FILES="exe|bin|pot|hcstat|dictstat|accepted|sh|cmd|bat|restore"
local HIDDEN_FILES_AGGRESIVE="exe|bin|pot|hcstat|dictstat|hcmask|hcchr|accepted|sh|cmd|restore"
local BUILD_IN_CHARSETS='?l ?u ?d ?a ?b ?s'
local SHORT_OPTS="-m -a -V -v -h -b -t -o -p -c -d -w -n -u -j -k -r -g -1 -2 -3 -4 -i -s -l"
local LONG_OPTS="--hash-type --attack-mode --version --help --eula --quiet --benchmark --benchmark-repeats --hex-salt --hex-wordlist --hex-charset --force --status --status-timer --status-automat --loopback --weak-hash-threshold --markov-hcstat --markov-disable --markov-classic --markov-threshold --runtime --session --restore --restore-disable --outfile --outfile-format --outfile-autohex-disable --outfile-check-timer --outfile-check-dir --separator --show --left --username --remove --remove-timer --potfile-disable --potfile-path --debug-mode --debug-file --induction-dir --segment-size --bitmap-min --bitmap-max --cpu-affinity --opencl-devices --opencl-platforms --opencl-device-types --opencl-vector-width --workload-profile --kernel-accel --kernel-loops --gpu-temp-disable --gpu-temp-abort --gpu-temp-retain --powertune-enable --skip --limit --keyspace --rule-left --rule-right --rules-file --generate-rules --generate-rules-func-min --generate-rules-func-max --generate-rules-seed --rules-cleanup --custom-charset1 --custom-charset2 --custom-charset3 --custom-charset4 --increment --increment-min --increment-max --logfile-disable --scrypt-tmto --truecrypt-keyfiles"
local OPTIONS="-m -a -t -o -p -c -d -w -n -u -j -k -r -g -1 -2 -3 -4 -s -l --hash-type --attack-mode --benchmark-repeats --status-timer --weak-hash-threshold --markov-hcstat --markov-threshold --runtime --session --timer --outfile --outfile-format --outfile-check-timer --outfile-check-dir --separator --remove-timer --potfile-path --debug-mode --debug-file --induction-dir --segment-size --bitmap-min --bitmap-max --cpu-affinity --opencl-devices --opencl-platforms --opencl-device-types --opencl-vector-width --workload-profile --kernel-accel --kernel-loops --gpu-temp-abort --gpu-temp-retain -disable --skip --limit --rule-left --rule-right --rules-file --generate-rules --generate-rules-func-min --generate-rules-func-max --generate-rules-seed --custom-charset1 --custom-charset2 --custom-charset3 --custom-charset4 --increment-min --increment-max --scrypt-tmto --truecrypt-keyfiles"
COMPREPLY=()
local cur="${COMP_WORDS[COMP_CWORD]}"
local prev="${COMP_WORDS[COMP_CWORD-1]}"
# if cur is just '=', ignore the '=' and treat it as only the prev was provided
if [[ "${cur}" == '=' ]]; then
cur=""
elif [[ "${prev}" == '=' ]]; then
if [ "${COMP_CWORD}" -gt 2 ]; then
prev="${COMP_WORDS[COMP_CWORD-2]}"
fi
fi
case "${prev}" in
-m|--hash-type)
COMPREPLY=($(compgen -W "${HASH_MODES}" -- ${cur}))
return 0
;;
-a|--attack-mode)
COMPREPLY=($(compgen -W "${ATTACK_MODES}" -- ${cur}))
return 0
;;
--outfile-format)
COMPREPLY=($(compgen -W "${OUTFILE_FORMATS}" -- ${cur}))
return 0
;;
-w|--workload-profile)
COMPREPLY=($(compgen -W "${WORKLOAD_PROFILE}" -- ${cur}))
return 0
;;
-o|--outfile|-r|--rules-file|--debug-file|--potfile-path)
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
COMPREPLY=($(compgen -W "${files}" -- ${cur})) # or $(compgen -f -X '*.+('${HIDDEN_FILES_AGGRESIVE}')' -- ${cur})
return 0
;;
--markov-hcstat)
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES}')' 2> /dev/null)
COMPREPLY=($(compgen -W "${files}" -- ${cur})) # or $(compgen -f -X '*.+('${HIDDEN_FILES_AGGRESIVE}')' -- ${cur})
return 0
;;
-d|--opencl-devices)
_hashcat_opencl_devices
local num_devices=${?}
_hashcat_get_permutations ${num_devices}
COMPREPLY=($(compgen -W "${hashcat_devices_permutation}" -- ${cur}))
return 0
;;
--opencl-device-types)
COMPREPLY=($(compgen -W "${OPENCL_DEVICE_TYPES}" -- ${cur}))
return 0
;;
--opencl-vector-width)
COMPREPLY=($(compgen -W "${OPENCL_VECTOR_WIDTH}" -- ${cur}))
return 0
;;
--opencl-platforms)
local icd_list=$(ls -1 /etc/OpenCL/vendors/*.icd 2> /dev/null)
local architecture=$(getconf LONG_BIT 2> /dev/null)
if [ -z "${architecture}" ]; then
return 0
fi
# filter the icd_list (do not show 32 bit on 64bit systems and vice versa)
if [ "${architecture}" -eq 64 ]; then
icd_list=$(echo "${icd_list}" | grep -v "32.icd")
else
icd_list=$(echo "${icd_list}" | grep -v "64.icd")
fi
local number_icds=$(seq 1 $(echo "${icd_list}" | wc -l))
COMPREPLY=($(compgen -W "${number_icds}" -- ${cur}))
return 0
;;
--cpu-affinity)
_hashcat_cpu_devices
local num_devices=${?}
_hashcat_get_permutations ${num_devices}
COMPREPLY=($(compgen -W "${hashcat_devices_permutation}" -- ${cur}))
return 0
;;
-1|-2|-3|-4|--custom-charset1|--custom-charset2|--custom-charset3|--custom-charset4)
local mask=${BUILD_IN_CHARSETS}
if [ -e "${cur}" ]; then # should be hcchr file (but not enforced)
COMPREPLY=($(compgen -W "${cur}" -- ${cur}))
return 0
fi
if [ -n "${cur}" ]; then
local cur_var=$(echo "${cur}" | sed 's/\?$//')
mask="${mask} ${cur_var}"
local h
for h in ${mask}; do
if ! echo ${cur} | grep -q ${h} 2> /dev/null; then
if echo ${cur} | grep -q '?a' 2> /dev/null; then
if [[ "${h}" == "?l" ]] ; then
continue
elif [[ "${h}" == "?u" ]] ; then
continue
elif [[ "${h}" == "?d" ]] ; then
continue
elif [[ "${h}" == "?s" ]] ; then
continue
elif [[ "${h}" == "?b" ]] ; then
continue
fi
fi
mask="${mask} ${cur_var}${h}"
fi
done
fi
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES}')' 2> /dev/null)
mask="${mask} ${files}"
COMPREPLY=($(compgen -W "${mask}" -- ${cur}))
return 0
;;
-t|-p|-c|-j|-k|-g| \
--status-timer|--markov-threshold|--runtime|--session|--separator|--segment-size|--rule-left|--rule-right| \
--gpu-temp-abort|--gpu-temp-retain|--generate-rules|--generate-rules-func-min|--generate-rules-func-max| \
--increment-min|--increment-max|--remove-timer|--bitmap-min|--bitmap-max|--skip|--limit|--generate-rules-seed| \
--weak-hash-threshold|--outfile-check-timer|--outfile-check-dir|--induction-dir|--scrypt-tmto)
return 0
;;
--debug-mode)
COMPREPLY=($(compgen -W "${DEBUG_MODE}" -- ${cur}))
return 0
;;
--truecrypt-keyfiles)
# first: remove the quotes such that file matching is possible
local cur_part0=$(echo "${cur}" | grep -Eo '^("|'"'"')')
local cur_mod=$(echo "${cur}" | sed 's/^["'"'"']//')
local cur_part1=$(echo "${cur_mod}" | grep ',' 2> /dev/null | sed 's/^\(.*, *\)[^,]*$/\1/')
local cur_part2=$(echo "${cur_mod}" | sed 's/^.*, *\([^,]*\)$/\1/')
# generate lines with the file name and a duplicate of it with a comma at the end
local files=$(ls -d ${cur_part2}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null | sed 's/^\(.*\)$/\1\n\1,\n/' | sed "s/^/${cur_part0}${cur_part1}/" | sed "s/$/${cur_part0}/")
COMPREPLY=($(compgen -W "${files}" -- ${cur}))
return 0
esac
# allow also the VARIANTS w/o spaces
# we could use compgen -P prefix, but for some reason it doesn't seem to work always
case "$cur" in
-m*)
local hash_modes_var="$(echo -n "-m ${HASH_MODES}" | sed 's/ / -m/g')"
COMPREPLY=($(compgen -W "${hash_modes_var}" -- ${cur}))
return 0
;;
-a*)
local attack_modes_var="$(echo -n "-a ${ATTACK_MODES}" | sed 's/ / -a/g')"
COMPREPLY=($(compgen -W "${attack_modes_var}" -- ${cur}))
return 0
;;
-w*)
local workload_profile_var="$(echo -n "-w ${WORKLOAD_PROFILE}" | sed 's/ / -w/g')"
COMPREPLY=($(compgen -W "${workload_profile_var}" -- ${cur}))
return 0
;;
-o*)
local outfile_var=$(ls -d ${cur:2}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
outfile_var="$(echo -e "\n${outfile_var}" | sed 's/^/-o/g')"
COMPREPLY=($(compgen -W "${outfile_var}" -- ${cur}))
return 0
;;
-r*)
local outfile_var=$(ls -d ${cur:2}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
outfile_var="$(echo -e "\n${outfile_var}" | sed 's/^/-r/g')"
COMPREPLY=($(compgen -W "${outfile_var}" -- ${cur}))
return 0
;;
-d*)
_hashcat_opencl_devices
local num_devices=${?}
_hashcat_get_permutations ${num_devices}
local opencl_devices_var="$(echo " "${hashcat_devices_permutation} | sed 's/ / -d/g')"
COMPREPLY=($(compgen -W "${opencl_devices_var}" -- ${cur}))
return 0
;;
esac
# Complete options/switches (not the arguments)
if [[ "${cur}" == -* ]]; then
COMPREPLY=($(compgen -W "${SHORT_OPTS} ${LONG_OPTS}" -- ${cur}))
return 0
fi
# additional parameter, no switch nor option but maybe hash file, dictionary, mask, directory
# check if first option out of (hash.txt and dictionary|mask|directory)
# is first option iff: here
# is second option iff: COMP_CWORD > 2 and no switch before (-*) if no option afterwards (for mask -a 3, -a 6, -a 7 - but possible for dicts!)
local h=1
local no_opts=0
local attack_mode=0 # also default of hashcat
local has_charset_1=0
local has_charset_2=0
local has_charset_3=0
local has_charset_4=0
while [ ${h} -le ${COMP_CWORD} ]; do
if [[ "${COMP_WORDS[h]}" == "-a" ]]; then
attack_mode=${COMP_WORDS[$((h + 1))]}
elif [[ "${COMP_WORDS[h]}" == -a* ]]; then
attack_mode=${COMP_WORDS[h]:2}
elif [[ "${COMP_WORDS[h]}" == "--attack-mode" ]]; then
attack_mode=${COMP_WORDS[$((h + 1))]}
elif [[ "${COMP_WORDS[h]}" == "-1" ]]; then
has_charset_1=1
elif [[ "${COMP_WORDS[h]}" == "--custom-charset1" ]]; then
has_charset_1=1
elif [[ "${COMP_WORDS[h]}" == "-2" ]]; then
has_charset_2=1
elif [[ "${COMP_WORDS[h]}" == "--custom-charset2" ]]; then
has_charset_2=1
elif [[ "${COMP_WORDS[h]}" == "-3" ]]; then
has_charset_3=1
elif [[ "${COMP_WORDS[h]}" == "--custom-charset3" ]]; then
has_charset_3=1
elif [[ "${COMP_WORDS[h]}" == "-4" ]]; then
has_charset_4=1
elif [[ "${COMP_WORDS[h]}" == "--custom-charset4" ]]; then
has_charset_4=1
fi
if _hashcat_contains "${OPTIONS}" "${COMP_WORDS[h]}"; then
h=$((h + 2))
else
if ! _hashcat_contains "${LONG_OPTS}${SHORT_OPTS}" "${COMP_WORDS[h]}"; then
local variants="-m -a -w -n -u -o -r -d"
local skip=0
local v
for v in ${variants}; do
if [[ "${COMP_WORDS[h]:0:2}" == "${v}" ]]; then
skip=1
fi
done
if [ "${skip}" -eq 0 ]; then
no_opts=$((no_opts + 1))
fi
fi
h=$((h + 1))
fi
done
case "${no_opts}" in
0)
return 0
;;
1)
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
COMPREPLY=($(compgen -W "${files}" -- ${cur}))
return 0
;;
*)
case "${attack_mode}" in
0)
# dict/directory are files here
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
COMPREPLY=($(compgen -W "${files}" -- ${cur}))
return 0
;;
1)
if [ "${no_opts}" -gt 4 ]; then
return 0
fi
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
COMPREPLY=($(compgen -W "${files}" -- ${cur}))
return 0
;;
3)
if [ "${no_opts}" -eq 2 ]; then
local mask=${BUILD_IN_CHARSETS}
if [ "${has_charset_1}" -eq 1 ]; then
mask="${mask} ?1"
fi
if [ "${has_charset_2}" -eq 1 ]; then
mask="${mask} ?2"
fi
if [ "${has_charset_3}" -eq 1 ]; then
mask="${mask} ?3"
fi
if [ "${has_charset_4}" -eq 1 ]; then
mask="${mask} ?4"
fi
if [ -e "${cur}" ]; then # should be hcmask file (but not enforced)
COMPREPLY=($(compgen -W "${cur}" -- ${cur}))
return 0
fi
if [ -n "${cur}" ]; then
local cur_var=$(echo "${cur}" | sed 's/\?$//')
mask="${mask} ${cur_var}"
local h
for h in ${mask}; do
mask="${mask} ${cur_var}${h}"
done
fi
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES}')' 2> /dev/null)
mask="${mask} ${files}"
COMPREPLY=($(compgen -W "${mask}" -- ${cur}))
return 0
fi
;;
6)
if [ "${no_opts}" -eq 2 ]; then
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
COMPREPLY=($(compgen -W "${files}" -- ${cur}))
elif [ "${no_opts}" -eq 3 ]; then
local mask=${BUILD_IN_CHARSETS}
if [ "${has_charset_1}" -eq 1 ]; then
mask="${mask} ?1"
fi
if [ "${has_charset_2}" -eq 1 ]; then
mask="${mask} ?2"
fi
if [ "${has_charset_3}" -eq 1 ]; then
mask="${mask} ?3"
fi
if [ "${has_charset_4}" -eq 1 ]; then
mask="${mask} ?4"
fi
if [ -e "${cur}" ]; then # should be hcmask file (but not enforced)
COMPREPLY=($(compgen -W "${cur}" -- ${cur}))
return 0
fi
if [ -n "${cur}" ]; then
local cur_var=$(echo "${cur}" | sed 's/\?$//')
mask="${mask} ${cur_var}"
local h
for h in ${mask}; do
mask="${mask} ${cur_var}${h}"
done
fi
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES}')' 2> /dev/null)
mask="${mask} ${files}"
COMPREPLY=($(compgen -W "${mask}" -- ${cur}))
return 0
fi
;;
7)
if [ "${no_opts}" -eq 2 ]; then
local mask=${BUILD_IN_CHARSETS}
if [ "${has_charset_1}" -eq 1 ]; then
mask="${mask} ?1"
fi
if [ "${has_charset_2}" -eq 1 ]; then
mask="${mask} ?2"
fi
if [ "${has_charset_3}" -eq 1 ]; then
mask="${mask} ?3"
fi
if [ "${has_charset_4}" -eq 1 ]; then
mask="${mask} ?4"
fi
if [ -e "${cur}" ]; then # should be hcmask file (but not enforced)
COMPREPLY=($(compgen -W "${cur}" -- ${cur}))
return 0
fi
if [ -n "${cur}" ]; then
local cur_var=$(echo "${cur}" | sed 's/\?$//')
mask="${mask} ${cur_var}"
local h
for h in ${mask}; do
mask="${mask} ${cur_var}${h}"
done
fi
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES}')' 2> /dev/null)
mask="${mask} ${files}"
COMPREPLY=($(compgen -W "${mask}" -- ${cur}))
return 0
elif [ "${no_opts}" -eq 3 ]; then
local files=$(ls -d ${cur}* 2> /dev/null | grep -Eiv '*\.('${HIDDEN_FILES_AGGRESIVE}')' 2> /dev/null)
COMPREPLY=($(compgen -W "${files}" -- ${cur}))
return
fi
;;
esac
esac
}
complete -F _hashcat -o filenames "${HASHCAT_ROOT}"/hashcat64.bin "${HASHCAT_ROOT}"/hashcat32.bin "${HASHCAT_ROOT}"/hashcat hashcat
|
/*!
* jQuery Cookie Plugin
* https://github.com/carhartl/jquery-cookie
*
* Copyright 2011, <NAME>
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://www.opensource.org/licenses/mit-license.php
* http://www.opensource.org/licenses/GPL-2.0
*/
;(function(g){g.cookie=function(h,b,a){if(1<arguments.length&&(!/Object/.test(Object.prototype.toString.call(b))||null===b||void 0===b)){a=g.extend({},a);if(null===b||void 0===b)a.expires=-1;if("number"===typeof a.expires){var d=a.expires,c=a.expires=new Date;c.setDate(c.getDate()+d)}b=""+b;return document.cookie=[encodeURIComponent(h),"=",a.raw?b:encodeURIComponent(b),a.expires?"; expires="+a.expires.toUTCString():"",a.path?"; path="+a.path:"",a.domain?"; domain="+a.domain:"",a.secure?"; secure": ""].join("")}for(var a=b||{},d=a.raw?function(a){return a}:decodeURIComponent,c=document.cookie.split("; "),e=0,f;f=c[e]&&c[e].split("=");e++)if(d(f[0])===h)return d(f[1]||"");return null}})(jQuery);
function changeThemeFun(themeName) {
var $easyuiTheme = $('#easyuiTheme');
var url = $easyuiTheme.attr('href');
var href = url.substring(0, url.indexOf('themes')) + 'themes/' + themeName + '/easyui.css';
$easyuiTheme.attr('href', href);
var $iframe = $('iframe');
if ($iframe.length > 0) {
for ( var i = 0; i < $iframe.length; i++) {
var ifr = $iframe[i];
$(ifr).contents().find('#easyuiTheme').attr('href', href);
}
}
$.cookie('easyuiThemeName', themeName, {
expires : 7,path: '/'
});
dealIconTheme(themeName);
}
function dealIconTheme(themeName){
$("#themeMenuItem").find(".menu-icon").remove();
$("#"+themeName).append("<div class='menu-icon icon-ok'></div>");
}
function autoChangeTheme(){
var themeSrc = $('#easyuiTheme').attr('href');
themeSrc = themeSrc.substring(themeSrc.indexOf('themes')+7);
themeSrc = themeSrc.substring(0,themeSrc.indexOf("/"));
var $CookieThemeName = $.cookie('easyuiThemeName',{path: '/'});
if ($CookieThemeName) {
if(themeSrc!=$CookieThemeName){
changeThemeFun($CookieThemeName);
}else{
dealIconTheme(themeSrc);
}
}else{
dealIconTheme(themeSrc);
$.cookie('easyuiThemeName', themeSrc, {
expires : 7,path: '/'
});
}
} |
#!/usr/bin/env bash
MASON_NAME=harfbuzz
MASON_VERSION=0.9.41
MASON_LIB_FILE=lib/libharfbuzz.a
MASON_PKGCONFIG_FILE=lib/pkgconfig/harfbuzz.pc
. ${MASON_DIR}/mason.sh
function mason_load_source {
mason_download \
http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-${MASON_VERSION}.tar.bz2 \
a7d4c722f7d663dfa51503c0c857046b86495a69
mason_extract_tar_bz2
export MASON_BUILD_PATH=${MASON_ROOT}/.build/${MASON_NAME}-${MASON_VERSION}
}
function mason_prepare_compile {
FREETYPE_VERSION="2.6"
${MASON_DIR}/mason install freetype ${FREETYPE_VERSION}
MASON_FREETYPE=$(${MASON_DIR}/mason prefix freetype ${FREETYPE_VERSION})
MASON_PLATFORM= ${MASON_DIR}/mason install ragel 6.9
export PATH=$(MASON_PLATFORM= ${MASON_DIR}/mason prefix ragel 6.9)/bin:$PATH
export PKG_CONFIG_PATH="$(${MASON_DIR}/mason prefix freetype ${FREETYPE_VERSION})/lib/pkgconfig":${PKG_CONFIG_PATH:-}
export C_INCLUDE_PATH="${MASON_FREETYPE}/include/freetype2"
export CPLUS_INCLUDE_PATH="${MASON_FREETYPE}/include/freetype2"
export LIBRARY_PATH="${MASON_FREETYPE}/lib"
if [[ ! `which pkg-config` ]]; then
echo "harfbuzz configure needs pkg-config, please install pkg-config"
exit 1
fi
}
function mason_compile {
export FREETYPE_CFLAGS="-I${MASON_FREETYPE}/include/freetype2"
export FREETYPE_LIBS="-L${MASON_FREETYPE}/lib -lfreetype -lz"
# Note CXXFLAGS overrides the harbuzz default with is `-O2 -g`
export CXXFLAGS="${CXXFLAGS} ${FREETYPE_CFLAGS} -O3 -DNDEBUG"
export CFLAGS="${CFLAGS} ${FREETYPE_CFLAGS} -O3 -DNDEBUG"
export LDFLAGS="${LDFLAGS} ${FREETYPE_LIBS}"
mason_step "Loading patch 'https://github.com/mapbox/mason/blob/${MASON_SLUG}/patch.diff'..."
curl --retry 3 -s -f -# -L \
https://raw.githubusercontent.com/mapbox/mason/${MASON_SLUG}/patch.diff \
-O || (mason_error "Could not find patch for ${MASON_SLUG}" && exit 1)
patch -N -p1 < ./patch.diff
./configure --prefix=${MASON_PREFIX} ${MASON_HOST_ARG} \
--enable-static \
--disable-shared \
--disable-dependency-tracking \
--with-icu=no \
--with-cairo=no \
--with-glib=no \
--with-gobject=no \
--with-graphite2=no \
--with-freetype \
--with-uniscribe=no \
--with-coretext=no
make -j${MASON_CONCURRENCY} V=1
make install
}
function mason_ldflags {
: # We're only using the full path to the archive, which is output in static_libs
}
function mason_cflags {
echo "-I${MASON_PREFIX}/include"
}
function mason_clean {
make clean
}
mason_run "$@"
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
#
# MarkAsCodeCoverageNonFeasible.py
# Copyright 2008 Google Inc.
#
# Marks a block of code as non feasible with regards to code coverage.
# To use it with Xcode 3.x, go to the scripts menu and choose
# "Edit User Scripts...". Then "Add Script File..." under the plus in
# the lower left hand corner.
#
# Set Input to "Selection"
# Directory to "Home Directory"
# Output to "Replace Selection"
# Errors to "Display in Alert"
#
# Then select the line(s) in your code that you want to mark as not
# covered, and select the script. Mapping it to Cntl-Option-N makes
# it easy to do from the keyboard.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import string
def main():
inputLines = sys.stdin.readlines()
if len(inputLines) == 1:
resultText = inputLines[0].rstrip() + """ // COV_NF_LINE\r"""
else:
firstLine = inputLines[0]
spaces = firstLine[0:-len(firstLine.lstrip())]
resultText = spaces + """// COV_NF_START\r"""
for curLine in inputLines:
resultText += curLine
resultText += spaces + """// COV_NF_END\r"""
print resultText
if __name__ == '__main__':
main() |
if [ -z "$DISPLAY" ]; then #If not set DISPLAY is SSH remote or tty
export DISPLAY=:0 # Set by default display
fi
electron --js-flags="--harmony-async-await" js/electron.js $1
|
#!/bin/bash
dieharder -d 201 -g 24 -S 965845128
|
import React, {Component} from 'react'
import PropTypes from 'prop-types';
export default class FlightResultCard extends Component {
static propTypes = {
price: PropTypes.string,
wlps_price: PropTypes.string,
airline_logo: PropTypes.string,
airline_name: PropTypes.string,
airport_departure: PropTypes.string,
airport_departure_code: PropTypes.string,
airport_arrival: PropTypes.string,
airport_arrival_code: PropTypes.string,
airport_transits: PropTypes.array,
time: PropTypes.string,
stops: PropTypes.number,
seatclass: PropTypes.string,
availability: PropTypes.number
}
render(){
var timesp = this.props.time.split("T");
var stoptext = "Direct";
if (this.props.stops > 0) {
stoptext = this.props.stops + " Stops";
};
var airport_transits_output= "";
if (this.props.airport_transits){
this.props.airport_transits.forEach((airport, index) => {
if (index>0){
airport_transits_output += ", "
}
airport_transits_output += airport
})
}
return(
<div className="flight-result-card">
<div className="row">
<div className="flight-result-card__container col-2 col-md-1">
<img src={this.props.airline_logo} className="img-fluid flight-result-card__img" alt="Responsive" />
</div>
<div className="flight-result-card__container col col-md-3">
<div className="flight-result-card__text">
<strong>{timesp[1]+"-"+timesp[3]}</strong>
</div>
<div className="flight-result-card__subtext show-on-md">
{this.props.airport_departure_code} to {this.props.airport_arrival_code} • {stoptext}
</div>
<div className="flight-result-card__subtext">
{this.props.airline_name}
</div>
</div>
<div className="flight-result-card__container hide-on-md col col-md-4">
<div className="flight-result-card__text">
{this.props.airport_departure_code} to {this.props.airport_arrival_code}
</div>
<div className="flight-result-card__subtext">
{this.props.airport_departure}
</div>
<div className="flight-result-card__subtext">
{this.props.airport_arrival}
</div>
</div>
<div className="flight-result-card__container hide-on-md col col-md-2">
<div className="flight-result-card__text">
{stoptext}
</div>
<div className="flight-result-card__subtext">
{airport_transits_output}
</div>
</div>
<div className="flight-result-card__container col col-md-2">
<div className="flight-result-card__text">
<strong>IDR {this.props.price}</strong>
</div>
<div className="flight-result-card__subtext">
{this.props.seatclass}
</div>
</div>
</div>
</div>
);
}
};
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/appl/RegistryManagerImpl.java<gh_stars>10-100
package io.opensphere.core.appl;
import java.util.function.Predicate;
import javax.swing.JFrame;
import io.opensphere.core.PluginToolboxRegistry;
import io.opensphere.core.UnitsRegistry;
import io.opensphere.core.api.Envoy;
import io.opensphere.core.api.Transformer;
import io.opensphere.core.cache.Cache;
import io.opensphere.core.control.ControlRegistry;
import io.opensphere.core.control.ControlRegistryImpl;
import io.opensphere.core.control.ui.UIRegistry;
import io.opensphere.core.control.ui.impl.UIRegistryImpl;
import io.opensphere.core.data.DataRegistryImpl;
import io.opensphere.core.datafilter.impl.DataFilterRegistryImpl;
import io.opensphere.core.geometry.GeometryRegistry;
import io.opensphere.core.geometry.GeometryRegistryImpl;
import io.opensphere.core.hud.awt.HUDFrame;
import io.opensphere.core.hud.awt.HUDJInternalFrame;
import io.opensphere.core.importer.FileOrURLImporter;
import io.opensphere.core.importer.impl.ImporterRegistryImpl;
import io.opensphere.core.messaging.GenericSubscriber;
import io.opensphere.core.metrics.impl.MetricsRegistryImpl;
import io.opensphere.core.order.OrderManagerRegistry;
import io.opensphere.core.order.impl.OrderManagerRegistryImpl;
import io.opensphere.core.preferences.PreferencesRegistry;
import io.opensphere.core.preferences.PreferencesRegistryImpl;
import io.opensphere.core.search.SearchRegistryImpl;
import io.opensphere.core.server.ServerProviderRegistry;
import io.opensphere.core.server.impl.ServerProviderRegistryImpl;
import io.opensphere.core.util.registry.GenericRegistry;
/**
* Manager for the code system registries.
*/
public class RegistryManagerImpl
{
/** The control registry. */
private final ControlRegistry myControlRegistry;
/** The Data filter registry. */
private final DataFilterRegistryImpl myDataFilterRegistry;
/** The data registry. */
private final DataRegistryImpl myDataRegistry;
/** The envoy registry. */
private final GenericRegistry<Envoy> myEnvoyRegistry;
/** The geometry registry. */
private final GeometryRegistry myGeometryRegistry;
/** The importer registry. */
private final ImporterRegistryImpl myImporterRegistry;
/** The Metrics registry. */
private final MetricsRegistryImpl myMetricsRegistry;
/** The order manager registry. */
private final OrderManagerRegistry myOrderManagerRegistry;
/** The plugin toolbox Registry. */
private final PluginToolboxRegistry myPluginToolboxRegistry;
/** The preferences registry. */
private final PreferencesRegistry myPreferencesRegistry;
/** The search provider registry. */
private final SearchRegistryImpl mySearchRegistry;
/**
* The server provider registry.
*/
private final ServerProviderRegistry myServerProviderRegistry;
/** The transformer registry. */
private final GenericRegistry<Transformer> myTransformerRegistry;
/** The UI registry. */
private final UIRegistry myUIRegistry;
/**
* Subscriber that adds internal frames that implement the importer
* interface to the importer registry.
*/
private final GenericSubscriber<HUDFrame> myComponentRegistryToImporterBinding;
/** The units registry. */
private final UnitsRegistry myUnitsRegistry;
/**
* Constructor.
*
* @param executorManager The executor manager.
* @param cache A cache implementation, or <code>null</code> if there is
* none.
* @param mainFrame The top level frame for the application.
*/
public RegistryManagerImpl(ExecutorManager executorManager, Cache cache, JFrame mainFrame)
{
myPreferencesRegistry = new PreferencesRegistryImpl(executorManager.getPreferencesEventExecutor(),
executorManager.getPreferencesPersistExecutor());
myControlRegistry = new ControlRegistryImpl();
myDataRegistry = cache == null ? null : new DataRegistryImpl(executorManager.createDataRegistryExecutor(), cache);
myEnvoyRegistry = new GenericRegistry<>();
myGeometryRegistry = new GeometryRegistryImpl(executorManager.getGeometryDataRetrieverExecutor());
myTransformerRegistry = new GenericRegistry<>();
myUIRegistry = new UIRegistryImpl(myControlRegistry, mainFrame, myPreferencesRegistry);
myUnitsRegistry = new UnitsRegistryImpl(myPreferencesRegistry);
myPluginToolboxRegistry = new PluginToolboxRegistryImpl();
myMetricsRegistry = new MetricsRegistryImpl();
myDataFilterRegistry = new DataFilterRegistryImpl(myPreferencesRegistry);
mySearchRegistry = new SearchRegistryImpl();
myImporterRegistry = new ImporterRegistryImpl();
myOrderManagerRegistry = new OrderManagerRegistryImpl(myPreferencesRegistry);
myServerProviderRegistry = new ServerProviderRegistryImpl();
myComponentRegistryToImporterBinding = (source, adds, removes) ->
{
Predicate<HUDFrame> importerPredicate = f1 -> f1 instanceof HUDJInternalFrame
&& ((HUDJInternalFrame)f1).getInternalFrame() instanceof FileOrURLImporter;
adds.stream().filter(importerPredicate)
.forEach(f2 -> myImporterRegistry.addImporter((FileOrURLImporter)((HUDJInternalFrame)f2).getInternalFrame()));
removes.stream().filter(importerPredicate).forEach(
f3 -> myImporterRegistry.removeImporter((FileOrURLImporter)((HUDJInternalFrame)f3).getInternalFrame()));
};
}
/**
* Bind the UI component registry to the importer registry such that when UI
* components are added that can support importing, they will also be added
* to the importer registry. This must be done after the UI component
* registry is initialized.
*/
public void bindComponentRegistryToImporterRegistry()
{
myUIRegistry.getComponentRegistry().addSubscriber(myComponentRegistryToImporterBinding);
}
/**
* Close the registries.
*/
public void close()
{
myDataRegistry.close();
}
/**
* Get the control registry.
*
* @return The control registry.
*/
public ControlRegistry getControlRegistry()
{
return myControlRegistry;
}
/**
* Get the data filter registry.
*
* @return The data filter registry.
*/
public DataFilterRegistryImpl getDataFilterRegistry()
{
return myDataFilterRegistry;
}
/**
* Get the data registry.
*
* @return The data registry.
*/
public DataRegistryImpl getDataRegistry()
{
return myDataRegistry;
}
/**
* Get the envoy registry.
*
* @return The envoy registry.
*/
public GenericRegistry<Envoy> getEnvoyRegistry()
{
return myEnvoyRegistry;
}
/**
* Get the geometry registry.
*
* @return The geometry registry.
*/
public GeometryRegistry getGeometryRegistry()
{
return myGeometryRegistry;
}
/**
* Get the importer registry.
*
* @return The importer registry.
*/
public ImporterRegistryImpl getImporterRegistry()
{
return myImporterRegistry;
}
/**
* Get the metrics registry.
*
* @return The metrics registry.
*/
public MetricsRegistryImpl getMetricsRegistry()
{
return myMetricsRegistry;
}
/**
* Gets the registry for order managers.
*
* @return the order manager registry.
*/
public OrderManagerRegistry getOrderManagerRegistry()
{
return myOrderManagerRegistry;
}
/**
* Get the plugin toolbox registry.
*
* @return The plugin toolbox registry.
*/
public PluginToolboxRegistry getPluginToolboxRegistry()
{
return myPluginToolboxRegistry;
}
/**
* Get the preferences registry.
*
* @return The preferences registry.
*/
public PreferencesRegistry getPreferencesRegistry()
{
return myPreferencesRegistry;
}
/**
* Get the search registry.
*
* @return The search registry.
*/
public SearchRegistryImpl getSearchRegistry()
{
return mySearchRegistry;
}
/**
* Get the server provider registry.
*
* @return The server provider registry.
*/
public ServerProviderRegistry getServerProviderRegistry()
{
return myServerProviderRegistry;
}
/**
* Get the transformer registry.
*
* @return The transformer registry.
*/
public GenericRegistry<Transformer> getTransformerRegistry()
{
return myTransformerRegistry;
}
/**
* Get the UI registry.
*
* @return The UI registry.
*/
public UIRegistry getUIRegistry()
{
return myUIRegistry;
}
/**
* Get the units registry.
*
* @return The units registry.
*/
public UnitsRegistry getUnitsRegistry()
{
return myUnitsRegistry;
}
}
|
package altinn.platform.pdf.models;
import java.util.HashMap;
import java.util.List;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.apache.commons.lang3.SerializationUtils;
@Schema(description = "A form layout element")
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public class FormLayoutElement implements Cloneable {
private String type;
private String id;
private HashMap<String, String> dataModelBindings;
private TextResourceBindings textResourceBindings;
private String optionsId;
private List<Option> options;
private OptionSource source;
private boolean simplified;
private List<String> children;
private int count;
private int maxCount;
private List<String> dataTypeIds;
private GroupEditProperties edit;
@Override
public Object clone() {
FormLayoutElement fle = null;
try {
fle = (FormLayoutElement) super.clone();
} catch (CloneNotSupportedException e) {
fle = new FormLayoutElement(
this.type,
this.id,
this.dataModelBindings,
this.textResourceBindings,
this.optionsId,
this.options,
this.source,
this.simplified,
this.children,
this.count,
this.maxCount,
this.dataTypeIds,
this.edit);
}
fle.dataModelBindings = SerializationUtils.clone(dataModelBindings);
return fle;
}
}
|
import pandas as pd
import numpy as np
import datetime
#simple class
class Adding:
def __init__(self, dataframe, list):
self.dataframe = dataframe
self.list = list
def addlist(self):
s = pd.Series(self.list, index = self.dataframe.index)
self.dataframe['listcol'] = s
# base class for datawrangling
class Dfwrangler:
def __init__(self, dataframe):
self.dataframe = dataframe
# child class for dataframes with date columns
class Dates(Dfwrangler):
def __init__(self, dataframe, datecol):
super().__init__(dataframe)
self.datecol = dataframe[datecol]
def splitdates(self):
# make col datatime
if type(self.datecol) == object:
self.datecol = pd.to_datetime(self.datecol)
elif type(self.datecol) == np.float64 or type(self.datecol) == np.int64:
self.datecol = pd.to_datetime(self.datecol, format='%Y%m%d')
else:
self.datecol = pd.to_datetime(self.datecol)
# then split into year, month, day
self.dataframe['year'] = self.datecol.dt.year
self.dataframe['month'] = self.datecol.dt.month
self.dataframe['day'] = self.datecol.dt.day
|
<html>
<head>
<script>
const contacts = [{
"name": "John Doe",
"phone": "123-456-7890",
"email": "johndoe@example.com"
},
{
"name": "Jane Doe",
"phone": "098-765-4321",
"email": "janedoe@example.com"
}];
</script>
</head>
<body>
<input type="text" id="name_input" placeholder="Enter name to search" />
<ul id="contacts_list">
</ul>
<script>
const nameInput = document.querySelector('#name_input');
const contactsList = document.querySelector('#contacts_list');
nameInput.addEventListener('input', () => {
contactsList.innerHTML = '';
contacts.forEach(user => {
if (user.name.toLowerCase().includes(nameInput.value.toLowerCase())) {
const listItem = `
<li>
Name: ${user.name}
Phone: ${user.phone}
Email: ${user.email}
</li>
`
contactsList.innerHTML += listItem;
}
})
});
</script>
</body>
</html> |
<reponame>amoradi/scintilla
import React from "react";
import { Data, Fill, RGBA, Stroke } from "../../shared/types";
import { FrameContext } from "../Frame";
import { makeD, makePoints, project, sanitizeYData } from "../../shared/utils";
import { v1 as uuidv1 } from "uuid";
import { isRGBA } from "../../shared/utils";
import { Path } from "../Path";
import { FillGradient } from "./FillGradient";
import { StrokeGradient } from "./StrokeGradient";
import { MultiSolidColor } from "./MultiSolidColor";
type Props = { data: Data; fill?: Fill; stroke?: Stroke };
const Line = ({ data, fill, stroke }: Props) => {
return (
<FrameContext.Consumer>
{({ height, viewBox, yRange }) => {
const yData = sanitizeYData(data, yRange);
let [min, max] = yRange
? yRange
: [Math.min(...yData), Math.max(...yData)];
const yPixels = yData.map(y => {
return project(height, { n: y, min, max });
});
const points = makePoints(yPixels);
const d = makeD(
points,
viewBox,
yRange
? {
min: project(height, { n: min, min, max }),
max: project(height, { n: max, min, max })
}
: undefined
);
const polygonalD = makeD(
points,
viewBox,
yRange
? {
min: project(height, { n: min, min, max }),
max: project(height, { n: max, min, max })
}
: undefined,
true
);
const uuid = uuidv1();
let uuidFillGradient;
return (
<svg
width="100%"
height={height}
viewBox={`0 0 ${viewBox.width} ${viewBox.height}`}
preserveAspectRatio="none"
overflow="visible"
>
{/* ========== Fill ========== */}
{/* --- Gradient --- */}
{fill && fill.gradient && (uuidFillGradient = uuidv1()) && (
<FillGradient
gradient={fill.gradient}
polygonalD={polygonalD}
uuid={uuidFillGradient}
/>
)}
{/* --- Solid --- */}
{/* 1 color */}
{fill && fill.solid && isRGBA(fill.solid) && (
<Path
clipPath={"none"}
d={polygonalD}
fill={`rgba(${fill.solid.join(", ")})`}
stroke="none"
strokeWidth={0}
strokeDasharray="none"
vectorEffect="non-scaling-stroke"
/>
)}
{/* multi color */}
{fill &&
fill.solid &&
Array.isArray(fill.solid) &&
fill.solid.every((c: any) => isRGBA(c)) && (
<MultiSolidColor
color={fill.solid as RGBA[]}
d={polygonalD}
maskHeight={viewBox.height}
viewBoxWidth={viewBox.width}
strokeStyle="solid"
width={0}
mode="fill"
/>
)}
{/* ========== Stroke ========== */}
{/* --- Gradient --- */}
{stroke && stroke.color && stroke.color.gradient && (
<StrokeGradient
d={d}
gradient={stroke.color.gradient}
strokeDasharray={
(stroke &&
stroke.style &&
stroke.style === "dash" &&
`${((stroke && stroke.width) || 1) * 6} ${((stroke &&
stroke.width) ||
1) * 4}`) ||
"none"
}
strokeWidth={(stroke && stroke.width) || 0}
uuid={uuid}
/>
)}
{/* --- Solid --- */}
{/* 1 color */}
{stroke && stroke.color && isRGBA(stroke.color.solid) && (
<Path
clipPath={"none"}
d={d}
stroke={`rgba(${stroke &&
stroke.color &&
stroke.color.solid &&
stroke.color.solid.join(", ")})`}
strokeWidth={(stroke && stroke.width) || 0}
strokeDasharray={
(stroke &&
stroke.style &&
stroke.style === "dash" &&
`${((stroke && stroke.width) || 1) * 6} ${((stroke &&
stroke.width) ||
1) * 4}`) ||
"none"
}
fill="none"
vectorEffect="non-scaling-stroke"
/>
)}
{/* multi color */}
{stroke &&
stroke.color &&
Array.isArray(stroke.color.solid) &&
stroke.color.solid.every((c: any) => isRGBA(c)) && (
<MultiSolidColor
color={stroke.color.solid as RGBA[]}
d={d}
maskHeight={viewBox.height}
viewBoxWidth={viewBox.width}
strokeStyle={stroke.style}
width={stroke.width}
mode={"stroke"}
/>
)}
</svg>
);
}}
</FrameContext.Consumer>
);
};
export { Line };
|
sorted_list = sorted([("John", 96), ("Adam", 97), ("Sarah", 95)], key= lambda x: x[1], reverse=True)
print(sorted_list) # Output: [('Adam', 97), ('John', 96), ('Sarah', 95)] |
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
go vet ./core/...
go vet ./schema/...
go vet ./connector/...
|
#!/bin/sh
http_code=`sudo curl -o "/etc/hosts_tmp" -w %{http_code}"\n" "https://raw.githubusercontent.com/wangchunming/2017hosts/master/hosts-pc"`
if [ $http_code -eq 200 ]; then
sudo rm "/etc/hosts"
sudo mv "/etc/hosts_tmp" "/etc/hosts"
echo "Hosts 更新完成"
else
echo "请求错误"
fi
|
<gh_stars>1-10
'use strict';
var realMozSetMessageHandler = navigator.mozSetMessageHandler;
var msgHandler = {};
navigator.mozSetMessageHandler = function(type, handler) {
msgHandler[type] = handler;
};
|
<filename>src/tree/symbols/TSEqOp.java
package tree.symbols;
import tree.DefaultTreeNodeSymbol;
public class TSEqOp extends DefaultTreeNodeSymbol {
public static int id = EQ_OP;
public static String text = "==";
public TSEqOp() {
super(text, id);
}
}
|
/*
** EPITECH PROJECT, 2020
** Vulkan-Engine
** File description:
** Game.hpp
*/
#ifndef GAME_HPP_
#define GAME_HPP_
#include <string>
#include <memory>
#include <random>
class EntityLib;
class GPUDisplay;
class GPUEntityMgr;
class Tracer;
struct EntityData;
struct EntityState;
struct JaugeVertex;
#define BONUS_EFFECT \
p->special += 0.2; \
p->coolant += 200; \
p->shield += 0.0625; \
if (p->shield > p->shieldMax) { \
p->special += p->shield - p->shieldMax; \
p->shield = p->shieldMax; \
} \
continue
#define RESPAWN_TIME 600
enum EntityTypes : unsigned char {
MIEL0,
MIEL1,
MIEL2,
MIEL3,
MIEL4,
MIEL5,
LINE1,
LINE2,
LINE3,
LINE4,
MULTICOLOR,
CHOCOLAT,
CANDY1,
CANDY2,
CANDY3,
CANDY4,
BOMB1,
BOMB2,
BOMB3,
BOMB4,
BLOCK1,
BLOCK2,
BLOCK3,
BLOCK4,
FISH1,
FISH2,
FISH3,
FISH4,
BIG_LASER,
BIG_LASER2,
CLASSIC,
BETTER,
AERODYNAMIC,
OPTIMAL,
ENERGISED,
BOOSTED,
LASER,
LASER2,
ECLAT0,
ECLAT1,
ECLAT2,
ECLAT3,
BONUS0,
BONUS1,
BONUS2,
BONUS3,
BONUS4,
BONUS5,
BONUS6,
BLASTER,
ECLATANT,
PROTECTO,
WAVE,
STRONG_WAVE,
BIG_WAVE,
MINI_TRANS,
TRANS,
SUPER_TRANS,
LASERIFIER
};
enum EntityFlags : unsigned char {
F_OTHER,
F_PIECE_1,
F_PIECE_5,
F_PIECE_10,
F_PIECE_25,
F_SHIELD_BOOST,
F_SPECIAL_BOOST,
F_COOLANT_BOOST,
F_PLAYER,
F_ECLATANT,
F_MULTICOLOR,
F_LINE,
F_BLOCK,
F_CANDY,
F_MIEL1,
F_MIEL2,
F_MIEL3,
F_MIEL4,
F_MIEL5,
};
enum SpecialWeapon : unsigned char {
SW_NONE,
SW_BIG_LASER,
SW_PROTECTO,
SW_MINI_TRANS,
SW_TRANS,
SW_SUPER_TRANS,
SW_SHIELD_BOOSTER,
SW_ENHANCED_SHIELD_BOOSTER,
SW_HIGHP_SHIELD_OVERCLOCKER,
SW_UNIVERSAL_CONVERTOR
};
struct VesselAttributes {
std::string name;
int cost;
int speed;
int energyConsumption;
unsigned char aspect;
};
struct SpecialWeaponAttributes {
std::string name;
std::string desc;
int cost;
float specialCost;
};
struct ShieldAttributes {
std::string name;
int cost;
float shieldRate;
float shieldCapacity;
float energyConsumption;
float heat;
};
struct WeaponAttributes {
std::string name;
int cost;
int baseEnergyConsumption;
};
struct GeneratorAttributes {
std::string name;
int cost;
float energyCapacity;
float energyRate;
int deathLossRatio;
};
struct RecoolerAttributes {
std::string name;
int cost;
float recoolingRate;
float recoolingCapacity;
};
#define SIZEOF_SAVED_DATA 18
struct SavedDatas {
unsigned int maxScore;
unsigned char vessel;
unsigned char weapon;
unsigned char weaponLevel;
unsigned char special;
unsigned char generator;
unsigned char shield;
unsigned char recooler;
unsigned char vesselUnlock;
unsigned char weaponUnlock;
unsigned char weaponLevelUnlock;
unsigned char specialUnlock;
unsigned char generatorUnlock;
unsigned char shieldUnlock;
unsigned char recoolerUnlock;
};
struct Player {
SavedDatas saved;
JaugeVertex *ptr;
int x;
int y;
int velX; // 0
int velY; // 0
unsigned int score;
int lastHealth;
float moveSpeed;
float moveEnergyCost;
float moveHeatCost;
float energy;
float energyRate;
float energyHeatCost;
float energyMax;
float shield;
float shieldRate;
float shieldEnergyCost;
float shieldHeatCost;
float shieldMax;
float coolant;
float coolantRate;
float coolantMax;
float special;
float specialMax;
float posX;
float posY;
bool alive;
bool uniconvert;
bool highPOverclocker;
bool shooting;
bool useSpecial;
bool boost;
unsigned char vesselAspect;
};
class Game {
public:
Game(const std::string &name, uint32_t version, int width, int height);
virtual ~Game();
Game(const Game &cpy) = delete;
Game &operator=(const Game &src) = delete;
void init();
void mainloop();
void load(int slot, int playerCount = 2);
bool openMenu(int type);
long getRecursionGain() const;
long getMaxedRecursionGain() const;
void makeRecursion();
long getScoreAfterRecursion() const;
private:
void save();
void gameStart();
void gameEnd();
void initPlayer(Player &p, int idx);
void update(GPUEntityMgr &engine);
void updatePlayer(GPUEntityMgr &engine);
void updatePlayer(Player &p, int idx);
void updatePlayerState(Player &p, int idx);
void shoot(Player &p);
void useSpecial(Player &p);
void spawn(GPUEntityMgr &engine);
void revive(Player &target, Player &saver, int idx);
static void updateS(Game *self, GPUEntityMgr &engine) {
self->update(engine);
}
static void updatePlayerS(Game *self, GPUEntityMgr &engine) {
self->updatePlayer(engine);
}
Player *getClosest(short idx);
std::unique_ptr<GPUDisplay> display;
std::unique_ptr<GPUEntityMgr> compute;
std::unique_ptr<Tracer> tracer;
std::shared_ptr<EntityLib> core;
std::random_device rdevice;
std::uniform_int_distribution<int> bonusDist {0, 700};
std::uniform_int_distribution<int> candyPosDist;
std::uniform_real_distribution<float> normDist {0.f, 1.f};
std::uniform_real_distribution<float> percentDist {0.f, 100.f};
unsigned int recursion = 0;
unsigned short level = 1;
unsigned short maxLevel = 1;
unsigned char nbPlayer = 0;
bool alive = false;
bool first = false;
bool notQuitting = true;
bool someone = false;
Player player1 {};
Player player2 {};
float difficultyCoef;
float candyTypeProbScale;
float circle[16];
int tic = 0;
int usedSlot;
const int width;
const int height;
public:
// Configurations
const int startScore = 8500; // Start equipment value
const float recursionBaseScoreRatio = 0.0625;
float recursionGainFactor = 1; // 0.55 for 2-players --> +10% recursion point
// pair of (entity/spawn chance)
static const std::pair<unsigned char, unsigned char> spawnability[6];
static const WeaponAttributes weaponList[5];
static const VesselAttributes vesselList[6];
static const GeneratorAttributes generatorList[6];
static const RecoolerAttributes recoolerList[10];
static const ShieldAttributes shieldList[11];
static const SpecialWeaponAttributes specialList[10];
};
#endif /* GAME_HPP_ */
|
# https://python-poetry.org/docs/#osx--linux--bashonwindows-install-instructions
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
# NOTE: Cannot run
# bash: /etc/bash_completion.d/poetry.bash-completion: Permission denied
# poetry completions bash > /etc/bash_completion.d/poetry.bash-completion |
/*
* SPDX-License-Identifier: BSD-3-Clause OR GPL-2.0-or-later
*
* Copyright (c) 2021, <NAME> <<EMAIL>>
*/
#include <string.h>
#include "gattlib_internal.h"
#define EDDYSTONE_SERVICE_UUID "0000FEAA-0000-1000-8000-00805F9B34FB"
const uuid_t gattlib_eddystone_common_data_uuid = CREATE_UUID16(0xFEAA);
const char *gattlib_eddystone_url_scheme_prefix[] = {
"http://www.",
"https://www.",
"http://",
"https://"
};
struct on_eddystone_discovered_device_arg {
gattlib_discovered_device_with_data_t discovered_device_cb;
void *user_data;
};
static void on_eddystone_discovered_device(void *adapter, const char* addr, const char* name, void *user_data)
{
struct on_eddystone_discovered_device_arg *callback_data = user_data;
gattlib_advertisement_data_t *advertisement_data;
size_t advertisement_data_count;
uint16_t manufacturer_id;
uint8_t *manufacturer_data;
size_t manufacturer_data_size;
int ret;
ret = gattlib_get_advertisement_data_from_mac(adapter, addr,
&advertisement_data, &advertisement_data_count,
&manufacturer_id, &manufacturer_data, &manufacturer_data_size);
if (ret != 0) {
return;
}
callback_data->discovered_device_cb(adapter, addr, name,
advertisement_data, advertisement_data_count,
manufacturer_id, manufacturer_data, manufacturer_data_size,
callback_data->user_data);
}
int gattlib_adapter_scan_eddystone(void *adapter, int16_t rssi_threshold, uint32_t eddystone_types,
gattlib_discovered_device_with_data_t discovered_device_cb, size_t timeout, void *user_data)
{
uuid_t eddystone_uuid;
uint32_t enabled_filters = GATTLIB_DISCOVER_FILTER_USE_UUID;
int ret;
ret = gattlib_string_to_uuid(EDDYSTONE_SERVICE_UUID, strlen(EDDYSTONE_SERVICE_UUID) + 1, &eddystone_uuid);
if (ret != 0) {
GATTLIB_LOG(GATTLIB_ERROR, "Fail to convert characteristic TX to UUID.");
return GATTLIB_ERROR_INTERNAL;
}
uuid_t *uuid_filter_list[] = { &eddystone_uuid, NULL };
if (eddystone_types & GATTLIB_EDDYSTONE_LIMIT_RSSI) {
enabled_filters |= GATTLIB_DISCOVER_FILTER_USE_RSSI;
}
struct on_eddystone_discovered_device_arg callback_data = {
.discovered_device_cb = discovered_device_cb,
.user_data = user_data
};
return gattlib_adapter_scan_enable_with_filter(adapter, uuid_filter_list, rssi_threshold, enabled_filters,
on_eddystone_discovered_device, timeout, &callback_data);
}
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
declare module '@docusaurus/theme-classic' {
export type Options = {
customCss?: string | string[];
};
}
declare module '@theme/Admonition' {
import type {ReactNode} from 'react';
export interface Props {
readonly children: ReactNode;
readonly type: 'note' | 'tip' | 'danger' | 'info' | 'caution';
readonly icon?: ReactNode;
readonly title?: string;
}
export default function Admonition(props: Props): JSX.Element;
}
declare module '@theme/AnnouncementBar' {
const AnnouncementBar: () => JSX.Element | null;
export default AnnouncementBar;
}
declare module '@theme/BackToTopButton' {
export default function BackToTopButton(): JSX.Element;
}
declare module '@theme/BlogListPaginator' {
import type {Metadata} from '@theme/BlogListPage';
export interface Props {
readonly metadata: Metadata;
}
const BlogListPaginator: (props: Props) => JSX.Element;
export default BlogListPaginator;
}
declare module '@theme/BlogPostItem' {
import type {FrontMatter, Metadata} from '@theme/BlogPostPage';
import type {Assets} from '@docusaurus/plugin-content-blog';
export interface Props {
readonly frontMatter: FrontMatter;
readonly assets: Assets;
readonly metadata: Metadata;
readonly truncated?: string | boolean;
readonly isBlogPostPage?: boolean;
readonly children: JSX.Element;
}
const BlogPostItem: (props: Props) => JSX.Element;
export default BlogPostItem;
}
declare module '@theme/BlogPostAuthor' {
import type {Metadata} from '@theme/BlogPostPage';
export interface Props {
readonly author: Metadata['authors'][number];
}
export default function BlogPostAuthor(props: Props): JSX.Element;
}
declare module '@theme/BlogPostAuthors' {
import type {Metadata} from '@theme/BlogPostPage';
import type {Assets} from '@docusaurus/plugin-content-blog';
export interface Props {
readonly authors: Metadata['authors'];
readonly assets: Assets;
}
export default function BlogPostAuthors(props: Props): JSX.Element;
}
declare module '@theme/BlogPostPaginator' {
type Item = {readonly title: string; readonly permalink: string};
export interface Props {
readonly nextItem?: Item;
readonly prevItem?: Item;
}
const BlogPostPaginator: (props: Props) => JSX.Element;
export default BlogPostPaginator;
}
declare module '@theme/BlogLayout' {
import type {ReactNode} from 'react';
import type {Props as LayoutProps} from '@theme/Layout';
import type {BlogSidebar} from '@theme/BlogSidebar';
export interface Props extends LayoutProps {
readonly sidebar?: BlogSidebar;
readonly toc?: ReactNode;
}
const BlogLayout: (props: Props) => JSX.Element;
export default BlogLayout;
}
declare module '@theme/CodeBlock' {
import type {ReactElement} from 'react';
export interface Props {
readonly children: string | ReactElement;
readonly className?: string;
readonly metastring?: string;
readonly title?: string;
readonly language?: string;
}
const CodeBlock: (props: Props) => JSX.Element;
export default CodeBlock;
}
declare module '@theme/DocPaginator' {
import type {PropNavigation} from '@docusaurus/plugin-content-docs';
// May be simpler to provide a {navigation: PropNavigation} prop?
export interface Props extends PropNavigation {}
export default function DocPaginator(props: Props): JSX.Element;
}
declare module '@theme/DocSidebar' {
import type {PropSidebarItem} from '@docusaurus/plugin-content-docs';
export interface Props {
readonly path: string;
readonly sidebar: readonly PropSidebarItem[];
readonly onCollapse: () => void;
readonly isHidden: boolean;
// MobileSecondaryFilter expects Record<string, unknown>
readonly [key: string]: unknown;
}
const DocSidebar: (props: Props) => JSX.Element;
export default DocSidebar;
}
declare module '@theme/DocSidebarItem' {
import type {PropSidebarItem} from '@docusaurus/plugin-content-docs';
export type DocSidebarPropsBase = {
readonly activePath: string;
readonly onItemClick?: (item: PropSidebarItem) => void;
readonly level: number;
readonly tabIndex?: number;
};
export interface Props {
readonly activePath: string;
readonly onItemClick?: (item: PropSidebarItem) => void;
readonly level: number;
readonly tabIndex?: number;
readonly item: PropSidebarItem;
}
export default function DocSidebarItem(props: Props): JSX.Element;
}
declare module '@theme/DocSidebarItems' {
import type {Props as DocSidebarItemProps} from '@theme/DocSidebarItem';
import type {PropSidebarItem} from '@docusaurus/plugin-content-docs';
export type Props = Omit<DocSidebarItemProps, 'item'> & {
readonly items: readonly PropSidebarItem[];
};
export default function DocSidebarItems(props: Props): JSX.Element;
}
declare module '@theme/DocVersionSuggestions' {
const DocVersionSuggestions: () => JSX.Element;
export default DocVersionSuggestions;
}
declare module '@theme/EditThisPage' {
export interface Props {
readonly editUrl: string;
}
const EditThisPage: (props: Props) => JSX.Element;
export default EditThisPage;
}
declare module '@theme/ErrorPageContent' {
import type ErrorComponent from '@theme/Error';
const ErrorPageContent: typeof ErrorComponent;
export default ErrorPageContent;
}
declare module '@theme/Footer' {
const Footer: () => JSX.Element | null;
export default Footer;
}
declare module '@theme/Heading' {
import type {ComponentProps} from 'react';
type HeadingType = 'h1' | 'h2' | 'h3' | 'h4' | 'h5' | 'h6';
export interface Props extends ComponentProps<HeadingType> {
as: HeadingType;
}
export default function Heading(props: Props): JSX.Element;
}
declare module '@theme/Layout' {
import type {ReactNode} from 'react';
export interface Props {
readonly children: ReactNode;
readonly title?: string;
readonly noFooter?: boolean;
readonly description?: string;
readonly image?: string;
readonly keywords?: string | string[];
readonly permalink?: string;
readonly wrapperClassName?: string;
readonly pageClassName?: string;
readonly searchMetadata?: {
readonly version?: string;
readonly tag?: string;
};
}
export default function Layout(props: Props): JSX.Element;
}
declare module '@theme/LayoutHead' {
import type {Props as LayoutProps} from '@theme/Layout';
export interface Props extends Omit<LayoutProps, 'children'> {}
export default function LayoutHead(props: Props): JSX.Element;
}
declare module '@theme/LayoutProviders' {
import type {ReactNode} from 'react';
export interface Props {
readonly children: ReactNode;
}
export default function LayoutProviders(props: Props): JSX.Element;
}
declare module '@theme/SearchMetadata' {
export interface Props {
readonly locale?: string;
readonly version?: string;
readonly tag?: string;
}
const SearchMetadata: (props: Props) => JSX.Element;
export default SearchMetadata;
}
declare module '@theme/LastUpdated' {
export interface Props {
readonly lastUpdatedAt?: number;
readonly formattedLastUpdatedAt?: string;
readonly lastUpdatedBy?: string;
}
const LastUpdated: (props: Props) => JSX.Element;
export default LastUpdated;
}
declare module '@theme/SkipToContent' {
const SkipToContent: () => JSX.Element;
export default SkipToContent;
}
declare module '@theme/MDXComponents' {
import type {ComponentProps} from 'react';
import type CodeBlock from '@theme/CodeBlock';
import type Head from '@docusaurus/Head';
export type MDXComponentsObject = {
readonly head: typeof Head;
readonly code: typeof CodeBlock;
readonly a: (props: ComponentProps<'a'>) => JSX.Element;
readonly pre: typeof CodeBlock;
readonly details: (props: ComponentProps<'details'>) => JSX.Element;
readonly h1: (props: ComponentProps<'h1'>) => JSX.Element;
readonly h2: (props: ComponentProps<'h2'>) => JSX.Element;
readonly h3: (props: ComponentProps<'h3'>) => JSX.Element;
readonly h4: (props: ComponentProps<'h4'>) => JSX.Element;
readonly h5: (props: ComponentProps<'h5'>) => JSX.Element;
readonly h6: (props: ComponentProps<'h6'>) => JSX.Element;
};
const MDXComponents: MDXComponentsObject;
export default MDXComponents;
}
declare module '@theme/Navbar' {
const Navbar: () => JSX.Element;
export default Navbar;
}
declare module '@theme/NavbarItem/DefaultNavbarItem' {
import type {Props as NavbarNavLinkProps} from '@theme/NavbarItem/NavbarNavLink';
export type DesktopOrMobileNavBarItemProps = NavbarNavLinkProps & {
readonly isDropdownItem?: boolean;
readonly className?: string;
readonly position?: 'left' | 'right';
};
export interface Props extends DesktopOrMobileNavBarItemProps {
readonly mobile?: boolean;
}
export default function DefaultNavbarItem(props: Props): JSX.Element;
}
declare module '@theme/NavbarItem/NavbarNavLink' {
import type {ReactNode} from 'react';
import type {LinkProps} from '@docusaurus/Link';
export type Props = LinkProps & {
readonly activeBasePath?: string;
readonly activeBaseRegex?: string;
readonly exact?: boolean;
readonly label?: ReactNode;
readonly prependBaseUrlToHref?: string;
};
export default function NavbarNavLink(props: Props): JSX.Element;
}
declare module '@theme/NavbarItem/DropdownNavbarItem' {
import type {Props as NavbarNavLinkProps} from '@theme/NavbarItem/NavbarNavLink';
import type {LinkLikeNavbarItemProps} from '@theme/NavbarItem';
export type DesktopOrMobileNavBarItemProps = NavbarNavLinkProps & {
readonly position?: 'left' | 'right';
readonly items: readonly LinkLikeNavbarItemProps[];
readonly className?: string;
};
export interface Props extends DesktopOrMobileNavBarItemProps {
readonly mobile?: boolean;
}
const DropdownNavbarItem: (props: Props) => JSX.Element;
export default DropdownNavbarItem;
}
declare module '@theme/NavbarItem/SearchNavbarItem' {
export interface Props {
readonly mobile?: boolean;
}
const SearchNavbarItem: (props: Props) => JSX.Element;
export default SearchNavbarItem;
}
declare module '@theme/NavbarItem/LocaleDropdownNavbarItem' {
import type {Props as DropdownNavbarItemProps} from '@theme/NavbarItem/DropdownNavbarItem';
import type {LinkLikeNavbarItemProps} from '@theme/NavbarItem';
export interface Props extends DropdownNavbarItemProps {
readonly dropdownItemsBefore: LinkLikeNavbarItemProps[];
readonly dropdownItemsAfter: LinkLikeNavbarItemProps[];
}
const LocaleDropdownNavbarItem: (props: Props) => JSX.Element;
export default LocaleDropdownNavbarItem;
}
declare module '@theme/NavbarItem/DocsVersionDropdownNavbarItem' {
import type {Props as DropdownNavbarItemProps} from '@theme/NavbarItem/DropdownNavbarItem';
import type {LinkLikeNavbarItemProps} from '@theme/NavbarItem';
export interface Props extends DropdownNavbarItemProps {
readonly docsPluginId?: string;
readonly dropdownActiveClassDisabled?: boolean;
readonly dropdownItemsBefore: LinkLikeNavbarItemProps[];
readonly dropdownItemsAfter: LinkLikeNavbarItemProps[];
}
const DocsVersionDropdownNavbarItem: (props: Props) => JSX.Element;
export default DocsVersionDropdownNavbarItem;
}
declare module '@theme/NavbarItem/DocsVersionNavbarItem' {
import type {Props as DefaultNavbarItemProps} from '@theme/NavbarItem/DefaultNavbarItem';
export interface Props extends DefaultNavbarItemProps {
readonly docsPluginId?: string;
}
const DocsVersionNavbarItem: (props: Props) => JSX.Element;
export default DocsVersionNavbarItem;
}
declare module '@theme/NavbarItem/DocNavbarItem' {
import type {Props as DefaultNavbarItemProps} from '@theme/NavbarItem/DefaultNavbarItem';
export interface Props extends DefaultNavbarItemProps {
readonly docId: string;
readonly docsPluginId?: string;
}
const DocsSidebarNavbarItem: (props: Props) => JSX.Element;
export default DocsSidebarNavbarItem;
}
declare module '@theme/NavbarItem/DocSidebarNavbarItem' {
import type {Props as DefaultNavbarItemProps} from '@theme/NavbarItem/DefaultNavbarItem';
export interface Props extends DefaultNavbarItemProps {
readonly sidebarId: string;
readonly docsPluginId?: string;
}
const DocSidebarNavbarItem: (props: Props) => JSX.Element;
export default DocSidebarNavbarItem;
}
declare module '@theme/NavbarItem' {
import type {ComponentProps} from 'react';
import type {Props as DefaultNavbarItemProps} from '@theme/NavbarItem/DefaultNavbarItem';
import type {Props as DocNavbarItemProps} from '@theme/NavbarItem/DocNavbarItem';
import type {Props as DocSidebarNavbarItemProps} from '@theme/NavbarItem/DocSidebarNavbarItem';
import type {Props as DocsVersionNavbarItemProps} from '@theme/NavbarItem/DocsVersionNavbarItem';
import type {Props as DropdownNavbarItemProps} from '@theme/NavbarItem/DropdownNavbarItem';
import type {Props as DocsVersionDropdownNavbarItemProps} from '@theme/NavbarItem/DocsVersionDropdownNavbarItem';
import type {Props as LocaleDropdownNavbarItemProps} from '@theme/NavbarItem/LocaleDropdownNavbarItem';
import type {Props as SearchNavbarItemProps} from '@theme/NavbarItem/SearchNavbarItem';
export type LinkLikeNavbarItemProps =
| ({readonly type?: 'default'} & DefaultNavbarItemProps)
| ({readonly type: 'doc'} & DocNavbarItemProps)
| ({readonly type: 'docsVersion'} & DocsVersionNavbarItemProps)
| ({readonly type: 'docSidebar'} & DocSidebarNavbarItemProps);
export type Props = ComponentProps<'a'> & {
readonly position?: 'left' | 'right';
} & (
| LinkLikeNavbarItemProps
| ({readonly type?: 'dropdown'} & DropdownNavbarItemProps)
| ({
readonly type: 'docsVersionDropdown';
} & DocsVersionDropdownNavbarItemProps)
| ({readonly type: 'localeDropdown'} & LocaleDropdownNavbarItemProps)
| ({
readonly type: 'search';
} & SearchNavbarItemProps)
);
export type Types = Props['type'];
const NavbarItem: (props: Props) => JSX.Element;
export default NavbarItem;
}
declare module '@theme/NavbarItem/utils' {
export function getInfimaActiveClassName(mobile?: boolean): string;
}
declare module '@theme/PaginatorNavLink' {
import type {ReactNode} from 'react';
import type {PropNavigationLink} from '@docusaurus/plugin-content-docs';
export interface Props extends Omit<PropNavigationLink, 'title'> {
readonly title: ReactNode;
readonly subLabel?: JSX.Element;
}
export default function PaginatorNavLink(props: Props): JSX.Element;
}
declare module '@theme/SearchBar' {
export default function SearchBar(): JSX.Element;
}
declare module '@theme/TabItem' {
import type {ReactNode} from 'react';
export interface Props {
readonly children: ReactNode;
readonly value: string;
readonly default?: boolean;
readonly label?: string;
readonly hidden?: boolean;
readonly className?: string;
readonly attributes?: Record<string, unknown>;
}
const TabItem: (props: Props) => JSX.Element;
export default TabItem;
}
declare module '@theme/Tabs' {
import type {ReactElement} from 'react';
import type {Props as TabItemProps} from '@theme/TabItem';
export interface Props {
readonly lazy?: boolean;
readonly block?: boolean;
readonly children: readonly ReactElement<TabItemProps>[];
readonly defaultValue?: string | null;
readonly values?: readonly {
value: string;
label?: string;
attributes?: Record<string, unknown>;
}[];
readonly groupId?: string;
readonly className?: string;
}
const Tabs: (props: Props) => JSX.Element;
export default Tabs;
}
declare module '@theme/ThemedImage' {
import type {ComponentProps} from 'react';
export interface Props extends Omit<ComponentProps<'img'>, 'src'> {
readonly sources: {
readonly light: string;
readonly dark: string;
};
}
const ThemedImage: (props: Props) => JSX.Element;
export default ThemedImage;
}
declare module '@theme/Details' {
import {Details, type DetailsProps} from '@docusaurus/theme-common';
export interface Props extends DetailsProps {}
export default Details;
}
declare module '@theme/TOCItems' {
import type {TOCItem} from '@docusaurus/types';
export type TOCItemsProps = {
readonly toc: readonly TOCItem[];
readonly minHeadingLevel?: number;
readonly maxHeadingLevel?: number;
readonly className?: string;
readonly linkClassName?: string | null;
readonly linkActiveClassName?: string;
};
export default function TOCItems(props: TOCItemsProps): JSX.Element;
}
declare module '@theme/TOC' {
import type {TOCItem} from '@docusaurus/types';
// minHeadingLevel only exists as a per-doc option,
// and won't have a default set by Joi. See TOC, TOCInline,
// TOCCollapsible for examples
export type TOCProps = {
readonly toc: readonly TOCItem[];
readonly minHeadingLevel?: number;
readonly maxHeadingLevel?: number;
readonly className?: string;
};
export type TOCHeadingsProps = {
readonly toc: readonly TOCItem[];
readonly minHeadingLevel?: number;
readonly maxHeadingLevel?: number;
};
export const TOCHeadings: (props: TOCHeadingsProps) => JSX.Element;
const TOC: (props: TOCProps) => JSX.Element;
export default TOC;
}
declare module '@theme/TOCInline' {
import type {TOCItem} from '@docusaurus/types';
export type TOCInlineProps = {
readonly toc: readonly TOCItem[];
readonly minHeadingLevel?: number;
readonly maxHeadingLevel?: number;
};
const TOCInline: (props: TOCInlineProps) => JSX.Element;
export default TOCInline;
}
declare module '@theme/TOCCollapsible' {
import type {TOCItem} from '@docusaurus/types';
export type TOCCollapsibleProps = {
readonly className?: string;
readonly minHeadingLevel?: number;
readonly maxHeadingLevel?: number;
readonly toc: readonly TOCItem[];
};
const TOCCollapsible: (props: TOCCollapsibleProps) => JSX.Element;
export default TOCCollapsible;
}
declare module '@theme/Toggle' {
import type {SyntheticEvent} from 'react';
export interface Props {
readonly className?: string;
readonly checked: boolean;
readonly onChange: (e: SyntheticEvent) => void;
}
const Toggle: (props: Props) => JSX.Element;
export default Toggle;
}
declare module '@theme/Logo' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'a'> {
readonly imageClassName?: string;
readonly titleClassName?: string;
}
const Logo: (props: Props) => JSX.Element;
export default Logo;
}
declare module '@theme/IconArrow' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'svg'> {}
const IconArrow: (props: Props) => JSX.Element;
export default IconArrow;
}
declare module '@theme/IconEdit' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'svg'> {}
const IconEdit: (props: Props) => JSX.Element;
export default IconEdit;
}
declare module '@theme/IconMenu' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'svg'> {}
const IconMenu: (props: Props) => JSX.Element;
export default IconMenu;
}
declare module '@theme/IconClose' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'svg'> {}
const IconClose: (props: Props) => JSX.Element;
export default IconClose;
}
declare module '@theme/IconLanguage' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'svg'> {}
const IconLanguage: (props: Props) => JSX.Element;
export default IconLanguage;
}
declare module '@theme/IconExternalLink' {
import type {ComponentProps} from 'react';
export interface Props extends ComponentProps<'svg'> {}
const IconExternalLink: (props: Props) => JSX.Element;
export default IconExternalLink;
}
declare module '@theme/TagsListByLetter' {
export type TagsListItem = Readonly<{
name: string;
permalink: string;
count: number;
}>;
export interface Props {
readonly tags: readonly TagsListItem[];
}
export default function TagsListByLetter(props: Props): JSX.Element;
}
declare module '@theme/TagsListInline' {
export type Tag = Readonly<{label: string; permalink: string}>;
export interface Props {
readonly tags: readonly Tag[];
}
export default function TagsListInline(props: Props): JSX.Element;
}
declare module '@theme/Tag' {
import type {TagsListItem} from '@theme/TagsListByLetter';
import type {Optional} from 'utility-types';
export interface Props extends Optional<TagsListItem, 'count'> {}
export default function Tag(props: Props): JSX.Element;
}
declare module '@theme/prism-include-languages' {
import type * as PrismNamespace from 'prismjs';
export default function prismIncludeLanguages(
PrismObject: typeof PrismNamespace,
): void;
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.image = void 0;
var image = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M19,2H1C0.447,2,0,2.447,0,3v14c0,0.552,0.447,1,1,1h18c0.553,0,1-0.448,1-1V3\r\n\tC20,2.448,19.553,2,19,2z M18,16H2V4h16V16z M14.315,10.877l-3.231,1.605L7.314,6.381L4,14h12L14.315,10.877z M13.25,9\r\n\tc0.69,0,1.25-0.56,1.25-1.25S13.94,6.5,13.25,6.5S12,7.06,12,7.75S12.56,9,13.25,9z"
}
}]
};
exports.image = image; |
def longestWord(sentence):
words = sentence.split(' ')
longest_word = ''
for word in words:
if len(word) > len(longest_word):
longest_word = word
return longest_word
sentence = "This is a test sentence"
print(longestWord(sentence)) // "sentence" |
import { Service } from 'typedi';
import * as Path from 'path';
import { OptionsService } from './Options';
import * as Fs from 'fs-extra';
import Hoek from '@hapi/hoek';
import { Channel } from '../class/Channel';
import { ModelsCollection } from '../class/ModelsCollection';
import { Project } from '../class/Project';
@Service()
export class ChannelsService {
/** Channels instances */
private _channels: Channel[];
constructor(private optionsService: OptionsService) {}
/** Get the channels. Load them if not loaded yet */
public async channels(): Promise<Channel[]> {
if (!(this._channels instanceof Array)) {
this._channels = await ChannelsService.sniff(this.optionsService.dir(), this.optionsService.depth());
if (this._channels.length === 0) {
throw new Error('No channel found');
}
for (const channel of this._channels) {
await channel.load();
}
}
return this._channels;
}
/** Ensure that all channels refers to the same project */
public async ensureSameProject(): Promise<void> {
const channels = await this.channels();
const firstProject = channels[0].guessProjectIdOrPath();
for (const channel of channels) {
if (channel.guessProjectIdOrPath() !== firstProject) {
throw new Error('Channels must refer to the same project');
}
}
}
/** Ensure that all channels define the same default fields */
public async ensureSameDefaultFields(): Promise<void> {
// Get defined fields
const channels = await this.channels();
const fieldsGroup = channels.filter((c) => !!c.config.defaultFields).map((c) => c.config.defaultFields);
if (fieldsGroup.length < 2) {
return;
}
// Compare each fields group to the first one
const ref = fieldsGroup[0];
for (let i = 1; i < fieldsGroup.length; i++) {
if (!Hoek.deepEqual(ref, fieldsGroup[i])) {
throw new Error('Default fields must match for all channels if defined');
}
}
}
/**
* Change project in all found channels from a given or current dir
* This change the project without loading the channels
*/
public async changeRemoteProject(project: string): Promise<void> {
const channels = await ChannelsService.sniff(this.optionsService.dir(), this.optionsService.depth());
if (channels.length === 0) {
throw new Error('No channel found');
}
for (const channel of channels) {
await Channel.changeProject(channel.path, project);
}
}
/**
* Use the same local project for all found channels
* This change the project without loading the channels
*/
public async mergeLocalProjects(): Promise<boolean> {
// Try to find channels
const channels = await ChannelsService.sniff(this.optionsService.dir(), this.optionsService.depth());
if (channels.length === 0) {
throw new Error('No channel found');
}
// If the one channel's project is local, use this project as reference and bind all other channels to this project
let mainChannel;
let mainChannelProjectPath;
for (const channel of channels) {
const projectPath = await this.resolveLocalProjectPath(channel);
if (projectPath) {
mainChannel = channel;
mainChannelProjectPath = projectPath;
break;
}
}
if (!mainChannel) {
// The user should choose a remote project
return false;
}
for (const channel of channels) {
if (channel === mainChannel) continue;
// Remove project file
const projectPath = await this.resolveLocalProjectPath(channel);
if (projectPath && Fs.existsSync(projectPath)) {
Fs.unlinkSync(projectPath);
}
// Get relative path
const newPath = Path.relative(Path.resolve(channel.path), mainChannelProjectPath);
await Channel.changeProject(channel.path, newPath);
}
return true;
}
/** Returns null if the project is not local */
private async resolveLocalProjectPath(channel: Channel): Promise<string> {
const config = await channel.readConfigFile();
if (Project.isRemoteId(config.project)) return null;
return Path.isAbsolute(config.project) ? config.project : Path.resolve(channel.path, config.project);
}
/** Returns the first models collection */
public async modelsCollection(): Promise<ModelsCollection> {
const channels = await this.channels();
return channels[0].modelsCollection;
}
/**
* This method detect all channels in the directory and its sub-directories, and create instances for them.
* We can define the depth level of subdirectories.
*/
private static async sniff(path: string, depth: number = 2, from: string = path): Promise<Channel[]> {
// Get channels in sub-directories first
const channels: Channel[] =
depth <= 0
? []
: (
await Promise.all(
Fs.readdirSync(path)
.map((dir) => Path.join(path, dir))
.filter((subPath) => Fs.statSync(subPath).isDirectory())
.map((subPath) => ChannelsService.sniff(subPath, depth - 1, from))
)
).reduce((flatten: Channel[], channels: Channel[]) => flatten.concat(channels), []);
// Get channel of current directory if exists
if (await Channel.configExists(path)) {
const name = Path.relative(Path.dirname(from), path);
const channel = new Channel(path, name);
channels.push(channel);
}
return channels;
}
}
|
<gh_stars>1000+
package handler
import "errors"
var ErrProjectAlreadyExists = errors.New("project already exists")
var ErrServiceAlreadyExists = errors.New("service already exists")
var ErrServiceNotFound = errors.New("service not found")
var ErrProjectNotFound = errors.New("project not found")
var ErrStageNotFound = errors.New("stage not found")
|
import { useHistory, useLocation } from 'react-router-dom'
import { Auth0Provider } from '@auth0/auth0-react'
// Set configuration parameters
const domain = process.env.REACT_APP_AUTH0_DOMAIN;
const clientId = process.env.REACT_APP_AUTH0_CLIENT_ID;
const audience = process.env.REACT_APP_AUTH0_AUDIENCE;
export const App = () => {
const history = useHistory();
const location = useLocation();
// Handle authentication state changes
const onRedirectCallback = (appState) => {
history.push(
appState && appState.returnTo
? appState.returnTo
: window.location.pathname
);
};
return (
<Auth0Provider
domain={domain}
clientId={clientId}
redirectUri={window.location.origin}
onRedirectCallback={onRedirectCallback}
audience={audience}
>
// Add the rest of your application code here
</Auth0Provider>
);
}; |
<gh_stars>0
import { ForkJoinObservable } from './ForkJoinObservable';
export const /** @type {?} */ forkJoin = ForkJoinObservable.create;
|
// @ts-check
'use strict';
const fs = require('fs');
const { fromArray, NaniError } = require('nani');
const util = require('util');
const YAML = require('yaml');
const readFile = util.promisify(fs.readFile);
const CodeMap = require('./CodeMap');
const SassValue = require('./SassValue');
/**
* @param {string} sourcePath
* @returns {Promise.<import('./types').ParsedSource>}
*/
async function readSource(sourcePath) {
const source = await readFile(sourcePath, 'utf-8');
const map = new CodeMap(source);
const ast = YAML.parseDocument(source, { customTags: [SassValue.tag] });
if (ast.errors && ast.errors.length) {
const max = Math.min(ast.errors.length, 10);
const errors = ast.errors.slice(0, max).map(yamlError => {
const { start, end } = yamlError.source.range;
return map.errorForRange('YAML error', [start, end], yamlError);
});
throw fromArray(errors);
}
return {
path: sourcePath,
source,
ast,
map,
};
}
module.exports = readSource;
|
import React, { useState, useCallback } from 'react';
import { Hello } from '../components/Hello';
export default function UseCallback() {
const [count, setCount] = useState(0);
const favoriteNums = [9, 5, 7];
// nao vai renderizar de novo (re-render) se não mudar a funcao
const increment = useCallback(
(n) => {
setCount((c) => c + n);
},
[setCount]
);
return (
<div>
<Hello increment={increment} />
<div>Count: {count}</div>
</div>
);
}
|
#!/usr/bin/python3
"""
eturns the dictionary description with simple data structure
(list, dictionary, string, integer and boolean)
for JSON serialization of an object
"""
def class_to_json(obj):
"""
eturns the dictionary description with simple data structure
(list, dictionary, string, integer and boolean)
for JSON serialization of an object
"""
return(obj.__dict__)
|
import * as tslib_1 from "tslib";
import { Gain } from "../core/context/Gain";
import { optionsFromArguments } from "../core/util/Defaults";
import { Signal } from "./Signal";
/**
* Multiply two incoming signals. Or, if a number is given in the constructor,
* multiplies the incoming signal by that value.
*
* @example
* import { Multiply, Signal } from "tone";
* // multiply two signals
* const mult = new Multiply();
* const sigA = new Signal(3);
* const sigB = new Signal(4);
* sigA.connect(mult);
* sigB.connect(mult.factor);
* // output of mult is 12.
* @example
* import { Multiply, Signal } from "tone";
* // multiply a signal and a number
* const mult = new Multiply(10);
* const sig = new Signal(2).connect(mult);
* // the output of mult is 20.
* @category Signal
*/
var Multiply = /** @class */ (function (_super) {
tslib_1.__extends(Multiply, _super);
function Multiply() {
var _this = _super.call(this, Object.assign(optionsFromArguments(Multiply.getDefaults(), arguments, ["value"]))) || this;
_this.name = "Multiply";
/**
* Indicates if the value should be overridden on connection
*/
_this.override = false;
var options = optionsFromArguments(Multiply.getDefaults(), arguments, ["value"]);
_this._mult = _this.input = _this.output = new Gain({
context: _this.context,
minValue: options.minValue,
maxValue: options.maxValue,
});
_this.factor = _this._param = _this._mult.gain;
_this.factor.setValueAtTime(options.value, 0);
return _this;
}
Multiply.getDefaults = function () {
return Object.assign(Signal.getDefaults(), {
value: 0,
});
};
Multiply.prototype.dispose = function () {
_super.prototype.dispose.call(this);
this._mult.dispose();
return this;
};
return Multiply;
}(Signal));
export { Multiply };
//# sourceMappingURL=Multiply.js.map |
import {Logger} from './logging';
import Tab, {TabEvent, TabUrlChangeEvent, TabVisibilityChangeEvent} from './tab';
import {OutputChannel} from './channel';
import NullLogger from './logging/nullLogger';
import {formatCause} from './error';
import {
Beacon,
BeaconPayload,
TrackingEvent,
TrackingEventContext,
isCartPartialEvent,
isIdentifiedUserEvent,
PartialTrackingEvent,
} from './trackingEvents';
import {TokenProvider} from './token';
import {RetryPolicy} from './retry';
type Options = {
eventMetadata?: {[key: string]: string},
};
export type Configuration = Options & {
channel: OutputChannel<Beacon>,
logger?: Logger,
tab: Tab,
tokenProvider: TokenProvider,
inactivityRetryPolicy: RetryPolicy<number>,
};
type State = {
initialized: boolean,
enabled: boolean,
suspended: boolean,
};
type InactivityTimer = {
id?: number,
since: number,
};
export type EventInfo<T extends TrackingEvent = TrackingEvent> = {
context: TrackingEventContext,
event: T,
timestamp: number,
status: 'pending' | 'confirmed' | 'failed' | 'ignored',
};
export interface EventListener {
(event: EventInfo): void;
}
const trackedEvents: {[key: string]: {[key: string]: boolean}} = {};
export default class Tracker {
private readonly options: Required<Options>;
private tab: Tab;
private tokenProvider: TokenProvider;
private inactivityRetryPolicy: RetryPolicy<any>;
private readonly channel: OutputChannel<Beacon>;
private readonly logger: Logger;
private readonly listeners: EventListener[] = [];
private readonly pending: Promise<void>[] = [];
private readonly state: State = {
enabled: false,
initialized: false,
suspended: false,
};
private readonly inactivityTimer: InactivityTimer = {
since: 0,
};
public constructor({tab, tokenProvider, channel, logger, inactivityRetryPolicy, ...options}: Configuration) {
this.tab = tab;
this.tokenProvider = tokenProvider;
this.inactivityRetryPolicy = inactivityRetryPolicy;
this.channel = channel;
this.logger = logger ?? new NullLogger();
this.options = {
...options,
eventMetadata: options.eventMetadata ?? {},
};
this.enable = this.enable.bind(this);
this.disable = this.disable.bind(this);
this.suspend = this.suspend.bind(this);
this.unsuspend = this.unsuspend.bind(this);
this.trackPageLoad = this.trackPageLoad.bind(this);
this.trackTabVisibilityChange = this.trackTabVisibilityChange.bind(this);
this.trackTabUrlChange = this.trackTabUrlChange.bind(this);
this.trackInactivity = this.trackInactivity.bind(this);
}
public addListener(listener: EventListener): void {
this.listeners.push(listener);
}
public removeListener(listener: EventListener): void {
let index = this.listeners.indexOf(listener);
while (index >= 0) {
this.listeners.splice(index, 1);
index = this.listeners.indexOf(listener);
}
}
public get flushed(): Promise<void> {
const suppress = (): void => {
// suppress errors
};
return Promise.all(this.pending).then(suppress, suppress);
}
public isEnabled(): boolean {
return this.state.enabled;
}
public isSuspended(): boolean {
return this.state.suspended;
}
public enable(): void {
if (this.state.enabled) {
return;
}
this.logger.info('Tracker enabled');
this.state.enabled = true;
if (this.state.suspended) {
return;
}
this.startInactivityTimer();
if (!this.state.initialized) {
this.state.initialized = true;
this.initialize();
}
this.tab.addListener('load', this.trackPageLoad);
this.tab.addListener('urlChange', this.trackTabUrlChange);
this.tab.addListener('visibilityChange', this.trackTabVisibilityChange);
}
public disable(): void {
if (!this.state.enabled) {
return;
}
this.logger.info('Tracker disabled');
this.state.enabled = false;
if (this.state.suspended) {
return;
}
this.tab.removeListener('load', this.trackPageLoad);
this.tab.removeListener('urlChange', this.trackTabUrlChange);
this.tab.removeListener('visibilityChange', this.trackTabVisibilityChange);
this.stopInactivityTimer();
}
public suspend(): void {
if (this.state.suspended) {
return;
}
this.logger.info('Tracker suspended');
if (this.state.enabled) {
this.disable();
this.state.enabled = true;
}
this.state.suspended = true;
}
public unsuspend(): void {
if (!this.state.suspended) {
return;
}
this.logger.info('Tracker unsuspended');
this.state.suspended = false;
if (this.state.enabled) {
this.state.enabled = false;
this.enable();
}
}
private initialize(): void {
if (trackedEvents[this.tab.id] === undefined) {
trackedEvents[this.tab.id] = {};
}
const initEvents = trackedEvents[this.tab.id];
if (this.tab.isNew && !initEvents.tabOpened) {
initEvents.tabOpened = true;
this.trackTabOpen({tabId: this.tab.id});
}
if (!initEvents.pageOpened) {
initEvents.pageOpened = true;
this.trackPageOpen({
url: this.tab.url,
referrer: this.tab.referrer,
});
}
}
private stopInactivityTimer(): void {
if (this.inactivityTimer.id !== undefined) {
window.clearTimeout(this.inactivityTimer.id);
delete this.inactivityTimer.id;
}
}
private startInactivityTimer(): void {
this.stopInactivityTimer();
this.inactivityTimer.since = Date.now();
let iteration = -1;
const startTimer = (): void => {
if (!this.inactivityRetryPolicy.shouldRetry(iteration + 1, this.inactivityTimer.since)) {
window.clearTimeout(this.inactivityTimer.id);
return;
}
iteration += 1;
this.inactivityTimer.id = window.setTimeout(
() => {
this.trackInactivity();
startTimer();
},
this.inactivityRetryPolicy.getDelay(iteration),
);
};
startTimer();
}
public track<T extends PartialTrackingEvent>(event: T, timestamp: number = Date.now()): Promise<T> {
return this.publish(this.enrichEvent(event, timestamp), timestamp).then(() => event);
}
private trackPageOpen({referrer, ...payload}: {url: string, referrer: string}): void {
this.enqueue({
type: 'pageOpened',
...payload,
...(referrer.length > 0 ? {referrer: referrer} : {}),
});
}
private trackPageLoad({detail: {tab}}: TabEvent): void {
this.enqueue({
type: 'pageLoaded',
url: tab.url,
title: tab.title,
lastModifiedTime: Date.parse(tab.document.lastModified),
});
}
private trackTabOpen(payload: {tabId: string}): void {
this.enqueue({
type: 'tabOpened',
...payload,
});
}
private trackTabUrlChange({detail}: TabUrlChangeEvent): void {
this.enqueue({
type: 'tabUrlChanged',
tabId: detail.tab.id,
url: detail.url,
});
}
private trackTabVisibilityChange({detail}: TabVisibilityChangeEvent): void {
this.enqueue({
type: 'tabVisibilityChanged',
tabId: detail.tab.id,
visibility: detail.visible ? 'visible' : 'hidden',
});
}
private trackInactivity(): void {
this.enqueue({
type: 'nothingChanged',
sinceTime: this.inactivityTimer.since,
});
}
private enqueue(event: TrackingEvent, timestamp: number = Date.now()): void {
this.publish(event, timestamp).catch(() => {
// suppress error
});
}
private notifyEvent(event: EventInfo): void {
this.listeners.map(listener => listener(event));
}
private publish<T extends TrackingEvent>(event: T, timestamp: number): Promise<T> {
if (event.type !== 'nothingChanged') {
this.stopInactivityTimer();
}
const metadata = this.options.eventMetadata;
const context: TrackingEventContext = {
tabId: this.tab.id,
url: this.tab.url,
...(Object.keys(metadata).length > 0 ? {metadata: metadata} : {}),
};
const eventInfo: EventInfo<T> = {
event: event,
context: context,
timestamp: timestamp,
status: 'pending',
};
if (this.state.suspended) {
this.logger.warn(`Tracker is suspended, ignoring event "${event.type}"`);
this.notifyEvent({...eventInfo, status: 'ignored'});
return Promise.reject(new Error('The tracker is suspended.'));
}
this.logger.info(`Tracked event "${event.type}"`);
this.notifyEvent(eventInfo);
return new Promise<T>((resolve, reject) => {
const promise = this.channel.publish(this.createBeacon(event, timestamp, context)).then(
() => {
this.logger.debug(`Successfully published event "${event.type}"`);
this.notifyEvent({...eventInfo, status: 'confirmed'});
resolve(event);
},
cause => {
this.logger.error(`Failed to publish event "${event.type}", reason: ${formatCause(cause)}`);
this.notifyEvent({...eventInfo, status: 'failed'});
reject(cause);
},
);
this.pending.push(promise);
promise.finally(() => {
this.pending.splice(this.pending.indexOf(promise), 1);
});
if (this.state.enabled && event.type !== 'nothingChanged') {
this.startInactivityTimer();
}
});
}
private enrichEvent(event: PartialTrackingEvent, timestamp: number): TrackingEvent {
if (isCartPartialEvent(event)) {
const {cart: {lastUpdateTime = timestamp, ...cart}, ...payload} = event;
return {
...payload,
cart: {
...cart,
lastUpdateTime: lastUpdateTime,
},
};
}
return event;
}
private createBeacon(event: TrackingEvent, timestamp: number, context: TrackingEventContext): Beacon {
const token = this.tokenProvider.getToken();
return {
timestamp: timestamp,
...(token !== null ? {token: token.toString()} : {}),
context: context,
payload: this.createBeaconPayload(event),
};
}
private createBeaconPayload(event: TrackingEvent): BeaconPayload {
if (!isIdentifiedUserEvent(event)) {
return event;
}
if (event.type === 'userSignedUp' && event.profile !== undefined) {
const {userId, profile, ...payload} = event;
return {
...payload,
externalUserId: userId,
patch: {
operations: [
{
type: 'set',
path: '.',
value: profile,
},
],
},
};
}
const {userId, ...payload} = event;
return {
...payload,
externalUserId: userId,
};
}
}
|
<gh_stars>0
import * as fs from "fs";
import * as path from "path";
import * as crypto from "crypto";
import * as Dstore from "data-store";
const store = Dstore("FILENAME");
store.clear();
export const extensions = [".js", ".jsx", ".ts", ".tsx"];
export interface PathOption {
baseUrl: string[];
}
export function digest(str: string): string {
return crypto
.createHash("md5")
.update(str)
.digest("hex");
}
export function genPath(root: string, filename: string): string | null {
const abPath = path.resolve(root, filename);
if (fs.existsSync(abPath) && fs.lstatSync(abPath).isFile()) {
return abPath;
}
return null;
}
function storePathKey(rootfile: string, filename: string) {
return digest(`path_${rootfile}_${filename}`);
}
const DEFAULT_OPTIONS = {
baseUrl: [process.cwd(), `${process.cwd()}/node_modules`],
node_modules: false
};
/**
* node_modules do not thinks
* @param filename_rootfile
* @param filename
* @param options
*/
export function findModulePath(
filename_rootfile: string,
filename: string,
options: PathOption = DEFAULT_OPTIONS
): string | null {
/**
* . 相对路径
* . 绝对路径
*/
const ext = path.extname(filename);
if (ext && extensions.indexOf(ext) === -1) {
return null;
}
if (path.dirname(filename_rootfile) !== filename_rootfile) {
filename_rootfile = path.dirname(filename_rootfile);
}
const storeKey = storePathKey(filename_rootfile, filename);
const storeKeyVal = store.get(storeKey);
const { baseUrl = DEFAULT_OPTIONS.baseUrl } = options;
if (storeKeyVal) {
return storeKeyVal === "null" ? null : storeKeyVal;
}
// save result path and return pathname
const storeAndReturn = (rpath: string | null) => {
store.set(storeKey, String(rpath));
return rpath;
};
const roots = baseUrl.concat(filename_rootfile);
let r: string | null = null;
roots.some(baseRoot => {
if (ext) {
const namepath = genPath(baseRoot, filename);
r = namepath;
return !!namepath;
}
let namepath2: string | null = null;
extensions.some(extname => {
namepath2 = genPath(baseRoot, `${filename}${extname}`);
if (!namepath2) {
namepath2 = genPath(baseRoot, `${filename}/index${extname}`);
}
return !!namepath2;
});
if (namepath2) {
r = namepath2;
return true;
}
return false;
});
return storeAndReturn(r);
}
|
#!/usr/bin/env bash
export REGISTRY=yakworks
BASE_URL=$REGISTRY/builder
docker buildx create --use
# export DOCKER_DEFAULT_PLATFORM=linux/amd64
buildx="docker buildx build --platform linux/amd64,linux/arm64"
cmd="$buildx --push -t '$BASE_URL:3.14' -t '$BASE_URL:base' base/."
eval $cmd
# for t in k8s jdk8 jdk8-slim node node-chrome postgres14-jdk8; do
for t in k8s jdk8 jdk8-slim node node-chrome; do
echo "building ${BASE_URL}:${t}"
cmd="$buildx --build-arg REGISTRY=$REGISTRY --push -t '${BASE_URL}:${t}' ${t}/."
eval $cmd
done
# docker buildx prune -f |
<gh_stars>1-10
require 'spec_helper'
RSpec.describe Sesame::Api do
let(:auth_token) { 'd015cf1353d21a14f392835bceb56d53649e447e3aebe440cef9d' }
let(:device_id) { 'ABCD12345' }
let(:test_class) do
Class.new do
include Sesame::Api
end
end
subject { test_class.new }
describe '#get_sesames' do
before do
stub_fixtures('sesames')
subject.auth_token(auth_token)
end
let(:sesames) { subject.get_sesames }
it 'retrieves a list of Sesame locks' do
expect(sesames.length).to eq(2)
expect(sesames.first['device_id']).to eq('ABC1234567')
end
end
describe '#get_sesame' do
before do
stub_fixtures('sesames')
subject.auth_token(auth_token)
end
let(:sesame) { subject.get_sesame(device_id: device_id) }
it 'retrieves a single Sesame lock' do
expect(sesame['locked']).to eq(true)
expect(sesame['battery']).to eq(100)
expect(sesame['responsive']).to eq(true)
end
it 'raises an error when lock ID is not found' do
expect { subject.get_sesame(device_id: 'EFGH67890') }
.to raise_error(Sesame::Error, /BAD_PARAMS/)
end
end
describe '#control_sesame' do
before do
stub_fixtures('control')
subject.auth_token(auth_token)
end
it 'should lock' do
expect { subject.control_sesame(device_id: device_id, command: 'lock') }.not_to raise_error
end
it 'should unlock' do
expect { subject.control_sesame(device_id: device_id, command: 'unlock') }.not_to raise_error
end
it 'should not squish' do
expect { subject.control_sesame(device_id: device_id, command: 'squish') }
.to raise_error(Sesame::Error, /BAD_PARAMS/)
end
end
end
|
package com.sanctionco.opconnect.model.apiactivity;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
import java.util.StringJoiner;
/**
* Represents an actor (1Password connect server) that performed an {@link APIRequest}.
*
* <p>See the <a href="https://support.1password.com/connect-api-reference/#apirequest-object">
* APIRequest</a> documentation for more details.
*/
public class Actor {
private final String id;
private final String account;
private final String jti;
private final String userAgent;
private final String ip;
@JsonCreator
Actor(@JsonProperty("id") String id,
@JsonProperty("account") String account,
@JsonProperty("jti") String jti,
@JsonProperty("userAgent") String userAgent,
@JsonProperty("ip") String ip) {
this.id = id;
this.account = account;
this.jti = jti;
this.userAgent = userAgent;
this.ip = ip;
}
/**
* Get the ID of the {@code Actor}.
*
* @return the id of the actor (connect sever)
*/
public String getId() {
return id;
}
/**
* Get the 1Password account ID.
*
* @return the id of the 1Password account the actor belongs to
*/
public String getAccount() {
return account;
}
/**
* Get the Access Token ID.
*
* @return the id of the access token used to authenticate the request
*/
public String getJti() {
return jti;
}
/**
* Get the user-agent string.
*
* @return the user agent string specified in the request
*/
public String getUserAgent() {
return userAgent;
}
/**
* Get the IP address.
*
* @return the ip address the request originated from
*/
public String getIp() {
return ip;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Actor actor = (Actor) o;
return Objects.equals(id, actor.id)
&& Objects.equals(account, actor.account)
&& Objects.equals(jti, actor.jti)
&& Objects.equals(userAgent, actor.userAgent)
&& Objects.equals(ip, actor.ip);
}
@Override
public int hashCode() {
return Objects.hash(id, account, jti, userAgent, ip);
}
@Override
public String toString() {
return new StringJoiner(", ", Actor.class.getSimpleName() + "[", "]")
.add("id='" + id + "'")
.add("account='" + account + "'")
.add("jti='" + jti + "'")
.add("userAgent='" + userAgent + "'")
.add("ip='" + ip + "'")
.toString();
}
}
|
# Import necessary libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# Read in the dataset
df = pd.read_csv('data.csv')
# Split the data into training and test sets
X = df.drop('target_class', axis=1)
y = df.target_class
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Build the classification model
clf = LogisticRegression()
clf.fit(X_train, y_train)
# Make predictions on the test set
y_pred = clf.predict(X_test)
# Evaluate the model
accuracy = accuracy_score(y_test, y_pred)
print('Accuracy: %.2f' % accuracy) |
<filename>packages/react-integration/cypress/integration/descriptionlist.spec.ts
describe('Description List Demo Test', () => {
it('Navigate to demo section', () => {
cy.visit('http://localhost:3000/description-list-demo-nav-link');
});
it('Verify list with help text', () => {
cy.get('#description-list-help-text')
.should('exist')
.children('.pf-c-description-list__group');
cy.get('.pf-c-popover__content').should('not.exist');
cy.get(
'#description-list-help-text > :nth-child(1) > .pf-c-description-list__term > .pf-c-description-list__text'
).click();
cy.get('.pf-c-popover__content').should('exist');
});
});
|
export * from './lib/ActorQueryOperationNop';
|
<gh_stars>10-100
// Autogenerated from library/elements.i
package ideal.library.elements;
public interface procedure1<R, A0> extends immutable_value, equality_comparable {
R call(A0 first);
}
|
<gh_stars>0
package prospector.routiduct.gui.blueprint.element;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import prospector.routiduct.Routiduct;
import prospector.routiduct.gui.GuiRoutiduct;
public class TextElement extends ElementBase {
protected String text;
protected int color;
protected boolean centered = false;
protected int clipTo = -1;
public TextElement(String text, int color, int x, int y) {
super(x, y);
this.text = text;
this.color = color;
}
public TextElement(String text, int color, int x, int y, int clipTo) {
this(text, color, x, y);
this.clipTo = clipTo;
}
public TextElement(String text, int color, int x, int y, boolean centered) {
this(text, color, x, y);
this.centered = centered;
}
public TextElement(String text, int color, int y, boolean centered) {
this(text, color, -1, y);
this.centered = centered;
}
public TextElement(String text, int color, int x, int y, boolean centered, int clipTo) {
this(text, color, x, y, centered);
this.clipTo = clipTo;
}
public TextElement(String text, int color, int y, boolean centered, int clipTo) {
this(text, color, y, centered);
this.clipTo = clipTo;
}
@Override
@SideOnly(Side.CLIENT)
public void draw(GuiRoutiduct gui) {
String string = text;
if (clipTo > -1 && gui.mc.fontRenderer.getStringWidth(string) > clipTo) {
string = gui.mc.fontRenderer.trimStringToWidth(text, clipTo - gui.mc.fontRenderer.getStringWidth("..."));
string = string + "...";
}
if (centered) {
if (x > -1) {
Routiduct.proxy.getGuiAssembler().drawCenteredString(gui, string, x, y, color);
} else {
Routiduct.proxy.getGuiAssembler().drawCenteredString(gui, string, y, color);
}
} else if (x > -1) {
Routiduct.proxy.getGuiAssembler().drawString(gui, string, x, y, color);
}
}
} |
<reponame>nmburgan/beaker-pe<gh_stars>0
require 'beaker'
require 'beaker-puppet'
require 'stringify-hash'
require 'beaker-pe/version'
require 'beaker-pe/install/pe_defaults'
require 'beaker-pe/install/pe_utils'
require 'beaker-pe/install/ca_utils'
require 'beaker-pe/options/pe_version_scraper'
require 'beaker-pe/pe-client-tools/config_file_helper'
require 'beaker-pe/pe-client-tools/install_helper'
require 'beaker-pe/pe-client-tools/executable_helper'
module Beaker
module DSL
module PE
include Beaker::DSL::InstallUtils::PEDefaults
include Beaker::DSL::InstallUtils::PEUtils
include Beaker::DSL::InstallUtils::PEClientTools
include Beaker::DSL::InstallUtils::CAUtils
include Beaker::Options::PEVersionScraper
include Beaker::DSL::PEClientTools::ConfigFileHelper
include Beaker::DSL::PEClientTools::ExecutableHelper
end
end
end
# Boilerplate DSL inclusion mechanism:
# First we register our module with the Beaker DSL
Beaker::DSL.register( Beaker::DSL::PE )
|
package com.example.wy.tickto.user_ins;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import com.example.wy.tickto.R;
import com.example.wy.tickto.leftmenu.SetActivity;
public class user_instructions extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_user_instructions);
}
}
|
<reponame>MeetDevin/WebCrawler<gh_stars>1-10
from PIL import Image
import numpy as np
import os
def windows(length, window_size):
start = 0
i = 0
while start < length:
yield start, start + window_size, i
start += int(window_size*0.5)
i += 1
def fuck_data(window_size=600):
files_path = 'images'
for train_class in os.listdir(files_path):
if train_class != 'Pica+pica':
continue
for pic_name in os.listdir(files_path + '/' + train_class):
if os.path.isfile(files_path + '/' + train_class + '/' + pic_name):
filename = files_path + '/' + train_class + '/' + pic_name
save_dir = 'new_images' + '/' + train_class
if not os.path.exists(save_dir):
os.makedirs(save_dir)
# 读取图像
old_img = np.asarray(Image.open(filename))
img = old_img.copy()
img.setflags(write=True)
for (start, end, i) in windows(np.shape(img)[1], window_size):
if np.shape(img[:, start:end])[1] < window_size:
end = np.shape(img)[1]
start = end - window_size
if start < 0:
break
save_path = save_dir + '/' + pic_name.replace('.jpg', '(' + str(i) + ')') + '.jpg'
if os.path.exists(save_path):
print('--exist: ', save_path)
continue
else:
Image.fromarray(old_img[:, start:end]).save(save_path)
print('save:', save_path)
if __name__ == '__main__':
fuck_data(600)
|
<reponame>DianeYuan/introduction-to-java-programming-10th-edition
package com.company;
import java.util.Scanner;
public class Exercise_3_19 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter the length of 3 sides in a triangle: ");
double side1 = input.nextDouble();
double side2 = input.nextDouble();
double side3 = input.nextDouble();
if ((side1 + side2 > side3) && (side2 + side3 > side1) && (side3 + side1 > side2))
System.out.println("Inputs are valid");
else
System.out.println("Error: illegal inputs");
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.phone = void 0;
var phone = {
"viewBox": "0 0 8 8",
"children": [{
"name": "path",
"attribs": {
"d": "M.19 0c-.11 0-.19.08-.19.19v7.63c0 .11.08.19.19.19h4.63c.11 0 .19-.08.19-.19v-7.63c0-.11-.08-.19-.19-.19h-4.63zm.81 1h3v5h-3v-5zm1.5 5.5c.28 0 .5.22.5.5s-.22.5-.5.5-.5-.22-.5-.5.22-.5.5-.5z",
"transform": "translate(1)"
}
}]
};
exports.phone = phone; |
<reponame>brahici/WBO<filename>index/views.py
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from .models import Menu
def index(request):
return render_to_response('index/index.html',
context_instance=RequestContext(request))
|
def count_solutions(initial_score, target_score):
if target_score == 0:
return 1 # There is only one way to reach a target score of 0, i.e., by not adding or subtracting anything
if initial_score > target_score:
return 0 # If the initial score is greater than the target score, no solutions are possible
# Create a list to store the number of solutions for each score from 0 to the target score
solutions = [0] * (target_score + 1)
solutions[0] = 1 # There is one way to reach a score of 0, i.e., by not adding or subtracting anything
for score in range(1, target_score + 1):
for fixed_score in fixed_set_of_scores:
if score - fixed_score >= 0:
solutions[score] += solutions[score - fixed_score]
return solutions[target_score] |
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
public class AlarmReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
// Get notification manager
NotificationManager notificationManager =
(NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
// Create an intent for the notification
Intent notificationIntent = new Intent(context, MainActivity.class);
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
PendingIntent pendingIntent = PendingIntent.getActivity(context, 0,
notificationIntent, 0);
// Create the notification
Notification notification = new Notification.Builder(context)
.setContentTitle("Timer is complete")
.setContentText("Your timer has expired!")
.setSmallIcon(R.mipmap.ic_launcher)
.setContentIntent(pendingIntent)
.build();
// Send notification
notificationManager.notify(0, notification);
}
} |
<filename>extern/typed-geometry/src/typed-geometry/types/objects/halfspace.hh
#pragma once
#include <typed-geometry/types/scalars/default.hh>
#include "../dir.hh"
#include "../pos.hh"
#include "traits.hh"
// A halfspace has a distance to the origin and a normal direction
// In dimension n, the distance is 1-dimensional and the normal direction is n-dimensional
// Note that there is a semantic difference between plane and halfspace in nD:
// - the plane describes all points lying on an (n-1)-dimensional plane
// - the halfspace describes all points lying inside this n-dimensional subspace
namespace tg
{
template <int D, class ScalarT>
struct halfspace;
// Common halfspace types
using halfspace1 = halfspace<1, f32>;
using halfspace2 = halfspace<2, f32>;
using halfspace3 = halfspace<3, f32>;
using halfspace4 = halfspace<4, f32>;
using fhalfspace1 = halfspace<1, f32>;
using fhalfspace2 = halfspace<2, f32>;
using fhalfspace3 = halfspace<3, f32>;
using fhalfspace4 = halfspace<4, f32>;
using dhalfspace1 = halfspace<1, f64>;
using dhalfspace2 = halfspace<2, f64>;
using dhalfspace3 = halfspace<3, f64>;
using dhalfspace4 = halfspace<4, f64>;
using ihalfspace1 = halfspace<1, i32>;
using ihalfspace2 = halfspace<2, i32>;
using ihalfspace3 = halfspace<3, i32>;
using ihalfspace4 = halfspace<4, i32>;
using uhalfspace1 = halfspace<1, u32>;
using uhalfspace2 = halfspace<2, u32>;
using uhalfspace3 = halfspace<3, u32>;
using uhalfspace4 = halfspace<4, u32>;
// ======== IMPLEMENTATION ========
template <int D, class ScalarT>
struct halfspace
{
using scalar_t = ScalarT;
using dir_t = dir<D, ScalarT>;
using pos_t = pos<D, ScalarT>;
dir_t normal; //< points _away_ from the halfspace
scalar_t dis;
constexpr halfspace() = default;
constexpr halfspace(dir_t n, scalar_t d) : normal(n), dis(d) {}
constexpr halfspace(dir_t n, pos_t p);
template <class OtherT>
explicit constexpr halfspace(halfspace<D, OtherT> const& v) : normal(v.normal), dis(v.dis)
{
}
[[nodiscard]] bool operator==(halfspace const& rhs) const { return normal == rhs.normal && dis == rhs.dis; }
[[nodiscard]] bool operator!=(halfspace const& rhs) const { return !operator==(rhs); }
};
template <class I, int D, class ScalarT>
constexpr void introspect(I&& i, halfspace<D, ScalarT>& v)
{
i(v.normal, "normal");
i(v.dis, "dis");
}
template <int D, class ScalarT>
struct object_traits<halfspace<D, ScalarT>> : detail::infinite_object_traits<D, ScalarT, D, default_object_tag>
{
};
} // namespace tg
|
#!/bin/bash
set -euo pipefail
script_root="$(cd "$(dirname "$(readlink "$([[ "${OSTYPE}" == linux* ]] && echo "-f")" "$0")")"; pwd)"
source "${script_root}/lib/utils.sh"
require ts "Please install ts: apt install moreutils"
LOGS_DIR="${HOME}/.ironfish/logs"
TS_FORMAT='%Y-%m-%d %H:%M:%.S '
if [ $# -eq 0 ]; then
LOG_FILE="${LOGS_DIR}/node.log"
elif [ "$1" == "miners:start" ]; then
# Too much spam
#LOG_FILE="${LOGS_DIR}/miner.log"
LOG_FILE="/dev/null"
else
LOG_FILE="${LOGS_DIR}/other.log"
fi
mkdir -p "${LOGS_DIR}"
docker run --rm --tty --interactive --network host --volume "${HOME}/.ironfish:/root/.ironfish" ghcr.io/iron-fish/ironfish:latest $* | ts "${TS_FORMAT}" | tee -a "${LOG_FILE}"
|
module.exports.helpers = require('./helpers');
module.exports.random = require('./random');
|
#!/bin/bash
clojure -Sdeps '{:deps {cljfmt {:mvn/version "0.6.7"}}}' -m cljfmt.main fix src test
|
import React from "react";
import Button from "@material-ui/core/Button";
import Card from "@material-ui/core/Card";
import CardActions from "@material-ui/core/CardActions";
import CardContent from "@material-ui/core/CardContent";
import CardHeader from "@material-ui/core/CardHeader";
import CssBaseline from "@material-ui/core/CssBaseline";
import Grid from "@material-ui/core/Grid";
import Typography from "@material-ui/core/Typography";
import { Link } from "react-router-dom";
import Container from "@material-ui/core/Container";
import { useStyles } from "../../theme/theme";
import { Footer } from "../../components/footerComp";
import { Navigation } from "../../components/navComp";
import enApiIcon from "../../assets/EnAPI_Icon_Text.png";
import enUIIcon from "../../assets/EnUI_Icon_Text.png";
import enosIcon from "../../assets/EnOS_Icon.png";
const softwares = [
{
title: <img src={enApiIcon} alt="en api icon" height="100" /> as any,
subheader: "Software",
description: [
"Open Source",
"Apache2",
"JSON-RPC",
"OpenRPC Spec",
],
buttonText: "More Info",
link: "enapi",
},
{
title: <img src={enUIIcon} alt="en ui icon" height="100" /> as any,
subheader: "Software",
description: [
"Open Source",
"Apache2",
"Multi OS Support",
"Desktop App",
],
buttonText: "More Info",
link: "enui",
},
{
title: <img src={enosIcon} alt="en os icon" height="100" /> as any,
subheader: "Software",
description: [
"Open Source",
"SBC/VM/Barebones support",
"ARM64 Support",
"Ubuntu Based",
],
buttonText: "More Info",
link: "enos",
},
];
const SoftwarePage: React.FC = () => {
const classes = useStyles();
return (
<React.Fragment>
<CssBaseline />
<Navigation />
{/* Hero unit */}
<Container maxWidth="md" component="main" className={classes.heroContent}>
<Typography component="h1" variant="h2" align="center" color="textPrimary" gutterBottom>
Software
</Typography>
<Typography align="center" color="textSecondary" component="p">
Blockchain software for next generation of Web3 development.
</Typography>
</Container>
{/* End hero unit */}
<Container maxWidth="md" component="main">
<Grid container spacing={5} alignItems="flex-end">
{softwares.map((software) => (
// Enterprise card is full width at sm breakpoint
<Grid item key={software.title} xs={12} sm={software.title === "Enterprise" ? 12 : 6} md={4}>
<Card>
<CardHeader
title={software.title}
subheader={software.subheader}
titleTypographyProps={{ align: "center" }}
subheaderTypographyProps={{ align: "center" }}
className={classes.cardHeader}
/>
<CardContent>
<ul>
{software.description.map((line) => (
<Typography component="li" variant="subtitle1" align="center" key={line}>
{line}
</Typography>
))}
</ul>
</CardContent>
<CardActions>
<Link to={software.link} className={classes.buttonlink}><Button fullWidth variant="outlined" color="primary">{software.buttonText}</Button></Link>
</CardActions>
</Card>
</Grid>
))}
</Grid>
</Container>
{/* Footer */}
<Footer />
{/* End footer */}
</React.Fragment>
);
};
export default SoftwarePage;
|
let parsedJSON = JSON.parse(jsonString);
let nameValue = parsedJSON['name'];
console.log(nameValue); |
export VERSION="v0.0.1-alpha"
cd Adapters/Adapters.Rest
bash generate.sh
cd ../..
cd Adapters/Adapters.Zeebe
bash generate.sh
cd ../..
cd Adapters/Adapters.Zeebe.Tests
bash generate.sh
cd ../.. |
from pypy.interpreter.error import OperationError
from pypy.interpreter.mixedmodule import MixedModule
from pypy.interpreter import gateway
from pypy.objspace.std.stdtypedef import StdTypeDef, SMM
from pypy.objspace.std.register_all import register_all
dict_copy = SMM('copy', 1,
doc='D.copy() -> a shallow copy of D')
dict_items = SMM('items', 1,
doc="D.items() -> list of D's (key, value) pairs, as"
' 2-tuples')
dict_keys = SMM('keys', 1,
doc="D.keys() -> list of D's keys")
dict_values = SMM('values', 1,
doc="D.values() -> list of D's values")
dict_has_key = SMM('has_key', 2,
doc='D.has_key(k) -> True if D has a key k, else False')
dict_clear = SMM('clear', 1,
doc='D.clear() -> None. Remove all items from D.')
dict_get = SMM('get', 3, defaults=(None,),
doc='D.get(k[,d]) -> D[k] if k in D, else d. d defaults'
' to None.')
dict_pop = SMM('pop', 2, varargs_w=True,
doc='D.pop(k[,d]) -> v, remove specified key and return'
' the corresponding value\nIf key is not found, d is'
' returned if given, otherwise KeyError is raised')
dict_popitem = SMM('popitem', 1,
doc='D.popitem() -> (k, v), remove and return some (key,'
' value) pair as a\n2-tuple; but raise KeyError if D'
' is empty')
dict_setdefault = SMM('setdefault', 3, defaults=(None,),
doc='D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d'
' if k not in D')
dict_update = SMM('update', 1, general__args__=True,
doc='D.update(E, **F) -> None. Update D from E and F:'
' for k in E: D[k] = E[k]\n(if E has keys else: for'
' (k, v) in E: D[k] = v) then: for k in F: D[k] ='
' F[k]')
dict_iteritems = SMM('iteritems', 1,
doc='D.iteritems() -> an iterator over the (key, value)'
' items of D')
dict_iterkeys = SMM('iterkeys', 1,
doc='D.iterkeys() -> an iterator over the keys of D')
dict_itervalues = SMM('itervalues', 1,
doc='D.itervalues() -> an iterator over the values of D')
dict_viewkeys = SMM('viewkeys', 1,
doc="D.viewkeys() -> a set-like object providing a view on D's keys")
dict_viewitems = SMM('viewitems', 1,
doc="D.viewitems() -> a set-like object providing a view on D's items")
dict_viewvalues = SMM('viewvalues', 1,
doc="D.viewvalues() -> an object providing a view on D's values")
dict_reversed = SMM('__reversed__', 1)
def dict_reversed__ANY(space, w_dict):
raise OperationError(space.w_TypeError, space.wrap('argument to reversed() must be a sequence'))
register_all(vars(), globals())
def descr_fromkeys(space, w_type, w_keys, w_fill=None):
from pypy.objspace.std.dictmultiobject import W_DictMultiObject
if w_fill is None:
w_fill = space.w_None
if space.is_w(w_type, space.w_dict):
w_dict = W_DictMultiObject.allocate_and_init_instance(space, w_type)
strlist = space.listview_str(w_keys)
if strlist is not None:
for key in strlist:
w_dict.setitem_str(key, w_fill)
else:
for w_key in space.listview(w_keys):
w_dict.setitem(w_key, w_fill)
else:
w_dict = space.call_function(w_type)
for w_key in space.listview(w_keys):
space.setitem(w_dict, w_key, w_fill)
return w_dict
app = gateway.applevel('''
def dictrepr(currently_in_repr, d):
if len(d) == 0:
return "{}"
dict_id = id(d)
if dict_id in currently_in_repr:
return '{...}'
currently_in_repr[dict_id] = 1
try:
items = []
# XXX for now, we cannot use iteritems() at app-level because
# we want a reasonable result instead of a RuntimeError
# even if the dict is mutated by the repr() in the loop.
for k, v in dict.items(d):
items.append(repr(k) + ": " + repr(v))
return "{" + ', '.join(items) + "}"
finally:
try:
del currently_in_repr[dict_id]
except:
pass
''', filename=__file__)
dictrepr = app.interphook("dictrepr")
def descr_repr(space, w_dict):
ec = space.getexecutioncontext()
w_currently_in_repr = ec._py_repr
if w_currently_in_repr is None:
w_currently_in_repr = ec._py_repr = space.newdict()
return dictrepr(space, w_currently_in_repr, w_dict)
# ____________________________________________________________
def descr__new__(space, w_dicttype, __args__):
from pypy.objspace.std.dictmultiobject import W_DictMultiObject
w_obj = W_DictMultiObject.allocate_and_init_instance(space, w_dicttype)
return w_obj
# ____________________________________________________________
dict_typedef = StdTypeDef("dict",
__doc__ = '''dict() -> new empty dictionary.
dict(mapping) -> new dictionary initialized from a mapping object\'s
(key, value) pairs.
dict(seq) -> new dictionary initialized as if via:
d = {}
for k, v in seq:
d[k] = v
dict(**kwargs) -> new dictionary initialized with the name=value pairs
in the keyword argument list. For example: dict(one=1, two=2)''',
__new__ = gateway.interp2app(descr__new__),
__hash__ = None,
__repr__ = gateway.interp2app(descr_repr),
fromkeys = gateway.interp2app(descr_fromkeys, as_classmethod=True),
)
dict_typedef.registermethods(globals())
# ____________________________________________________________
def descr_dictiter__reduce__(w_self, space):
"""
This is a slightly special case of pickling.
Since iteration over a dict is a bit hairy,
we do the following:
- create a clone of the dict iterator
- run it to the original position
- collect all remaining elements into a list
At unpickling time, we just use that list
and create an iterator on it.
This is of course not the standard way.
XXX to do: remove this __reduce__ method and do
a registration with copy_reg, instead.
"""
w_mod = space.getbuiltinmodule('_pickle_support')
mod = space.interp_w(MixedModule, w_mod)
new_inst = mod.get('dictiter_surrogate_new')
w_typeobj = space.gettypeobject(dictiter_typedef)
raise OperationError(
space.w_TypeError,
space.wrap("can't pickle dictionary-keyiterator objects"))
# XXXXXX get that working again
# we cannot call __init__ since we don't have the original dict
if isinstance(w_self, W_DictIter_Keys):
w_clone = space.allocate_instance(W_DictIter_Keys, w_typeobj)
elif isinstance(w_self, W_DictIter_Values):
w_clone = space.allocate_instance(W_DictIter_Values, w_typeobj)
elif isinstance(w_self, W_DictIter_Items):
w_clone = space.allocate_instance(W_DictIter_Items, w_typeobj)
else:
msg = "unsupported dictiter type '%s' during pickling" % (w_self, )
raise OperationError(space.w_TypeError, space.wrap(msg))
w_clone.space = space
w_clone.content = w_self.content
w_clone.len = w_self.len
w_clone.pos = 0
w_clone.setup_iterator()
# spool until we have the same pos
while w_clone.pos < w_self.pos:
w_obj = w_clone.next_entry()
w_clone.pos += 1
stuff = [w_clone.next_entry() for i in range(w_clone.pos, w_clone.len)]
w_res = space.newlist(stuff)
tup = [
w_res
]
w_ret = space.newtuple([new_inst, space.newtuple(tup)])
return w_ret
# ____________________________________________________________
dictiter_typedef = StdTypeDef("dictionaryiterator",
__reduce__ = gateway.interp2app(descr_dictiter__reduce__),
)
# ____________________________________________________________
# Dict views
dict_keys_typedef = StdTypeDef(
"dict_keys",
)
dict_items_typedef = StdTypeDef(
"dict_items",
)
dict_values_typedef = StdTypeDef(
"dict_values",
)
|
class Prison::CancellationsController < ApplicationController
include StaffResponseContext
before_action :authorize_prison_request
before_action :authenticate_user
before_action :check_visit_cancellable
def create
if cancellation_response.valid?
cancellation_response.cancel!
ga_tracker.send_cancelled_visit_event
flash[:notice] = t('visit_cancelled', scope: %i[prison flash])
redirect_to prison_visit_path(memoised_visit)
else
flash.now[:alert] = cancellation_response.error_message
@visit = memoised_visit.decorate
@message = Message.new
render :new
end
end
private
def cancellation_response
@cancellation_response ||= CancellationResponse.new(
memoised_visit,
cancellation_params,
user: current_user
)
end
def cancellation_params
params.
require(:cancellation).
permit(reasons: []).
merge(nomis_cancelled: true)
end
def check_visit_cancellable
unless memoised_visit.can_cancel?
flash[:notice] = t('already_cancelled', scope: %i[prison flash])
redirect_to prison_visit_path(memoised_visit)
end
end
def ga_tracker
@ga_tracker ||= GATracker.new(current_user, memoised_visit, cookies, request)
end
end
|
#!/bin/sh
# Remove this step when Xcode starts to do this automatically
bash "$SRCROOT/$PROJECT_NAME/strip-dynamic-framework-architectures.sh"
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.rootfinding;
import com.opengamma.analytics.math.MathException;
import com.opengamma.analytics.math.function.Function1D;
/**
* Finds a single root of a function using the bisection method.
* <p>
* If a root of a function $f(x)$ is bounded by two values $x_1$ and $x_2$,
* then $f(x_1)f(x_2) < 0$. The function is evaluated at the midpoint of these
* values and the bound that gives the same sign in the function evaluation is
* replaced. The bisection is stopped when the change in the value of $x$ is
* below the accuracy, or the evaluation of the function at $x$ is zero.
*/
public class BisectionSingleRootFinder extends RealSingleRootFinder {
private final double _accuracy;
private static final int MAX_ITER = 100;
private static final double ZERO = 1e-16;
/**
* Sets the accuracy to 10<sup>-15</sup>
*/
public BisectionSingleRootFinder() {
this(1e-15);
}
/**
* @param accuracy The required accuracy of the $x$-position of the root
*/
public BisectionSingleRootFinder(final double accuracy) {
_accuracy = Math.abs(accuracy);
}
/**
* {@inheritDoc}
* @throws MathException If the root is not found to the required accuracy in 100 attempts
*/
@Override
public Double getRoot(final Function1D<Double, Double> function, final Double x1, final Double x2) {
checkInputs(function, x1, x2);
final double y1 = function.evaluate(x1);
double y = function.evaluate(x2);
if (Math.abs(y) < _accuracy) {
return x2;
}
if (Math.abs(y1) < _accuracy) {
return x1;
}
double dx, xRoot, xMid;
if (y1 < 0) {
dx = x2 - x1;
xRoot = x1;
} else {
dx = x1 - x2;
xRoot = x2;
}
for (int i = 0; i < MAX_ITER; i++) {
dx *= 0.5;
xMid = xRoot + dx;
y = function.evaluate(xMid);
if (y <= 0) {
xRoot = xMid;
}
if (Math.abs(dx) < _accuracy || Math.abs(y) < ZERO) {
return xRoot;
}
}
throw new MathException("Could not find root in " + MAX_ITER + " attempts");
}
}
|
/**
* 应用相关接口
*/
var Session = require( '../../session' );
module.exports = function (app) {
app.get('/actions/logout', function( req, res ){
Session.logout( req, res );
res.redirect( '/' );
});
}; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.