text
stringlengths 3
1.05M
|
|---|
#pragma once
#include <stdbool.h>
#include <stdint.h>
#include "files.h"
#include "ovbase.h"
struct api;
struct api_request_params {
struct files files;
int layer;
int frame_advance;
error err;
void *userdata;
};
typedef void (*api_request_complete_func)(struct api_request_params *const params);
typedef void (*api_request_func)(struct api_request_params *const params, api_request_complete_func const complete);
NODISCARD error api_init(struct api **const api);
void api_set_callback(struct api *const api, api_request_func const callback, void *const userdata);
NODISCARD error api_update_mapped_data(struct api *const api);
NODISCARD error api_exit(struct api **const api);
NODISCARD bool api_initialized(struct api const *const api);
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include "os/os.h"
#include "nimble/nimble_npl.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* Initialize a task.
*
* This function initializes the task structure pointed to by t,
* clearing and setting it's stack pointer, provides sane defaults
* and sets the task as ready to run, and inserts it into the operating
* system scheduler.
*
* @param t The task to initialize
* @param name The name of the task to initialize
* @param func The task function to call
* @param arg The argument to pass to this task function
* @param prio The priority at which to run this task
* @param sanity_itvl The time at which this task should check in with the
* sanity task. OS_WAIT_FOREVER means never check in
* here.
* @param stack_bottom A pointer to the bottom of a task's stack
* @param stack_size The overall size of the task's stack.
*
* @return 0 on success, non-zero on failure.
*/
int
ble_npl_task_init(struct ble_npl_task *t, const char *name, ble_npl_task_func_t func,
void *arg, uint8_t prio, ble_npl_time_t sanity_itvl,
ble_npl_stack_t *stack_bottom, uint16_t stack_size)
{
int err;
if ((t == NULL) || (func == NULL)) {
return OS_INVALID_PARM;
}
pthread_attr_t attr;
struct sched_param param;
err = pthread_attr_init(&attr);
if (err) return err;
err = pthread_attr_getschedparam (&attr, ¶m);
if (err) return err;
err = pthread_attr_setschedpolicy(&attr, SCHED_RR);
if (err) return err;
param.sched_priority = prio;
err = pthread_attr_setschedparam (&attr, ¶m);
if (err) return err;
t->name = name;
err = pthread_create(&t->handle, &attr, func, arg);
return err;
}
/*
* Removes specified task
* XXX
* NOTE: This interface is currently experimental and not ready for common use
*/
int
ble_npl_task_remove(struct ble_npl_task *t)
{
return pthread_cancel(t->handle);
}
/**
* Return the number of tasks initialized.
*
* @return number of tasks initialized
*/
uint8_t
ble_npl_task_count(void)
{
return 0;
}
void *
ble_npl_get_current_task_id(void)
{
return (void *)pthread_self();
}
bool ble_npl_os_started(void)
{
return true;
}
void ble_npl_task_yield(void)
{
pthread_yield();
}
void ble_npl_scheduler_start(void)
{
int ret = 0;
pthread_exit(&ret);
while (true)
{
ble_npl_task_yield();
}
}
#ifdef __cplusplus
}
#endif
|
from turtle import Turtle
FONT = ("Courier", 20, "normal")
class ScoreBoard(Turtle):
def __init__(self):
super().__init__()
self.level = 1
self.hideturtle()
self.penup()
self.goto(-280, 260)
self.score_update()
def score_update(self):
self.clear()
self.write(f"Level: {self.level}", align="left", font=FONT)
def update(self):
self.level += 1
self.score_update()
def game_over(self):
self.goto(0,0)
self.write("GAME OVER", align="center", font = FONT)
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_is_nginx_installed(host):
package_nginx = host.package('nginx-common')
assert package_nginx.is_installed
def test_nginx_config_exists(host):
nginx_config = host.file('/etc/nginx/nginx.conf')
assert nginx_config.exists
def test_nginx_user(host):
user = host.user('www-data')
assert user.exists
assert user.group == 'www-data'
assert user.shell == '/usr/sbin/nologin'
assert user.home == '/var/www'
def test_http_port(host):
port = host.socket('tcp://0.0.0.0:80')
assert port.is_listening
def test_nginx_service(host):
service = host.service('nginx')
assert service.is_enabled
assert service.is_running
|
ficha = list()
while True:
nome = str(input('Nome : '))
nota1 = float(input('Nota 1 : '))
nota2 = float(input('Nota 2 : '))
media = (nota1 + nota2) / 2
ficha.append([nome, [nota1, nota2], media])
resp = str(input('Quer continuar ? [S/N] : ')).strip().upper()[0]
if resp in 'N':
break
print(ficha)
print('-='* 30)
print(f'{"No.":<4} {"NOME":<10} {"MEDIA":>8}')
print('-'* 26)
for i, a in enumerate(ficha):
print(f'{i:<4} {a[0]:<10} {a[2]:>8.1f}')
while True:
print('-'*35)
opc = int(input('Mostrar notas de qual aluno : [999 interrompe! '))
if opc == 999:
print('Finalizando')
break
elif opc <= len(ficha) - 1:
print(f'Notas de ficha {ficha[opc][0]} são {ficha[opc][1]}')
print('Volte sempre')
|
const express = require('express');
const mongoose = require('mongoose');
const logger = require('morgan');
const PORT = process.env.PORT || 3000;
const app = express();
app.use(logger('dev'));
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use(express.static('public'));
mongoose.connect(process.env.MONGODB_URI || 'mongodb://localhost/workout', {
useNewUrlParser: true,
useFindAndModify: false,
useUnifiedTopology: true,
useCreateIndex: true,
});
// routes
app.use(require('./routes/api-routes.js'));
app.use(require('./routes/html-routes.js'));
app.listen(PORT, () => {
console.log(`App running on port ${PORT}!`);
});
|
/*
* Kendo UI Web v2014.1.318 (http://kendoui.com)
* Copyright 2014 Telerik AD. All rights reserved.
*
* Kendo UI Web commercial licenses may be obtained at
* http://www.telerik.com/purchase/license-agreement/kendo-ui-web
* If you do not own a commercial license, this file shall be governed by the
* GNU General Public License (GPL) version 3.
* For GPL requirements, please review: http://www.gnu.org/copyleft/gpl.html
*/
!function(e,define){define([],e)}(function(){return function(e){var t=e.kendo||(e.kendo={cultures:{}});t.cultures["en-JM"]={name:"en-JM",numberFormat:{pattern:["-n"],decimals:2,",":",",".":".",groupSize:[3],percent:{pattern:["-n %","n %"],decimals:2,",":",",".":".",groupSize:[3],symbol:"%"},currency:{pattern:["-$n","$n"],decimals:2,",":",",".":".",groupSize:[3],symbol:"J$"}},calendars:{standard:{days:{names:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],namesAbbr:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],namesShort:["Su","Mo","Tu","We","Th","Fr","Sa"]},months:{names:["January","February","March","April","May","June","July","August","September","October","November","December",""],namesAbbr:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec",""]},AM:["AM","am","AM"],PM:["PM","pm","PM"],patterns:{d:"dd/MM/yyyy",D:"dddd, MMMM dd, yyyy",F:"dddd, MMMM dd, yyyy hh:mm:ss tt",g:"dd/MM/yyyy hh:mm tt",G:"dd/MM/yyyy hh:mm:ss tt",m:"MMMM dd",M:"MMMM dd",s:"yyyy'-'MM'-'dd'T'HH':'mm':'ss",t:"hh:mm tt",T:"hh:mm:ss tt",u:"yyyy'-'MM'-'dd HH':'mm':'ss'Z'",y:"MMMM, yyyy",Y:"MMMM, yyyy"},"/":"/",":":":",firstDay:0}}}}(this),window.kendo},"function"==typeof define&&define.amd?define:function(e,t){t()});
|
/**
* Copyright (c) 2015-present, Parse, LLC.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
jest.dontMock('../decode');
jest.dontMock('../encode');
jest.dontMock('../ObjectStateMutations');
jest.dontMock('../ParseFile');
jest.dontMock('../ParseGeoPoint');
jest.dontMock('../ParseOp');
jest.dontMock('../ParseRelation');
jest.dontMock('../TaskQueue');
const mockObject = function (className) {
this.className = className;
};
mockObject.registerSubclass = function () {};
jest.setMock('../ParseObject', mockObject);
const ObjectStateMutations = require('../ObjectStateMutations');
const ParseOps = require('../ParseOp');
const TaskQueue = require('../TaskQueue');
describe('ObjectStateMutations', () => {
it('can apply server data', () => {
const serverData = {};
ObjectStateMutations.setServerData(serverData, { counter: 12 });
expect(serverData).toEqual({ counter: 12 });
ObjectStateMutations.setServerData(serverData, { counter: undefined });
expect(serverData).toEqual({});
});
it('can set a pending op', () => {
let pendingOps = [{}];
const op = new ParseOps.IncrementOp(1);
ObjectStateMutations.setPendingOp(pendingOps, 'counter', op);
expect(pendingOps).toEqual([{ counter: op }]);
pendingOps = [{}, {}];
ObjectStateMutations.setPendingOp(pendingOps, 'counter', op);
expect(pendingOps).toEqual([{}, { counter: op }]);
ObjectStateMutations.setPendingOp(pendingOps, 'counter', null);
expect(pendingOps).toEqual([{}, {}]);
});
it('can push a new pending state', () => {
const pendingOps = [{}];
ObjectStateMutations.pushPendingState(pendingOps);
expect(pendingOps).toEqual([{}, {}]);
ObjectStateMutations.pushPendingState(pendingOps);
expect(pendingOps).toEqual([{}, {}, {}]);
});
it('can pop a pending state', () => {
let pendingOps = [{}];
let first = pendingOps[0];
expect(ObjectStateMutations.popPendingState(pendingOps)).toBe(first);
expect(pendingOps).toEqual([{}]);
const op = new ParseOps.IncrementOp(1);
pendingOps = [{ counter: op }, {}, {}];
first = pendingOps[0];
expect(ObjectStateMutations.popPendingState(pendingOps)).toBe(first);
expect(pendingOps).toEqual([{}, {}]);
});
it('can merge the first op set into the next', () => {
let pendingOps = [{ counter: new ParseOps.SetOp(1), name: new ParseOps.SetOp('foo') }, {}];
ObjectStateMutations.mergeFirstPendingState(pendingOps);
expect(pendingOps).toEqual([
{ counter: new ParseOps.SetOp(1), name: new ParseOps.SetOp('foo') },
]);
pendingOps = [{ counter: new ParseOps.SetOp(1) }, { counter: new ParseOps.IncrementOp(1) }];
ObjectStateMutations.mergeFirstPendingState(pendingOps);
expect(pendingOps).toEqual([{ counter: new ParseOps.SetOp(2) }]);
});
it('can estimate an attribute value', () => {
const serverData = { counter: 12 };
const pendingOps = [{ counter: new ParseOps.IncrementOp(2), name: new ParseOps.SetOp('foo') }];
expect(
ObjectStateMutations.estimateAttribute(
serverData,
pendingOps,
'someClass',
'someId',
'counter'
)
).toBe(14);
expect(
ObjectStateMutations.estimateAttribute(serverData, pendingOps, 'someClass', 'someId', 'name')
).toBe('foo');
pendingOps.push({
counter: new ParseOps.IncrementOp(1),
name: new ParseOps.SetOp('override'),
});
expect(
ObjectStateMutations.estimateAttribute(
serverData,
pendingOps,
'someClass',
'someId',
'counter'
)
).toBe(15);
expect(
ObjectStateMutations.estimateAttribute(serverData, pendingOps, 'someClass', 'someId', 'name')
).toBe('override');
pendingOps.push({ likes: new ParseOps.RelationOp([], []) });
const relation = ObjectStateMutations.estimateAttribute(
serverData,
pendingOps,
'someClass',
'someId',
'likes'
);
expect(relation.parent.id).toBe('someId');
expect(relation.parent.className).toBe('someClass');
expect(relation.key).toBe('likes');
});
it('can estimate all attributes', () => {
const serverData = { counter: 12 };
const pendingOps = [{ counter: new ParseOps.IncrementOp(2), name: new ParseOps.SetOp('foo') }];
expect(
ObjectStateMutations.estimateAttributes(serverData, pendingOps, 'someClass', 'someId')
).toEqual({
counter: 14,
name: 'foo',
});
pendingOps.push({
counter: new ParseOps.IncrementOp(1),
name: new ParseOps.SetOp('override'),
});
expect(
ObjectStateMutations.estimateAttributes(serverData, pendingOps, 'someClass', 'someId')
).toEqual({
counter: 15,
name: 'override',
});
pendingOps.push({ likes: new ParseOps.RelationOp([], []) });
const attributes = ObjectStateMutations.estimateAttributes(
serverData,
pendingOps,
'someClass',
'someId'
);
expect(attributes.likes.parent.id).toBe('someId');
expect(attributes.likes.parent.className).toBe('someClass');
expect(attributes.likes.key).toBe('likes');
});
it('can estimate attributes for nested documents', () => {
let serverData = { objectField: { counter: 10, letter: 'a' } };
let pendingOps = [{ 'objectField.counter': new ParseOps.IncrementOp(2) }];
expect(
ObjectStateMutations.estimateAttributes(serverData, pendingOps, 'someClass', 'someId')
).toEqual({
objectField: {
counter: 12,
letter: 'a',
},
});
pendingOps = [{ 'objectField.counter': new ParseOps.SetOp(20) }];
expect(
ObjectStateMutations.estimateAttributes(serverData, pendingOps, 'someClass', 'someId')
).toEqual({
objectField: {
counter: 20,
letter: 'a',
},
});
serverData = {};
pendingOps = [{ 'objectField.subField.subField.counter': new ParseOps.IncrementOp(20) }];
expect(
ObjectStateMutations.estimateAttributes(serverData, pendingOps, 'someClass', 'someId')
).toEqual({
objectField: {
subField: {
subField: {
counter: 20,
},
},
},
});
});
it('can commit changes from the server', () => {
const serverData = {};
const objectCache = {};
ObjectStateMutations.commitServerChanges(serverData, objectCache, {
name: 'foo',
data: { count: 5 },
});
expect(serverData).toEqual({ name: 'foo', data: { count: 5 } });
expect(objectCache).toEqual({ data: '{"count":5}' });
});
it('can generate a default state for implementations', () => {
expect(ObjectStateMutations.defaultState()).toEqual({
serverData: {},
pendingOps: [{}],
objectCache: {},
tasks: new TaskQueue(),
existed: false,
});
});
});
|
from fpa import __version__
def test_version():
assert __version__ == '0.1.0'
|
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Jun 10 2020 10:03:13).
//
// Copyright (C) 1997-2019 Steve Nygard.
//
#import <objc/NSObject.h>
@interface TVMusicContextMenuFactory : NSObject
{
}
+ (id)_contextMenuForArtist:(id)arg1; // IMP=0x00000001000605f0
+ (id)_contextMenuForMovie:(id)arg1; // IMP=0x000000010006027c
+ (id)_contextMenuForTVEpisode:(id)arg1; // IMP=0x000000010005ff80
+ (id)_menuDataWithTitle:(id)arg1 addItemsBlock:(CDUnknownBlockType)arg2; // IMP=0x000000010005fea4
+ (id)_contextMenuForPlaylist:(id)arg1; // IMP=0x000000010005f988
+ (id)_contextMenuForAlbum:(id)arg1; // IMP=0x000000010005f4c0
+ (id)_contextMenuForSong:(id)arg1; // IMP=0x000000010005ed34
+ (_Bool)_hasCloudLibrary; // IMP=0x000000010005ecdc
+ (void)_presentViewController:(id)arg1; // IMP=0x000000010005ebf4
+ (void)_pushViewController:(id)arg1; // IMP=0x000000010005eb10
+ (void)_performWithAppInForeground:(CDUnknownBlockType)arg1; // IMP=0x000000010005e684
+ (id)_navigationController; // IMP=0x000000010005e628
+ (void)_openJavascriptWithURL:(id)arg1; // IMP=0x000000010005e548
+ (void)_removeLibraryItemItemMatchingIdentifiers:(id)arg1 itemKind:(id)arg2; // IMP=0x000000010005e380
+ (void)_findLibraryItemMatchingIdentifiers:(id)arg1 itemKind:(id)arg2 completionHandler:(CDUnknownBlockType)arg3; // IMP=0x000000010005e190
+ (void)_removeItemFromLibraryWithStoreID:(long long)arg1 itemKind:(id)arg2; // IMP=0x000000010005e0fc
+ (void)_addTrackWithAdamIDToLibrary:(long long)arg1 completionBlock:(CDUnknownBlockType)arg2; // IMP=0x000000010005debc
+ (void)_checkLibraryForTrackWithAdamID:(long long)arg1 resultBlock:(CDUnknownBlockType)arg2; // IMP=0x000000010005dd98
+ (id)_player; // IMP=0x000000010005dd3c
+ (id)_propertiesForGenericObjectType:(long long)arg1; // IMP=0x000000010005dc24
+ (id)_kindForGenericObjectType:(long long)arg1; // IMP=0x000000010005db08
+ (id)_menuItemForGoToMovieWithStoreID:(long long)arg1; // IMP=0x000000010005d97c
+ (id)_menuItemForGoToMovie:(id)arg1; // IMP=0x000000010005d85c
+ (id)_menuItemForGoToTVShowWithEpisodeStoreID:(long long)arg1; // IMP=0x000000010005d6d0
+ (id)_menuItemForGoToTVShow:(id)arg1; // IMP=0x000000010005d5b0
+ (id)_menuItemForCatalogShuffle; // IMP=0x000000010005d4f0
+ (id)_menuItemForCatalogPlaylistUnsubscribe:(id)arg1; // IMP=0x000000010005d190
+ (id)_menuItemForCatalogPlaylistSubscribe:(id)arg1; // IMP=0x000000010005ce00
+ (id)_menuItemForSongLyrics:(id)arg1; // IMP=0x000000010005cb84
+ (id)_menuItemForAddToPlaylist:(id)arg1; // IMP=0x000000010005c9c0
+ (void)_goToPlaylistAction:(id)arg1; // IMP=0x000000010005c758
+ (id)_menuItemForGoToArtist:(id)arg1; // IMP=0x000000010005c408
+ (id)_menuItemForLibrarySongGoToArtist:(id)arg1; // IMP=0x000000010005bdcc
+ (void)_catalogSongGoToAlbumActon:(id)arg1; // IMP=0x000000010005bb44
+ (id)_menuItemGoToPlaylistWithSong:(id)arg1; // IMP=0x000000010005b9bc
+ (id)_menuItemGoToPlaylist:(id)arg1; // IMP=0x000000010005b788
+ (id)_menuItemForCatalogSongGoToAlbum:(id)arg1; // IMP=0x000000010005b4d4
+ (id)_menuItemForCreateStationFromArtist:(id)arg1; // IMP=0x000000010005b2f0
+ (id)_menuItemForCreateStationFromAlbum:(id)arg1; // IMP=0x000000010005b10c
+ (id)_menuItemForCreateStationFromSong:(id)arg1; // IMP=0x000000010005af14
+ (id)_menuItemForAddMPObject:(id)arg1; // IMP=0x000000010005ab24
+ (id)_menuItemForRemoveMPObject:(id)arg1 title:(id)arg2; // IMP=0x000000010005a924
+ (id)_menuItemForAddOrRemoveMPObject:(id)arg1 title:(id)arg2 inLibrary:(_Bool)arg3; // IMP=0x000000010005a8a0
+ (id)_menuItemForAddSong:(id)arg1; // IMP=0x000000010005a834
+ (id)_menuItemForRemoveSong:(id)arg1; // IMP=0x000000010005a7ac
+ (id)_menuItemForRemoveAlbumFromLibrary:(id)arg1; // IMP=0x000000010005a530
+ (id)_menuItemForAddAlbumToLibrary:(id)arg1; // IMP=0x000000010005a0cc
+ (id)_menuItemForAddOrRemoveAlbum:(id)arg1; // IMP=0x000000010005a038
+ (void)_showLyricsForContextMenuSubject:(id)arg1; // IMP=0x0000000100059ddc
+ (id)_menuItemForLibraryPlayback:(id)arg1 itemType:(unsigned long long)arg2 persistentID:(long long)arg3 playbackAction:(unsigned long long)arg4; // IMP=0x0000000100059bd8
+ (id)_menuItemForPlaySong:(id)arg1 inContainer:(id)arg2 playbackAction:(unsigned long long)arg3; // IMP=0x0000000100059870
+ (id)_menuItemForAddToUpNext:(id)arg1 actionBlock:(CDUnknownBlockType)arg2; // IMP=0x00000001000595ac
+ (id)_menuItemForPlayNext:(id)arg1 actionBlock:(CDUnknownBlockType)arg2; // IMP=0x000000010005928c
+ (id)_menuItemForTasteType:(long long)arg1 tasteTitleKey:(id)arg2 neutralTitleKey:(id)arg3 mpObject:(id)arg4; // IMP=0x00000001000590a0
+ (id)_menuItemForHateMPObject:(id)arg1; // IMP=0x000000010005903c
+ (id)_menuItemForLoveMPObject:(id)arg1; // IMP=0x0000000100058fd8
+ (id)_menuItemForRemoveMPObjectFromLibrary:(id)arg1 title:(id)arg2; // IMP=0x0000000100058dc0
+ (void)_deleteConfirmationDialogWithTitle:(id)arg1 CompletionHandler:(CDUnknownBlockType)arg2; // IMP=0x0000000100058a8c
+ (id)_menuTitleForMPSong:(id)arg1; // IMP=0x0000000100058938
+ (id)_menuItemForLibraryRemoveSong:(id)arg1 fromPlaylist:(id)arg2 atPosition:(long long)arg3; // IMP=0x00000001000585f0
+ (void)_showLibraryArtistWithPID:(long long)arg1 completion:(CDUnknownBlockType)arg2; // IMP=0x0000000100058360
+ (id)_menuItemForLibraryGoToAlbumWithSong:(id)arg1; // IMP=0x0000000100057df4
+ (void)_findAlbumWithPersistentID:(long long)arg1 completionHandler:(CDUnknownBlockType)arg2; // IMP=0x0000000100057cbc
+ (id)_contextMenuForRadioStation:(id)arg1; // IMP=0x0000000100057a6c
+ (id)_contextMenuForMPTVSeason:(id)arg1; // IMP=0x0000000100057800
+ (void)_contextMenuForLibraryPlaylistEntry:(id)arg1 completionHandler:(CDUnknownBlockType)arg2; // IMP=0x00000001000572d4
+ (void)_contextMenuForLibraryObject:(id)arg1 inContainer:(id)arg2 completion:(CDUnknownBlockType)arg3; // IMP=0x0000000100057174
+ (id)contextMenuForSubject:(id)arg1; // IMP=0x0000000100056e88
+ (void)contextMenuForLibraryPlaylistEntry:(id)arg1 inPlaylist:(id)arg2 atPosition:(long long)arg3 completionHandler:(CDUnknownBlockType)arg4; // IMP=0x0000000100056ce0
+ (void)contextMenuForLibraryObject:(id)arg1 inContainer:(id)arg2 completionHandler:(CDUnknownBlockType)arg3; // IMP=0x0000000100056798
+ (void)contextMenuForLibraryObject:(id)arg1 completionHandler:(CDUnknownBlockType)arg2; // IMP=0x0000000100056744
+ (void)contextMenuForStoreObject:(id)arg1 inContainer:(id)arg2 isInLibrary:(_Bool)arg3 playbackReportingDictionary:(id)arg4 completionHandler:(CDUnknownBlockType)arg5; // IMP=0x0000000100056584
+ (void)contextMenuForMediaItem:(id)arg1 completionHandler:(CDUnknownBlockType)arg2; // IMP=0x000000010005632c
+ (void)initialize; // IMP=0x00000001000562f8
@end
|
import unittest
from src.data.models.election_event import ElectionEvent
from src.data.repositories.mock_election_result_repository import MockElectionResultRepository
from src.services.election_result_service import ElectionResultService
from src.services.election_event_service import ElectionEventService
class ElectionEventServiceTestCases(unittest.TestCase):
def setUp(self) -> None:
self.mock_election_result_repository = MockElectionResultRepository()
self.election_result_service = ElectionResultService(self.mock_election_result_repository)
self.election_event_service = ElectionEventService(self.election_result_service)
def test_get_election_years(self):
expected = ["1993", "1997", "2001", "2005"]
actual = self.election_event_service.get_election_years()
self.assertEqual(expected, actual)
def test_get_election_events(self):
expected = [
ElectionEvent("1993", "MO", "County 1"),
ElectionEvent("1993", "MO", "County 2"),
ElectionEvent("1993", "MO", "County 3"),
ElectionEvent("1997", "FK", "County 1"),
ElectionEvent("1997", "FK", "County 2"),
ElectionEvent("1997", "FK", "County 3"),
ElectionEvent("2001", "MO", "County 1"),
ElectionEvent("2001", "MO", "County 2"),
ElectionEvent("2001", "MO", "County 3"),
ElectionEvent("2005", "FK", "County 1"),
ElectionEvent("2005", "FK", "County 2"),
ElectionEvent("2005", "FK", "County 3"),
]
actual = self.election_event_service.get_election_events()
self.assertEqual(expected, actual)
def test_get_counties_for_state(self):
expected = ["County 1", "County 2", "County 3"]
actual = self.election_event_service.get_counties_for_state("MO")
self.assertEqual(expected, actual)
expected = ["County 1", "County 2", "County 3"]
actual = self.election_event_service.get_counties_for_state("FK")
self.assertEqual(expected, actual)
def test_get_localities(self):
expected = ["County 1,FK", "County 1,MO", "County 2,FK", "County 2,MO", "County 3,FK", "County 3,MO"]
actual = self.election_event_service.get_localities()
self.assertEqual(expected, actual)
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from stub import hello_pb2 as hello__pb2
class HelloServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/hello.HelloService/SayHello',
request_serializer=hello__pb2.HelloReq.SerializeToString,
response_deserializer=hello__pb2.HelloResp.FromString,
)
self.SayHelloStrict = channel.unary_unary(
'/hello.HelloService/SayHelloStrict',
request_serializer=hello__pb2.HelloReq.SerializeToString,
response_deserializer=hello__pb2.HelloResp.FromString,
)
class HelloServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def SayHello(self, request, context):
"""This thing just says Hello to anyone
SayHello('Euler') -> Hello, Euler!
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SayHelloStrict(self, request, context):
"""Strict Version responds only to requests which have `Name` length
less than 10 characters
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_HelloServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=hello__pb2.HelloReq.FromString,
response_serializer=hello__pb2.HelloResp.SerializeToString,
),
'SayHelloStrict': grpc.unary_unary_rpc_method_handler(
servicer.SayHelloStrict,
request_deserializer=hello__pb2.HelloReq.FromString,
response_serializer=hello__pb2.HelloResp.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'hello.HelloService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
import _ from 'underscore';
import common from '../common';
import array from '../coordinates/array';
/**
* Convert a geojs line annotation to the large_image
* annotation schema.
*/
function line(annotation) {
const element = common(annotation, 'polyline');
const coordinates = array(annotation.coordinates());
return _.extend(element, {
type: 'polyline',
closed: !!annotation.style('closed'),
points: coordinates
});
}
export default line;
|
from typing import overload
from UdonPie import System
from UdonPie import UnityEngine
from UdonPie.Undefined import *
class SpriteState:
def __new__(cls, arg1=None):
'''
:returns: SpriteState
:rtype: UnityEngine.UI.SpriteState
'''
pass
@staticmethod
def get_highlightedSprite():
'''
:returns: Sprite
:rtype: UnityEngine.Sprite
'''
pass
@staticmethod
def set_highlightedSprite(arg1):
'''
:param arg1: Sprite
:type arg1: UnityEngine.Sprite
'''
pass
@staticmethod
def get_pressedSprite():
'''
:returns: Sprite
:rtype: UnityEngine.Sprite
'''
pass
@staticmethod
def set_pressedSprite(arg1):
'''
:param arg1: Sprite
:type arg1: UnityEngine.Sprite
'''
pass
@staticmethod
def get_disabledSprite():
'''
:returns: Sprite
:rtype: UnityEngine.Sprite
'''
pass
@staticmethod
def set_disabledSprite(arg1):
'''
:param arg1: Sprite
:type arg1: UnityEngine.Sprite
'''
pass
@staticmethod
@overload
def Equals(arg1):
'''
:param arg1: SpriteState
:type arg1: UnityEngine.SpriteState
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def Equals(arg1):
'''
:param arg1: Object
:type arg1: System.Object
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def Equals(arg1=None):
pass
@staticmethod
def ToString():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def GetHashCode():
'''
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def GetType():
'''
:returns: Type
:rtype: System.Type
'''
pass
|
/**
* Owl carousel
* @version 2.3.4
* @author Bartosz Wojciechowski
* @author David Deutsch
* @license The MIT License (MIT)
* @todo Lazy Load Icon
* @todo prevent animationend bubling
* @todo itemsScaleUp
* @todo Test Zepto
* @todo stagePadding calculate wrong active classes
*/
;(function($, window, document, undefined) {
/**
* Creates a carousel.
* @class The Owl Carousel.
* @public
* @param {HTMLElement|jQuery} element - The element to create the carousel for.
* @param {Object} [options] - The options
*/
function Owl(element, options) {
/**
* Current settings for the carousel.
* @public
*/
this.settings = null;
/**
* Current options set by the caller including defaults.
* @public
*/
this.options = $.extend({}, Owl.Defaults, options);
/**
* Plugin element.
* @public
*/
this.$element = $(element);
/**
* Proxied event handlers.
* @protected
*/
this._handlers = {};
/**
* References to the running plugins of this carousel.
* @protected
*/
this._plugins = {};
/**
* Currently suppressed events to prevent them from being retriggered.
* @protected
*/
this._supress = {};
/**
* Absolute current position.
* @protected
*/
this._current = null;
/**
* Animation speed in milliseconds.
* @protected
*/
this._speed = null;
/**
* Coordinates of all items in pixel.
* @todo The name of this member is missleading.
* @protected
*/
this._coordinates = [];
/**
* Current breakpoint.
* @todo Real media queries would be nice.
* @protected
*/
this._breakpoint = null;
/**
* Current width of the plugin element.
*/
this._width = null;
/**
* All real items.
* @protected
*/
this._items = [];
/**
* All cloned items.
* @protected
*/
this._clones = [];
/**
* Merge values of all items.
* @todo Maybe this could be part of a plugin.
* @protected
*/
this._mergers = [];
/**
* Widths of all items.
*/
this._widths = [];
/**
* Invalidated parts within the update process.
* @protected
*/
this._invalidated = {};
/**
* Ordered list of workers for the update process.
* @protected
*/
this._pipe = [];
/**
* Current state information for the drag operation.
* @todo #261
* @protected
*/
this._drag = {
time: null,
target: null,
pointer: null,
stage: {
start: null,
current: null
},
direction: null
};
/**
* Current state information and their tags.
* @type {Object}
* @protected
*/
this._states = {
current: {},
tags: {
'initializing': [ 'busy' ],
'animating': [ 'busy' ],
'dragging': [ 'interacting' ]
}
};
$.each([ 'onResize', 'onThrottledResize' ], $.proxy(function(i, handler) {
this._handlers[handler] = $.proxy(this[handler], this);
}, this));
$.each(Owl.Plugins, $.proxy(function(key, plugin) {
this._plugins[key.charAt(0).toLowerCase() + key.slice(1)]
= new plugin(this);
}, this));
$.each(Owl.Workers, $.proxy(function(priority, worker) {
this._pipe.push({
'filter': worker.filter,
'run': $.proxy(worker.run, this)
});
}, this));
this.setup();
this.initialize();
}
/**
* Default options for the carousel.
* @public
*/
Owl.Defaults = {
items: 3,
loop: false,
center: false,
rewind: false,
checkVisibility: true,
mouseDrag: true,
touchDrag: true,
pullDrag: true,
freeDrag: false,
margin: 0,
stagePadding: 0,
merge: false,
mergeFit: true,
autoWidth: false,
startPosition: 0,
rtl: false,
smartSpeed: 250,
fluidSpeed: false,
dragEndSpeed: false,
responsive: {},
responsiveRefreshRate: 200,
responsiveBaseElement: window,
fallbackEasing: 'swing',
slideTransition: '',
info: false,
nestedItemSelector: false,
itemElement: 'div',
stageElement: 'div',
refreshClass: 'owl-refresh',
loadedClass: 'owl-loaded',
loadingClass: 'owl-loading',
rtlClass: 'owl-rtl',
responsiveClass: 'owl-responsive',
dragClass: 'owl-drag',
itemClass: 'owl-item',
stageClass: 'owl-stage',
stageOuterClass: 'owl-stage-outer',
grabClass: 'owl-grab'
};
/**
* Enumeration for width.
* @public
* @readonly
* @enum {String}
*/
Owl.Width = {
Default: 'default',
Inner: 'inner',
Outer: 'outer'
};
/**
* Enumeration for types.
* @public
* @readonly
* @enum {String}
*/
Owl.Type = {
Event: 'event',
State: 'state'
};
/**
* Contains all registered plugins.
* @public
*/
Owl.Plugins = {};
/**
* List of workers involved in the update process.
*/
Owl.Workers = [ {
filter: [ 'width', 'settings' ],
run: function() {
this._width = this.$element.width();
}
}, {
filter: [ 'width', 'items', 'settings' ],
run: function(cache) {
cache.current = this._items && this._items[this.relative(this._current)];
}
}, {
filter: [ 'items', 'settings' ],
run: function() {
this.$stage.children('.cloned').remove();
}
}, {
filter: [ 'width', 'items', 'settings' ],
run: function(cache) {
var margin = this.settings.margin || '',
grid = !this.settings.autoWidth,
rtl = this.settings.rtl,
css = {
'width': 'auto',
'margin-left': rtl ? margin : '',
'margin-right': rtl ? '' : margin
};
!grid && this.$stage.children().css(css);
cache.css = css;
}
}, {
filter: [ 'width', 'items', 'settings' ],
run: function(cache) {
var width = (this.width() / this.settings.items).toFixed(3) - this.settings.margin,
merge = null,
iterator = this._items.length,
grid = !this.settings.autoWidth,
widths = [];
cache.items = {
merge: false,
width: width
};
while (iterator--) {
merge = this._mergers[iterator];
merge = this.settings.mergeFit && Math.min(merge, this.settings.items) || merge;
cache.items.merge = merge > 1 || cache.items.merge;
widths[iterator] = !grid ? this._items[iterator].width() : width * merge;
}
this._widths = widths;
}
}, {
filter: [ 'items', 'settings' ],
run: function() {
var clones = [],
items = this._items,
settings = this.settings,
// TODO: Should be computed from number of min width items in stage
view = Math.max(settings.items * 2, 4),
size = Math.ceil(items.length / 2) * 2,
repeat = settings.loop && items.length ? settings.rewind ? view : Math.max(view, size) : 0,
append = '',
prepend = '';
repeat /= 2;
while (repeat > 0) {
// Switch to only using appended clones
clones.push(this.normalize(clones.length / 2, true));
$(items[clones[clones.length - 1]][0]).clone(true).addClass('cloned').appendTo(this.$stage);
clones.push(this.normalize(items.length - 1 - (clones.length - 1) / 2, true));
$(items[clones[clones.length - 1]][0]).clone(true).addClass('cloned').prependTo(this.$stage);
repeat -= 1;
}
this._clones = clones;
}
}, {
filter: [ 'width', 'items', 'settings' ],
run: function() {
var rtl = this.settings.rtl ? 1 : -1,
size = this._clones.length + this._items.length,
iterator = -1,
previous = 0,
current = 0,
coordinates = [];
while (++iterator < size) {
previous = coordinates[iterator - 1] || 0;
current = this._widths[this.relative(iterator)] + this.settings.margin;
coordinates.push(previous + current * rtl);
}
this._coordinates = coordinates;
}
}, {
filter: [ 'width', 'items', 'settings' ],
run: function() {
var padding = this.settings.stagePadding,
coordinates = this._coordinates,
css = {
'width': Math.ceil(Math.abs(coordinates[coordinates.length - 1])) + padding * 2,
'padding-left': padding || '',
'padding-right': padding || ''
};
this.$stage.css(css);
}
}, {
filter: [ 'width', 'items', 'settings' ],
run: function(cache) {
var iterator = this._coordinates.length,
grid = !this.settings.autoWidth,
items = this.$stage.children();
if (grid && cache.items.merge) {
while (iterator--) {
cache.css.width = this._widths[this.relative(iterator)];
items.eq(iterator).css(cache.css);
}
} else if (grid) {
cache.css.width = cache.items.width;
items.css(cache.css);
}
}
}, {
filter: [ 'items' ],
run: function() {
this._coordinates.length < 1 && this.$stage.removeAttr('style');
}
}, {
filter: [ 'width', 'items', 'settings' ],
run: function(cache) {
cache.current = cache.current ? this.$stage.children().index(cache.current) : 0;
cache.current = Math.max(this.minimum(), Math.min(this.maximum(), cache.current));
this.reset(cache.current);
}
}, {
filter: [ 'position' ],
run: function() {
this.animate(this.coordinates(this._current));
}
}, {
filter: [ 'width', 'position', 'items', 'settings' ],
run: function() {
var rtl = this.settings.rtl ? 1 : -1,
padding = this.settings.stagePadding * 2,
begin = this.coordinates(this.current()) + padding,
end = begin + this.width() * rtl,
inner, outer, matches = [], i, n;
for (i = 0, n = this._coordinates.length; i < n; i++) {
inner = this._coordinates[i - 1] || 0;
outer = Math.abs(this._coordinates[i]) + padding * rtl;
if ((this.op(inner, '<=', begin) && (this.op(inner, '>', end)))
|| (this.op(outer, '<', begin) && this.op(outer, '>', end))) {
matches.push(i);
}
}
this.$stage.children('.active').removeClass('active');
this.$stage.children(':eq(' + matches.join('), :eq(') + ')').addClass('active');
this.$stage.children('.center').removeClass('center');
if (this.settings.center) {
this.$stage.children().eq(this.current()).addClass('center');
}
}
} ];
/**
* Create the stage DOM element
*/
Owl.prototype.initializeStage = function() {
this.$stage = this.$element.find('.' + this.settings.stageClass);
// if the stage is already in the DOM, grab it and skip stage initialization
if (this.$stage.length) {
return;
}
this.$element.addClass(this.options.loadingClass);
// create stage
this.$stage = $('<' + this.settings.stageElement + '>', {
"class": this.settings.stageClass
}).wrap( $( '<div/>', {
"class": this.settings.stageOuterClass
}));
// append stage
this.$element.append(this.$stage.parent());
};
/**
* Create item DOM elements
*/
Owl.prototype.initializeItems = function() {
var $items = this.$element.find('.owl-item');
// if the items are already in the DOM, grab them and skip item initialization
if ($items.length) {
this._items = $items.get().map(function(item) {
return $(item);
});
this._mergers = this._items.map(function() {
return 1;
});
this.refresh();
return;
}
// append content
this.replace(this.$element.children().not(this.$stage.parent()));
// check visibility
if (this.isVisible()) {
// update view
this.refresh();
} else {
// invalidate width
this.invalidate('width');
}
this.$element
.removeClass(this.options.loadingClass)
.addClass(this.options.loadedClass);
};
/**
* Initializes the carousel.
* @protected
*/
Owl.prototype.initialize = function() {
this.enter('initializing');
this.trigger('initialize');
this.$element.toggleClass(this.settings.rtlClass, this.settings.rtl);
if (this.settings.autoWidth && !this.is('pre-loading')) {
var imgs, nestedSelector, width;
imgs = this.$element.find('img');
nestedSelector = this.settings.nestedItemSelector ? '.' + this.settings.nestedItemSelector : undefined;
width = this.$element.children(nestedSelector).width();
if (imgs.length && width <= 0) {
this.preloadAutoWidthImages(imgs);
}
}
this.initializeStage();
this.initializeItems();
// register event handlers
this.registerEventHandlers();
this.leave('initializing');
this.trigger('initialized');
};
/**
* @returns {Boolean} visibility of $element
* if you know the carousel will always be visible you can set `checkVisibility` to `false` to
* prevent the expensive browser layout forced reflow the $element.is(':visible') does
*/
Owl.prototype.isVisible = function() {
return this.settings.checkVisibility
? this.$element.is(':visible')
: true;
};
/**
* Setups the current settings.
* @todo Remove responsive classes. Why should adaptive designs be brought into IE8?
* @todo Support for media queries by using `matchMedia` would be nice.
* @public
*/
Owl.prototype.setup = function() {
var viewport = this.viewport(),
overwrites = this.options.responsive,
match = -1,
settings = null;
if (!overwrites) {
settings = $.extend({}, this.options);
} else {
$.each(overwrites, function(breakpoint) {
if (breakpoint <= viewport && breakpoint > match) {
match = Number(breakpoint);
}
});
settings = $.extend({}, this.options, overwrites[match]);
if (typeof settings.stagePadding === 'function') {
settings.stagePadding = settings.stagePadding();
}
delete settings.responsive;
// responsive class
if (settings.responsiveClass) {
this.$element.attr('class',
this.$element.attr('class').replace(new RegExp('(' + this.options.responsiveClass + '-)\\S+\\s', 'g'), '$1' + match)
);
}
}
this.trigger('change', { property: { name: 'settings', value: settings } });
this._breakpoint = match;
this.settings = settings;
this.invalidate('settings');
this.trigger('changed', { property: { name: 'settings', value: this.settings } });
};
/**
* Updates option logic.
* @protected
*/
Owl.prototype.updateOptionsLogic = function() {
if (this.settings.autoWidth) {
this.settings.stagePadding = false;
this.settings.merge = false;
}
if (this.settings.mouseDrag) {
this.$element.addClass(this.options.dragClass);
this.$stage.on('mousedown.owl.core', $.proxy(this.onDragStart, this));
this.$stage.on('dragstart.owl.core selectstart.owl.core', function() { return false });
} else {
this.$element.removeClass(this.options.dragClass);
this.$element.removeClass(this.options.grabClass);
this.$stage.off('mousedown.owl.core');
this.$stage.off('dragstart.owl.core selectstart.owl.core');
}
if (this.settings.touchDrag){
this.$stage.on('touchstart.owl.core', $.proxy(this.onDragStart, this));
this.$stage.on('touchcancel.owl.core', $.proxy(this.onDragEnd, this));
} else {
this.$stage.off('mousedown.owl.core');
this.$stage.off('touchcancel.owl.core');
}
};
/**
* Prepares an item before add.
* @todo Rename event parameter `content` to `item`.
* @protected
* @returns {jQuery|HTMLElement} - The item container.
*/
Owl.prototype.prepare = function(item) {
var event = this.trigger('prepare', { content: item });
if (!event.data) {
event.data = $('<' + this.settings.itemElement + '/>')
.addClass(this.options.itemClass).append(item)
}
this.trigger('prepared', { content: event.data });
return event.data;
};
/**
* Updates the view.
* @public
*/
Owl.prototype.update = function() {
var i = 0,
n = this._pipe.length,
filter = $.proxy(function(p) { return this[p] }, this._invalidated),
cache = {};
while (i < n) {
if (this._invalidated.all || $.grep(this._pipe[i].filter, filter).length > 0) {
this._pipe[i].run(cache);
}
i++;
}
this._invalidated = {};
!this.is('valid') && this.enter('valid');
};
/**
* Gets the width of the view.
* @public
* @param {Owl.Width} [dimension=Owl.Width.Default] - The dimension to return.
* @returns {Number} - The width of the view in pixel.
*/
Owl.prototype.width = function(dimension) {
dimension = dimension || Owl.Width.Default;
switch (dimension) {
case Owl.Width.Inner:
case Owl.Width.Outer:
return this._width;
default:
return this._width - this.settings.stagePadding * 2 + this.settings.margin;
}
};
/**
* Refreshes the carousel primarily for adaptive purposes.
* @public
*/
Owl.prototype.refresh = function(resizing) {
resizing = resizing || false;
this.enter('refreshing');
this.trigger('refresh');
this.setup();
this.updateOptionsLogic();
this.$element.addClass(this.options.refreshClass);
this.update();
if (!resizing) {
this.onResize();
}
this.$element.removeClass(this.options.refreshClass);
this.leave('refreshing');
this.trigger('refreshed');
};
/**
* Checks window `resize` event.
* @protected
*/
Owl.prototype.onThrottledResize = function() {
window.clearTimeout(this.resizeTimer);
this.resizeTimer = window.setTimeout(this._handlers.onResize, this.settings.responsiveRefreshRate);
};
/**
* Checks window `resize` event.
* @protected
*/
Owl.prototype.onResize = function() {
var resizing = true;
if (!this._items.length) {
return false;
}
if (this._width === this.$element.width()) {
return false;
}
if (!this.isVisible()) {
return false;
}
this.enter('resizing');
if (this.trigger('resize').isDefaultPrevented()) {
this.leave('resizing');
return false;
}
this.invalidate('width');
this.refresh(resizing);
this.leave('resizing');
this.trigger('resized');
};
/**
* Registers event handlers.
* @todo Check `msPointerEnabled`
* @todo #261
* @protected
*/
Owl.prototype.registerEventHandlers = function() {
if ($.support.transition) {
this.$stage.on($.support.transition.end + '.owl.core', $.proxy(this.onTransitionEnd, this));
}
if (this.settings.responsive !== false) {
this.on(window, 'resize', this._handlers.onThrottledResize);
}
if (this.settings.mouseDrag) {
this.$element.addClass(this.options.dragClass);
this.$stage.on('mousedown.owl.core', $.proxy(this.onDragStart, this));
this.$stage.on('dragstart.owl.core selectstart.owl.core', function() { return false });
}
if (this.settings.touchDrag){
this.$stage.on('touchstart.owl.core', $.proxy(this.onDragStart, this));
this.$stage.on('touchcancel.owl.core', $.proxy(this.onDragEnd, this));
}
};
/**
* Handles `touchstart` and `mousedown` events.
* @todo Horizontal swipe threshold as option
* @todo #261
* @protected
* @param {Event} event - The event arguments.
*/
Owl.prototype.onDragStart = function(event) {
var stage = null;
if (event.which === 3) {
return;
}
if ($.support.transform) {
stage = this.$stage.css('transform').replace(/.*\(|\)| /g, '').split(',');
stage = {
x: stage[stage.length === 16 ? 12 : 4],
y: stage[stage.length === 16 ? 13 : 5]
};
} else {
stage = this.$stage.position();
stage = {
x: this.settings.rtl ?
stage.left + this.$stage.width() - this.width() + this.settings.margin :
stage.left,
y: stage.top
};
}
if (this.is('animating')) {
$.support.transform ? this.animate(stage.x) : this.$stage.stop()
this.invalidate('position');
}
this.$element.toggleClass(this.options.grabClass, event.type === 'mousedown');
this.speed(0);
this._drag.time = new Date().getTime();
this._drag.target = $(event.target);
this._drag.stage.start = stage;
this._drag.stage.current = stage;
this._drag.pointer = this.pointer(event);
$(document).on('mouseup.owl.core touchend.owl.core', $.proxy(this.onDragEnd, this));
$(document).one('mousemove.owl.core touchmove.owl.core', $.proxy(function(event) {
var delta = this.difference(this._drag.pointer, this.pointer(event));
$(document).on('mousemove.owl.core touchmove.owl.core', $.proxy(this.onDragMove, this));
if (Math.abs(delta.x) < Math.abs(delta.y) && this.is('valid')) {
return;
}
event.preventDefault();
this.enter('dragging');
this.trigger('drag');
}, this));
};
/**
* Handles the `touchmove` and `mousemove` events.
* @todo #261
* @protected
* @param {Event} event - The event arguments.
*/
Owl.prototype.onDragMove = function(event) {
var minimum = null,
maximum = null,
pull = null,
delta = this.difference(this._drag.pointer, this.pointer(event)),
stage = this.difference(this._drag.stage.start, delta);
if (!this.is('dragging')) {
return;
}
event.preventDefault();
if (this.settings.loop) {
minimum = this.coordinates(this.minimum());
maximum = this.coordinates(this.maximum() + 1) - minimum;
stage.x = (((stage.x - minimum) % maximum + maximum) % maximum) + minimum;
} else {
minimum = this.settings.rtl ? this.coordinates(this.maximum()) : this.coordinates(this.minimum());
maximum = this.settings.rtl ? this.coordinates(this.minimum()) : this.coordinates(this.maximum());
pull = this.settings.pullDrag ? -1 * delta.x / 5 : 0;
stage.x = Math.max(Math.min(stage.x, minimum + pull), maximum + pull);
}
this._drag.stage.current = stage;
this.animate(stage.x);
};
/**
* Handles the `touchend` and `mouseup` events.
* @todo #261
* @todo Threshold for click event
* @protected
* @param {Event} event - The event arguments.
*/
Owl.prototype.onDragEnd = function(event) {
var delta = this.difference(this._drag.pointer, this.pointer(event)),
stage = this._drag.stage.current,
direction = delta.x > 0 ^ this.settings.rtl ? 'left' : 'right';
$(document).off('.owl.core');
this.$element.removeClass(this.options.grabClass);
if (delta.x !== 0 && this.is('dragging') || !this.is('valid')) {
this.speed(this.settings.dragEndSpeed || this.settings.smartSpeed);
this.current(this.closest(stage.x, delta.x !== 0 ? direction : this._drag.direction));
this.invalidate('position');
this.update();
this._drag.direction = direction;
if (Math.abs(delta.x) > 3 || new Date().getTime() - this._drag.time > 300) {
this._drag.target.one('click.owl.core', function() { return false; });
}
}
if (!this.is('dragging')) {
return;
}
this.leave('dragging');
this.trigger('dragged');
};
/**
* Gets absolute position of the closest item for a coordinate.
* @todo Setting `freeDrag` makes `closest` not reusable. See #165.
* @protected
* @param {Number} coordinate - The coordinate in pixel.
* @param {String} direction - The direction to check for the closest item. Ether `left` or `right`.
* @return {Number} - The absolute position of the closest item.
*/
Owl.prototype.closest = function(coordinate, direction) {
var position = -1,
pull = 30,
width = this.width(), // visible carousel width
count = this.settings.items,
itemWidth = Math.round(width / count),
coordinates = this.coordinates();
if (!this.settings.freeDrag) {
// check closest item
$.each(coordinates, $.proxy(function(index, value) {
// on a left pull, check on current index
if (direction === 'left' && coordinate > value - pull && coordinate < value + pull) {
position = index;
// on a right pull, check on previous index
// to do so, subtract width from value and set position = index + 1
} else if (direction === 'right' && coordinate > value - itemWidth - pull && coordinate < value - itemWidth + pull) {
position = index + 1;
} else if (this.op(coordinate, '<', value)
&& this.op(coordinate, '>', coordinates[index + 1] !== undefined ? coordinates[index + 1] : value - width)) {
position = direction === 'left' ? index + 1 : index;
}
return position === -1;
}, this));
}
if (!this.settings.loop) {
// non loop boundries
if (this.op(coordinate, '>', coordinates[this.minimum()])) {
position = coordinate = this.minimum();
} else if (this.op(coordinate, '<', coordinates[this.maximum()])) {
position = coordinate = this.maximum();
}
}
return position;
};
/**
* Animates the stage.
* @todo #270
* @public
* @param {Number} coordinate - The coordinate in pixels.
*/
Owl.prototype.animate = function(coordinate) {
var animate = this.speed() > 0;
this.is('animating') && this.onTransitionEnd();
if (animate) {
this.enter('animating');
this.trigger('translate');
}
if ($.support.transform3d && $.support.transition) {
this.$stage.css({
transform: 'translate3d(' + coordinate + 'px,0px,0px)',
transition: (this.speed() / 1000) + 's' + (
this.settings.slideTransition ? ' ' + this.settings.slideTransition : ''
)
});
} else if (animate) {
this.$stage.animate({
left: coordinate + 'px'
}, this.speed(), this.settings.fallbackEasing, $.proxy(this.onTransitionEnd, this));
} else {
this.$stage.css({
left: coordinate + 'px'
});
}
};
/**
* Checks whether the carousel is in a specific state or not.
* @param {String} state - The state to check.
* @returns {Boolean} - The flag which indicates if the carousel is busy.
*/
Owl.prototype.is = function(state) {
return this._states.current[state] && this._states.current[state] > 0;
};
/**
* Sets the absolute position of the current item.
* @public
* @param {Number} [position] - The new absolute position or nothing to leave it unchanged.
* @returns {Number} - The absolute position of the current item.
*/
Owl.prototype.current = function(position) {
if (position === undefined) {
return this._current;
}
if (this._items.length === 0) {
return undefined;
}
position = this.normalize(position);
if (this._current !== position) {
var event = this.trigger('change', { property: { name: 'position', value: position } });
if (event.data !== undefined) {
position = this.normalize(event.data);
}
this._current = position;
this.invalidate('position');
this.trigger('changed', { property: { name: 'position', value: this._current } });
}
return this._current;
};
/**
* Invalidates the given part of the update routine.
* @param {String} [part] - The part to invalidate.
* @returns {Array.<String>} - The invalidated parts.
*/
Owl.prototype.invalidate = function(part) {
if ($.type(part) === 'string') {
this._invalidated[part] = true;
this.is('valid') && this.leave('valid');
}
return $.map(this._invalidated, function(v, i) { return i });
};
/**
* Resets the absolute position of the current item.
* @public
* @param {Number} position - The absolute position of the new item.
*/
Owl.prototype.reset = function(position) {
position = this.normalize(position);
if (position === undefined) {
return;
}
this._speed = 0;
this._current = position;
this.suppress([ 'translate', 'translated' ]);
this.animate(this.coordinates(position));
this.release([ 'translate', 'translated' ]);
};
/**
* Normalizes an absolute or a relative position of an item.
* @public
* @param {Number} position - The absolute or relative position to normalize.
* @param {Boolean} [relative=false] - Whether the given position is relative or not.
* @returns {Number} - The normalized position.
*/
Owl.prototype.normalize = function(position, relative) {
var n = this._items.length,
m = relative ? 0 : this._clones.length;
if (!this.isNumeric(position) || n < 1) {
position = undefined;
} else if (position < 0 || position >= n + m) {
position = ((position - m / 2) % n + n) % n + m / 2;
}
return position;
};
/**
* Converts an absolute position of an item into a relative one.
* @public
* @param {Number} position - The absolute position to convert.
* @returns {Number} - The converted position.
*/
Owl.prototype.relative = function(position) {
position -= this._clones.length / 2;
return this.normalize(position, true);
};
/**
* Gets the maximum position for the current item.
* @public
* @param {Boolean} [relative=false] - Whether to return an absolute position or a relative position.
* @returns {Number}
*/
Owl.prototype.maximum = function(relative) {
var settings = this.settings,
maximum = this._coordinates.length,
iterator,
reciprocalItemsWidth,
elementWidth;
if (settings.loop) {
maximum = this._clones.length / 2 + this._items.length - 1;
} else if (settings.autoWidth || settings.merge) {
iterator = this._items.length;
if (iterator) {
reciprocalItemsWidth = this._items[--iterator].width();
elementWidth = this.$element.width();
while (iterator--) {
reciprocalItemsWidth += this._items[iterator].width() + this.settings.margin;
if (reciprocalItemsWidth > elementWidth) {
break;
}
}
}
maximum = iterator + 1;
} else if (settings.center) {
maximum = this._items.length - 1;
} else {
maximum = this._items.length - settings.items;
}
if (relative) {
maximum -= this._clones.length / 2;
}
return Math.max(maximum, 0);
};
/**
* Gets the minimum position for the current item.
* @public
* @param {Boolean} [relative=false] - Whether to return an absolute position or a relative position.
* @returns {Number}
*/
Owl.prototype.minimum = function(relative) {
return relative ? 0 : this._clones.length / 2;
};
/**
* Gets an item at the specified relative position.
* @public
* @param {Number} [position] - The relative position of the item.
* @return {jQuery|Array.<jQuery>} - The item at the given position or all items if no position was given.
*/
Owl.prototype.items = function(position) {
if (position === undefined) {
return this._items.slice();
}
position = this.normalize(position, true);
return this._items[position];
};
/**
* Gets an item at the specified relative position.
* @public
* @param {Number} [position] - The relative position of the item.
* @return {jQuery|Array.<jQuery>} - The item at the given position or all items if no position was given.
*/
Owl.prototype.mergers = function(position) {
if (position === undefined) {
return this._mergers.slice();
}
position = this.normalize(position, true);
return this._mergers[position];
};
/**
* Gets the absolute positions of clones for an item.
* @public
* @param {Number} [position] - The relative position of the item.
* @returns {Array.<Number>} - The absolute positions of clones for the item or all if no position was given.
*/
Owl.prototype.clones = function(position) {
var odd = this._clones.length / 2,
even = odd + this._items.length,
map = function(index) { return index % 2 === 0 ? even + index / 2 : odd - (index + 1) / 2 };
if (position === undefined) {
return $.map(this._clones, function(v, i) { return map(i) });
}
return $.map(this._clones, function(v, i) { return v === position ? map(i) : null });
};
/**
* Sets the current animation speed.
* @public
* @param {Number} [speed] - The animation speed in milliseconds or nothing to leave it unchanged.
* @returns {Number} - The current animation speed in milliseconds.
*/
Owl.prototype.speed = function(speed) {
if (speed !== undefined) {
this._speed = speed;
}
return this._speed;
};
/**
* Gets the coordinate of an item.
* @todo The name of this method is missleanding.
* @public
* @param {Number} position - The absolute position of the item within `minimum()` and `maximum()`.
* @returns {Number|Array.<Number>} - The coordinate of the item in pixel or all coordinates.
*/
Owl.prototype.coordinates = function(position) {
var multiplier = 1,
newPosition = position - 1,
coordinate;
if (position === undefined) {
return $.map(this._coordinates, $.proxy(function(coordinate, index) {
return this.coordinates(index);
}, this));
}
if (this.settings.center) {
if (this.settings.rtl) {
multiplier = -1;
newPosition = position + 1;
}
coordinate = this._coordinates[position];
coordinate += (this.width() - coordinate + (this._coordinates[newPosition] || 0)) / 2 * multiplier;
} else {
coordinate = this._coordinates[newPosition] || 0;
}
coordinate = Math.ceil(coordinate);
return coordinate;
};
/**
* Calculates the speed for a translation.
* @protected
* @param {Number} from - The absolute position of the start item.
* @param {Number} to - The absolute position of the target item.
* @param {Number} [factor=undefined] - The time factor in milliseconds.
* @returns {Number} - The time in milliseconds for the translation.
*/
Owl.prototype.duration = function(from, to, factor) {
if (factor === 0) {
return 0;
}
return Math.min(Math.max(Math.abs(to - from), 1), 6) * Math.abs((factor || this.settings.smartSpeed));
};
/**
* Slides to the specified item.
* @public
* @param {Number} position - The position of the item.
* @param {Number} [speed] - The time in milliseconds for the transition.
*/
Owl.prototype.to = function(position, speed) {
var current = this.current(),
revert = null,
distance = position - this.relative(current),
direction = (distance > 0) - (distance < 0),
items = this._items.length,
minimum = this.minimum(),
maximum = this.maximum();
if (this.settings.loop) {
if (!this.settings.rewind && Math.abs(distance) > items / 2) {
distance += direction * -1 * items;
}
position = current + distance;
revert = ((position - minimum) % items + items) % items + minimum;
if (revert !== position && revert - distance <= maximum && revert - distance > 0) {
current = revert - distance;
position = revert;
this.reset(current);
}
} else if (this.settings.rewind) {
maximum += 1;
position = (position % maximum + maximum) % maximum;
} else {
position = Math.max(minimum, Math.min(maximum, position));
}
this.speed(this.duration(current, position, speed));
this.current(position);
if (this.isVisible()) {
this.update();
}
};
/**
* Slides to the next item.
* @public
* @param {Number} [speed] - The time in milliseconds for the transition.
*/
Owl.prototype.next = function(speed) {
speed = speed || false;
this.to(this.relative(this.current()) + 1, speed);
};
/**
* Slides to the previous item.
* @public
* @param {Number} [speed] - The time in milliseconds for the transition.
*/
Owl.prototype.prev = function(speed) {
speed = speed || false;
this.to(this.relative(this.current()) - 1, speed);
};
/**
* Handles the end of an animation.
* @protected
* @param {Event} event - The event arguments.
*/
Owl.prototype.onTransitionEnd = function(event) {
// if css2 animation then event object is undefined
if (event !== undefined) {
event.stopPropagation();
// Catch only owl-stage transitionEnd event
if ((event.target || event.srcElement || event.originalTarget) !== this.$stage.get(0)) {
return false;
}
}
this.leave('animating');
this.trigger('translated');
};
/**
* Gets viewport width.
* @protected
* @return {Number} - The width in pixel.
*/
Owl.prototype.viewport = function() {
var width;
if (this.options.responsiveBaseElement !== window) {
width = $(this.options.responsiveBaseElement).width();
} else if (window.innerWidth) {
width = window.innerWidth;
} else if (document.documentElement && document.documentElement.clientWidth) {
width = document.documentElement.clientWidth;
} else {
console.warn('Can not detect viewport width.');
}
return width;
};
/**
* Replaces the current content.
* @public
* @param {HTMLElement|jQuery|String} content - The new content.
*/
Owl.prototype.replace = function(content) {
this.$stage.empty();
this._items = [];
if (content) {
content = (content instanceof jQuery) ? content : $(content);
}
if (this.settings.nestedItemSelector) {
content = content.find('.' + this.settings.nestedItemSelector);
}
content.filter(function() {
return this.nodeType === 1;
}).each($.proxy(function(index, item) {
item = this.prepare(item);
this.$stage.append(item);
this._items.push(item);
this._mergers.push(item.find('[data-merge]').addBack('[data-merge]').attr('data-merge') * 1 || 1);
}, this));
this.reset(this.isNumeric(this.settings.startPosition) ? this.settings.startPosition : 0);
this.invalidate('items');
};
/**
* Adds an item.
* @todo Use `item` instead of `content` for the event arguments.
* @public
* @param {HTMLElement|jQuery|String} content - The item content to add.
* @param {Number} [position] - The relative position at which to insert the item otherwise the item will be added to the end.
*/
Owl.prototype.add = function(content, position) {
var current = this.relative(this._current);
position = position === undefined ? this._items.length : this.normalize(position, true);
content = content instanceof jQuery ? content : $(content);
this.trigger('add', { content: content, position: position });
content = this.prepare(content);
if (this._items.length === 0 || position === this._items.length) {
this._items.length === 0 && this.$stage.append(content);
this._items.length !== 0 && this._items[position - 1].after(content);
this._items.push(content);
this._mergers.push(content.find('[data-merge]').addBack('[data-merge]').attr('data-merge') * 1 || 1);
} else {
this._items[position].before(content);
this._items.splice(position, 0, content);
this._mergers.splice(position, 0, content.find('[data-merge]').addBack('[data-merge]').attr('data-merge') * 1 || 1);
}
this._items[current] && this.reset(this._items[current].index());
this.invalidate('items');
this.trigger('added', { content: content, position: position });
};
/**
* Removes an item by its position.
* @todo Use `item` instead of `content` for the event arguments.
* @public
* @param {Number} position - The relative position of the item to remove.
*/
Owl.prototype.remove = function(position) {
position = this.normalize(position, true);
if (position === undefined) {
return;
}
this.trigger('remove', { content: this._items[position], position: position });
this._items[position].remove();
this._items.splice(position, 1);
this._mergers.splice(position, 1);
this.invalidate('items');
this.trigger('removed', { content: null, position: position });
};
/**
* Preloads images with auto width.
* @todo Replace by a more generic approach
* @protected
*/
Owl.prototype.preloadAutoWidthImages = function(images) {
images.each($.proxy(function(i, element) {
this.enter('pre-loading');
element = $(element);
$(new Image()).one('load', $.proxy(function(e) {
element.attr('src', e.target.src);
element.css('opacity', 1);
this.leave('pre-loading');
!this.is('pre-loading') && !this.is('initializing') && this.refresh();
}, this)).attr('src', (window.devicePixelRatio > 1) ? element.attr('data-src-retina') : element.attr('data-src') || element.attr('src'));
}, this));
};
/**
* Destroys the carousel.
* @public
*/
Owl.prototype.destroy = function() {
this.$element.off('.owl.core');
this.$stage.off('.owl.core');
$(document).off('.owl.core');
if (this.settings.responsive !== false) {
window.clearTimeout(this.resizeTimer);
this.off(window, 'resize', this._handlers.onThrottledResize);
}
for (var i in this._plugins) {
this._plugins[i].destroy();
}
this.$stage.children('.cloned').remove();
this.$stage.unwrap();
this.$stage.children().contents().unwrap();
this.$stage.children().unwrap();
this.$stage.remove();
this.$element
.removeClass(this.options.refreshClass)
.removeClass(this.options.loadingClass)
.removeClass(this.options.loadedClass)
.removeClass(this.options.rtlClass)
.removeClass(this.options.dragClass)
.removeClass(this.options.grabClass)
.attr('class', this.$element.attr('class').replace(new RegExp(this.options.responsiveClass + '-\\S+\\s', 'g'), ''))
.removeData('owl.carousel');
};
/**
* Operators to calculate right-to-left and left-to-right.
* @protected
* @param {Number} [a] - The left side operand.
* @param {String} [o] - The operator.
* @param {Number} [b] - The right side operand.
*/
Owl.prototype.op = function(a, o, b) {
var rtl = this.settings.rtl;
switch (o) {
case '<':
return rtl ? a > b : a < b;
case '>':
return rtl ? a < b : a > b;
case '>=':
return rtl ? a <= b : a >= b;
case '<=':
return rtl ? a >= b : a <= b;
default:
break;
}
};
/**
* Attaches to an internal event.
* @protected
* @param {HTMLElement} element - The event source.
* @param {String} event - The event name.
* @param {Function} listener - The event handler to attach.
* @param {Boolean} capture - Wether the event should be handled at the capturing phase or not.
*/
Owl.prototype.on = function(element, event, listener, capture) {
if (element.addEventListener) {
element.addEventListener(event, listener, capture);
} else if (element.attachEvent) {
element.attachEvent('on' + event, listener);
}
};
/**
* Detaches from an internal event.
* @protected
* @param {HTMLElement} element - The event source.
* @param {String} event - The event name.
* @param {Function} listener - The attached event handler to detach.
* @param {Boolean} capture - Wether the attached event handler was registered as a capturing listener or not.
*/
Owl.prototype.off = function(element, event, listener, capture) {
if (element.removeEventListener) {
element.removeEventListener(event, listener, capture);
} else if (element.detachEvent) {
element.detachEvent('on' + event, listener);
}
};
/**
* Triggers a public event.
* @todo Remove `status`, `relatedTarget` should be used instead.
* @protected
* @param {String} name - The event name.
* @param {*} [data=null] - The event data.
* @param {String} [namespace=carousel] - The event namespace.
* @param {String} [state] - The state which is associated with the event.
* @param {Boolean} [enter=false] - Indicates if the call enters the specified state or not.
* @returns {Event} - The event arguments.
*/
Owl.prototype.trigger = function(name, data, namespace, state, enter) {
var status = {
item: { count: this._items.length, index: this.current() }
}, handler = $.camelCase(
$.grep([ 'on', name, namespace ], function(v) { return v })
.join('-').toLowerCase()
), event = $.Event(
[ name, 'owl', namespace || 'carousel' ].join('.').toLowerCase(),
$.extend({ relatedTarget: this }, status, data)
);
if (!this._supress[name]) {
$.each(this._plugins, function(name, plugin) {
if (plugin.onTrigger) {
plugin.onTrigger(event);
}
});
this.register({ type: Owl.Type.Event, name: name });
this.$element.trigger(event);
if (this.settings && typeof this.settings[handler] === 'function') {
this.settings[handler].call(this, event);
}
}
return event;
};
/**
* Enters a state.
* @param name - The state name.
*/
Owl.prototype.enter = function(name) {
$.each([ name ].concat(this._states.tags[name] || []), $.proxy(function(i, name) {
if (this._states.current[name] === undefined) {
this._states.current[name] = 0;
}
this._states.current[name]++;
}, this));
};
/**
* Leaves a state.
* @param name - The state name.
*/
Owl.prototype.leave = function(name) {
$.each([ name ].concat(this._states.tags[name] || []), $.proxy(function(i, name) {
this._states.current[name]--;
}, this));
};
/**
* Registers an event or state.
* @public
* @param {Object} object - The event or state to register.
*/
Owl.prototype.register = function(object) {
if (object.type === Owl.Type.Event) {
if (!$.event.special[object.name]) {
$.event.special[object.name] = {};
}
if (!$.event.special[object.name].owl) {
var _default = $.event.special[object.name]._default;
$.event.special[object.name]._default = function(e) {
if (_default && _default.apply && (!e.namespace || e.namespace.indexOf('owl') === -1)) {
return _default.apply(this, arguments);
}
return e.namespace && e.namespace.indexOf('owl') > -1;
};
$.event.special[object.name].owl = true;
}
} else if (object.type === Owl.Type.State) {
if (!this._states.tags[object.name]) {
this._states.tags[object.name] = object.tags;
} else {
this._states.tags[object.name] = this._states.tags[object.name].concat(object.tags);
}
this._states.tags[object.name] = $.grep(this._states.tags[object.name], $.proxy(function(tag, i) {
return $.inArray(tag, this._states.tags[object.name]) === i;
}, this));
}
};
/**
* Suppresses events.
* @protected
* @param {Array.<String>} events - The events to suppress.
*/
Owl.prototype.suppress = function(events) {
$.each(events, $.proxy(function(index, event) {
this._supress[event] = true;
}, this));
};
/**
* Releases suppressed events.
* @protected
* @param {Array.<String>} events - The events to release.
*/
Owl.prototype.release = function(events) {
$.each(events, $.proxy(function(index, event) {
delete this._supress[event];
}, this));
};
/**
* Gets unified pointer coordinates from event.
* @todo #261
* @protected
* @param {Event} - The `mousedown` or `touchstart` event.
* @returns {Object} - Contains `x` and `y` coordinates of current pointer position.
*/
Owl.prototype.pointer = function(event) {
var result = { x: null, y: null };
event = event.originalEvent || event || window.event;
event = event.touches && event.touches.length ?
event.touches[0] : event.changedTouches && event.changedTouches.length ?
event.changedTouches[0] : event;
if (event.pageX) {
result.x = event.pageX;
result.y = event.pageY;
} else {
result.x = event.clientX;
result.y = event.clientY;
}
return result;
};
/**
* Determines if the input is a Number or something that can be coerced to a Number
* @protected
* @param {Number|String|Object|Array|Boolean|RegExp|Function|Symbol} - The input to be tested
* @returns {Boolean} - An indication if the input is a Number or can be coerced to a Number
*/
Owl.prototype.isNumeric = function(number) {
return !isNaN(parseFloat(number));
};
/**
* Gets the difference of two vectors.
* @todo #261
* @protected
* @param {Object} - The first vector.
* @param {Object} - The second vector.
* @returns {Object} - The difference.
*/
Owl.prototype.difference = function(first, second) {
return {
x: first.x - second.x,
y: first.y - second.y
};
};
/**
* The jQuery Plugin for the Owl Carousel
* @todo Navigation plugin `next` and `prev`
* @public
*/
$.fn.owlCarousel = function(option) {
var args = Array.prototype.slice.call(arguments, 1);
return this.each(function() {
var $this = $(this),
data = $this.data('owl.carousel');
if (!data) {
data = new Owl(this, typeof option == 'object' && option);
$this.data('owl.carousel', data);
$.each([
'next', 'prev', 'to', 'destroy', 'refresh', 'replace', 'add', 'remove'
], function(i, event) {
data.register({ type: Owl.Type.Event, name: event });
data.$element.on(event + '.owl.carousel.core', $.proxy(function(e) {
if (e.namespace && e.relatedTarget !== this) {
this.suppress([ event ]);
data[event].apply(this, [].slice.call(arguments, 1));
this.release([ event ]);
}
}, data));
});
}
if (typeof option == 'string' && option.charAt(0) !== '_') {
data[option].apply(data, args);
}
});
};
/**
* The constructor for the jQuery Plugin
* @public
*/
$.fn.owlCarousel.Constructor = Owl;
})(window.Zepto || window.jQuery, window, document);
|
"""Calculate totals given minimal area request."""
import argparse
import logging
import os
import time
from ecoshard import geoprocessing
import numpy
import scipy.ndimage.morphology
import taskgraph
from osgeo import gdal
from osgeo import osr
import pandas.plotting
import matplotlib.pyplot as plt
import pandas
gdal.SetCacheMax(2**27)
logging.basicConfig(
level=logging.DEBUG,
format=(
'%(asctime)s (%(relativeCreated)d) %(levelname)s %(name)s'
' [%(funcName)s:%(lineno)d] %(message)s'))
LOGGER = logging.getLogger(__name__)
logging.getLogger('taskgraph').setLevel(logging.WARN)
def _make_test_data_smooth(dir_path, n, m):
"""Make random raster test data."""
os.makedirs(dir_path, exist_ok=True)
raster_path_list = []
for raster_path, (pi, pj) in [(
os.path.join(dir_path, f'smooth_{n}_{index}.tif'),
(int(n*index/m), 0)) for index in range(m)]:
base_array = numpy.ones((n, n))
base_array[pi, pj] = 0
dist_array = scipy.ndimage.morphology.distance_transform_edt(
base_array)
geoprocessing.numpy_array_to_raster(
dist_array, -1, (1, -1), (0, 0),
osr.SRS_WKT_WGS84_LAT_LONG, raster_path)
raster_path_list.append(raster_path)
return raster_path_list
def _make_test_data_random(dir_path, n, m):
"""Make random raster test data."""
os.makedirs(dir_path, exist_ok=True)
raster_path_list = []
for raster_path, (pi, pj) in [
(f'random_{index}.tif', (int(n*index/m), 0)) for index in range(m)]:
LOGGER.debug(f'{raster_path}, {pi}, {pj}')
base_array = numpy.random.random((n, n))
geoprocessing.numpy_array_to_raster(
base_array, -1, (1, -1), (0, 0),
osr.SRS_WKT_WGS84_LAT_LONG, raster_path)
raster_path_list.append(raster_path)
return raster_path_list
def _callback(prob):
LOGGER.debug(f'phase: {prob.phase}, status: {prob.status} {prob.message}')
def _sum_raster(raster_path):
"""Return the non-nodata sum of the raster."""
raster_info = geoprocessing.get_raster_info(raster_path)
nodata = raster_info['nodata'][0]
running_sum = 0.0
for _, array in geoprocessing.iterblocks((raster_path, 1)):
if nodata is not None:
valid_mask = (array != nodata)
else:
valid_mask = numpy.ones(array.shape, dtype=bool)
running_sum += numpy.sum(array[valid_mask])
return running_sum
def multigrid_optimize(
raster_path_list, min_proportion, target_raster_path,
win_xoff, win_yoff, win_xsize, win_ysize,
prop_tol=1e-12, grid_size=64,):
"""Solve a multigrid optimization problem.
Args:
raster_path_list (list): list of rasters of equal shape to
use for value objective matching.
min_proportion (float): minimum proportion of rasters to optimize
for.
target_raster_path (str): path to target mask raster
win_xoff, win_yoff, win_xsize, win_ysize (int): the offset into the
global rasters to solve a sub optimization problem on.
prop_tol (float): propotion of raster sum to use as tolerance.
grid_size (int): the size to subdivide the optimization problem on.
Return:
{
'objective_sum_list': [sum of selected objectives],
'proportion_list': [proportion of raster selected],
'area_list': [area of raster selected]
}, runtime in seconds
"""
start_time = time.time()
raster_info = geoprocessing.get_raster_info(raster_path_list[0])
n_cols, n_rows = raster_info['raster_size']
col_stepsize = max(win_xsize // grid_size, 1)
row_stepsize = max(win_ysize // grid_size, 1)
n_col_grids = int(numpy.ceil(win_xsize / col_stepsize))
n_row_grids = int(numpy.ceil(win_xsize / row_stepsize))
mask_array = numpy.full(
(n_row_grids, n_col_grids), -1, dtype=numpy.float32)
A_list = [[] for _ in range(len(raster_path_list))]
raster_sum_list = [0.0] * len(raster_path_list)
b_list = []
offset_list = []
for x_index in range(n_col_grids):
local_xoff = win_xoff + x_index * col_stepsize
local_win_xsize = col_stepsize
next_xoff = win_xoff + (x_index+1)*col_stepsize
if next_xoff > n_cols:
local_win_xsize += n_cols-next_xoff
for y_index in range(n_row_grids):
local_yoff = win_yoff + y_index * row_stepsize
local_win_ysize = row_stepsize
next_yoff = win_yoff + (y_index+1)*row_stepsize
if next_yoff > n_rows:
local_win_ysize += n_rows-next_yoff
tol = 1.0
# load the subgrid
offset_dict = {
'xoff': local_xoff,
'yoff': local_yoff,
'win_xsize': local_win_xsize,
'win_ysize': local_win_ysize,
}
valid_mask = numpy.ones(
(local_win_ysize, local_win_xsize), dtype=bool)
array_list = []
for raster_path in raster_path_list:
raster = gdal.OpenEx(raster_path)
band = raster.GetRasterBand(1)
array = band.ReadAsArray(**offset_dict)
nodata = band.GetNoDataValue()
band = None
raster = None
if nodata is not None:
valid_mask &= (array != nodata)
array_list.append(array)
if not numpy.any(valid_mask):
continue
mask_array[x_index, y_index] = 0.0
for array_index, array in enumerate(array_list):
grid_sum = numpy.sum(array[valid_mask])
A_list[array_index].append(-grid_sum)
raster_sum_list[array_index] += grid_sum
# record the current grid offset for sub-multigrid
offset_list.append(offset_dict)
b_list = [-min_proportion*tot_val for tot_val in raster_sum_list]
tol = min([prop_tol*val for val in raster_sum_list])
c_vector = numpy.full(len(A_list[0]), col_stepsize*row_stepsize)
res = scipy.optimize.linprog(
c_vector,
A_ub=A_list,
b_ub=b_list,
bounds=[0, 1],
options={'tol': tol, 'disp': False})
valid_mask = mask_array == 0
mask_array[valid_mask] = res.x
mask_array[~valid_mask] = 0
if col_stepsize == 1 and row_stepsize == 1:
raster = gdal.OpenEx(
target_raster_path, gdal.OF_RASTER | gdal.GA_Update)
band = raster.GetRasterBand(1)
band.WriteArray(mask_array.T, xoff=win_xoff, yoff=win_yoff)
band = None
raster = None
else:
for local_offset, local_prop in zip(
offset_list, mask_array[valid_mask]):
if local_prop > 1:
local_prop = 1
n_local_pixels = (
local_offset['win_xsize'] * local_offset['win_ysize'])
predicted_pixels_to_set = round(local_prop * n_local_pixels)
if predicted_pixels_to_set == 0:
raster = gdal.OpenEx(
target_raster_path, gdal.OF_RASTER | gdal.GA_Update)
band = raster.GetRasterBand(1)
band.WriteArray(
numpy.zeros(
(local_offset['win_ysize'],
local_offset['win_xsize'])),
xoff=local_offset['xoff'], yoff=local_offset['yoff'])
band = None
raster = None
continue
if round(local_prop * n_local_pixels) == n_local_pixels:
raster = gdal.OpenEx(
target_raster_path, gdal.OF_RASTER | gdal.GA_Update)
band = raster.GetRasterBand(1)
band.WriteArray(
numpy.ones(
(local_offset['win_ysize'],
local_offset['win_xsize'])),
xoff=local_offset['xoff'], yoff=local_offset['yoff'])
band = None
raster = None
continue
multigrid_optimize(
raster_path_list, local_prop, target_raster_path,
local_offset['xoff'], local_offset['yoff'],
local_offset['win_xsize'], local_offset['win_ysize'],
prop_tol=prop_tol, grid_size=grid_size)
return time.time() - start_time
def _scipy_optimize(raster_path_list, min_proportion):
raster_info = geoprocessing.get_raster_info(raster_path_list[0])
n_rows, n_cols = raster_info['raster_size']
LOGGER.info('construct area list')
c_vector = numpy.ones(n_rows*n_cols)
A_list = []
b_list = []
tol = 1.0
tot_val = 0.0
for raster_path in raster_path_list:
LOGGER.info(f'processing {raster_path}')
raster = gdal.OpenEx(raster_path)
array = raster.ReadAsArray().flatten()
A_list.append(-array.flatten())
array_sum = numpy.sum(array)
tot_val += array_sum
b_list.append(-min_proportion*array_sum)
tol = min(tol, 1e-12*array_sum)
LOGGER.debug('solving problem')
res = scipy.optimize.linprog(
c_vector,
A_ub=A_list,
b_ub=b_list,
bounds=[0, 1],
options={'tol': tol, 'disp': True})
raster_results = [numpy.sum(A * res.x)/numpy.sum(A) for A in A_list]
return res, tot_val, raster_results
def _sum_over_mask(base_raster_path, mask_raster_path):
"""Return the sum of mask==1 pixels over base."""
running_sum = 0.0
for (_, base_array), (_, mask_array) in zip(
geoprocessing.iterblocks((base_raster_path, 1)),
geoprocessing.iterblocks((mask_raster_path, 1))):
running_sum += numpy.sum(base_array[mask_array >= 0.5])
return running_sum
def main():
"""Entry point."""
parser = argparse.ArgumentParser(description='Calc sum given min area')
parser.add_argument('n_cells', type=int,)
parser.add_argument('grid_size', type=int,)
parser.add_argument('min_size', type=int,)
args = parser.parse_args()
raster_side_length = args.n_cells
LOGGER.info('construct test data')
task_graph = taskgraph.TaskGraph('.', -1) #multiprocessing.cpu_count())
data_type = 'smooth'
test_data_task = task_graph.add_task(
func=_make_test_data_smooth,
args=('test_data', raster_side_length, 10),
store_result=True,
task_name='make smooth test data')
raster_path_list = test_data_task.get()
LOGGER.info('construct optimization problem')
min_proportion = 0.5
current_grid_size = args.grid_size
opt_task_list = []
csv_file_path = f'result_{data_type}_{raster_side_length}_{args.grid_size}.csv'
with open(csv_file_path, 'w') as \
csv_file:
csv_file.write('grid size,run time,')
csv_file.write(','.join([
os.path.basename(os.path.splitext(path)[0])
for path in raster_path_list]))
csv_file.write(',')
csv_file.write(',error_'.join([
os.path.basename(os.path.splitext(path)[0])
for path in raster_path_list]))
csv_file.write(',mean_error')
while current_grid_size >= args.min_size:
LOGGER.debug(f'construct {current_grid_size}')
target_raster_path = (
f'optimal_mask_{data_type}_{args.n_cells}_{current_grid_size}.tif')
if not os.path.exists(target_raster_path):
geoprocessing.new_raster_from_base(
raster_path_list[0], target_raster_path, gdal.GDT_Float32,
[-1])
raster_info = geoprocessing.get_raster_info(raster_path_list[0])
n_cols, n_rows = raster_info['raster_size']
optimization_task = task_graph.add_task(
func=multigrid_optimize,
args=(
raster_path_list, min_proportion, target_raster_path,
0, 0, n_cols, n_rows),
kwargs={'prop_tol': 1e-12, 'grid_size': current_grid_size},
store_result=True)
opt_task_list.append((current_grid_size, optimization_task, target_raster_path))
current_grid_size = int(current_grid_size * 0.9)
first_solution = True
for grid_size, optimization_task, target_raster_path in opt_task_list:
if first_solution:
expected_sum_list = []
LOGGER.info(f'waiting for grid size {grid_size}')
runtime = optimization_task.get()
csv_file.write(f'\n{grid_size},{runtime}')
error_list = []
for index, base_raster_path in enumerate(raster_path_list):
val_sum = _sum_over_mask(base_raster_path, target_raster_path)
csv_file.write(f',{val_sum}')
if first_solution:
expected_sum_list.append(val_sum)
else:
error_list.append((expected_sum_list[index]-val_sum)/expected_sum_list[index]*100)
if not first_solution:
for error in error_list:
csv_file.write(f',{error}')
csv_file.write(f',{numpy.mean(error_list)}')
first_solution = False
ax = plt.gca()
ax2 = plt.twinx()
ax.set_ylabel('runtime (s)')
ax2.set_ylabel('mean % error from original')
df = pandas.read_csv(
csv_file_path, usecols=['grid size', 'run time', 'mean_error'])
ax.plot(df[1::]['grid size'], df[1::]['run time'], f'b.-', label=f'runtime (s)')
ax.plot(df[1::]['grid size'], df[1::]['mean_error'], f'k.--', label=f'error %')
ax.legend()
grid_min = df[1::]['grid size'].min()
grid_max = df[1::]['grid size'].max()
ax.set_xlim(grid_max, grid_min) # decreasing time
plt.grid()
plt.ylabel('runtime (s)')
plt.show()
if __name__ == '__main__':
main()
|
import React from 'react'
import MapboxGLMap from './MapboxGLMap'
import Studios from './Studios'
import './App.css'
function App() {
return (
<div className="App">
<MapboxGLMap />
<Studios />
</div>
)
}
export default App
|
/*
* Copyright (c) 2008, 2018, Oracle and/or its affiliates. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License, version 2.0, as
* published by the Free Software Foundation.
*
* This program is also distributed with certain software (including
* but not limited to OpenSSL) that is licensed under separate terms,
* as designated in a particular file or component or in included license
* documentation. The authors of MySQL hereby grant you an
* additional permission to link the program and your derivative works
* with the separately licensed software that they have included with
* MySQL.
*
* Without limiting anything contained in the foregoing, this file,
* which is part of MySQL Connector/C++, is also subject to the
* Universal FOSS Exception, version 1.0, a copy of which can be found at
* http://oss.oracle.com/licenses/universal-foss-exception.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License, version 2.0, for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "../BaseTestFixture.h"
/**
* @author
*/
namespace testsuite
{
namespace regression
{
class PreparedStatementRegressionTest : public BaseTestFixture
{
private:
typedef BaseTestFixture super;
DatabaseMetaData dbmd;
protected:
/**
* setUp() function for tests
*/
/* throws std::exception * */
void setUp();
public:
TEST_FIXTURE(PreparedStatementRegressionTest)
{
TEST_CASE( testStmtClose );
}
void testStmtClose();
};
REGISTER_FIXTURE(PreparedStatementRegressionTest);
} //namespace regression
} //namespace testsuite
|
import React, { Component } from 'react';
import { List } from 'semantic-ui-react';
import Section from './section';
//if(process.env.WEBPACK) require('./index.scss');
class Division extends Component {
constructor() {
super();
this.state = {
isVisible: false
};
}
toggleDivision() {
this.setState({isVisible: !this.state.isVisible});
}
render() {
const { division } = this.props;
let sections;
let caret = "caret right";
// debugger
if (this.state.isVisible) {
caret = "caret down";
sections = (
<List.List style={{paddingLeft:10}}>
{division.sections.map((section) => (
<Section key={section.section} section={section} />
))}
</List.List>
);
}
return (
<List.Item key={division.division}>
<List.Icon name={caret}/>
<List.Content>
<List.Header as={'h3'} onClick={(event) => this.toggleDivision()}>{division.division} - {division.title}</List.Header>
{sections}
</List.Content>
</List.Item>
);
}
}
export default Division;
|
from authentication.socialaccount.providers.oauth2.urls import default_urlpatterns
from .provider import HubicProvider
urlpatterns = default_urlpatterns(HubicProvider)
|
/*
* Описания переменных и функций чтения заголовка письма.
*
* Copyright (C) 1992-1995 Cronyx Ltd.
* Автор: Сергей Вакуленко, vak@cronyx.ru
* Wed Feb 8 18:29:31 MSK 1995
*/
struct headertable {
char *name;
char **value;
};
extern char *h_approved;
extern char *h_date;
extern char *h_distribution;
extern char *h_expires;
extern char *h_followup_to;
extern char *h_from;
extern char *h_keywords;
extern char *h_message_id;
extern char *h_newsgroups;
extern char *h_organization;
extern char *h_references;
extern char *h_reply_to;
extern char *h_resent_from;
extern char *h_sender;
extern char *h_subject;
extern char *h_summary;
extern char *h_supersed;
extern char *h_mime_version;
extern char *h_content_type;
extern char *h_content_transfer_encoding;
extern char *h_content_length;
extern char *h_to;
extern char *h_cc;
extern char *h_from_;
extern void scanheader (FILE *fd);
extern void freeheader (void);
|
/**
* Welcome to your Workbox-powered service worker!
*
* You'll need to register this file in your web app and you should
* disable HTTP caching for this file too.
* See https://goo.gl/nhQhGp
*
* The rest of the code is auto-generated. Please don't update this file
* directly; instead, make changes to your Workbox build configuration
* and re-run your build process.
* See https://goo.gl/2aRDsh
*/
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
importScripts(
"/precache-manifest.3893678617de4914a303d584c05f5b3a.js"
);
self.addEventListener('message', (event) => {
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
workbox.core.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
* See https://goo.gl/S9QRab
*/
self.__precacheManifest = [].concat(self.__precacheManifest || []);
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
});
|
module.exports = (app) => {
if (!app.get('secret')) {
app.set('secret', Math.random().toString(16).slice(2));
}
};
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Segment2 = void 0;
const Point2_1 = require("../Point2");
const Vector2_1 = require("../Vector2");
const util_1 = require("@anderjason/util");
const optionalLineIntersectionGivenPoints_1 = require("../Line2/optionalLineIntersectionGivenPoints");
const segmentWithClippingBox_1 = require("./_internal/segmentWithClippingBox");
const half = util_1.Percent.givenFraction(1, 2);
class Segment2 {
constructor(a, b) {
this._start = a;
this._end = b;
}
static givenXYPair(x1, y1, x2, y2) {
return new Segment2(Point2_1.Point2.givenXY(x1, y1), Point2_1.Point2.givenXY(x2, y2));
}
static givenPoints(startPoint, endPoint) {
return new Segment2(startPoint, endPoint);
}
static isEqual(a, b) {
if (a == null && b == null) {
return true;
}
if (a == null || b == null) {
return false;
}
return a.isEqual(b);
}
get startPoint() {
return this._start;
}
get endPoint() {
return this._end;
}
get startX() {
return this._start.x;
}
get endX() {
return this._end.x;
}
get startY() {
return this._start.y;
}
get endY() {
return this._end.y;
}
isEqual(other) {
if (other == null) {
return false;
}
if (!(other instanceof Segment2)) {
return false;
}
return other._start.isEqual(this._start) && other._end.isEqual(this._end);
}
toLength() {
return this._start.toDistance(this._end);
}
toNearestPoint(point, infinite) {
const pnt = Vector2_1.Vector2.givenXY(point.x, point.y);
if (infinite) {
const linePnt = Vector2_1.Vector2.givenXY(this._start.x, this._start.y);
const lineDir = Vector2_1.Vector2.givenPoints(this._start, this._end).withNormalizedMagnitude();
const v = pnt.withSubtractedVector(linePnt);
const d = v.toDotProduct(lineDir);
return linePnt.withAddedVector(lineDir.withMultipliedScalar(d)).toPoint();
}
else {
let line = Vector2_1.Vector2.givenPoints(this._start, this._end);
const len = line.toMagnitude();
line = line.withNormalizedMagnitude();
const startVector = Vector2_1.Vector2.givenPoint(this._start);
const v = pnt.withSubtractedVector(startVector);
let d = v.toDotProduct(line);
d = util_1.NumberUtil.numberWithHardLimit(d, 0, len);
return startVector
.withAddedVector(line.withMultipliedScalar(d))
.toPoint();
}
}
toMidpoint() {
return this.toIntermediatePoint(half);
}
toIntermediatePoint(percent) {
const t = percent.toNumber(1);
const x = this.startPoint.x + (this.endPoint.x - this.startPoint.x) * t;
const y = this.startPoint.y + (this.endPoint.y - this.startPoint.y) * t;
return Point2_1.Point2.givenXY(x, y);
}
toOptionalIntersectionGivenLine(other) {
return other.toOptionalIntersectionGivenSegment(this);
}
toOptionalIntersectionGivenSegment(other) {
const startA = this.startPoint;
const endA = this.endPoint;
const startB = other.startPoint;
const endB = other.endPoint;
return optionalLineIntersectionGivenPoints_1.optionalLineIntersectionGivenPoints(startA, endA, startB, endB, "touch");
}
toPointGivenDistance(distance, fromPoint) {
switch (fromPoint) {
case "start":
if (distance === 0) {
return this.startPoint;
}
return this.startPoint.withAddedVector(Vector2_1.Vector2.givenPoints(this.startPoint, this.endPoint)
.withNormalizedMagnitude()
.withMultipliedScalar(distance));
case "end":
if (distance === 0) {
return this.endPoint;
}
return this.endPoint.withAddedVector(Vector2_1.Vector2.givenPoints(this.endPoint, this.startPoint)
.withNormalizedMagnitude()
.withMultipliedScalar(distance));
default:
throw new Error("Unsupported fromPoint");
}
}
withAddedVector(vector) {
return new Segment2(this._start.withAddedVector(vector), this._end.withAddedVector(vector));
}
withSubtractedVector(vector) {
return new Segment2(this._start.withSubtractedVector(vector), this._end.withSubtractedVector(vector));
}
withClippingBox(box) {
return segmentWithClippingBox_1.segmentWithClippingBox(this, box);
}
}
exports.Segment2 = Segment2;
//# sourceMappingURL=index.js.map
|
# Standard Library imports
# Core Django imports
# Third-party imports
# App imports
class Error(Exception):
def __init__(self, value=""):
if not hasattr(self, "value"):
self.value = value
def __str__(self):
return repr(self.value)
###############
# User Errors #
###############
class UsernameAlreadyExistsError(Error):
message = "An account with this username already exists!"
internal_error_code = 40901
class EmailAddressAlreadyExistsError(Error):
message = "There is already an account associated with this email address!"
internal_error_code = 40902
class TermsNotAcceptedError(Error):
message = "You must accept the terms of service in order to use example.com!"
internal_error_code = 42901
|
/**
* Defines the React 16 Adapter for Enzyme.
*
* @link http://airbnb.io/enzyme/docs/installation/#working-with-react-16
* @copyright 2017 Airbnb, Inc.
*/
import { configure } from "enzyme";
import Adapter from "enzyme-adapter-react-16";
configure({ adapter: new Adapter() })
|
angular.module('caac.users.login-form.directive', [
'caac.users.auth.service',
]).controller('LoginFormController', ['$log', '$scope', '$location', 'AuthService',
function($log, $scope, $location, AuthService) {
var self = $scope;
var logger = $log.getInstance('LoginController');
var loginForm = {};
self.construct = function() {
};
self.handleLoginBtnClick = function() {
AuthService.attemptLogin(self.loginForm)
.then(function(resp) {
$location.url('dashboard');
})
.catch(function(resp) {
self.loginForm.password = "";
if (resp.reason === 'unauthorized') {
self.loginForm.error = 'Invalid Credentials. Please try again.';
}
});
};
self.construct();
}
])
.directive('loginForm', [
function() {
return {
restrict: 'E',
templateUrl: 'users/auth/login/LoginForm.html',
controller: 'LoginFormController'
};
}
]);
|
from __future__ import print_function
# ------------------------------------------------------------------------------------------------
# Copyright (c) 2016 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ------------------------------------------------------------------------------------------------
# Tutorial sample #2: Run simple mission using raw XML
# Added modifications by Norbert Bátfai (nb4tf4i) batfai.norbert@inf.unideb.hu, mine.ly/nb4tf4i.1
# 2018.10.18, https://bhaxor.blog.hu/2018/10/18/malmo_minecraft
# 2020.02.02, NB4tf4i's Red Flowers, http://smartcity.inf.unideb.hu/~norbi/NB4tf4iRedFlowerHell
# 2020.03.02, https://github.com/nbatfai/RedFlowerHell
# 2020.03.07, "_smartSteve": nof_turn (number of turns) is replaced by the dict self.collectedFlowers
# 2020.03.07, "_smartSteve": nof_turn (number of turns) is replaced by the dict self.collectedFlowers
# 2020.03.29, Red Pill team, Nándor Bátfai & Norbert Bátfai, rewriting from scratch: https://youtu.be/5-0fsgvyZ9c, "smart" feature is eliminated
# 2020.03.31, Green Pill team, Norbert Bátfai, forked from Red Pill, https://youtu.be/BCS1_TuMsRQ
# 2020.04.02, Qualification for participating in RFH III, Norbert Bátfai, https://youtu.be/cfhh3llDoRo
# 2020.04.03, Uploading to github repo https://github.com/nbatfai/RedFlowerHell
# TODO életerőtől függő viselkedés
from builtins import range
import MalmoPython
import os
import sys
import time
import random
import json
import math
from enum import Enum
if sys.version_info[0] == 2:
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) # flush print output immediately
else:
import functools
print = functools.partial(print, flush=True)
# Create default Malmo objects:
agent_host = MalmoPython.AgentHost()
try:
agent_host.parse( sys.argv )
except RuntimeError as e:
print('ERROR:',e)
print(agent_host.getUsage())
exit(1)
if agent_host.receivedArgument("help"):
print(agent_host.getUsage())
exit(0)
# -- set up the mission -- #
missionXML_file='nb4tf4i_d.xml'
with open(missionXML_file, 'r') as f:
print("NB4tf4i's Red Flowers (Red Flower Hell) - DEAC-Hackers Battle Royale Arena\n")
print("NB4tf4i vörös pipacsai (Vörös Pipacs Pokol) - DEAC-Hackers Battle Royale Arena\n")
print("The aim of this first challenge, called nb4tf4i's red flowers, is to collect as many red flowers as possible before the lava flows down the hillside.\n")
print("Ennek az első, az nb4tf4i vörös virágai nevű kihívásnak a célja összegyűjteni annyi piros virágot, amennyit csak lehet, mielőtt a láva lefolyik a hegyoldalon.\n")
print("Norbert Bátfai, batfai.norbert@inf.unideb.hu, https://arato.inf.unideb.hu/batfai.norbert/\n")
print("Version history\n", "Code: Green Pill", sys.argv[0], ", series 24, version 3, max 28 poppies: https://youtu.be/cfhh3llDoRo, Norbert Bátfai, nbatfai@gmail.com, Nándor Bátfai.\n")
print("Loading mission from %s" % missionXML_file)
mission_xml = f.read()
my_mission = MalmoPython.MissionSpec(mission_xml, True)
my_mission.drawBlock( 0, 0, 0, "lava")
class Hourglass:
def __init__(self, charSet):
self.charSet = charSet
self.index = 0
def cursor(self):
self.index=(self.index+1)%len(self.charSet)
return self.charSet[self.index]
hg = Hourglass('|/-\|')
class SteveState(Enum):
GOING_UP = 0
FIRST_TURN = 1
FORWARD = 2
TURNING = 3
FLOWER = 4
ATTACK = 5
PICK_UP = 6
LVL_DOWN = 7
class Steve:
def __init__(self, agent_host):
self.agent_host = agent_host
self.x = 0
self.y = 0
self.z = 0
self.yaw = 0
self.pitch = 0
self.agent_host.sendCommand( "look 1" )
self.agent_host.sendCommand( "look 1" )
self.agent_host.sendCommand( "turn 1" )
self.agent_host.sendCommand( "turn 1" )
self.state = SteveState.GOING_UP
self.trapc = 0
self.lvl = 0
self.lvlc = 0
self.front_of_me_idx = 0
self.front_of_me_idxr = 0
self.front_of_me_idxl = 0
self.right_of_me_idx = 0
self.left_of_me_idx = 0
self.nof_red_flower = 0
def isInTrap(self, nbr):
dc = 0
nbri = [9,10,11,12,14,15,16,17]
for i in range(0, len(nbri)):
if nbr[nbri[i]]=="dirt" :
dc = dc + 1
return dc > 6
def checkInventory(self, observations):
flower = False
dirt = False
dirt_idx = 0
for i in range(2):
hotbari = 'Hotbar_'+str(i)+'_item'
hotbars = 'Hotbar_'+str(i)+'_size'
slot0_contents = observations.get(hotbari, "")
if slot0_contents == "red_flower":
slot0_size = observations.get(hotbars, "")
if self.nof_red_flower < slot0_size :
self.nof_red_flower = slot0_size
print(" *** A RED FLOWER IS MINED AND PICKED UP *** ")
flower = True
if slot0_contents == "dirt":
dirt_idx = i+1
slot0_size = observations.get(hotbars, "")
if 0 < slot0_size :
dirt = True
return flower, dirt, dirt_idx
def idle(self, delay):
#print(" SLEEPING for ", delay)
time.sleep(delay)
def isInTrap(self, nbr):
if nbr[9]=="dirt" and nbr[10]=="dirt" and nbr[11]=="dirt" and nbr[12]=="dirt" and nbr[14]=="dirt" and nbr[15]=="dirt" and nbr[16]=="dirt" and nbr[17]=="dirt":
return True
else:
return False
def calcNbrIndex(self):
if self.yaw >= 180-22.5 and self.yaw <= 180+22.5 :
self.front_of_me_idx = 1
self.front_of_me_idxr = 2
self.front_of_me_idxl = 0
self.right_of_me_idx = 5
self.left_of_me_idx = 3
elif self.yaw >= 180+22.5 and self.yaw <= 270-22.5 :
self.front_of_me_idx = 2
self.front_of_me_idxr = 5
self.front_of_me_idxl =1
self.right_of_me_idx = 8
self.left_of_me_idx = 0
elif self.yaw >= 270-22.5 and self.yaw <= 270+22.5 :
self.front_of_me_idx = 5
self.front_of_me_idxr = 8
self.front_of_me_idxl = 2
self.right_of_me_idx = 7
self.left_of_me_idx = 1
elif self.yaw >= 270+22.5 and self.yaw <= 360-22.5 :
self.front_of_me_idx = 8
self.front_of_me_idxr = 7
self.front_of_me_idxl = 5
self.right_of_me_idx = 6
self.left_of_me_idx = 2
elif self.yaw >= 360-22.5 or self.yaw <= 0+22.5 :
self.front_of_me_idx = 7
self.front_of_me_idxr = 6
self.front_of_me_idxl = 8
self.right_of_me_idx = 3
self.left_of_me_idx = 5
elif self.yaw >= 0+22.5 and self.yaw <= 90-22.5 :
self.front_of_me_idx = 6
self.front_of_me_idxr = 3
self.front_of_me_idxl = 7
self.right_of_me_idx = 0
self.left_of_me_idx = 8
elif self.yaw >= 90-22.5 and self.yaw <= 90+22.5 :
self.front_of_me_idx = 3
self.front_of_me_idxr = 0
self.front_of_me_idxl = 6
self.right_of_me_idx = 1
self.left_of_me_idx = 7
elif self.yaw >= 90+22.5 and self.yaw <= 180-22.5 :
self.front_of_me_idx = 0
self.front_of_me_idxr = 1
self.front_of_me_idxl = 3
self.right_of_me_idx = 2
self.left_of_me_idx = 6
else:
print("There is great disturbance in the Force...")
def whatISee(self, observations):
self.lookingat = "NOTHING"
if "LineOfSight" in observations:
lineOfSight = observations["LineOfSight"]
self.lookingat = lineOfSight["type"]
def whatMyPos(self, observations):
if "Yaw" in observations:
self.yaw = int(observations["Yaw"])
if "Pitch" in observations:
self.pitch = int(observations["Pitch"])
if "XPos" in observations:
self.x = int(observations["XPos"])
if "ZPos" in observations:
self.z = int(observations["ZPos"])
if "YPos" in observations:
self.y = int(observations["YPos"])
def run(self):
world_state = self.agent_host.getWorldState()
# Loop until mission ends:
while world_state.is_mission_running:
#print(">>> nb4tf4i arena -----------------------------------\n")
delay = self.action(world_state)
#print("nb4tf4i arena >>> -----------------------------------\n")
self.idle(delay)
world_state = self.agent_host.getWorldState()
def action(self, world_state):
for error in world_state.errors:
print("Error:", error.text)
if world_state.number_of_observations_since_last_state == 0:
#print(" NO OBSERVATIONS NO ACTIONS")
return False
input = world_state.observations[-1].text
observations = json.loads(input)
nbr = observations.get("nbr3x3", 0)
#print(observations)
self.whatMyPos(observations)
print("\n>>> nb4tf4i arena --- (there are observations) -------------------")
print("Steve's Coords: ", self.x, self.y, self.z, " Yaw: ", self.yaw, " Pitch: ", self.pitch, " #RF: ", self.nof_red_flower)
flower, dirt, dirt_idx = self.checkInventory(observations)
#print("Number of flowers: ", self.nof_red_flower)
self.whatISee(observations)
#print(" Steve's <): ", self.lookingat)
self.calcNbrIndex()
delay = .02
if self.state == SteveState.GOING_UP :
print(" GOING_UP: ", nbr[self.front_of_me_idx+9])
if self.y <= 30 :
self.agent_host.sendCommand( "jumpmove 1" )
print(" GOING_UP: jumpmove 1 ", nbr[self.front_of_me_idx+9])
else :
print(" GOING_UP: turn 1 28<=y ", nbr[self.front_of_me_idx+9])
self.agent_host.sendCommand( "turn 1" )
delay = .23
self.state = SteveState.FIRST_TURN
elif self.state == SteveState.FIRST_TURN:
print(" FIRST_TURN: ", nbr[self.front_of_me_idx+9])
delay = .1
self.state = SteveState.FORWARD
elif self.state == SteveState.FORWARD:
print(" FORWARD: ", nbr[self.front_of_me_idx+9])
if self.isInTrap(nbr):
print(" FORWARD: trap")
self.agent_host.sendCommand( "jumpmove 1" )
if nbr[self.front_of_me_idx+9] == "air" and nbr[self.front_of_me_idx] == "dirt":
self.agent_host.sendCommand( "move 1" )
print(" FORWARD: move 1 ", nbr[self.front_of_me_idx+9])
elif nbr[self.front_of_me_idx+9] == "flowing_lava" and nbr[self.left_of_me_idx+9] == "flowing_lava":
self.state = SteveState.LVL_DOWN
print(" FORWARD: LAVA 1 ", nbr[self.front_of_me_idx+9])
elif nbr[self.front_of_me_idx+9] == "dirt" :
print(" FORWARD: turn 1 ", nbr[self.front_of_me_idx+9])
self.agent_host.sendCommand( "turn 1" )
delay = .23
self.state = SteveState.TURNING
elif nbr[self.front_of_me_idx+9] == "red_flower" :
print(" FORWARD: front of me red_flower look ", nbr[self.front_of_me_idx+9])
self.state = SteveState.FLOWER
elif nbr[4+9] == "red_flower" :
print(" FORWARD: standing on red_flower look ", nbr[self.front_of_me_idx+9])
self.state = SteveState.FLOWER
else:
self.agent_host.sendCommand( "jumpmove -1" )
print(" Hoppá ")
self.state = SteveState.LVL_DOWN
pass
elif self.state == SteveState.TURNING:
print(" TURNING: ", nbr[self.front_of_me_idx+9])
if self.isInTrap(nbr):
print(" TURNING: trap ")
self.agent_host.sendCommand( "jumpmove 1" )
if nbr[self.front_of_me_idx+9] == "air":
self.state = SteveState.FORWARD
elif nbr[self.front_of_me_idx+9] == "red_flower":
print(" TURNING: red_flower tamadasra felkeszul", nbr[self.front_of_me_idx+9])
self.agent_host.sendCommand( "move 1" )
self.state = SteveState.FLOWER
else:
pass
# TODO ez előttig van csak átgondolva, itt tart a greenpill
elif self.state == SteveState.FLOWER:
print(" FLOWER: ", nbr[self.front_of_me_idx+9])
print(" FLOWER: *** attack *** ")
self.agent_host.sendCommand( "attack 1" )
self.lvl = self.y
delay = .6
self.state = SteveState.PICK_UP
elif self.state == SteveState.PICK_UP:
print(" PICK_UP: ", nbr[self.front_of_me_idx+9])
if nbr[self.front_of_me_idx+9] == "red_flower":
print(" PICK_UP: *** new attack *** ", nbr[self.front_of_me_idx+9])
self.agent_host.sendCommand( "jump 1" )
self.agent_host.sendCommand( "attack 1" )
return delay
if self.isInTrap(nbr):
print(" PICK_UP: trap ")
if self.trapc < 1:
print(" PICK_UP: trap if")
self.agent_host.sendCommand( "jumpmove -1" )
self.agent_host.sendCommand( "jumpmove -1" )
self.trapc = self.trapc + 1
else:
print(" PICK_UP: trap else")
self.agent_host.sendCommand( "jumpmove 1" )
self.agent_host.sendCommand( "jumpmove 1" )
self.trapc = 0
delay = .3
else:
self.trapc = 0
if flower:
print(" PICK_UP: *** PICKED *** ", self.y, " #RF ", self.nof_red_flower)
# self.lvl = self.y
self.state = SteveState.LVL_DOWN
else:
self.agent_host.sendCommand( "move 1" )
self.state = SteveState.FORWARD
delay = .24
print(" PICK_UP: WHAT CAN WE DO? ")
elif self.state == SteveState.LVL_DOWN:
delay = .14
print(" LVL_DOWN: ", nbr[self.front_of_me_idx+9], " ", self.lvl, " ", self.y)
if self.isInTrap(nbr):
print(" LVL_DOWN: trap ")
if nbr[self.front_of_me_idx+18] == "dirt" and nbr[self.left_of_me_idx+18] == "dirt":
print(" LVL_DOWN: trap elotte 2x dirt")
self.agent_host.sendCommand( "turn 1" )
self.agent_host.sendCommand( "jumpmove 1" )
else:
if self.lvl != self.y + 1 :
print(" LVL_DOWN: turn move turn ", nbr[self.front_of_me_idx+9])
if self.lvlc < 2:
print(" LVL_DOWN: lvlc if")
self.agent_host.sendCommand( "move 1" )
self.agent_host.sendCommand( "move 1" )
self.agent_host.sendCommand( "strafe 1" )
self.agent_host.sendCommand( "strafe 1" )
delay = .4
self.lvlc = self.lvlc + 1
else:
print(" LVL_DOWN: lvlc else")
self.agent_host.sendCommand( "move -1" )
self.agent_host.sendCommand( "move -1" )
self.agent_host.sendCommand( "strafe 1" )
self.agent_host.sendCommand( "strafe 1" )
delay = .4
self.lvlc = 0
else:
self.lvlc = 0
print(" LVL_DOWN: not trap ")
self.state = SteveState.FIRST_TURN
else:
pass
return delay
num_repeats = 1
for ii in range(num_repeats):
my_mission_record = MalmoPython.MissionRecordSpec()
# Attempt to start a mission:
max_retries = 6
for retry in range(max_retries):
try:
agent_host.startMission( my_mission, my_mission_record )
break
except RuntimeError as e:
if retry == max_retries - 1:
print("Error starting mission:", e)
exit(1)
else:
print("Attempting to start the mission:")
time.sleep(2)
# Loop until mission starts:
print(" Waiting for the mission to start")
world_state = agent_host.getWorldState()
while not world_state.has_mission_begun:
print("\r"+hg.cursor(), end="")
time.sleep(0.15)
world_state = agent_host.getWorldState()
for error in world_state.errors:
print("Error:",error.text)
print("NB4tf4i Red Flower Hell running\n")
steve = Steve(agent_host)
steve.run()
print("Number of flowers: "+ str(steve.nof_red_flower))
time.sleep(3)
print("Mission ended")
# Mission has ended.
|
# Generated by Django 2.1.2 on 2018-10-20 19:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0010_snakeidiom'),
]
operations = [
migrations.AlterField(
model_name='snakeidiom',
name='idiom',
field=models.CharField(help_text='A saying about a snake.', max_length=140, primary_key=True, serialize=False),
),
]
|
module.exports={A:{A:{"1":"J D E F A B","16":"hB"},B:{"1":"C K L G M N O R S T U V W X Y Z P a H b"},C:{"1":"6 7 8 9 AB BB CB DB EB FB GB aB HB bB Q IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB cB R S T jB U V W X Y Z P a H b","2":"0 1 2 3 4 5 iB ZB I c J D E F A B C K L G M N O d e f g h i j k l m n o p q r s t u v w x y z kB lB"},D:{"1":"0 1 2 3 4 5 6 7 8 9 I c J D E F A B C K L G M N O d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB aB HB bB Q IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB cB R S T U V W X Y Z P a H b mB nB oB"},E:{"1":"I c J D E F A B C K L G pB dB qB rB sB tB eB XB YB uB vB wB"},F:{"1":"0 1 2 3 4 5 6 7 8 9 B C G M N O d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB HB Q IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB xB yB zB 0B XB fB 1B YB","16":"F"},G:{"1":"E dB 2B gB 3B 4B 5B 6B 7B 8B 9B AC BC CC DC EC FC GC HC IC JC KC"},H:{"1":"LC"},I:{"1":"ZB I H OC PC gB QC RC","16":"MC NC"},J:{"1":"D A"},K:{"1":"A B C Q XB fB YB"},L:{"1":"H"},M:{"1":"P"},N:{"1":"A B"},O:{"1":"SC"},P:{"1":"I TC UC VC WC XC eB YC ZC aC bC"},Q:{"1":"cC"},R:{"1":"dC"},S:{"1":"eC"}},B:1,C:"Element.insertAdjacentElement() & Element.insertAdjacentText()"};
|
// import '../styles/grid.css';
// import '../styles/listings.css';
// import '../styles/content-area.css';
// import '../styles/home.css'
// import { UserContext } from '../context/UserProvider'
// import axios from 'axios'
// export class ListingsRental extends Component {
// constructor() {
// super()
// this.loopListingsRental = this.loopListingsRental.bind(this)
// }
// loopListingsRental() {
// const { listingsRentalData } = this.props
// return listingsRentalData.map((listing, index) => {
// if (this.props.globalState.view === 'box') {
// // THIS IS THE BOX VIEW
// return (
// <div className="col-md-3" key={index}>
// <div className="listing">
// <div className="listing-img" style={{ background: `url("${listing.image}") no-repeat center center` }}>
// <span className="address">{listing.address}</span>
// <div className="details">
// <div className="col-md-3">
// <div className="user-img"></div>
// </div>
// <div className="col-md-9">
// <div className="user-details">
// <span className="user-name">Nina Jones</span>
// <span className="post-date">05/05/2018</span>
// </div>
// <div className="listing-details">
// <div className="floor-space">
// <i className="fa fa-square-o" aria-hidden="true"></i>
// <span>{listing.floorSpace} ft²</span>
// </div>
// <div className="bedrooms">
// <i className="fa fa-bed" aria-hidden="true"></i>
// <span>{listing.rooms} bedroom{listing.rooms > 1 ? 's' : ''}</span>
// </div>
// </div>
// <div className="view-btn">
// View Listing
// </div>
// </div>
// </div>
// </div>
// <div className="bottom-info">
// <span className="price">${listing.price}</span>
// <span className="location"><i className="fa fa-map-marker" aria-hidden="true"></i>{listing.city}, {listing.state}</span>
// </div>
// </div>
// </div>
// )
// } else {
// // THIS IS THE LONG VIEW
// return (
// <div className="col-md-12 col-lg-6" key={index}>
// <div className="listing">
// <div className="listing-img" style={{ background: `url("${listing.image}") no-repeat center center` }}>
// <span className="address">{listing.address}</span>
// <div className="details">
// <div className="col-md-3">
// <div className="user-img"></div>
// </div>
// <div className="col-md-9">
// <div className="user-details">
// <span className="user-name">Nina Jones</span>
// <span className="post-date">05/05/2018</span>
// </div>
// <div className="listing-details">
// <div className="floor-space">
// <i className="fa fa-square-o" aria-hidden="true"></i>
// <span>{listing.floorSpace} ft²</span>
// </div>
// <div className="bedrooms">
// <i className="fa fa-bed" aria-hidden="true"></i>
// <span>{listing.rooms} bedroom{listing.rooms > 1 ? 's' : ''}</span>
// </div>
// </div>
// <div className="view-btn">
// View Listing
// </div>
// </div>
// </div>
// </div>
// <div className="bottom-info">
// <span className="price">${listing.price}</span>
// <span className="location"><i className="fa fa-map-marker" aria-hidden="true"></i>{listing.city}, {listing.state}</span>
// </div>
// </div>
// </div>
// )
// }
// })
// }
// render() {
// return (
// <section id="listings">
// <section className="search-area">
// <input type="text" name="search" placeholder="Search here..." onChange={this.props.change} />
// </section>
// <section className="sortby-area">
// <div className="results">{this.props.globalState.filteredData.length} results found</div>
// <div className="sort-options">
// <select name="sortby" className="sortby" onChange={this.props.change}>
// <option value="price-asc">Lowest Price</option>
// <option value="price-dsc">Highest Price</option>
// </select>
// <div className="view">
// <i className="fa fa-th-list" aria-hidden="true" onClick={this.props.changeView.bind(null, 'long')} ></i>
// <i className="fa fa-th" aria-hidden="true" onClick={this.props.changeView.bind(null, 'box')} ></i>
// </div>
// </div>
// </section>
// <section className="listings-results">
// {this.loopListingsRental()}
// </section>
// <section id="pagination">
// <ul className="pages">
// <li>Prev</li>
// <li>1</li>
// <li className="active">2</li>
// <li>3</li>
// <li>4</li>
// <li>5</li>
// <li>Next</li>
// </ul>
// </section>
// </section>
// )
// }
// }
// export default ListingsRental
|
"""
Sample script that runs netcdf4 functions logged by recipy.
"""
# Copyright (c) 2016, 2018 University of Edinburgh and Netherlands eScience
# Center
from __future__ import (nested_scopes, generators, division,
absolute_import, with_statement,
print_function, unicode_literals)
import recipy
import os
import sys
import xarray
import numpy as np
from integration_test.packages.base import Base
class XarraySample(Base):
"""
Sample script that runs xarray functions logged by recipy.
This class assumes the existence of a data/xarray directory,
co-located with this file, with the following content:
* image.tiff
* soilPropertiesRhineMeuse30min.nc: test netcdf file (taken with permission
from https://github.com/UU-Hydro/PCR-GLOBWB_input_example, see
https://www.geosci-model-dev.net/11/2429/2018/gmd-11-2429-2018.html for
more information about this data)
* topoPropertiesRhineMeuse30min.nc: test netcdf file (taken with permission
from https://github.com/UU-Hydro/PCR-GLOBWB_input_example, see
https://www.geosci-model-dev.net/11/2429/2018/gmd-11-2429-2018.html for
more information about this data)
* data_array.nc: netcdf file containing a dataarray (instead of a dataset)
"""
def __init__(self):
"""
Constructor. Set data_dir attribute with path to data files needed
by this class.
"""
Base.__init__(self)
self.data_dir = os.path.join(self.current_dir, "data", "xarray")
def open_dataset(self):
"""
Use xarray.open_dataset to read netcdf file.
"""
file_name = os.path.join(self.data_dir,
"soilPropertiesRhineMeuse30min.nc")
xarray.open_dataset(file_name)
def open_mfdataset_glob(self):
"""
Use xarray.open_mfdataset to read multiple netcdf files with a glob
pattern.
"""
pattern = os.path.join(self.data_dir, "*PropertiesRhineMeuse30min.nc")
xarray.open_mfdataset(pattern)
def open_mfdataset_list(self):
"""
Use xarray.open_mfdataset to read multiple netcdf files from a list.
"""
file_names = [os.path.join(self.data_dir, f)
for f in ('soilPropertiesRhineMeuse30min.nc',
'topoPropertiesRhineMeuse30min.nc')]
xarray.open_mfdataset(file_names)
def open_rasterio(self):
"""
Use xarray.open_rasterio to read image.tiff.
"""
file_name = os.path.join(self.data_dir, "image.tiff")
xarray.open_rasterio(file_name)
def open_dataarray(self):
"""
Use xarray.open_dataarray to read a netcdf file.
"""
file_name = os.path.join(self.data_dir, "data_array.nc")
xarray.open_dataarray(file_name)
def dataset_to_netcdf(self):
"""
Use xarray.Dataset.to_netcdf to write a netcdf file.
"""
data = xarray.DataArray(np.random.randn(2, 3))
ds = xarray.Dataset({'foo': data, 'bar': ('x', [1, 2]), 'baz': np.pi})
file_name = os.path.join(self.data_dir, "data.nc")
ds.to_netcdf(file_name)
os.remove(file_name)
def dataarray_to_netcdf(self):
"""
Use xarray.DataArray.to_netcdf to write a netcdf file.
"""
data = xarray.DataArray(np.random.randn(2, 3))
file_name = os.path.join(self.data_dir, "data.nc")
data.to_netcdf(file_name)
os.remove(file_name)
def save_mfdataset(self):
"""
Use xarray.save_mfdataset to write multiple netcdf files.
"""
dar1 = xarray.DataArray(np.random.randn(2, 3))
dar2 = xarray.DataArray(np.random.randn(2, 3))
data1 = xarray.Dataset({'foo': dar1, 'bar': ('x', [1, 2])})
data2 = xarray.Dataset({'foo': dar2, 'bar': ('x', [1, 2])})
file_names = [os.path.join(self.data_dir, f)
for f in ['data1.nc', 'data2.nc']]
xarray.save_mfdataset([data1, data2], file_names)
for f in file_names:
os.remove(f)
if __name__ == "__main__":
XarraySample().invoke(sys.argv)
|
class CircularBuffer:
def __init__(self, capacity=16, oversample=4):
self._buffer = [0] * capacity
self._index = 0
self._capacity = capacity
self._oversample = oversample
def get(self, index):
return self._buffer[index % self._capacity]
def push(self, value):
self._buffer[self._index] = value
self._index = (self._index + 1) % self._capacity
def position(self):
return self._index
def range(self):
return self._capacity * self._oversample
def sample(self, position):
point_a = int(position / self._oversample) % self._capacity
point_b = (point_a + 1) % self._capacity
delta = float(position % self._oversample) / self._oversample
value_a = self._buffer[point_a]
value_b = self._buffer[point_b]
return (value_b * delta) + (value_a * (1.0 - delta))
|
module.exports = {
mode: 'jit',
purge: ['./pages/**/*.tsx', './components/**/*.tsx'],
darkMode: false,
theme: {
extend: {},
},
variants: {
extend: {},
},
plugins: [],
corePlugins: {
preflight: false,
},
};
|
import sys
sys.path.append('../G26/Utils')
from Error import *
def compararTiposBin(arg1, arg2, sign):
try:
s = arg1.type
l = arg1
except:
''
try:
s = arg2.type
r = arg2
except:
''
left = l
right = r
if sign == '+':
if left.type == 'integer' or left.type == 'float':
if right.type == 'integer' or right.type == 'float':
return False
return Error('Semántico', 'Error de tipos en MAS, no se puede operar ' + left.type + ' con ' + right.type, 0, 0)
if sign == '-':
if left.type == 'integer' or left.type == 'float':
if right.type == 'integer' or right.type == 'float':
return False
return Error('Semántico', 'Error de tipos en MENOS, no se puede operar ' + left.type + ' con ' + right.type, 0, 0)
if sign == '/':
if left.type == 'integer' or left.type == 'float':
if right.type == 'integer' or right.type == 'float':
if right.val == 0:
return Error('Semántico', 'No es posible la division con 0', 0, 0)
return False
return Error('Semántico', 'Error de tipos en DIVISION, no se puede operar ' + left.type + ' con ' + right.type, 0, 0)
if sign == '*':
if left.type == 'integer' or left.type == 'float':
if right.type == 'integer' or right.type == 'float':
return False
return Error('Semántico', 'Error de tipos en MULTIPLICACION, no se puede operar ' + left.type + ' con ' + right.type, 0, 0)
if sign == '%':
if left.type == 'integer' or left.type == 'float':
if right.type == 'integer' or right.type == 'float':
return False
return Error('Semántico', 'Error de tipos en PORCENTAJE, no se puede operar ' + left.type + ' con ' + right.type, 0, 0)
if sign == '^':
if left.type == 'integer' or left.type == 'float':
if right.type == 'integer' or right.type == 'float':
return False
return Error('Semántico', 'Error de tipos en POTENCIA, no se puede operar ' + left.type + ' con ' + right.type, 0, 0)
return False
|
"""
This migration script eliminates all of the tables that were used for the 1st version of the
library templates where template fields and contents were each stored as a separate table row
in various library item tables. All of these tables are dropped in this script, eliminating all
existing template data. A total of 14 existing tables are dropped.
We're now basing library templates on forms, so field contents are
stored as a jsonified list in the form_values table. This script introduces the following 3
new association tables:
1) library_info_association
2) library_folder_info_association
3) library_dataset_dataset_info_association
If using mysql, this script will throw an (OperationalError) exception due to a long index name on
the library_dataset_dataset_info_association table, which is OK because the script creates an index
with a shortened name.
"""
from __future__ import print_function
import logging
import sys
from sqlalchemy import Column, ForeignKey, Index, Integer, MetaData, Table
from sqlalchemy.exc import NoSuchTableError
log = logging.getLogger( __name__ )
log.setLevel(logging.DEBUG)
handler = logging.StreamHandler( sys.stdout )
format = "%(name)s %(levelname)s %(asctime)s %(message)s"
formatter = logging.Formatter( format )
handler.setFormatter( formatter )
log.addHandler( handler )
metadata = MetaData()
LibraryInfoAssociation_table = Table( 'library_info_association', metadata,
Column( "id", Integer, primary_key=True),
Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
LibraryFolderInfoAssociation_table = Table( 'library_folder_info_association', metadata,
Column( "id", Integer, primary_key=True),
Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
LibraryDatasetDatasetInfoAssociation_table = Table( 'library_dataset_dataset_info_association', metadata,
Column( "id", Integer, primary_key=True),
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
def upgrade(migrate_engine):
metadata.bind = migrate_engine
print(__doc__)
# Load existing tables
metadata.reflect()
# Drop all of the original library_item_info tables
# NOTE: all existing library item into template data is eliminated here via table drops
try:
LibraryItemInfoPermissions_table = Table( "library_item_info_permissions", metadata, autoload=True )
except NoSuchTableError:
LibraryItemInfoPermissions_table = None
log.debug( "Failed loading table library_item_info_permissions" )
try:
LibraryItemInfoPermissions_table.drop()
except Exception:
log.exception("Dropping library_item_info_permissions table failed.")
try:
LibraryItemInfoTemplatePermissions_table = Table( "library_item_info_template_permissions", metadata, autoload=True )
except NoSuchTableError:
LibraryItemInfoTemplatePermissions_table = None
log.debug( "Failed loading table library_item_info_template_permissions" )
try:
LibraryItemInfoTemplatePermissions_table.drop()
except Exception:
log.exception("Dropping library_item_info_template_permissions table failed.")
try:
LibraryItemInfoElement_table = Table( "library_item_info_element", metadata, autoload=True )
except NoSuchTableError:
LibraryItemInfoElement_table = None
log.debug( "Failed loading table library_item_info_element" )
try:
LibraryItemInfoElement_table.drop()
except Exception:
log.exception("Dropping library_item_info_element table failed.")
try:
LibraryItemInfoTemplateElement_table = Table( "library_item_info_template_element", metadata, autoload=True )
except NoSuchTableError:
LibraryItemInfoTemplateElement_table = None
log.debug( "Failed loading table library_item_info_template_element" )
try:
LibraryItemInfoTemplateElement_table.drop()
except Exception:
log.exception("Dropping library_item_info_template_element table failed.")
try:
LibraryInfoTemplateAssociation_table = Table( "library_info_template_association", metadata, autoload=True )
except NoSuchTableError:
LibraryInfoTemplateAssociation_table = None
log.debug( "Failed loading table library_info_template_association" )
try:
LibraryInfoTemplateAssociation_table.drop()
except Exception:
log.exception("Dropping library_info_template_association table failed.")
try:
LibraryFolderInfoTemplateAssociation_table = Table( "library_folder_info_template_association", metadata, autoload=True )
except NoSuchTableError:
LibraryFolderInfoTemplateAssociation_table = None
log.debug( "Failed loading table library_folder_info_template_association" )
try:
LibraryFolderInfoTemplateAssociation_table.drop()
except Exception:
log.exception("Dropping library_folder_info_template_association table failed.")
try:
LibraryDatasetInfoTemplateAssociation_table = Table( "library_dataset_info_template_association", metadata, autoload=True )
except NoSuchTableError:
LibraryDatasetInfoTemplateAssociation_table = None
log.debug( "Failed loading table library_dataset_info_template_association" )
try:
LibraryDatasetInfoTemplateAssociation_table.drop()
except Exception:
log.exception("Dropping library_dataset_info_template_association table failed.")
try:
LibraryDatasetDatasetInfoTemplateAssociation_table = Table( "library_dataset_dataset_info_template_association", metadata, autoload=True )
except NoSuchTableError:
LibraryDatasetDatasetInfoTemplateAssociation_table = None
log.debug( "Failed loading table library_dataset_dataset_info_template_association" )
try:
LibraryDatasetDatasetInfoTemplateAssociation_table.drop()
except Exception:
log.exception("Dropping library_dataset_dataset_info_template_association table failed.")
try:
LibraryInfoAssociation_table = Table( "library_info_association", metadata, autoload=True )
except NoSuchTableError:
LibraryInfoAssociation_table = None
log.debug( "Failed loading table library_info_association" )
try:
LibraryInfoAssociation_table.drop()
except Exception:
log.exception("Dropping library_info_association table failed.")
try:
LibraryFolderInfoAssociation_table = Table( "library_folder_info_association", metadata, autoload=True )
except NoSuchTableError:
LibraryFolderInfoAssociation_table = None
log.debug( "Failed loading table library_folder_info_association" )
try:
LibraryFolderInfoAssociation_table.drop()
except Exception:
log.exception("Dropping library_folder_info_association table failed.")
try:
LibraryDatasetInfoAssociation_table = Table( "library_dataset_info_association", metadata, autoload=True )
except NoSuchTableError:
LibraryDatasetInfoAssociation_table = None
log.debug( "Failed loading table library_dataset_info_association" )
try:
LibraryDatasetInfoAssociation_table.drop()
except Exception:
log.exception("Dropping library_dataset_info_association table failed.")
try:
LibraryDatasetDatasetInfoAssociation_table = Table( "library_dataset_dataset_info_association", metadata, autoload=True )
except NoSuchTableError:
LibraryDatasetDatasetInfoAssociation_table = None
log.debug( "Failed loading table library_dataset_dataset_info_association" )
try:
LibraryDatasetDatasetInfoAssociation_table.drop()
except Exception:
log.exception("Dropping library_dataset_dataset_info_association table failed.")
try:
LibraryItemInfo_table = Table( "library_item_info", metadata, autoload=True )
except NoSuchTableError:
LibraryItemInfo_table = None
log.debug( "Failed loading table library_item_info" )
try:
LibraryItemInfo_table.drop()
except Exception:
log.exception("Dropping library_item_info table failed.")
try:
LibraryItemInfoTemplate_table = Table( "library_item_info_template", metadata, autoload=True )
except NoSuchTableError:
LibraryItemInfoTemplate_table = None
log.debug( "Failed loading table library_item_info_template" )
try:
LibraryItemInfoTemplate_table.drop()
except Exception:
log.exception("Dropping library_item_info_template table failed.")
# Create all new tables above
try:
LibraryInfoAssociation_table.create()
except Exception:
log.exception("Creating library_info_association table failed.")
try:
LibraryFolderInfoAssociation_table.create()
except Exception:
log.exception("Creating library_folder_info_association table failed.")
try:
LibraryDatasetDatasetInfoAssociation_table.create()
except Exception:
log.exception("Creating library_dataset_dataset_info_association table failed.")
# Fix index on LibraryDatasetDatasetInfoAssociation_table for mysql
if migrate_engine.name == 'mysql':
# Load existing tables
metadata.reflect()
i = Index( "ix_lddaia_ldda_id", LibraryDatasetDatasetInfoAssociation_table.c.library_dataset_dataset_association_id )
try:
i.create()
except Exception:
log.exception("Adding index 'ix_lddaia_ldda_id' to table 'library_dataset_dataset_info_association' table failed.")
def downgrade(migrate_engine):
metadata.bind = migrate_engine
log.debug( "Downgrade is not possible." )
|
/* @flow */
import { CharacterMetadata, ContentBlock, genKey, Entity } from 'draft-js';
import { Map, List, OrderedMap, OrderedSet } from 'immutable';
import getSafeBodyFromHTML from './getSafeBodyFromHTML';
import {
createTextChunk,
getSoftNewlineChunk,
getEmptyChunk,
getBlockDividerChunk,
getFirstBlockChunk,
getAtomicBlockChunk,
joinChunks,
} from './chunkBuilder';
import getBlockTypeForTag from './getBlockTypeForTag';
import processInlineTag from './processInlineTag';
import getBlockData from './getBlockData';
import getEntityId from './getEntityId';
const SPACE = ' ';
const REGEX_NBSP = new RegExp(' ', 'g');
let firstBlock = true;
type CustomChunkGenerator = (nodeName: string, node: HTMLElement) => ?{type: string, mutability: string, data: {}};
function genFragment(
node: Object,
inlineStyle: OrderedSet,
depth: number,
lastList: string,
inEntity: number,
customChunkGenerator: ?CustomChunkGenerator,
): Object {
const nodeName = node.nodeName.toLowerCase();
if (customChunkGenerator) {
const value = customChunkGenerator(nodeName, node);
if (value) {
const entityId = Entity.__create(
value.type,
value.mutability,
value.data || {},
);
return { chunk: getAtomicBlockChunk(entityId) };
}
}
if (nodeName === 'div' &&
node instanceof HTMLDivElement
) {
const entityConfig = {};
entityConfig.ctaTitle = node.getElementsByTagName('H3')[0].innerHTML;
entityConfig.ctaText = node.getElementsByTagName('P')[0].innerHTML;
entityConfig.ctaButtonText = node.getElementsByTagName('A')[0].innerHTML;
entityConfig.url = node.getElementsByTagName('A')[0].getAttribute('href');
entityConfig.targetOption = node.getElementsByTagName('A')[0].getAttribute('target');
const entityId = Entity.__create(
'CTA_BOX',
'MUTABLE',
entityConfig,
);
return { chunk: getAtomicBlockChunk(entityId) };
}
if (nodeName === 'a' &&
node instanceof HTMLAnchorElement &&
node.id === 'ctaimage-root'
) {
const image = node.getElementsByTagName('img')[0];
const entityConfig = {};
entityConfig.src = image.getAttribute ? image.getAttribute('src') || image.src : image.src;
entityConfig.alt = image.alt;
entityConfig.height = image.style.height;
entityConfig.width = image.style.width;
if (image.style.float) {
entityConfig.alignment = image.style.float;
}
entityConfig.linkUrl = node.href;
const entityId = Entity.__create(
'CTA_IMAGE',
'MUTABLE',
entityConfig,
);
return { chunk: getAtomicBlockChunk(entityId) };
}
if (nodeName === '#text' && node.textContent !== '\n') {
return createTextChunk(node, inlineStyle, inEntity);
}
if (nodeName === 'br') {
return { chunk: getSoftNewlineChunk() };
}
if (
nodeName === 'img' &&
node instanceof HTMLImageElement
) {
const entityConfig = {};
entityConfig.src = node.getAttribute ? node.getAttribute('src') || node.src : node.src;
entityConfig.alt = node.alt;
entityConfig.height = node.style.height;
entityConfig.width = node.style.width;
if (node.style.float) {
entityConfig.alignment = node.style.float;
}
const entityId = Entity.__create(
'IMAGE',
'MUTABLE',
entityConfig,
);
return { chunk: getAtomicBlockChunk(entityId) };
}
if (
nodeName === 'video' &&
node instanceof HTMLVideoElement
) {
const entityConfig = {};
entityConfig.controls = true;
entityConfig.src = node.getAttribute ? node.getAttribute('src') || node.src : node.src;
entityConfig.alt = node.alt;
entityConfig.height = node.style.height;
entityConfig.width = node.style.width;
if (node.style.float) {
entityConfig.alignment = node.style.float;
}
const entityId = Entity.__create(
'VIDEO',
'MUTABLE',
entityConfig,
);
return { chunk: getAtomicBlockChunk(entityId) };
}
if (
nodeName === 'iframe' &&
node instanceof HTMLIFrameElement
) {
const entityConfig = {};
entityConfig.src = node.getAttribute ? node.getAttribute('src') || node.src : node.src;
entityConfig.height = node.height;
entityConfig.width = node.width;
const entityId = Entity.__create(
'EMBEDDED_LINK',
'MUTABLE',
entityConfig,
);
return { chunk: getAtomicBlockChunk(entityId) };
}
const blockType = getBlockTypeForTag(nodeName, lastList);
let chunk;
if (blockType) {
if (nodeName === 'ul' || nodeName === 'ol') {
lastList = nodeName;
depth += 1;
} else {
if (
blockType !== 'unordered-list-item' &&
blockType !== 'ordered-list-item'
) {
lastList = '';
depth = -1;
}
if (!firstBlock) {
chunk = getBlockDividerChunk(
blockType,
depth,
getBlockData(node)
);
} else {
chunk = getFirstBlockChunk(
blockType,
getBlockData(node)
);
firstBlock = false;
}
}
}
if (!chunk) {
chunk = getEmptyChunk();
}
inlineStyle = processInlineTag(nodeName, node, inlineStyle);
let child = node.firstChild;
while (child) {
const entityId = getEntityId(child);
const { chunk: generatedChunk } = genFragment(child, inlineStyle, depth, lastList, (entityId || inEntity), customChunkGenerator);
chunk = joinChunks(chunk, generatedChunk);
const sibling = child.nextSibling;
child = sibling;
}
return { chunk };
}
function getChunkForHTML(html: string, customChunkGenerator: ?CustomChunkGenerator): Object {
const sanitizedHtml = html.trim().replace(REGEX_NBSP, SPACE);
const safeBody = getSafeBodyFromHTML(sanitizedHtml);
if (!safeBody) {
return null;
}
firstBlock = true;
const { chunk } = genFragment(safeBody, new OrderedSet(), -1, '', undefined, customChunkGenerator);
return { chunk };
}
export default function htmlToDraft(html: string, customChunkGenerator: ?CustomChunkGenerator): Object {
const chunkData = getChunkForHTML(html, customChunkGenerator);
if (chunkData) {
const { chunk } = chunkData;
let entityMap = new OrderedMap({});
chunk.entities && chunk.entities.forEach(entity => {
if (entity) {
entityMap = entityMap.set(entity, Entity.__get(entity));
}
});
let start = 0;
return {
contentBlocks: chunk.text.split('\r')
.map(
(textBlock, ii) => {
const end = start + textBlock.length;
const inlines = chunk && chunk.inlines.slice(start, end);
const entities = chunk && chunk.entities.slice(start, end);
const characterList = new List(
inlines.map((style, index) => {
const data = { style, entity: null };
if (entities[index]) {
data.entity = entities[index];
}
return CharacterMetadata.create(data);
}),
);
start = end;
return new ContentBlock({
key: genKey(),
type: (chunk && chunk.blocks[ii] && chunk.blocks[ii].type) || 'unstyled',
depth: chunk && chunk.blocks[ii] && chunk.blocks[ii].depth,
data: (chunk && chunk.blocks[ii] && chunk.blocks[ii].data) || new Map({}),
text: textBlock,
characterList,
});
},
),
entityMap,
};
}
return null;
}
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=protected-access
from knack.util import CLIError
from knack.log import get_logger
from azure.cli.core.commands.validators import validate_key_value_pairs
from azure.cli.core.profiles import ResourceType, get_sdk
from azure.cli.command_modules.storage._client_factory import (get_storage_data_service_client,
blob_data_service_factory,
file_data_service_factory,
storage_client_factory)
from azure.cli.command_modules.storage.util import glob_files_locally, guess_content_type
from azure.cli.command_modules.storage.sdkutil import get_table_data_type
from azure.cli.command_modules.storage.url_quote_util import encode_for_url
from azure.cli.command_modules.storage.oauth_token_util import TokenUpdater
storage_account_key_options = {'primary': 'key1', 'secondary': 'key2'}
logger = get_logger(__name__)
# Utilities
# pylint: disable=inconsistent-return-statements,too-many-lines
def _query_account_key(cli_ctx, account_name):
"""Query the storage account key. This is used when the customer doesn't offer account key but name."""
rg, scf = _query_account_rg(cli_ctx, account_name)
t_storage_account_keys = get_sdk(
cli_ctx, ResourceType.MGMT_STORAGE, 'models.storage_account_keys#StorageAccountKeys')
scf.config.enable_http_logger = False
logger.debug('Disable HTTP logging to avoid having storage keys in debug logs')
if t_storage_account_keys:
return scf.storage_accounts.list_keys(rg, account_name).key1
# of type: models.storage_account_list_keys_result#StorageAccountListKeysResult
return scf.storage_accounts.list_keys(rg, account_name).keys[0].value # pylint: disable=no-member
def _query_account_rg(cli_ctx, account_name):
"""Query the storage account's resource group, which the mgmt sdk requires."""
scf = storage_client_factory(cli_ctx)
acc = next((x for x in scf.storage_accounts.list() if x.name == account_name), None)
if acc:
from msrestazure.tools import parse_resource_id
return parse_resource_id(acc.id)['resource_group'], scf
raise ValueError("Storage account '{}' not found.".format(account_name))
def _create_token_credential(cli_ctx):
from knack.cli import EVENT_CLI_POST_EXECUTE
TokenCredential = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'common#TokenCredential')
token_credential = TokenCredential()
updater = TokenUpdater(token_credential, cli_ctx)
def _cancel_timer_event_handler(_, **__):
updater.cancel()
cli_ctx.register_event(EVENT_CLI_POST_EXECUTE, _cancel_timer_event_handler)
return token_credential
# region PARAMETER VALIDATORS
def parse_storage_account(cmd, namespace):
"""Parse storage account which can be either account name or account id"""
from msrestazure.tools import parse_resource_id, is_valid_resource_id
if namespace.account_name and is_valid_resource_id(namespace.account_name):
namespace.resource_group_name = parse_resource_id(namespace.account_name)['resource_group']
namespace.account_name = parse_resource_id(namespace.account_name)['name']
elif namespace.account_name and not namespace.resource_group_name:
namespace.resource_group_name = _query_account_rg(cmd.cli_ctx, namespace.account_name)[0]
def process_resource_group(cmd, namespace):
"""Processes the resource group parameter from the account name"""
if namespace.account_name and not namespace.resource_group_name:
namespace.resource_group_name = _query_account_rg(cmd.cli_ctx, namespace.account_name)[0]
def validate_table_payload_format(cmd, namespace):
t_table_payload = get_table_data_type(cmd.cli_ctx, 'table', 'TablePayloadFormat')
if namespace.accept:
formats = {
'none': t_table_payload.JSON_NO_METADATA,
'minimal': t_table_payload.JSON_MINIMAL_METADATA,
'full': t_table_payload.JSON_FULL_METADATA
}
namespace.accept = formats[namespace.accept.lower()]
def validate_bypass(namespace):
if namespace.bypass:
namespace.bypass = ', '.join(namespace.bypass) if isinstance(namespace.bypass, list) else namespace.bypass
def validate_client_parameters(cmd, namespace):
""" Retrieves storage connection parameters from environment variables and parses out connection string into
account name and key """
n = namespace
def get_config_value(section, key, default):
return cmd.cli_ctx.config.get(section, key, default)
if hasattr(n, 'auth_mode'):
auth_mode = n.auth_mode or get_config_value('storage', 'auth_mode', None)
del n.auth_mode
if not n.account_name:
n.account_name = get_config_value('storage', 'account', None)
if auth_mode == 'login':
n.token_credential = _create_token_credential(cmd.cli_ctx)
if hasattr(n, 'token_credential') and n.token_credential:
# give warning if there are account key args being ignored
account_key_args = [n.account_key and "--account-key", n.sas_token and "--sas-token",
n.connection_string and "--connection-string"]
account_key_args = [arg for arg in account_key_args if arg]
if account_key_args:
logger.warning('In "login" auth mode, the following arguments are ignored: %s',
' ,'.join(account_key_args))
return
if not n.connection_string:
n.connection_string = get_config_value('storage', 'connection_string', None)
# if connection string supplied or in environment variables, extract account key and name
if n.connection_string:
conn_dict = validate_key_value_pairs(n.connection_string)
n.account_name = conn_dict.get('AccountName')
n.account_key = conn_dict.get('AccountKey')
n.sas_token = conn_dict.get('SharedAccessSignature')
# otherwise, simply try to retrieve the remaining variables from environment variables
if not n.account_name:
n.account_name = get_config_value('storage', 'account', None)
if not n.account_key:
n.account_key = get_config_value('storage', 'key', None)
if not n.sas_token:
n.sas_token = get_config_value('storage', 'sas_token', None)
# strip the '?' from sas token. the portal and command line are returns sas token in different
# forms
if n.sas_token:
n.sas_token = n.sas_token.lstrip('?')
# if account name is specified but no key, attempt to query
if n.account_name and not n.account_key and not n.sas_token:
logger.warning('No connection string, account key or sas token found, we will query account keys for your '
'storage account. Please try to use --auth-mode login or provide one of the following parameters'
': connection string, account key or sas token for your storage account.')
n.account_key = _query_account_key(cmd.cli_ctx, n.account_name)
def process_blob_source_uri(cmd, namespace):
"""
Validate the parameters referenced to a blob source and create the source URI from them.
"""
from .util import create_short_lived_blob_sas
usage_string = \
'Invalid usage: {}. Supply only one of the following argument sets to specify source:' \
'\n\t --source-uri' \
'\n\tOR --source-container --source-blob --source-snapshot [--source-account-name & sas] ' \
'\n\tOR --source-container --source-blob --source-snapshot [--source-account-name & key] '
ns = vars(namespace)
# source as blob
container = ns.pop('source_container', None)
blob = ns.pop('source_blob', None)
snapshot = ns.pop('source_snapshot', None)
# source credential clues
source_account_name = ns.pop('source_account_name', None)
source_account_key = ns.pop('source_account_key', None)
sas = ns.pop('source_sas', None)
# source in the form of an uri
uri = ns.get('copy_source', None)
if uri:
if any([container, blob, sas, snapshot, source_account_name, source_account_key]):
raise ValueError(usage_string.format('Unused parameters are given in addition to the '
'source URI'))
# simplest scenario--no further processing necessary
return
validate_client_parameters(cmd, namespace) # must run first to resolve storage account
# determine if the copy will happen in the same storage account
if not source_account_name and source_account_key:
raise ValueError(usage_string.format('Source account key is given but account name is not'))
if not source_account_name and not source_account_key:
# neither source account name or key is given, assume that user intends to copy blob in
# the same account
source_account_name = ns.get('account_name', None)
source_account_key = ns.get('account_key', None)
elif source_account_name and not source_account_key:
if source_account_name == ns.get('account_name', None):
# the source account name is same as the destination account name
source_account_key = ns.get('account_key', None)
else:
# the source account is different from destination account but the key is missing
# try to query one.
try:
source_account_key = _query_account_key(cmd.cli_ctx, source_account_name)
except ValueError:
raise ValueError('Source storage account {} not found.'.format(source_account_name))
# else: both source account name and key are given by user
if not source_account_name:
raise ValueError(usage_string.format('Storage account name not found'))
if not sas:
sas = create_short_lived_blob_sas(cmd, source_account_name, source_account_key, container, blob)
query_params = []
if sas:
query_params.append(sas)
if snapshot:
query_params.append('snapshot={}'.format(snapshot))
uri = 'https://{}.blob.{}/{}/{}{}{}'.format(source_account_name,
cmd.cli_ctx.cloud.suffixes.storage_endpoint,
container,
blob,
'?' if query_params else '',
'&'.join(query_params))
namespace.copy_source = uri
def validate_source_uri(cmd, namespace): # pylint: disable=too-many-statements
from .util import create_short_lived_blob_sas, create_short_lived_file_sas
usage_string = \
'Invalid usage: {}. Supply only one of the following argument sets to specify source:' \
'\n\t --source-uri [--source-sas]' \
'\n\tOR --source-container --source-blob [--source-account-name & sas] [--source-snapshot]' \
'\n\tOR --source-container --source-blob [--source-account-name & key] [--source-snapshot]' \
'\n\tOR --source-share --source-path' \
'\n\tOR --source-share --source-path [--source-account-name & sas]' \
'\n\tOR --source-share --source-path [--source-account-name & key]'
ns = vars(namespace)
# source as blob
container = ns.pop('source_container', None)
blob = ns.pop('source_blob', None)
snapshot = ns.pop('source_snapshot', None)
# source as file
share = ns.pop('source_share', None)
path = ns.pop('source_path', None)
file_snapshot = ns.pop('file_snapshot', None)
# source credential clues
source_account_name = ns.pop('source_account_name', None)
source_account_key = ns.pop('source_account_key', None)
source_sas = ns.pop('source_sas', None)
# source in the form of an uri
uri = ns.get('copy_source', None)
if uri:
if any([container, blob, snapshot, share, path, file_snapshot, source_account_name,
source_account_key]):
raise ValueError(usage_string.format('Unused parameters are given in addition to the '
'source URI'))
if source_sas:
source_sas = source_sas.lstrip('?')
uri = '{}{}{}'.format(uri, '?', source_sas)
namespace.copy_source = uri
return
# ensure either a file or blob source is specified
valid_blob_source = container and blob and not share and not path and not file_snapshot
valid_file_source = share and path and not container and not blob and not snapshot
if not valid_blob_source and not valid_file_source:
raise ValueError(usage_string.format('Neither a valid blob or file source is specified'))
if valid_blob_source and valid_file_source:
raise ValueError(usage_string.format('Ambiguous parameters, both blob and file sources are '
'specified'))
validate_client_parameters(cmd, namespace) # must run first to resolve storage account
if not source_account_name:
if source_account_key:
raise ValueError(usage_string.format('Source account key is given but account name is not'))
# assume that user intends to copy blob in the same account
source_account_name = ns.get('account_name', None)
# determine if the copy will happen in the same storage account
same_account = False
if not source_account_key and not source_sas:
if source_account_name == ns.get('account_name', None):
same_account = True
source_account_key = ns.get('account_key', None)
source_sas = ns.get('sas_token', None)
else:
# the source account is different from destination account but the key is missing try to query one.
try:
source_account_key = _query_account_key(cmd.cli_ctx, source_account_name)
except ValueError:
raise ValueError('Source storage account {} not found.'.format(source_account_name))
# Both source account name and either key or sas (or both) are now available
if not source_sas:
# generate a sas token even in the same account when the source and destination are not the same kind.
if valid_file_source and (ns.get('container_name', None) or not same_account):
import os
dir_name, file_name = os.path.split(path) if path else (None, '')
source_sas = create_short_lived_file_sas(cmd, source_account_name, source_account_key, share,
dir_name, file_name)
elif valid_blob_source and (ns.get('share_name', None) or not same_account):
source_sas = create_short_lived_blob_sas(cmd, source_account_name, source_account_key, container, blob)
query_params = []
if source_sas:
query_params.append(source_sas.lstrip('?'))
if snapshot:
query_params.append('snapshot={}'.format(snapshot))
if file_snapshot:
query_params.append('sharesnapshot={}'.format(file_snapshot))
uri = 'https://{0}.{1}.{6}/{2}/{3}{4}{5}'.format(
source_account_name,
'blob' if valid_blob_source else 'file',
container if valid_blob_source else share,
encode_for_url(blob if valid_blob_source else path),
'?' if query_params else '',
'&'.join(query_params),
cmd.cli_ctx.cloud.suffixes.storage_endpoint)
namespace.copy_source = uri
def validate_blob_type(namespace):
if not namespace.blob_type:
namespace.blob_type = 'page' if namespace.file_path.endswith('.vhd') else 'block'
def validate_storage_data_plane_list(namespace):
if namespace.num_results == '*':
namespace.num_results = None
else:
namespace.num_results = int(namespace.num_results)
def get_content_setting_validator(settings_class, update, guess_from_file=None):
def _class_name(class_type):
return class_type.__module__ + "." + class_type.__class__.__name__
def validator(cmd, namespace):
t_base_blob_service, t_file_service, t_blob_content_settings, t_file_content_settings = cmd.get_models(
'blob.baseblobservice#BaseBlobService',
'file#FileService',
'blob.models#ContentSettings',
'file.models#ContentSettings')
# must run certain validators first for an update
if update:
validate_client_parameters(cmd, namespace)
if update and _class_name(settings_class) == _class_name(t_file_content_settings):
get_file_path_validator()(namespace)
ns = vars(namespace)
clear_content_settings = ns.pop('clear_content_settings', False)
# retrieve the existing object properties for an update
if update and not clear_content_settings:
account = ns.get('account_name')
key = ns.get('account_key')
cs = ns.get('connection_string')
sas = ns.get('sas_token')
token_credential = ns.get('token_credential')
if _class_name(settings_class) == _class_name(t_blob_content_settings):
client = get_storage_data_service_client(cmd.cli_ctx,
service=t_base_blob_service,
name=account,
key=key, connection_string=cs, sas_token=sas,
token_credential=token_credential)
container = ns.get('container_name')
blob = ns.get('blob_name')
lease_id = ns.get('lease_id')
props = client.get_blob_properties(container, blob, lease_id=lease_id).properties.content_settings
elif _class_name(settings_class) == _class_name(t_file_content_settings):
client = get_storage_data_service_client(cmd.cli_ctx, t_file_service, account, key, cs, sas)
share = ns.get('share_name')
directory = ns.get('directory_name')
filename = ns.get('file_name')
props = client.get_file_properties(share, directory, filename).properties.content_settings
# create new properties
new_props = settings_class(
content_type=ns.pop('content_type', None),
content_disposition=ns.pop('content_disposition', None),
content_encoding=ns.pop('content_encoding', None),
content_language=ns.pop('content_language', None),
content_md5=ns.pop('content_md5', None),
cache_control=ns.pop('content_cache_control', None)
)
# if update, fill in any None values with existing
if update:
if not clear_content_settings:
for attr in ['content_type', 'content_disposition', 'content_encoding', 'content_language',
'content_md5', 'cache_control']:
if getattr(new_props, attr) is None:
setattr(new_props, attr, getattr(props, attr))
else:
if guess_from_file:
new_props = guess_content_type(ns[guess_from_file], new_props, settings_class)
ns['content_settings'] = new_props
return validator
def validate_custom_domain(namespace):
if namespace.use_subdomain and not namespace.custom_domain:
raise ValueError('usage error: --custom-domain DOMAIN [--use-subdomain]')
def validate_encryption_services(cmd, namespace):
"""
Builds up the encryption services object for storage account operations based on the list of services passed in.
"""
if namespace.encryption_services:
t_encryption_services, t_encryption_service = get_sdk(cmd.cli_ctx, ResourceType.MGMT_STORAGE,
'EncryptionServices', 'EncryptionService', mod='models')
services = {service: t_encryption_service(enabled=True) for service in namespace.encryption_services}
namespace.encryption_services = t_encryption_services(**services)
def validate_encryption_source(cmd, namespace):
ns = vars(namespace)
key_name = ns.pop('encryption_key_name', None)
key_version = ns.pop('encryption_key_version', None)
key_vault_uri = ns.pop('encryption_key_vault', None)
if namespace.encryption_key_source == 'Microsoft.Keyvault' and not (key_name and key_version and key_vault_uri):
raise ValueError('--encryption-key-name, --encryption-key-vault, and --encryption-key-version are required '
'when --encryption-key-source=Microsoft.Keyvault is specified.')
if key_name or key_version or key_vault_uri:
if namespace.encryption_key_source and namespace.encryption_key_source != 'Microsoft.Keyvault':
raise ValueError('--encryption-key-name, --encryption-key-vault, and --encryption-key-version are not '
'applicable without Microsoft.Keyvault key-source.')
KeyVaultProperties = get_sdk(cmd.cli_ctx, ResourceType.MGMT_STORAGE, 'KeyVaultProperties',
mod='models')
if not KeyVaultProperties:
return
kv_prop = KeyVaultProperties(key_name=key_name, key_version=key_version, key_vault_uri=key_vault_uri)
namespace.encryption_key_vault_properties = kv_prop
def validate_entity(namespace):
""" Converts a list of key value pairs into a dictionary. Ensures that required
RowKey and PartitionKey are converted to the correct case and included. """
values = dict(x.split('=', 1) for x in namespace.entity)
keys = values.keys()
for key in list(keys):
if key.lower() == 'rowkey':
val = values[key]
del values[key]
values['RowKey'] = val
elif key.lower() == 'partitionkey':
val = values[key]
del values[key]
values['PartitionKey'] = val
keys = values.keys()
missing_keys = 'RowKey ' if 'RowKey' not in keys else ''
missing_keys = '{}PartitionKey'.format(missing_keys) \
if 'PartitionKey' not in keys else missing_keys
if missing_keys:
import argparse
raise argparse.ArgumentError(
None, 'incorrect usage: entity requires: {}'.format(missing_keys))
def cast_val(key, val):
""" Attempts to cast numeric values (except RowKey and PartitionKey) to numbers so they
can be queried correctly. """
if key in ['PartitionKey', 'RowKey']:
return val
def try_cast(to_type):
try:
return to_type(val)
except ValueError:
return None
return try_cast(int) or try_cast(float) or val
# ensure numbers are converted from strings so querying will work correctly
values = {key: cast_val(key, val) for key, val in values.items()}
namespace.entity = values
def validate_marker(namespace):
""" Converts a list of key value pairs into a dictionary. Ensures that required
nextrowkey and nextpartitionkey are included. """
if not namespace.marker:
return
marker = dict(x.split('=', 1) for x in namespace.marker)
expected_keys = {'nextrowkey', 'nextpartitionkey'}
for key in list(marker.keys()):
new_key = key.lower()
if new_key in expected_keys:
expected_keys.remove(key.lower())
val = marker[key]
del marker[key]
marker[new_key] = val
if expected_keys:
import argparse
raise argparse.ArgumentError(
None, 'incorrect usage: marker requires: {}'.format(' '.join(expected_keys)))
namespace.marker = marker
def get_file_path_validator(default_file_param=None):
""" Creates a namespace validator that splits out 'path' into 'directory_name' and 'file_name'.
Allows another path-type parameter to be named which can supply a default filename. """
def validator(namespace):
import os
if not hasattr(namespace, 'path'):
return
path = namespace.path
dir_name, file_name = os.path.split(path) if path else (None, '')
if default_file_param and '.' not in file_name:
dir_name = path
file_name = os.path.split(getattr(namespace, default_file_param))[1]
dir_name = None if dir_name in ('', '.') else dir_name
namespace.directory_name = dir_name
namespace.file_name = file_name
del namespace.path
return validator
def validate_included_datasets(cmd, namespace):
if namespace.include:
include = namespace.include
if set(include) - set('cmsd'):
help_string = '(c)opy-info (m)etadata (s)napshots (d)eleted'
raise ValueError('valid values are {} or a combination thereof.'.format(help_string))
t_blob_include = cmd.get_models('blob#Include')
namespace.include = t_blob_include('s' in include, 'm' in include, False, 'c' in include, 'd' in include)
def validate_key_name(namespace):
key_options = {'primary': '1', 'secondary': '2'}
if hasattr(namespace, 'key_type') and namespace.key_type:
namespace.key_name = namespace.key_type + key_options[namespace.key_name]
else:
namespace.key_name = storage_account_key_options[namespace.key_name]
def validate_metadata(namespace):
if namespace.metadata:
namespace.metadata = dict(x.split('=', 1) for x in namespace.metadata)
def get_permission_help_string(permission_class):
allowed_values = [x.lower() for x in dir(permission_class) if not x.startswith('__')]
return ' '.join(['({}){}'.format(x[0], x[1:]) for x in allowed_values])
def get_permission_validator(permission_class):
allowed_values = [x.lower() for x in dir(permission_class) if not x.startswith('__')]
allowed_string = ''.join(x[0] for x in allowed_values)
def validator(namespace):
if namespace.permission:
if set(namespace.permission) - set(allowed_string):
help_string = get_permission_help_string(permission_class)
raise ValueError(
'valid values are {} or a combination thereof.'.format(help_string))
namespace.permission = permission_class(_str=namespace.permission)
return validator
def table_permission_validator(cmd, namespace):
""" A special case for table because the SDK associates the QUERY permission with 'r' """
t_table_permissions = get_table_data_type(cmd.cli_ctx, 'table', 'TablePermissions')
if namespace.permission:
if set(namespace.permission) - set('raud'):
help_string = '(r)ead/query (a)dd (u)pdate (d)elete'
raise ValueError('valid values are {} or a combination thereof.'.format(help_string))
namespace.permission = t_table_permissions(_str=namespace.permission)
def validate_container_public_access(cmd, namespace):
from .sdkutil import get_container_access_type
t_base_blob_svc = cmd.get_models('blob.baseblobservice#BaseBlobService')
if namespace.public_access:
namespace.public_access = get_container_access_type(cmd.cli_ctx, namespace.public_access.lower())
if hasattr(namespace, 'signed_identifiers'):
# must retrieve the existing ACL to simulate a patch operation because these calls
# are needlessly conflated
ns = vars(namespace)
validate_client_parameters(cmd, namespace)
account = ns.get('account_name')
key = ns.get('account_key')
cs = ns.get('connection_string')
sas = ns.get('sas_token')
client = get_storage_data_service_client(cmd.cli_ctx, t_base_blob_svc, account, key, cs, sas)
container = ns.get('container_name')
lease_id = ns.get('lease_id')
ns['signed_identifiers'] = client.get_container_acl(container, lease_id=lease_id)
def validate_select(namespace):
if namespace.select:
namespace.select = ','.join(namespace.select)
# pylint: disable=too-many-statements
def get_source_file_or_blob_service_client(cmd, namespace):
"""
Create the second file service or blob service client for batch copy command, which is used to
list the source files or blobs. If both the source account and source URI are omitted, it
indicates that user want to copy files or blobs in the same storage account, therefore the
destination client will be set None hence the command will use destination client.
"""
t_file_svc, t_block_blob_svc = cmd.get_models('file#FileService', 'blob.blockblobservice#BlockBlobService')
usage_string = 'invalid usage: supply only one of the following argument sets:' + \
'\n\t --source-uri [--source-sas]' + \
'\n\tOR --source-container' + \
'\n\tOR --source-container --source-account-name --source-account-key' + \
'\n\tOR --source-container --source-account-name --source-sas' + \
'\n\tOR --source-share --source-account-name --source-account-key' + \
'\n\tOR --source-share --source-account-name --source-account-sas'
ns = vars(namespace)
source_account = ns.pop('source_account_name', None)
source_key = ns.pop('source_account_key', None)
source_uri = ns.pop('source_uri', None)
source_sas = ns.get('source_sas', None)
source_container = ns.get('source_container', None)
source_share = ns.get('source_share', None)
if source_uri and source_account:
raise ValueError(usage_string)
if not source_uri and bool(source_container) == bool(source_share): # must be container or share
raise ValueError(usage_string)
if (not source_account) and (not source_uri):
# Set the source_client to None if neither source_account or source_uri is given. This
# indicates the command that the source files share or blob container is in the same storage
# account as the destination file share or blob container.
#
# The command itself should create the source service client since the validator can't
# access the destination client through the namespace.
#
# A few arguments check will be made as well so as not to cause ambiguity.
if source_key or source_sas:
raise ValueError('invalid usage: --source-account-name is missing; the source account is assumed to be the'
' same as the destination account. Do not provide --source-sas or --source-account-key')
ns['source_client'] = None
if 'token_credential' not in ns: # not using oauth
return
# oauth is only possible through destination, must still get source creds
source_account, source_key, source_sas = ns['account_name'], ns['account_key'], ns['sas_token']
if source_account:
if not (source_key or source_sas):
# when neither storage account key or SAS is given, try to fetch the key in the current
# subscription
source_key = _query_account_key(cmd.cli_ctx, source_account)
if source_container:
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_block_blob_svc, name=source_account, key=source_key, sas_token=source_sas)
elif source_share:
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_file_svc, name=source_account, key=source_key, sas_token=source_sas)
elif source_uri:
if source_key or source_container or source_share:
raise ValueError(usage_string)
from .storage_url_helpers import StorageResourceIdentifier
if source_sas:
source_uri = '{}{}{}'.format(source_uri, '?', source_sas.lstrip('?'))
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, source_uri)
nor_container_or_share = not identifier.container and not identifier.share
if not identifier.is_url():
raise ValueError('incorrect usage: --source-uri expects a URI')
if identifier.blob or identifier.directory or identifier.filename or nor_container_or_share:
raise ValueError('incorrect usage: --source-uri has to be blob container or file share')
if identifier.sas_token:
ns['source_sas'] = identifier.sas_token
else:
source_key = _query_account_key(cmd.cli_ctx, identifier.account_name)
if identifier.container:
ns['source_container'] = identifier.container
if identifier.account_name != ns.get('account_name'):
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_block_blob_svc, name=identifier.account_name, key=source_key,
sas_token=identifier.sas_token)
elif identifier.share:
ns['source_share'] = identifier.share
if identifier.account_name != ns.get('account_name'):
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_file_svc, name=identifier.account_name, key=source_key,
sas_token=identifier.sas_token)
def add_progress_callback(cmd, namespace):
def _update_progress(current, total):
message = getattr(_update_progress, 'message', 'Alive')
reuse = getattr(_update_progress, 'reuse', False)
if total:
hook.add(message=message, value=current, total_val=total)
if total == current and not reuse:
hook.end()
hook = cmd.cli_ctx.get_progress_controller(det=True)
_update_progress.hook = hook
if not namespace.no_progress:
namespace.progress_callback = _update_progress
del namespace.no_progress
def process_container_delete_parameters(cmd, namespace):
"""Process the parameters for storage container delete command"""
# check whether to use mgmt or data-plane
if namespace.bypass_immutability_policy:
# use management-plane
namespace.processed_account_name = namespace.account_name
namespace.processed_resource_group, namespace.mgmt_client = _query_account_rg(
cmd.cli_ctx, namespace.account_name)
del namespace.auth_mode
else:
# use data-plane, like before
validate_client_parameters(cmd, namespace)
def process_blob_download_batch_parameters(cmd, namespace):
"""Process the parameters for storage blob download command"""
import os
# 1. quick check
if not os.path.exists(namespace.destination) or not os.path.isdir(namespace.destination):
raise ValueError('incorrect usage: destination must be an existing directory')
# 2. try to extract account name and container name from source string
_process_blob_batch_container_parameters(cmd, namespace)
# 3. Call validators
add_progress_callback(cmd, namespace)
def process_blob_upload_batch_parameters(cmd, namespace):
"""Process the source and destination of storage blob upload command"""
import os
# 1. quick check
if not os.path.exists(namespace.source) or not os.path.isdir(namespace.source):
raise ValueError('incorrect usage: source must be an existing directory')
# 2. try to extract account name and container name from destination string
_process_blob_batch_container_parameters(cmd, namespace, source=False)
# 3. collect the files to be uploaded
namespace.source = os.path.realpath(namespace.source)
namespace.source_files = [c for c in glob_files_locally(namespace.source, namespace.pattern)]
# 4. determine blob type
if namespace.blob_type is None:
vhd_files = [f for f in namespace.source_files if f[0].endswith('.vhd')]
if any(vhd_files) and len(vhd_files) == len(namespace.source_files):
# when all the listed files are vhd files use page
namespace.blob_type = 'page'
elif any(vhd_files):
# source files contain vhd files but not all of them
raise CLIError("""Fail to guess the required blob type. Type of the files to be
uploaded are not consistent. Default blob type for .vhd files is "page", while
others are "block". You can solve this problem by either explicitly set the blob
type or ensure the pattern matches a correct set of files.""")
else:
namespace.blob_type = 'block'
# 5. call other validators
validate_metadata(namespace)
t_blob_content_settings = cmd.loader.get_sdk('blob.models#ContentSettings')
get_content_setting_validator(t_blob_content_settings, update=False)(cmd, namespace)
add_progress_callback(cmd, namespace)
def process_blob_delete_batch_parameters(cmd, namespace):
_process_blob_batch_container_parameters(cmd, namespace)
def _process_blob_batch_container_parameters(cmd, namespace, source=True):
"""Process the container parameters for storage blob batch commands before populating args from environment."""
if source:
container_arg, container_name_arg = 'source', 'source_container_name'
else:
# destination
container_arg, container_name_arg = 'destination', 'destination_container_name'
# try to extract account name and container name from source string
from .storage_url_helpers import StorageResourceIdentifier
container_arg_val = getattr(namespace, container_arg) # either a url or name
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, container_arg_val)
if not identifier.is_url():
setattr(namespace, container_name_arg, container_arg_val)
elif identifier.blob:
raise ValueError('incorrect usage: {} should be either a container URL or name'.format(container_arg))
else:
setattr(namespace, container_name_arg, identifier.container)
if namespace.account_name is None:
namespace.account_name = identifier.account_name
elif namespace.account_name != identifier.account_name:
raise ValueError('The given storage account name is not consistent with the '
'account name in the destination URL')
# if no sas-token is given and the container url contains one, use it
if not namespace.sas_token and identifier.sas_token:
namespace.sas_token = identifier.sas_token
# Finally, grab missing storage connection parameters from environment variables
validate_client_parameters(cmd, namespace)
def process_file_upload_batch_parameters(cmd, namespace):
"""Process the parameters of storage file batch upload command"""
import os
# 1. quick check
if not os.path.exists(namespace.source):
raise ValueError('incorrect usage: source {} does not exist'.format(namespace.source))
if not os.path.isdir(namespace.source):
raise ValueError('incorrect usage: source must be a directory')
# 2. try to extract account name and container name from destination string
from .storage_url_helpers import StorageResourceIdentifier
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, namespace.destination)
if identifier.is_url():
if identifier.filename or identifier.directory:
raise ValueError('incorrect usage: destination must be a file share url')
namespace.destination = identifier.share
if not namespace.account_name:
namespace.account_name = identifier.account_name
namespace.source = os.path.realpath(namespace.source)
def process_file_download_batch_parameters(cmd, namespace):
"""Process the parameters for storage file batch download command"""
import os
# 1. quick check
if not os.path.exists(namespace.destination) or not os.path.isdir(namespace.destination):
raise ValueError('incorrect usage: destination must be an existing directory')
# 2. try to extract account name and share name from source string
process_file_batch_source_parameters(cmd, namespace)
def process_file_batch_source_parameters(cmd, namespace):
from .storage_url_helpers import StorageResourceIdentifier
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, namespace.source)
if identifier.is_url():
if identifier.filename or identifier.directory:
raise ValueError('incorrect usage: source should be either share URL or name')
namespace.source = identifier.share
if not namespace.account_name:
namespace.account_name = identifier.account_name
def process_file_download_namespace(namespace):
import os
get_file_path_validator()(namespace)
dest = namespace.file_path
if not dest or os.path.isdir(dest):
namespace.file_path = os.path.join(dest, namespace.file_name) \
if dest else namespace.file_name
def process_metric_update_namespace(namespace):
import argparse
namespace.hour = namespace.hour == 'true'
namespace.minute = namespace.minute == 'true'
namespace.api = namespace.api == 'true' if namespace.api else None
if namespace.hour is None and namespace.minute is None:
raise argparse.ArgumentError(
None, 'incorrect usage: must specify --hour and/or --minute')
if (namespace.hour or namespace.minute) and namespace.api is None:
raise argparse.ArgumentError(
None, 'incorrect usage: specify --api when hour or minute metrics are enabled')
def validate_subnet(cmd, namespace):
from msrestazure.tools import resource_id, is_valid_resource_id
from azure.cli.core.commands.client_factory import get_subscription_id
subnet = namespace.subnet
subnet_is_id = is_valid_resource_id(subnet)
vnet = namespace.vnet_name
if (subnet_is_id and not vnet) or (not subnet and not vnet):
return
if subnet and not subnet_is_id and vnet:
namespace.subnet = resource_id(
subscription=get_subscription_id(cmd.cli_ctx),
resource_group=namespace.resource_group_name,
namespace='Microsoft.Network',
type='virtualNetworks',
name=vnet,
child_type_1='subnets',
child_name_1=subnet)
else:
raise CLIError('incorrect usage: [--subnet ID | --subnet NAME --vnet-name NAME]')
def get_datetime_type(to_string):
""" Validates UTC datetime. Examples of accepted forms:
2017-12-31T01:11:59Z,2017-12-31T01:11Z or 2017-12-31T01Z or 2017-12-31 """
from datetime import datetime
def datetime_type(string):
""" Validates UTC datetime. Examples of accepted forms:
2017-12-31T01:11:59Z,2017-12-31T01:11Z or 2017-12-31T01Z or 2017-12-31 """
accepted_date_formats = ['%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%MZ',
'%Y-%m-%dT%HZ', '%Y-%m-%d']
for form in accepted_date_formats:
try:
if to_string:
return datetime.strptime(string, form).strftime(form)
return datetime.strptime(string, form)
except ValueError:
continue
raise ValueError("Input '{}' not valid. Valid example: 2000-12-31T12:59:59Z".format(string))
return datetime_type
def ipv4_range_type(string):
""" Validates an IPv4 address or address range. """
import re
ip_format = r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}'
if not re.match("^{}$".format(ip_format), string):
if not re.match("^{ip_format}-{ip_format}$".format(ip_format=ip_format), string):
raise ValueError
return string
def resource_type_type(loader):
""" Returns a function which validates that resource types string contains only a combination of service,
container, and object. Their shorthand representations are s, c, and o. """
def impl(string):
t_resources = loader.get_models('common.models#ResourceTypes')
if set(string) - set("sco"):
raise ValueError
return t_resources(_str=''.join(set(string)))
return impl
def services_type(loader):
""" Returns a function which validates that services string contains only a combination of blob, queue, table,
and file. Their shorthand representations are b, q, t, and f. """
def impl(string):
t_services = loader.get_models('common.models#Services')
if set(string) - set("bqtf"):
raise ValueError
return t_services(_str=''.join(set(string)))
return impl
def get_char_options_validator(types, property_name):
def _validator(namespace):
service_types = set(getattr(namespace, property_name, list()))
if not service_types:
raise ValueError('Missing options --{}.'.format(property_name.replace('_', '-')))
if service_types - set(types):
raise ValueError(
'--{}: only valid values are: {}.'.format(property_name.replace('_', '-'), ', '.join(types)))
setattr(namespace, property_name, service_types)
return _validator
def page_blob_tier_validator(cmd, namespace):
if not namespace.tier:
return
if namespace.blob_type != 'page' and namespace.tier:
raise ValueError('Blob tier is only applicable to page blobs on premium storage accounts.')
try:
namespace.tier = getattr(cmd.get_models('blob.models#PremiumPageBlobTier'), namespace.tier)
except AttributeError:
from azure.cli.command_modules.storage.sdkutil import get_blob_tier_names
raise ValueError('Unknown premium page blob tier name. Choose among {}'.format(', '.join(
get_blob_tier_names(cmd.cli_ctx, 'PremiumPageBlobTier'))))
def block_blob_tier_validator(cmd, namespace):
if not namespace.tier:
return
if namespace.blob_type != 'block' and namespace.tier:
raise ValueError('Blob tier is only applicable to block blobs on standard storage accounts.')
try:
namespace.tier = getattr(cmd.get_models('blob.models#StandardBlobTier'), namespace.tier)
except AttributeError:
from azure.cli.command_modules.storage.sdkutil import get_blob_tier_names
raise ValueError('Unknown block blob tier name. Choose among {}'.format(', '.join(
get_blob_tier_names(cmd.cli_ctx, 'StandardBlobTier'))))
def blob_tier_validator(cmd, namespace):
if namespace.blob_type == 'page':
page_blob_tier_validator(cmd, namespace)
elif namespace.blob_type == 'block':
block_blob_tier_validator(cmd, namespace)
else:
raise ValueError('Blob tier is only applicable to block or page blob.')
def validate_azcopy_upload_destination_url(cmd, namespace):
client = blob_data_service_factory(cmd.cli_ctx, {
'account_name': namespace.account_name})
destination_path = namespace.destination_path
if not destination_path:
destination_path = ''
url = client.make_blob_url(namespace.destination_container, destination_path)
namespace.destination = url
del namespace.destination_container
del namespace.destination_path
def validate_azcopy_remove_arguments(cmd, namespace):
usage_string = \
'Invalid usage: {}. Supply only one of the following argument sets to specify source:' \
'\n\t --container-name [--name]' \
'\n\tOR --share-name [--path]'
ns = vars(namespace)
# source as blob
container = ns.pop('container_name', None)
blob = ns.pop('blob_name', None)
# source as file
share = ns.pop('share_name', None)
path = ns.pop('path', None)
# ensure either a file or blob source is specified
valid_blob = container and not share
valid_file = share and not container
if not valid_blob and not valid_file:
raise ValueError(usage_string.format('Neither a valid blob or file source is specified'))
if valid_blob and valid_file:
raise ValueError(usage_string.format('Ambiguous parameters, both blob and file sources are '
'specified'))
if valid_blob:
client = blob_data_service_factory(cmd.cli_ctx, {
'account_name': namespace.account_name})
if not blob:
blob = ''
url = client.make_blob_url(container, blob)
namespace.service = 'blob'
namespace.target = url
if valid_file:
import os
client = file_data_service_factory(cmd.cli_ctx, {
'account_name': namespace.account_name,
'account_key': namespace.account_key})
dir_name, file_name = os.path.split(path) if path else (None, '')
dir_name = None if dir_name in ('', '.') else dir_name
url = client.make_file_url(share, dir_name, file_name)
namespace.service = 'file'
namespace.target = url
def as_user_validator(namespace):
if namespace.as_user:
if namespace.expiry is None:
import argparse
raise argparse.ArgumentError(
None, 'incorrect usage: specify --expiry when as-user is enabled')
expiry = get_datetime_type(False)(namespace.expiry)
from datetime import datetime, timedelta
if expiry > datetime.utcnow() + timedelta(days=7):
import argparse
raise argparse.ArgumentError(
None, 'incorrect usage: --expiry should be within 7 days from now')
if ((not hasattr(namespace, 'token_credential') or namespace.token_credential is None) and
(not hasattr(namespace, 'auth_mode') or namespace.auth_mode != 'login')):
import argparse
raise argparse.ArgumentError(
None, "incorrect usage: specify '--auth-mode login' when as-user is enabled")
def validator_delete_retention_days(namespace):
if namespace.enable_delete_retention is True and namespace.delete_retention_days is None:
raise ValueError(
"incorrect usage: you have to provide value for '--delete-retention-days' when '--enable-delete-retention' "
"is set to true")
if namespace.enable_delete_retention is False and namespace.delete_retention_days is not None:
raise ValueError(
"incorrect usage: '--delete-retention-days' is invalid when '--enable-delete-retention' is set to false")
if namespace.enable_delete_retention is None and namespace.delete_retention_days is not None:
raise ValueError(
"incorrect usage: please specify '--enable-delete-retention true' if you want to set the value for "
"'--delete-retention-days'")
if namespace.delete_retention_days or namespace.delete_retention_days == 0:
if namespace.delete_retention_days < 1:
raise ValueError(
"incorrect usage: '--delete-retention-days' must be greater than or equal to 1")
if namespace.delete_retention_days > 365:
raise ValueError(
"incorrect usage: '--delete-retention-days' must be less than or equal to 365")
def validate_private_endpoint_connection_id(cmd, namespace):
if namespace.connection_id:
from azure.cli.core.util import parse_proxy_resource_id
result = parse_proxy_resource_id(namespace.connection_id)
namespace.resource_group_name = result['resource_group']
namespace.account_name = result['name']
namespace.private_endpoint_connection_name = result['child_name_1']
if namespace.account_name and not namespace.resource_group_name:
namespace.resource_group_name = _query_account_rg(cmd.cli_ctx, namespace.account_name)[0]
if not all([namespace.account_name, namespace.resource_group_name, namespace.private_endpoint_connection_name]):
raise CLIError('incorrect usage: [--id ID | --name NAME --account-name NAME]')
del namespace.connection_id
|
"""
Copyright (c) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pytest
pytest.importorskip('accuracy_checker.launcher.gapi_launcher')
import cv2
import numpy as np
from accuracy_checker.launcher.launcher import create_launcher
from accuracy_checker.config import ConfigError
def get_gapi_test_model(models_dir):
config = {
"framework": "g-api",
#"weights": str(models_dir / "SampLeNet.bin"),
"model": models_dir,
"adapter": "classification",
"device": "cpu",
"inputs": [{"name": "data", "type": "INPUT", "shape": "(3, 32, 32)"}],
'outputs': ['fc3']
}
return create_launcher(config)
class TestGAPILauncher:
def test_launcher_creates(self, models_dir):
assert get_gapi_test_model(models_dir).inputs['data'] == (1, 3, 32, 32)
def test_infer_model(self, data_dir, models_dir):
test_model = get_gapi_test_model(models_dir)
_, _, h, w = test_model.inputs['data']
img_raw = cv2.imread(str(data_dir / '1.jpg'))
img_resized = cv2.resize(img_raw, (w, h))
res = test_model.predict([{'data': img_resized}], [{}])
assert np.argmax(res[0]['fc3']) == 7
@pytest.mark.usefixtures('mock_path_exists')
class TestOpenCVLauncherConfig:
def test_missed_framework_in_create_gapi_launcher_raises_config_error_exception(self):
config = {
# 'framework': 'g-api',
'model': 'model.xml',
'weights': 'weights.bin',
'device': 'CPU',
'adapter': 'classification',
'inputs': [{'name': 'data', 'type': 'INPUT'}],
'outputs': ['out']
}
with pytest.raises(KeyError):
create_launcher(config)
def test_missed_model_in_create_gapi_launcher_raises_config_error_exception(self):
config = {
'framework': 'g-api',
# 'model': 'model.ocv',
'weights': 'weights.bin',
'device': 'CPU',
'adapter': 'classification',
'inputs': [{'name': 'data', 'type': 'INPUT'}],
'outputs': ['out']
}
with pytest.raises(ConfigError):
create_launcher(config, 'model')
def test_missed_device_in_create_gapi_launcher_raises_config_error_exception(self):
config = {
'framework': 'g-api',
'model': 'model.xml',
'weights': 'weights.bin',
# 'device': 'not_device',
'adapter': 'classification',
'inputs': [{'name': 'data', 'type': 'INPUT'}],
'outputs': ['out']
}
with pytest.raises(ConfigError):
create_launcher(config)
def test_missed_inputs_in_create_gapi_launcher_raises_config_error_exception(self):
config = {
'framework': 'g-api',
'model': 'model.xml',
'weights': 'weights.bin',
'device': 'CPU',
'backend': 'not_backend',
'adapter': 'classification',
# 'inputs': [{'name': 'data', 'type': 'INPUT'}]
'outputs': ['out']
}
with pytest.raises(ConfigError):
create_launcher(config)
def test_missed_outputs_in_create_gapi_launcher_raises_config_error_exception(self):
config = {
'framework': 'g-api',
'model': 'model.xml',
'weights': 'weights.bin',
'device': 'CPU',
'backend': 'not_backend',
'adapter': 'classification',
'inputs': [{'name': 'data', 'type': 'INPUT'}]
#'outputs': ['out']
}
with pytest.raises(ConfigError):
create_launcher(config)
|
import os
from unittest.mock import patch
import mock
import pytest
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.git_tools import git_path
from demisto_sdk.commands.common.hook_validations.docker import \
DockerImageValidator
from demisto_sdk.commands.common.tools import get_yaml
RETURN_ERROR_TARGET = 'GetDockerImageLatestTag.return_error'
MOCK_TAG_LIST = [{
u'last_updated': u'2019-10-23T09:13:30.84299Z',
u'name': u'1.0.0.2876',
u'repository': 7863337,
u'creator': 4824052,
u'image_id': None,
u'v2': True,
u'last_updater_username': u'containersci',
u'last_updater': 4824052,
u'images': [{
u'features': u'',
u'os_features': u'',
u'variant': None,
u'os_version': None,
u'architecture': u'amd64',
u'os': u'linux',
u'digest': u'DIGEST',
u'size': 79019268
}],
u'full_size': 79019268,
u'id': 73482510
}, {
u'last_updated': u'2019-10-16T06:47:29.631011Z',
u'name': u'1.0.0.2689',
u'repository': 7863337,
u'creator': 4824052,
u'image_id': None,
u'v2': True,
u'last_updater_username': u'containersci',
u'last_updater': 4824052,
u'images': [{
u'features': u'',
u'os_features': u'',
u'variant': None,
u'os_version': None,
u'architecture': u'amd64',
u'os': u'linux',
u'digest': u'DIGEST',
u'size': 77021619
}],
u'full_size': 77021619,
u'id': 72714981
}]
FILES_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files'))
TEST_INTEGRATION_FILE = os.path.join(FILES_PATH, 'fake_integration.yml')
TEST_SCRIPT_FILE = os.path.join(FILES_PATH, 'fake-script.yml')
class TestDockerImage:
# demisto/python-deb doesn't contain a latest tag
@pytest.mark.parametrize('image', ['python', 'python-deb', 'python3', 'python3-deb'])
def test_get_docker_image_latest_tag(self, image):
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.file_path = "PATH"
docker_image_validator.ignored_errors = []
docker_image_validator.docker_image_latest_tag = '1.0.3'
docker_image_validator.docker_image_name = 'demisto/python'
docker_image_validator.suppress_print = False
tag = docker_image_validator.get_docker_image_latest_tag(docker_image_name=f'demisto/{image}',
yml_docker_image='')
# current latest tag is 2.7.16.2728 or 3.7.2.2728 disable-secrets-detection
assert int(tag.split('.')[3]) >= 2728
data_test_none_demisto_docker = [
('blabla/google-api-py3', '1.0.0.5992', ''),
('unknownvuser/v-alpine', 'at_v_commit-b17ade1257cfe086c1742c91deeb6c606037b893', ''),
('feedparser', 'latest', '')
]
@pytest.mark.parametrize('docker, docker_tag, expected_output', data_test_none_demisto_docker)
def test_none_demisto_docker(self, docker, docker_tag, expected_output):
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.file_path = "PATH"
docker_image_validator.ignored_errors = {}
docker_image_validator.checked_files = set()
docker_image_validator.suppress_print = False
assert docker_image_validator.get_docker_image_latest_tag(docker_image_name=docker,
yml_docker_image='{}:{}'.format(docker,
docker_tag)) == expected_output
# disable-secrets-detection-start
def test_get_docker_image_from_yml(self):
with patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
# Test integration case
docker_validator = DockerImageValidator(None, None, None)
docker_validator.yml_file = get_yaml(TEST_INTEGRATION_FILE)
docker_validator.is_integration = True
docker_validator.suppress_print = False
docker_image = docker_validator.get_docker_image_from_yml()
assert docker_image == "demisto/pyjwt:1.0"
# Test script case
docker_validator.yml_file = get_yaml(TEST_SCRIPT_FILE)
docker_validator.is_integration = False
docker_image = docker_validator.get_docker_image_from_yml()
assert docker_image == "demisto/stix2:1.0.0.204"
# disable-secrets-detection-end
def test_lexical_find_latest_tag(self):
tag_list = ["2.0.2000", "2.1.2700", "2.1.373", "latest"]
tag = DockerImageValidator.lexical_find_latest_tag(tag_list)
assert tag == "2.1.2700"
def test_find_latest_tag_by_date(self):
tag = DockerImageValidator.find_latest_tag_by_date(MOCK_TAG_LIST)
assert tag == "1.0.0.2876"
@pytest.mark.parametrize('www_auth, expected', [('AAArealm="2",service="3"AAA', ('2', '3')), ('bbb', ())])
def test_parse_www_auth(self, www_auth, expected):
assert expected == DockerImageValidator.parse_www_auth(www_auth)
# disable-secrets-detection-start
@pytest.mark.parametrize('input_tags, output_tags',
[(['1.2.3.0', '4.5.6.0', '7.8.9.0'], ['4.5.6.0', '1.2.3.0', '7.8.9.0']),
(['1.2.3.0', '4.a.6.0', '7.8.9.0'], ['7.8.9.0', '1.2.3.0']),
(['aaa', 'bbb'], []), (['6a.7.6'], []), (['6..4'], [])])
# disable-secrets-detection-end
def test_clear_non_numbered_tags(self, input_tags, output_tags):
assert sorted(output_tags) == sorted(DockerImageValidator.clear_non_numbered_tags(input_tags))
# disable-secrets-detection-start
def test_parse_docker_image(self):
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.file_path = "PATH"
docker_image_validator.ignored_errors = {}
docker_image_validator.checked_files = set()
docker_image_validator.docker_image_latest_tag = '1.0.3'
docker_image_validator.docker_image_name = 'demisto/python'
docker_image_validator.suppress_print = False
assert 'demisto/python', '1.3-alpine' == docker_image_validator.parse_docker_image(
docker_image='demisto/python:1.3-alpine')
assert 'demisto/slack', '1.2.3.4' == docker_image_validator.parse_docker_image(
docker_image='demisto/slack:1.2.3.4')
assert 'demisto/python', '' == docker_image_validator.parse_docker_image(
docker_image='demisto/python/1.2.3.4')
assert ('', '') == docker_image_validator.parse_docker_image(docker_image='blah/blah:1.2.3.4')
# disable-secrets-detection-end
def test_is_docker_image_latest_tag_with_default_image(self):
"""
Given
- The default docker image - 'demisto/python:1.3-alpine'
When
- The most updated docker image in docker-hub is '1.0.3'
Then
- If the docker image is numeric and the most update one, it is Valid
- If the docker image is not numeric and labeled "latest", it is Invalid
"""
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.file_path = "PATH"
docker_image_validator.ignored_errors = {}
docker_image_validator.code_type = 'python'
docker_image_validator.checked_files = set()
docker_image_validator.docker_image_latest_tag = '1.0.3'
docker_image_validator.docker_image_name = 'demisto/python'
docker_image_validator.suppress_print = False
docker_image_validator.is_latest_tag = True
docker_image_validator.is_modified_file = False
docker_image_validator.docker_image_tag = '1.3-alpine'
docker_image_validator.is_valid = True
assert docker_image_validator.is_docker_image_latest_tag() is False
assert docker_image_validator.is_latest_tag is False
assert docker_image_validator.is_docker_image_valid() is False
def test_is_docker_image_latest_tag_with_tag_labeled_latest(self):
"""
Given
- A docker image with "latest" as tag
When
- The most updated docker image in docker-hub is '1.0.3'
Then
- If the docker image is numeric and the most update one, it is Valid
- If the docker image is not numeric and labeled "latest", it is Invalid
"""
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.ignored_errors = {}
docker_image_validator.checked_files = set()
docker_image_validator.docker_image_latest_tag = 'latest'
docker_image_validator.docker_image_name = 'demisto/python'
docker_image_validator.file_path = "PATH"
docker_image_validator.code_type = 'python'
docker_image_validator.suppress_print = False
docker_image_validator.is_latest_tag = True
docker_image_validator.is_valid = True
docker_image_validator.docker_image_tag = 'latest'
assert docker_image_validator.is_docker_image_latest_tag() is False
assert docker_image_validator.is_latest_tag is False
assert docker_image_validator.is_docker_image_valid() is False
def test_is_docker_image_latest_tag_with_latest_tag(self):
"""
Given
- A docker image with '1.0.3' as tag
When
- The most updated docker image in docker-hub is '1.0.3'
Then
- If the docker image is numeric and the most update one, it is Valid
- If the docker image is not numeric and labeled "latest", it is Invalid
"""
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.ignored_errors = {}
docker_image_validator.file_path = "path"
docker_image_validator.docker_image_latest_tag = '1.0.3'
docker_image_validator.docker_image_name = 'demisto/python'
docker_image_validator.code_type = 'python'
docker_image_validator.suppress_print = False
docker_image_validator.is_latest_tag = True
docker_image_validator.is_valid = True
docker_image_validator.docker_image_tag = '1.0.3'
assert docker_image_validator.is_docker_image_latest_tag() is True
assert docker_image_validator.is_latest_tag is True
assert docker_image_validator.is_docker_image_valid() is True
def test_is_docker_image_latest_tag_with_numeric_but_not_most_updated(self):
"""
Given
- A docker image with '1.0.2' as tag
When
- The most updated docker image in docker-hub is '1.0.3'
Then
- If the docker image is numeric and the most update one, it is Valid
- If the docker image is not numeric and labeled "latest", it is Invalid
- If the docker image is not the most updated one it is invalid
"""
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.ignored_errors = {}
docker_image_validator.file_path = "path"
docker_image_validator.docker_image_latest_tag = '1.0.3'
docker_image_validator.docker_image_name = 'demisto/python'
docker_image_validator.code_type = 'python'
docker_image_validator.checked_files = set()
docker_image_validator.suppress_print = False
docker_image_validator.is_latest_tag = True
docker_image_validator.docker_image_tag = '1.0.2'
docker_image_validator.is_valid = True
assert docker_image_validator.is_docker_image_latest_tag() is False
assert docker_image_validator.is_latest_tag is False
assert docker_image_validator.is_docker_image_valid() is False
def test_is_docker_image_latest_tag_without_tag(self):
"""
Given
- A latest docker image has an empty tag
When
- The most updated docker image in docker-hub is '1.0.3'
Then
- If the docker image is numeric and the most update one, it is Valid
- If the docker image is not numeric and labeled "latest", it is Invalid
"""
with mock.patch.object(DockerImageValidator, '__init__', lambda x, y, z, w: None):
docker_image_validator = DockerImageValidator(None, None, None)
docker_image_validator.yml_file = {}
docker_image_validator.ignored_errors = {}
docker_image_validator.file_path = "path"
docker_image_validator.docker_image_latest_tag = ''
docker_image_validator.docker_image_name = 'demisto/python'
docker_image_validator.code_type = 'python'
docker_image_validator.suppress_print = False
docker_image_validator.is_latest_tag = True
docker_image_validator.docker_image_tag = '1.0.2'
docker_image_validator.is_valid = True
assert docker_image_validator.is_docker_image_latest_tag() is False
assert docker_image_validator.is_latest_tag is False
assert docker_image_validator.is_docker_image_valid() is False
def test_non_existing_docker(self, integration, capsys, requests_mock, mocker):
docker_image = 'demisto/nonexistingdocker:1.4.0'
integration.yml.write_dict(
{
'script': {
'subtype': 'python3',
'type': 'python',
'dockerimage': docker_image
}
}
)
error, code = Errors.non_existing_docker(docker_image)
mocker.patch.object(DockerImageValidator, 'docker_auth', return_value='auth')
requests_mock.get(
"https://hub.docker.com/v2/repositories/demisto/nonexistingdocker/tags",
json={'results': []}
)
validator = DockerImageValidator(integration.yml.path, True, True)
assert validator.is_docker_image_valid() is False
captured = capsys.readouterr()
assert validator.is_valid is False
assert error in captured.out
assert code in captured.out
|
import generateRuleGroup from '../generateRuleGroup';
import generateSimpleQuery from '../generateSimpleQuery';
const expectedReturnObj = {
id: '',
rules: [generateRuleGroup()],
target: { type: '', value: '' }
};
it('returns a field object', () => {
expect(generateSimpleQuery().target).toEqual(expectedReturnObj.target);
});
it('populates part of the return object using data passed in', () => {
const populatedObj = {
...expectedReturnObj,
id: '123-abc',
rules: [1, 2, 3]
};
expect(generateSimpleQuery(populatedObj)).toEqual(populatedObj);
});
|
# Copyright 2018 The TensorFlow Constrained Optimization Authors. All Rights
# Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# ==============================================================================
"""Contains loss functions for use in rate constraints.
The "rates" used in this library represent e.g. the true positive rate, which is
the proportion of positively-labeled examples on which the model makes a
positive prediction. We cannot optimize over such rates directly, since the
indicator function for "do we make a positive prediction on this example" is
discontinuous, and hence not differentiable. Hence, we relax this indicator
function using a loss function, several alternatives for which are defined in
this file.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
import tensorflow as tf
from tensorflow_constrained_optimization.python.rates import helpers
def _convert_to_binary_classification_predictions(predictions):
"""Converts a `Tensor` into a set of binary classification predictions.
This function checks that the given `Tensor` is floating-point, and that it is
trivially convertible to rank-1, i.e. has only one "nontrivial" dimension
(e.g. the shapes [1000] and [1, 1, None, 1] are allowed, but [None, 1, None]
and [50, 10] are not). If it satisfies these conditions, then it is reshaped
to be rank-1 (if necessary) and returned.
Args:
predictions: a rank-1 floating-point `Tensor` of predictions.
Returns:
The predictions `Tensor`, reshaped to be rank-1, if necessary.
Raises:
TypeError: if "predictions" is not a floating-point `Tensor`.
ValueError: if "predictions" is not trivially convertible to rank-1.
"""
if not tf.contrib.framework.is_tensor(predictions):
raise TypeError("predictions must be a tensor")
if not predictions.dtype.is_floating:
raise TypeError("predictions must be floating-point")
return helpers.convert_to_1d_tensor(predictions, name="predictions")
@six.add_metaclass(abc.ABCMeta)
class Loss(object):
"""Abstract base class for losses.
We use `Loss`es as keys in dictionaries (see the Term.key property), so every
`Loss` must implement the __hash__ and __eq__ methods.
"""
@abc.abstractproperty
def is_differentiable(self):
"""Returns true only if the associated loss is {sub,super}differentiable.
This property is used to check that non-differentiable losses (e.g. the
zero-one loss) aren't used in contexts in which we will try to optimize over
them. Non-differentiable losses, however, can be *evaluated* safely.
Subdifferentiability or superdifferentiability is enough: as long as we can
optimize the loss using a gradient-based method, this method should return
True.
Returns:
True if the loss is {sub,super}differentiable. False otherwise.
"""
@abc.abstractmethod
def __hash__(self):
pass
@abc.abstractmethod
def __eq__(self, other):
pass
def __ne__(self, other):
return not self.__eq__(other)
@six.add_metaclass(abc.ABCMeta)
class BinaryClassificationLoss(Loss):
"""Abstract class for binary classification losses."""
@abc.abstractproperty
def is_normalized(self):
"""Returns true only if the associated loss is normalized.
We call a classification loss "normalized" if there exists a random variable
Z such that, for any values of the predictions and weights:
loss(predictions, weights) = E[zero-one-loss(predictions + Z, weights)]
where the expectation is taken over Z.
Intuitively, a normalized loss can be interpreted as a smoothed zero-one
loss (e.g. a ramp or a sigmoid), while a non-normalized loss will typically
be some unbounded relaxation (e.g. a hinge).
Returns:
True if the loss is normalized. False otherwise.
"""
@abc.abstractmethod
def evaluate_binary_classification(self, predictions, weights):
"""Evaluates a binary classification loss on the given predictions.
Given a rank-1 `Tensor` of predictions with shape (n,), where n is the
number of examples, and a rank-2 `Tensor` of weights with shape (m, 2),
where m is broadcastable to n, this method will return a `Tensor` of shape
(n,) where the ith element approximates:
zero_one_loss[i] = weights[i, 0] * 1{predictions[i] > 0} +
0.5 * (weights[i, 0] + weights[i, 1]) * 1{predictions[i] == 0} +
weights[i, 1] * 1{predictions[i] < 0}
where 1{} is an indicator function. For the zero-one loss, the result will
equal the above quantity, while for other losses, it'll instead be an
approximation. For convex losses, it will typically be a convex (in
predictions) upper bound.
Args:
predictions: a `Tensor` of shape (n,), where n is the number of examples.
weights: a `Tensor` of shape (m,2), where m is broadcastable to n. This
`Tensor` is *not* necessarily nonnegative.
Returns:
A `Tensor` of shape (n,) and dtype=predictions.dtype, containing the
losses for each example.
"""
class ZeroOneLoss(BinaryClassificationLoss):
"""Zero-one loss.
The zero-one loss is normalized and non-differentiable (it's piecewise
constant).
"""
def __init__(self):
pass
@property
def is_differentiable(self):
"""Returns False, since the zero-one loss is discontinuous."""
return False
@property
def is_normalized(self):
"""Returns True, since the zero-one loss is normalized."""
return True
def __hash__(self):
return hash(type(self))
def __eq__(self, other):
return type(other) is type(self)
def evaluate_binary_classification(self, predictions, weights):
"""Evaluates the zero-one loss on the given predictions.
Given a rank-1 `Tensor` of predictions with shape (n,), where n is the
number of examples, and a rank-2 `Tensor` of weights with shape (m, 2),
where m is broadcastable to n, this method will return a `Tensor` of shape
(n,) where the ith element is:
zero_one_loss[i] = weights[i, 0] * 1{predictions[i] > 0} +
0.5 * (weights[i, 0] + weights[i, 1]) * 1{predictions[i] == 0} +
weights[i, 1] * 1{predictions[i] < 0}
where 1{} is an indicator function.
Args:
predictions: a `Tensor` of shape (n,), where n is the number of examples.
weights: a `Tensor` of shape (m,2), where m is broadcastable to n. This
`Tensor` is *not* necessarily nonnegative.
Returns:
A `Tensor` of shape (n,) and dtype=predictions.dtype, containing the
zero-one losses for each example.
Raises:
TypeError: if "predictions" is not a floating-point `Tensor`, or "weights"
is not a `Tensor`.
ValueError: if "predictions" is not rank-1, or "weights" is not a rank-2
`Tensor` with exactly two columns.
"""
predictions = _convert_to_binary_classification_predictions(predictions)
dtype = predictions.dtype.base_dtype
columns = helpers.get_num_columns_of_2d_tensor(weights, name="weights")
if columns != 2:
raise ValueError("weights must have two columns")
positive_weights = tf.cast(weights[:, 0], dtype=dtype)
negative_weights = tf.cast(weights[:, 1], dtype=dtype)
sign = tf.sign(predictions)
return 0.5 * ((positive_weights + negative_weights) + sign *
(positive_weights - negative_weights))
class HingeLoss(BinaryClassificationLoss):
"""Hinge loss.
The hinge loss is subdifferentiable and non-normalized.
"""
def __init__(self, margin=1.0):
"""Creates a new HingeLoss object with the given margin.
The margin determines how far a prediction must be from the decision
boundary in order for it to be penalized. When the margin is zero, this
threshold is exactly the decision boundary. When the margin is at least one,
the hinge loss upper bounds the zero-one loss.
Args:
margin: positive float, the margin of the hinge loss. Defaults to 1.
Raises:
ValueError: if the margin is non-positive.
"""
self._margin = float(margin)
if margin <= 0.0:
raise ValueError("margin must be positive")
@property
def margin(self):
"""Accessor for the margin constructor parameter."""
return self._margin
@property
def is_differentiable(self):
"""Returns True, since the hinge loss is subdifferentiable."""
return True
@property
def is_normalized(self):
"""Returns False, since the hinge loss is unbounded."""
return False
def __hash__(self):
return hash((type(self), self._margin))
def __eq__(self, other):
return (type(other) is type(self)) and (self._margin == other.margin)
def evaluate_binary_classification(self, predictions, weights):
"""Evaluates the zero-one loss on the given predictions.
Given a rank-1 `Tensor` of predictions with shape (n,), where n is the
number of examples, and a rank-2 `Tensor` of weights with shape (m, 2),
where m is broadcastable to n, this method will return a `Tensor` of shape
(n,) where the ith element is:
hinge_loss[i] = constant_weights[i] +
(weights[i, 0] - constant_weights[i])
* max{0, margin + predictions[i]} +
(weights[i, 1] - constant_weights[i])
* max{0, margin - predictions[i]}
where constant_weights[i] = min{weights[i, 0], weights[i, 1]} contains the
minimum weights.
Args:
predictions: a `Tensor` of shape (n,), where n is the number of examples.
weights: a `Tensor` of shape (m,2), where m is broadcastable to n. This
`Tensor` is *not* necessarily nonnegative.
Returns:
A `Tensor` of shape (n,) and dtype=predictions.dtype, containing the
hinge losses for each example.
Raises:
TypeError: if "predictions" is not a floating-point `Tensor`, or "weights"
is not a `Tensor`.
ValueError: if "predictions" is not rank-1, or "weights" is not a rank-2
`Tensor` with exactly two columns.
"""
predictions = _convert_to_binary_classification_predictions(predictions)
dtype = predictions.dtype.base_dtype
columns = helpers.get_num_columns_of_2d_tensor(weights, name="weights")
if columns != 2:
raise ValueError("weights must have two columns")
zero = tf.zeros(1, dtype=dtype)
margin = tf.constant(self._margin, dtype=dtype)
positive_weights = tf.cast(weights[:, 0], dtype=dtype)
negative_weights = tf.cast(weights[:, 1], dtype=dtype)
constant_weights = tf.minimum(positive_weights, negative_weights)
positive_weights -= constant_weights
negative_weights -= constant_weights
is_positive = tf.maximum(zero, margin + predictions)
is_negative = tf.maximum(zero, margin - predictions)
return constant_weights + (
positive_weights * is_positive + negative_weights * is_negative)
|
/*
* Copyright © 1991-2006 Bart Massey
* ALL RIGHTS RESERVED
* [This program is licensed under the "3-clause ('new') BSD License"]
* Please see the file COPYING in the source
* distribution of this software for license terms.
*/
// -*- C++ -*-
// a simple fast texture class -- interface
// Bart 2/91
class point;
class model;
class texture;
class fast_texture : public texture {
protected:
point pl; // light-point
point pli; // light intensity
point pe; // eye-point
public:
fast_texture( model &, texture & );
fast_texture( const fast_texture & );
virtual texture * tune_texture( model & );
virtual point value( point &, point &, point &, model &, int );
};
|
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.c (the "License");
# you may not use this file except in compliance with the License.
#
# The entire source code is OSSRPL except 'whois' which is MPL
# License: MPL and OSSRPL
""" Userbot module for getiing info about any user on Telegram(including you!). """
import os
from telethon.tl.functions.photos import GetUserPhotosRequest
from telethon.tl.functions.users import GetFullUserRequest
from telethon.tl.types import MessageEntityMentionName
from telethon.utils import get_input_location
from userbot import CMD_HELP, TEMP_DOWNLOAD_DIRECTORY
from userbot.events import register
@register(pattern=".data(?: |$)(.*)", outgoing=True)
async def who(event):
await event.edit(
"`Hacking into zuckerberg's account and stealing data 😂...`")
if not os.path.isdir(TEMP_DOWNLOAD_DIRECTORY):
os.makedirs(TEMP_DOWNLOAD_DIRECTORY)
replied_user = await get_user(event)
try:
photo, caption = await fetch_info(replied_user, event)
except AttributeError:
event.edit("`Could not fetch info of that user.`")
return
message_id_to_reply = event.message.reply_to_msg_id
if not message_id_to_reply:
message_id_to_reply = None
try:
await event.client.send_file(event.chat_id,
photo,
caption=caption,
link_preview=False,
force_document=False,
reply_to=message_id_to_reply,
parse_mode="html")
if not photo.startswith("http"):
os.remove(photo)
await event.delete()
except TypeError:
await event.edit(caption, parse_mode="html")
async def get_user(event):
""" Get the user from argument or replied message. """
if event.reply_to_msg_id and not event.pattern_match.group(1):
previous_message = await event.get_reply_message()
replied_user = await event.client(
GetFullUserRequest(previous_message.from_id))
else:
user = event.pattern_match.group(1)
if user.isnumeric():
user = int(user)
if not user:
self_user = await event.client.get_me()
user = self_user.id
if event.message.entities is not None:
probable_user_mention_entity = event.message.entities[0]
if isinstance(probable_user_mention_entity,
MessageEntityMentionName):
user_id = probable_user_mention_entity.user_id
replied_user = await event.client(GetFullUserRequest(user_id))
return replied_user
try:
user_object = await event.client.get_entity(user)
replied_user = await event.client(
GetFullUserRequest(user_object.id))
except (TypeError, ValueError) as err:
await event.edit(str(err))
return None
return replied_user
async def fetch_info(replied_user, event):
""" Get details from the User object. """
replied_user_profile_photos = await event.client(
GetUserPhotosRequest(user_id=replied_user.user.id,
offset=42,
max_id=0,
limit=80))
replied_user_profile_photos_count = "This gay has no pic."
try:
replied_user_profile_photos_count = replied_user_profile_photos.count
except AttributeError as e:
pass
user_id = replied_user.user.id
first_name = replied_user.user.first_name
last_name = replied_user.user.last_name
try:
dc_id, location = get_input_location(replied_user.profile_photo)
except Exception as e:
dc_id = "Couldn't fetch DC ID!"
location = str(e)
common_chat = replied_user.common_chats_count
username = replied_user.user.username
user_bio = replied_user.about
is_bot = replied_user.user.bot
restricted = replied_user.user.restricted
verified = replied_user.user.verified
photo = await event.client.download_profile_photo(user_id,
TEMP_DOWNLOAD_DIRECTORY +
str(user_id) + ".jpg",
download_big=True)
first_name = first_name.replace(
"\u2060", "") if first_name else ("This User has no First Name")
last_name = last_name.replace(
"\u2060", "") if last_name else ("This User has no Last Name")
username = "@{}".format(username) if username else (
"This User has no Username")
user_bio = "This User has no About" if not user_bio else user_bio
caption = "<b>USER INFO:</b>\n\n"
caption += f"First Name: {first_name}\n"
caption += f"Last Name: {last_name}\n"
caption += f"Username: {username}\n"
caption += f"Data Centre ID: {dc_id}\n"
caption += f"Number of Profile Pics: {replied_user_profile_photos_count}\n"
caption += f"Is Bot: {is_bot}\n"
caption += f"Is Restricted: {restricted}\n"
caption += f"Is Verified by Telegram: {verified}\n"
caption += f"ID: <code>{user_id}</code>\n\n"
caption += f"Bio: \n<code>{user_bio}</code>\n\n"
caption += f"Common Chats with this user: {common_chat}\n"
caption += f"Permanent Link To Profile: "
caption += f"<a href=\"tg://user?id={user_id}\">{first_name}</a>"
return photo, caption
CMD_HELP.update({
"User Info":
".data <username> or reply to someones text with .data\
\nUsage: Gets info of an user."
})
|
from helper.embed_props import *
import discord
def get_open_issues_embed(data):
header_text = "**Open Issues**"
embed = discord.Embed(
description="**Please clear these issues asap** \n" + data,
timestamp=timestamp,
color=color
)
embed.set_author(name=header_text)
embed.set_footer(text=footer_text,
icon_url=icon_url)
return embed
def get_resolved_issues_embed(data):
header_text = "**Resolved Issues**"
embed = discord.Embed(
description="**Dear mentors, Please close these issues asap** \n" + data,
timestamp=timestamp,
color=color
)
embed.set_author(name=header_text)
embed.set_footer(text=footer_text,
icon_url=icon_url)
return embed
|
/*
* Copyright 2017, Yahoo Inc.
* Copyrights licensed under the New BSD License.
* See the accompanying LICENSE file for terms.
*/
module.exports = function load(global) {global.Intl && global.Intl._timeZoneData && global.Intl._timeZoneData.load({
"zoneDefs": [
"Moon/Nearside||0,0,0,1,1,0,2,0,0,3,1,0"
],
"timeStamps": [
"-lk72c1",
"-gsn7t1",
"-cc7vc1",
"1vvvvvv"
],
"offsets": [
0,
600
]
});};
|
Mo 26 Apr 2022 14:43:00 CEST
# UpdateUpdate!!-!!
<img class="newsimage" src="https://pixelfed.de/storage/m/_v2/409480134010541343/a25608e04-1e15cb/01fb5xqjMQU4/g0SWVUoA7FWQOejQ448I41tioJZu1GICoHQxpvXv.jpg"> <p>This is a paragraph.</p>
Wir haben immernoch eine homepage
|
# Tree_sort algorithm
# Build a BST and in order traverse.
class node():
# BST data structure
def __init__(self, val):
self.val = val
self.left = None
self.right = None
def insert(self,val):
if self.val:
if val < self.val:
if self.left is None:
self.left = node(val)
else:
self.left.insert(val)
elif val > self.val:
if self.right is None:
self.right = node(val)
else:
self.right.insert(val)
else:
self.val = val
def inorder(root, res):
# Recursive travesal
if root:
inorder(root.left,res)
res.append(root.val)
inorder(root.right,res)
def treesort(arr):
# Build BST
if len(arr) == 0:
return arr
root = node(arr[0])
for i in range(1,len(arr)):
root.insert(arr[i])
# Traverse BST in order.
res = []
inorder(root,res)
return res
print(treesort([10,1,3,2,9,14,13]))
|
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
var umd = createCommonjsModule(function (module, exports) {
(function (global, factory) {
module.exports = factory();
}(commonjsGlobal, (function () {
var isMergeableObject = function isMergeableObject(value) {
return isNonNullObject(value)
&& !isSpecial(value)
};
function isNonNullObject(value) {
return !!value && typeof value === 'object'
}
function isSpecial(value) {
var stringValue = Object.prototype.toString.call(value);
return stringValue === '[object RegExp]'
|| stringValue === '[object Date]'
|| isReactElement(value)
}
// see https://github.com/facebook/react/blob/b5ac963fb791d1298e7f396236383bc955f916c1/src/isomorphic/classic/element/ReactElement.js#L21-L25
var canUseSymbol = typeof Symbol === 'function' && Symbol.for;
var REACT_ELEMENT_TYPE = canUseSymbol ? Symbol.for('react.element') : 0xeac7;
function isReactElement(value) {
return value.$$typeof === REACT_ELEMENT_TYPE
}
function emptyTarget(val) {
return Array.isArray(val) ? [] : {}
}
function cloneUnlessOtherwiseSpecified(value, options) {
return (options.clone !== false && options.isMergeableObject(value))
? deepmerge(emptyTarget(value), value, options)
: value
}
function defaultArrayMerge(target, source, options) {
return target.concat(source).map(function(element) {
return cloneUnlessOtherwiseSpecified(element, options)
})
}
function getMergeFunction(key, options) {
if (!options.customMerge) {
return deepmerge
}
var customMerge = options.customMerge(key);
return typeof customMerge === 'function' ? customMerge : deepmerge
}
function mergeObject(target, source, options) {
var destination = {};
if (options.isMergeableObject(target)) {
Object.keys(target).forEach(function(key) {
destination[key] = cloneUnlessOtherwiseSpecified(target[key], options);
});
}
Object.keys(source).forEach(function(key) {
if (!options.isMergeableObject(source[key]) || !target[key]) {
destination[key] = cloneUnlessOtherwiseSpecified(source[key], options);
} else {
destination[key] = getMergeFunction(key, options)(target[key], source[key], options);
}
});
return destination
}
function deepmerge(target, source, options) {
options = options || {};
options.arrayMerge = options.arrayMerge || defaultArrayMerge;
options.isMergeableObject = options.isMergeableObject || isMergeableObject;
var sourceIsArray = Array.isArray(source);
var targetIsArray = Array.isArray(target);
var sourceAndTargetTypesMatch = sourceIsArray === targetIsArray;
if (!sourceAndTargetTypesMatch) {
return cloneUnlessOtherwiseSpecified(source, options)
} else if (sourceIsArray) {
return options.arrayMerge(target, source, options)
} else {
return mergeObject(target, source, options)
}
}
deepmerge.all = function deepmergeAll(array, options) {
if (!Array.isArray(array)) {
throw new Error('first argument should be an array')
}
return array.reduce(function(prev, next) {
return deepmerge(prev, next, options)
}, {})
};
var deepmerge_1 = deepmerge;
return deepmerge_1;
})));
});
/*!
* isobject <https://github.com/jonschlinkert/isobject>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
function isObject(val) {
return val != null && typeof val === 'object' && Array.isArray(val) === false;
}
/*!
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
function isObjectObject(o) {
return isObject(o) === true
&& Object.prototype.toString.call(o) === '[object Object]';
}
function isPlainObject(o) {
var ctor,prot;
if (isObjectObject(o) === false) return false;
// If has modified constructor
ctor = o.constructor;
if (typeof ctor !== 'function') return false;
// If has modified prototype
prot = ctor.prototype;
if (isObjectObject(prot) === false) return false;
// If constructor does not have an Object-specific method
if (prot.hasOwnProperty('isPrototypeOf') === false) {
return false;
}
// Most likely a plain Object
return true;
}
var urlTemplate = createCommonjsModule(function (module, exports) {
(function (root, factory) {
{
module.exports = factory();
}
}(commonjsGlobal, function () {
/**
* @constructor
*/
function UrlTemplate() {
}
/**
* @private
* @param {string} str
* @return {string}
*/
UrlTemplate.prototype.encodeReserved = function (str) {
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
if (!/%[0-9A-Fa-f]/.test(part)) {
part = encodeURI(part).replace(/%5B/g, '[').replace(/%5D/g, ']');
}
return part;
}).join('');
};
/**
* @private
* @param {string} str
* @return {string}
*/
UrlTemplate.prototype.encodeUnreserved = function (str) {
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
return '%' + c.charCodeAt(0).toString(16).toUpperCase();
});
};
/**
* @private
* @param {string} operator
* @param {string} value
* @param {string} key
* @return {string}
*/
UrlTemplate.prototype.encodeValue = function (operator, value, key) {
value = (operator === '+' || operator === '#') ? this.encodeReserved(value) : this.encodeUnreserved(value);
if (key) {
return this.encodeUnreserved(key) + '=' + value;
} else {
return value;
}
};
/**
* @private
* @param {*} value
* @return {boolean}
*/
UrlTemplate.prototype.isDefined = function (value) {
return value !== undefined && value !== null;
};
/**
* @private
* @param {string}
* @return {boolean}
*/
UrlTemplate.prototype.isKeyOperator = function (operator) {
return operator === ';' || operator === '&' || operator === '?';
};
/**
* @private
* @param {Object} context
* @param {string} operator
* @param {string} key
* @param {string} modifier
*/
UrlTemplate.prototype.getValues = function (context, operator, key, modifier) {
var value = context[key],
result = [];
if (this.isDefined(value) && value !== '') {
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
value = value.toString();
if (modifier && modifier !== '*') {
value = value.substring(0, parseInt(modifier, 10));
}
result.push(this.encodeValue(operator, value, this.isKeyOperator(operator) ? key : null));
} else {
if (modifier === '*') {
if (Array.isArray(value)) {
value.filter(this.isDefined).forEach(function (value) {
result.push(this.encodeValue(operator, value, this.isKeyOperator(operator) ? key : null));
}, this);
} else {
Object.keys(value).forEach(function (k) {
if (this.isDefined(value[k])) {
result.push(this.encodeValue(operator, value[k], k));
}
}, this);
}
} else {
var tmp = [];
if (Array.isArray(value)) {
value.filter(this.isDefined).forEach(function (value) {
tmp.push(this.encodeValue(operator, value));
}, this);
} else {
Object.keys(value).forEach(function (k) {
if (this.isDefined(value[k])) {
tmp.push(this.encodeUnreserved(k));
tmp.push(this.encodeValue(operator, value[k].toString()));
}
}, this);
}
if (this.isKeyOperator(operator)) {
result.push(this.encodeUnreserved(key) + '=' + tmp.join(','));
} else if (tmp.length !== 0) {
result.push(tmp.join(','));
}
}
}
} else {
if (operator === ';') {
if (this.isDefined(value)) {
result.push(this.encodeUnreserved(key));
}
} else if (value === '' && (operator === '&' || operator === '?')) {
result.push(this.encodeUnreserved(key) + '=');
} else if (value === '') {
result.push('');
}
}
return result;
};
/**
* @param {string} template
* @return {function(Object):string}
*/
UrlTemplate.prototype.parse = function (template) {
var that = this;
var operators = ['+', '#', '.', '/', ';', '?', '&'];
return {
expand: function (context) {
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
if (expression) {
var operator = null,
values = [];
if (operators.indexOf(expression.charAt(0)) !== -1) {
operator = expression.charAt(0);
expression = expression.substr(1);
}
expression.split(/,/g).forEach(function (variable) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
values.push.apply(values, that.getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
});
if (operator && operator !== '+') {
var separator = ',';
if (operator === '?') {
separator = '&';
} else if (operator !== '#') {
separator = operator;
}
return (values.length !== 0 ? operator : '') + values.join(separator);
} else {
return values.join(',');
}
} else {
return that.encodeReserved(literal);
}
});
}
};
};
return new UrlTemplate();
}));
});
var browser = getUserAgentBrowser;
function getUserAgentBrowser () {
/* global navigator */
return navigator.userAgent
}
function _slicedToArray(arr, i) {
return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest();
}
function _arrayWithHoles(arr) {
if (Array.isArray(arr)) return arr;
}
function _iterableToArrayLimit(arr, i) {
var _arr = [];
var _n = true;
var _d = false;
var _e = undefined;
try {
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
_arr.push(_s.value);
if (i && _arr.length === i) break;
}
} catch (err) {
_d = true;
_e = err;
} finally {
try {
if (!_n && _i["return"] != null) _i["return"]();
} finally {
if (_d) throw _e;
}
}
return _arr;
}
function _nonIterableRest() {
throw new TypeError("Invalid attempt to destructure non-iterable instance");
}
function lowercaseKeys(object) {
if (!object) {
return {};
}
return Object.keys(object).reduce((newObj, key) => {
newObj[key.toLowerCase()] = object[key];
return newObj;
}, {});
}
function merge(defaults, route, options) {
if (typeof route === "string") {
let _route$split = route.split(" "),
_route$split2 = _slicedToArray(_route$split, 2),
method = _route$split2[0],
url = _route$split2[1];
options = Object.assign(url ? {
method,
url
} : {
url: method
}, options);
} else {
options = route || {};
} // lowercase header names before merging with defaults to avoid duplicates
options.headers = lowercaseKeys(options.headers);
const mergedOptions = umd.all([defaults, options].filter(Boolean), {
isMergeableObject: isPlainObject
}); // mediaType.previews arrays are merged, instead of overwritten
if (defaults && defaults.mediaType.previews.length) {
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
}
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
return mergedOptions;
}
function addQueryParameters(url, parameters) {
const separator = /\?/.test(url) ? "&" : "?";
const names = Object.keys(parameters);
if (names.length === 0) {
return url;
}
return url + separator + names.map(name => {
if (name === "q") {
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
}
return "".concat(name, "=").concat(encodeURIComponent(parameters[name]));
}).join("&");
}
const urlVariableRegex = /\{[^}]+\}/g;
function removeNonChars(variableName) {
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
}
function extractUrlVariableNames(url) {
const matches = url.match(urlVariableRegex);
if (!matches) {
return [];
}
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
}
function omit(object, keysToOmit) {
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
obj[key] = object[key];
return obj;
}, {});
}
function parse(options) {
// https://fetch.spec.whatwg.org/#methods
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
let url = options.url.replace(/:([a-z]\w+)/g, "{+$1}");
let headers = Object.assign({}, options.headers);
let body;
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
const urlVariableNames = extractUrlVariableNames(url);
url = urlTemplate.parse(url).expand(parameters);
if (!/^http/.test(url)) {
url = options.baseUrl + url;
}
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
const remainingParameters = omit(parameters, omittedParameters);
const isBinaryRequset = /application\/octet-stream/i.test(headers.accept);
if (!isBinaryRequset) {
if (options.mediaType.format) {
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, "application/vnd$1$2.".concat(options.mediaType.format))).join(",");
}
if (options.mediaType.previews.length) {
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
const format = options.mediaType.format ? ".".concat(options.mediaType.format) : "+json";
return "application/vnd.github.".concat(preview, "-preview").concat(format);
}).join(",");
}
} // for GET/HEAD requests, set URL query parameters from remaining parameters
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
if (["GET", "HEAD"].includes(method)) {
url = addQueryParameters(url, remainingParameters);
} else {
if ("data" in remainingParameters) {
body = remainingParameters.data;
} else {
if (Object.keys(remainingParameters).length) {
body = remainingParameters;
} else {
headers["content-length"] = 0;
}
}
} // default content-type for JSON if body is set
if (!headers["content-type"] && typeof body !== "undefined") {
headers["content-type"] = "application/json; charset=utf-8";
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
// fetch does not allow to set `content-length` header, but we can set body to an empty string
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
body = "";
} // Only return body/request keys if present
return Object.assign({
method,
url,
headers
}, typeof body !== "undefined" ? {
body
} : null, options.request ? {
request: options.request
} : null);
}
function endpointWithDefaults(defaults, route, options) {
return parse(merge(defaults, route, options));
}
function withDefaults(oldDefaults, newDefaults) {
const DEFAULTS = merge(oldDefaults, newDefaults);
const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
return Object.assign(endpoint, {
DEFAULTS,
defaults: withDefaults.bind(null, DEFAULTS),
merge: merge.bind(null, DEFAULTS),
parse
});
}
const VERSION = "5.1.2";
const userAgent = "octokit-endpoint.js/".concat(VERSION, " ").concat(browser());
const DEFAULTS = {
method: "GET",
baseUrl: "https://api.github.com",
headers: {
accept: "application/vnd.github.v3+json",
"user-agent": userAgent
},
mediaType: {
format: "",
previews: []
}
};
const endpoint = withDefaults(null, DEFAULTS);
var browser$1 = createCommonjsModule(function (module, exports) {
// ref: https://github.com/tc39/proposal-global
var getGlobal = function () {
// the only reliable means to get the global object is
// `Function('return this')()`
// However, this causes CSP violations in Chrome apps.
if (typeof self !== 'undefined') { return self; }
if (typeof window !== 'undefined') { return window; }
if (typeof global !== 'undefined') { return global; }
throw new Error('unable to locate global object');
};
var global = getGlobal();
module.exports = exports = global.fetch;
// Needed for TypeScript and Webpack.
exports.default = global.fetch.bind(global);
exports.Headers = global.Headers;
exports.Request = global.Request;
exports.Response = global.Response;
});
var browser_1 = browser$1.Headers;
var browser_2 = browser$1.Request;
var browser_3 = browser$1.Response;
class Deprecation extends Error {
constructor (message) {
super(message);
// Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = 'Deprecation';
}
}
var Deprecation_1 = Deprecation;
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
var wrappy_1 = wrappy;
function wrappy (fn, cb) {
if (fn && cb) return wrappy(fn)(cb)
if (typeof fn !== 'function')
throw new TypeError('need wrapper function')
Object.keys(fn).forEach(function (k) {
wrapper[k] = fn[k];
});
return wrapper
function wrapper() {
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
var ret = fn.apply(this, args);
var cb = args[args.length-1];
if (typeof ret === 'function' && ret !== cb) {
Object.keys(cb).forEach(function (k) {
ret[k] = cb[k];
});
}
return ret
}
}
var once_1 = wrappy_1(once);
var strict = wrappy_1(onceStrict);
once.proto = once(function () {
Object.defineProperty(Function.prototype, 'once', {
value: function () {
return once(this)
},
configurable: true
});
Object.defineProperty(Function.prototype, 'onceStrict', {
value: function () {
return onceStrict(this)
},
configurable: true
});
});
function once (fn) {
var f = function () {
if (f.called) return f.value
f.called = true;
return f.value = fn.apply(this, arguments)
};
f.called = false;
return f
}
function onceStrict (fn) {
var f = function () {
if (f.called)
throw new Error(f.onceError)
f.called = true;
return f.value = fn.apply(this, arguments)
};
var name = fn.name || 'Function wrapped with `once`';
f.onceError = name + " shouldn't be called more than once";
f.called = false;
return f
}
once_1.strict = strict;
const logOnce = once_1(deprecation => console.warn(deprecation));
/**
* Error with extra properties to help with debugging
*/
class RequestError extends Error {
constructor(message, statusCode, options) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = "HttpError";
this.status = statusCode;
Object.defineProperty(this, "code", {
get() {
logOnce(new Deprecation_1("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
return statusCode;
}
});
this.headers = options.headers; // redact request credentials without mutating original request options
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
});
} // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]");
this.request = requestCopy;
}
}
const VERSION$1 = "4.1.0";
function getBufferResponse(response) {
return response.arrayBuffer();
}
function fetchWrapper(requestOptions) {
if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
let headers = {};
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || browser$1;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, requestOptions.request)).then(response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requsets
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new RequestError(response.statusText, status, {
headers,
request: requestOptions
});
}
if (status === 304) {
throw new RequestError("Not modified", status, {
headers,
request: requestOptions
});
}
if (status >= 400) {
return response.text().then(message => {
const error = new RequestError(message, status, {
headers,
request: requestOptions
});
try {
Object.assign(error, JSON.parse(error.message));
} catch (e) {// ignore, see octokit/rest.js#684
}
throw error;
});
}
const contentType = response.headers.get("content-type");
if (/application\/json/.test(contentType)) {
return response.json();
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
return getBufferResponse(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof RequestError) {
throw error;
}
throw new RequestError(error.message, 500, {
headers,
request: requestOptions
});
});
}
function withDefaults$1(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
const newApi = function newApi(route, parameters) {
const endpointOptions = endpoint.merge(route, parameters);
if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
}
return endpointOptions.request.hook(endpointOptions, options => fetchWrapper(endpoint.parse(options)));
};
return Object.assign(newApi, {
endpoint,
defaults: withDefaults$1.bind(null, endpoint)
});
}
const request = withDefaults$1(endpoint, {
headers: {
"user-agent": "octokit-request.js/".concat(VERSION$1, " ").concat(browser())
}
});
export { request };
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('reviews', '0008_auto_20150618_0836'),
]
operations = [
migrations.AddField(
model_name='searchsession',
name='version',
field=models.IntegerField(default=1),
),
]
|
/**
* External dependencies
*/
import classnames from 'classnames';
import PropTypes from 'prop-types';
import Title from '@woocommerce/base-components/title';
/**
* Internal dependencies
*/
import './style.scss';
const StepHeading = ( { title, stepHeadingContent } ) => (
<div className="wc-block-components-checkout-step__heading">
<Title
aria-hidden="true"
className="wc-block-components-checkout-step__title"
headingLevel="2"
>
{ title }
</Title>
{ !! stepHeadingContent && (
<span className="wc-block-components-checkout-step__heading-content">
{ stepHeadingContent }
</span>
) }
</div>
);
const FormStep = ( {
id,
className,
title,
legend,
description,
children,
disabled = false,
showStepNumber = true,
stepHeadingContent = () => {},
} ) => {
// If the form step doesn't have a legend or title, render a <div> instead
// of a <fieldset>.
const Element = legend || title ? 'fieldset' : 'div';
return (
<Element
className={ classnames(
className,
'wc-block-components-checkout-step',
{
'wc-block-components-checkout-step--with-step-number': showStepNumber,
'wc-block-components-checkout-step--disabled': disabled,
}
) }
id={ id }
disabled={ disabled }
>
{ !! ( legend || title ) && (
<legend className="screen-reader-text">
{ legend || title }
</legend>
) }
{ !! title && (
<StepHeading
title={ title }
stepHeadingContent={ stepHeadingContent() }
/>
) }
<div className="wc-block-components-checkout-step__container">
{ !! description && (
<p className="wc-block-components-checkout-step__description">
{ description }
</p>
) }
<div className="wc-block-components-checkout-step__content">
{ children }
</div>
</div>
</Element>
);
};
FormStep.propTypes = {
id: PropTypes.string,
className: PropTypes.string,
title: PropTypes.string,
description: PropTypes.string,
children: PropTypes.node,
showStepNumber: PropTypes.bool,
stepHeadingContent: PropTypes.func,
disabled: PropTypes.bool,
legend: PropTypes.string,
};
export default FormStep;
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from enum import Enum
__all__ = [
'AlertRuleKind',
'CaseSeverity',
'DataConnectorKind',
'IncidentClassification',
'IncidentClassificationReason',
'IncidentSeverity',
'IncidentStatus',
]
class AlertRuleKind(str, Enum):
"""
The alert rule kind
"""
SCHEDULED = "Scheduled"
MICROSOFT_SECURITY_INCIDENT_CREATION = "MicrosoftSecurityIncidentCreation"
FUSION = "Fusion"
class CaseSeverity(str, Enum):
"""
The severity of the incident
"""
CRITICAL = "Critical"
HIGH = "High"
MEDIUM = "Medium"
LOW = "Low"
INFORMATIONAL = "Informational"
class DataConnectorKind(str, Enum):
"""
The data connector kind
"""
AZURE_ACTIVE_DIRECTORY = "AzureActiveDirectory"
AZURE_SECURITY_CENTER = "AzureSecurityCenter"
MICROSOFT_CLOUD_APP_SECURITY = "MicrosoftCloudAppSecurity"
THREAT_INTELLIGENCE = "ThreatIntelligence"
OFFICE365 = "Office365"
AMAZON_WEB_SERVICES_CLOUD_TRAIL = "AmazonWebServicesCloudTrail"
AZURE_ADVANCED_THREAT_PROTECTION = "AzureAdvancedThreatProtection"
MICROSOFT_DEFENDER_ADVANCED_THREAT_PROTECTION = "MicrosoftDefenderAdvancedThreatProtection"
class IncidentClassification(str, Enum):
"""
The reason the incident was closed
"""
UNDETERMINED = "Undetermined"
TRUE_POSITIVE = "TruePositive"
BENIGN_POSITIVE = "BenignPositive"
FALSE_POSITIVE = "FalsePositive"
class IncidentClassificationReason(str, Enum):
"""
The classification reason the incident was closed with
"""
SUSPICIOUS_ACTIVITY = "SuspiciousActivity"
SUSPICIOUS_BUT_EXPECTED = "SuspiciousButExpected"
INCORRECT_ALERT_LOGIC = "IncorrectAlertLogic"
INACCURATE_DATA = "InaccurateData"
class IncidentSeverity(str, Enum):
"""
The severity of the incident
"""
HIGH = "High"
MEDIUM = "Medium"
LOW = "Low"
INFORMATIONAL = "Informational"
class IncidentStatus(str, Enum):
"""
The status of the incident
"""
NEW = "New"
ACTIVE = "Active"
CLOSED = "Closed"
|
import { html, PolymerElement } from "../../../../@polymer/polymer/polymer-element.js";
import "../../../../@vaadin/vaadin-grid/vaadin-grid.js";
import "../../../../@polymer/polymer/lib/elements/dom-repeat.js";
import "../../../../@vaadin/vaadin-grid/vaadin-grid.js";
import "../../../../@polymer/polymer/lib/elements/dom-if.js";
import "../../../../@vaadin/vaadin-grid/vaadin-grid-column-group.js";
import "../../../../@vaadin/vaadin-grid/vaadin-grid-filter.js";
import "../../../../@vaadin/vaadin-grid/vaadin-grid-sorter.js";
import "../../../../@vaadin/vaadin-grid/vaadin-grid-selection-column.js";
import "../../../../@polymer/iron-ajax/iron-ajax.js";
import "../../../../@polymer/iron-image/iron-image.js";
import "../../../../@polymer/paper-tooltip/paper-tooltip.js";
import "../../../lrnsys-layout/lib/lrnsys-dialog.js";
import "../../../elmsln-loading/elmsln-loading.js";
import "../../../lrndesign-avatar/lrndesign-avatar.js";
class LrnappCanvasListing extends PolymerElement {
static get template() {
return html`
<style include="materializecss-styles">
:host {
display: block;
margin: 0 2em;
}
.loading {
width: 100%;
z-index: 1000;
opacity: 0.8;
text-align: center;
align-content: center;
justify-content: center;
height: 100vh;
position: absolute;
background-color: white;
}
vaadin-grid#material {
height: 75vh;
font-family: Roboto, sans-serif;
--divider-color: rgba(0, 0, 0, var(--dark-divider-opacity));
--vaadin-grid-cell: {
padding: 0;
}
--vaadin-grid-header-cell: {
height: 3.5em;
color: rgba(0, 0, 0, var(--dark-secondary-opacity));
font-size: 1em;
}
--vaadin-grid-body-cell: {
height: 3em;
color: rgba(0, 0, 0, var(--dark-primary-opacity));
font-size: 0.8em;
}
--vaadin-grid-body-row-hover-cell: {
background-color: var(--paper-grey-200);
}
--vaadin-grid-body-row-selected-cell: {
background-color: var(--paper-grey-100);
}
--vaadin-grid-focused-cell: {
box-shadow: none;
font-weight: bold;
}
}
vaadin-grid#material .cell {
overflow: hidden;
text-overflow: ellipsis;
padding-right: 56px;
}
vaadin-grid#material .cell.last {
padding-right: 24px;
}
vaadin-grid#material .cell.numeric {
text-align: right;
}
vaadin-grid#material paper-checkbox {
--primary-color: var(--paper-indigo-500);
margin: 0 24px;
}
vaadin-grid#material vaadin-grid-sorter {
--vaadin-grid-sorter-arrow: {
display: none !important;
}
}
vaadin-grid#material vaadin-grid-sorter .cell {
flex: 1;
display: flex;
justify-content: space-between;
align-items: center;
}
vaadin-grid#material vaadin-grid-sorter iron-icon {
transform: scale(0.8);
}
vaadin-grid#material vaadin-grid-sorter:not([direction]) iron-icon {
color: rgba(0, 0, 0, var(--dark-disabled-opacity));
}
vaadin-grid#material vaadin-grid-sorter[direction] {
color: rgba(0, 0, 0, var(--dark-primary-opacity));
}
vaadin-grid#material vaadin-grid-sorter[direction="desc"] iron-icon {
transform: scale(0.8) rotate(180deg);
}
vaadin-grid-sorter {
text-align: center;
}
lrndesign-avatar {
display: inline-flex;
}
lrnsys-dialog {
display: inline-flex;
}
lrnsys-dialog #dialog-trigger span {
pointer-events: none;
}
.avatar-name {
line-height: 2em;
margin: 0;
display: inline-block;
}
.listing-select {
display: block;
height: 100%;
margin: 0;
width: 100%;
}
</style>
<iron-ajax
auto
url="[[sourcePath]]"
params='{"return": "courses"}'
handle-as="json"
on-response="handleResponse"
last-response="{{queryResponse}}"
></iron-ajax>
<div id="loading" class="loading">
<h3>Loading..</h3>
<elmsln-loading color="grey-text" size="large"></elmsln-loading>
</div>
<vaadin-grid
column-reordering-allowed
id="material"
aria-label="Course list"
items="[[_toArray(canvasCourses)]]"
>
<vaadin-grid-column width="50px" flex-grow="0">
<template class="header"
>#</template
>
<template
>[[index]]</template
>
<template class="footer"
>#</template
>
</vaadin-grid-column>
<vaadin-grid-column width="200px" flex-grow="0" resizable>
<template class="header">
<vaadin-grid-sorter path="term">Semester</vaadin-grid-sorter>
</template>
<template>
[[item.term]]
</template>
<template class="footer">
<vaadin-grid-filter
aria-label="Semester"
path="term"
value="[[_filterTerm]]"
>
<paper-input
slot="filter"
label="Semester"
value="{{_filterTerm::input}}"
focus-target
></paper-input>
</vaadin-grid-filter>
</template>
</vaadin-grid-column>
<vaadin-grid-column resizable>
<template class="header">
<vaadin-grid-sorter path="name">Name</vaadin-grid-sorter>
</template>
<template
>[[item.name]]</template
>
<template class="footer">
<vaadin-grid-filter
aria-label="Course"
path="name"
value="[[_filterCourse]]"
>
<paper-input
slot="filter"
label="Course"
value="{{_filterCourse::input}}"
focus-target
></paper-input>
</vaadin-grid-filter>
</template>
</vaadin-grid-column>
<vaadin-grid-column resizable>
<template class="header">
<vaadin-grid-sorter path="sis_course_id">SIS</vaadin-grid-sorter>
</template>
<template>
[[item.sis_course_id]]
</template>
<template class="footer">
<vaadin-grid-filter
aria-label="Student information system ID"
path="sis_course_id"
value="[[_filterSIS]]"
>
<paper-input
slot="filter"
label="SIS"
value="{{_filterSIS::input}}"
focus-target
></paper-input>
</vaadin-grid-filter>
</template>
</vaadin-grid-column>
<vaadin-grid-column width="100px" flex-grow="0" resizable>
<template class="header">
<vaadin-grid-sorter path="student_count"
>Students</vaadin-grid-sorter
>
</template>
<template
>[[item.student_count]]</template
>
</vaadin-grid-column>
<vaadin-grid-column width="100px" flex-grow="0" resizable>
<template class="header">
<vaadin-grid-sorter path="workflow_state">State</vaadin-grid-sorter>
</template>
<template
>[[item.workflow_state]]</template
>
<template class="footer">
<vaadin-grid-filter
aria-label="Workflow state"
path="workflow_state"
value="[[_filterWorkflow]]"
>
<paper-input
slot="filter"
label="State"
value="{{_filterWorkflow::input}}"
focus-target
></paper-input>
</vaadin-grid-filter>
</template>
</vaadin-grid-column>
<vaadin-grid-column>
<template class="header"
>ELMSLN Course</template
>
<template>
<select
name$="elmsln--map--:key:[[item.sis_course_id]]:key:[[item.term]]:key:[[item.start]]:key:[[item.end]]"
class="listing-select"
value="{{item.elmslnCourse::input}}"
>
<template
is="dom-repeat"
items="[[elmslnCourses]]"
as="elmsCourse"
>
<option value="[[elmsCourse.machineName]]"
>[[elmsCourse.name]] ([[elmsCourse.machineName]])</option
>
</template>
</select>
</template>
<template class="footer">
<vaadin-grid-filter
aria-label="Course"
path="elmslnCourse"
value="[[_filterELMSLNCourse]]"
>
<paper-input
slot="filter"
label="Course"
value="{{_filterELMSLNCourse::input}}"
focus-target
></paper-input>
</vaadin-grid-filter>
</template>
</vaadin-grid-column>
<vaadin-grid-column width="100px" flex-grow="0">
<template class="header"></template>
<template>
<paper-button
raised
on-click="_triggerDialog"
id$="{{item.sis_course_id}}"
>Details</paper-button
>
</template>
<template class="footer"></template>
</vaadin-grid-column>
</vaadin-grid>
<iron-ajax
id="request"
url="[[sourcePath]]"
params='{"return": "users"}'
handle-as="json"
on-response="handleRosterResponse"
last-response="{{queryResponse}}"
></iron-ajax>
<lrnsys-dialog
tabindex="-1"
id="details-dialog"
body-append
header="{{activeCourse.name}}"
>
<div slot="content">
<template is="dom-if" if="{{!roster}}">
<div id="loadingRoster" class="loading">
<h3>Loading..</h3>
<elmsln-loading color="grey-text" size="large"></elmsln-loading>
</div>
</template>
</div>
<div slot="header">
<template is="dom-if" if="{{roster}}">
<template is="dom-if" if="{{activeCourse.image}}">
<iron-image
style="width:100%; height:200px; background-color: lightgray;"
sizing="cover"
preload
fade
src$="{{activeCourse.image}}"
></iron-image>
</template>
<span class="heading">
<span>Student count: {{activeCourse.student_count}}</span>
<span>SIS ID: {{activeCourse.sis_course_id}}</span>
<span>Term: {{activeCourse.term}}</span>
<span>Workflow: {{activeCourse.workflow_state}}</span>
</span>
</template>
</div>
<div id="loadingContent" slot="content">
<template is="dom-repeat" items="[[_toArray(roster)]]" as="roleList">
<h2>{{roleList.role}}s</h2>
<template
is="dom-repeat"
items="[[_toArray(roleList.users)]]"
as="user"
>
<div class="avatar-name" id$="user-{{user.id}}">
<lrndesign-avatar
label$="{{user.name}}"
src$="{{user.picture}}"
></lrndesign-avatar>
</div>
<paper-tooltip for$="user-{{user.id}}"
>{{user.name}}</paper-tooltip
>
</template>
</template>
</div>
</lrnsys-dialog>
`;
}
static get tag() {
return "lrnapp-canvas-listing";
}
static get properties() {
return {
elmslnCourse: {
type: String
},
elmslnSection: {
type: String
},
basePath: {
type: String
},
csrfToken: {
type: String
},
endPoint: {
type: String
},
elmslnCourses: {
type: Array,
notify: true
},
canvasCourses: {
type: Array,
notify: true
},
roster: {
type: Array,
notify: true,
value: false
},
queryResponse: {
type: Array,
notify: true
},
sourcePath: {
type: String,
notify: true
},
activeCourse: {
type: String,
notify: true,
reflectToAttribute: true
}
};
}
/**
* Simple way to convert from object to array.
*/
_toArray(obj) {
if (obj == null) {
return [];
}
return Object.keys(obj).map(function (key) {
return obj[key];
});
}
/**
* Toggling collapse on an iron element.
*/
collapseToggle(e) {
e.target.nextElementSibling.toggle();
}
/**
* Trigger the dialog box to opened and kick off request for data.
*/
_triggerDialog(e) {
this.querySelector("#details-dialog").toggleDialog();
this.roster = false;
this.activeCourse = this.canvasCourses[e.target.id];
this.querySelector("#request").params["sis_course_id"] = this.activeCourse.sis_course_id;
this.querySelector("#request").generateRequest();
this.querySelector("#loadingContent").style.display = "none";
}
handleResponse() {
this.elmslnCourses = this.queryResponse.data.elmslnCourses;
this.canvasCourses = this.queryResponse.data.canvasCourses;
this.$.loading.hidden = true;
}
handleRosterResponse() {
this.roster = this.queryResponse.data;
this.querySelector("#loadingContent").style.display = "block";
}
/**
* highjack shadowDom
*/
_attachDom(dom) {
this.appendChild(dom);
}
}
window.customElements.define(LrnappCanvasListing.tag, LrnappCanvasListing);
export { LrnappCanvasListing };
|
// Copyright (c) 2005-2006 INRIA Sophia-Antipolis (France).
// All rights reserved.
//
// This file is part of CGAL (www.cgal.org).
//
// Partially supported by the IST Programme of the EU as a Shared-cost
// RTD (FET Open) Project under Contract No IST-2000-26473
// (ECG - Effective Computational Geometry for Curves and Surfaces)
// and a STREP (FET Open) Project under Contract No IST-006413
// (ACS -- Algorithms for Complex Shapes)
//
// $URL$
// $Id$
// SPDX-License-Identifier: GPL-3.0-or-later OR LicenseRef-Commercial
//
// Author(s) : Monique Teillaud <Monique.Teillaud@sophia.inria.fr>
// Sylvain Pion
// Pedro Machado
#ifndef CGAL_ALGEBRAIC_KERNEL_FUNCTIONS_ON_ROOTS_AND_POLYNOMIALS_2_3_H
#define CGAL_ALGEBRAIC_KERNEL_FUNCTIONS_ON_ROOTS_AND_POLYNOMIALS_2_3_H
#include <CGAL/license/Circular_kernel_3.h>
#include <CGAL/Algebraic_kernel_for_spheres/internal_functions_on_roots_and_polynomial_1_3_and_2_3.h>
namespace CGAL {
namespace AlgebraicSphereFunctors {
template < class AK, class OutputIterator >
inline
OutputIterator
solve( const typename AK::Polynomial_for_spheres_2_3 &e1,
const typename AK::Polynomial_for_spheres_2_3 &e2,
const typename AK::Polynomial_for_spheres_2_3 &e3,
OutputIterator res )
{
typedef typename AK::FT FT;
CGAL_kernel_precondition(!((e1 == e2) && (e2 == e3)));
// we put as a precondition that the polynomial for spheres represents
// a sphere and not an isolated point or an empty_space
CGAL_kernel_precondition(!(e1.empty_space() || e1.isolated_point()));
CGAL_kernel_precondition(!(e2.empty_space() || e2.isolated_point()));
CGAL_kernel_precondition(!(e3.empty_space() || e3.isolated_point()));
typedef typename AK::Polynomial_1_3 Polynomial_1_3;
// The degenerated cases are 2 tangent spheres
// os 2 non-intersecting spheres
// beacause we cannot have infinitely many solutions
if(e1 == e2) {
if(tangent<AK>(e1,e3)) {
Polynomial_1_3 p = plane_from_2_spheres<AK>(e1,e3);
return internal::solve_tangent<AK>(p,e1,res);
}
CGAL_kernel_precondition(!(intersect<AK>(e1,e3)));
return res;
}
if((e1 == e3) || (e2 == e3)) {
if(tangent<AK>(e1,e2)) {
Polynomial_1_3 p = plane_from_2_spheres<AK>(e1,e2);
return internal::solve_tangent<AK>(p,e1,res);
}
CGAL_kernel_precondition(!(intersect<AK>(e1,e2)));
return res;
}
// non degenerated case
if(intersect<AK>(e1,e2)) {
Polynomial_1_3 p1 = plane_from_2_spheres<AK>(e1,e2);
if(intersect<AK>(e2,e3)) {
Polynomial_1_3 p2 = plane_from_2_spheres<AK>(e2,e3);
if(same_solutions<FT>(p1,p2)) {
const FT sq_d1 = CGAL::square(p1.a()*e1.a() + p1.b()*e1.b() +
p1.c()*e1.c() + p1.d()) /
(square(p1.a()) + square(p1.b()) + square(p1.c()));
const FT r1_sqr = e1.r_sq() - sq_d1;
const FT sq_d2 = CGAL::square(p2.a()*e2.a() + p2.b()*e2.b() +
p2.c()*e2.c() + p2.d()) /
(square(p2.a()) + square(p2.b()) + square(p2.c()));
const FT r2_sqr = e2.r_sq() - sq_d2;
if(r1_sqr != r2_sqr) return res;
// otherwise there are an infinite number of points
// this is not allowed
CGAL_kernel_precondition(r1_sqr == 0);
return internal::solve_tangent<AK>(p1,e1,res);
}
return solve<AK>(p1,p2,e2,res);
} return res;
} return res;
}
template <class AK>
typename AK::Root_for_spheres_2_3
x_critical_point(const typename AK::Polynomial_for_spheres_2_3 & s, bool i)
{
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
return Root_for_spheres_2_3(
make_root_of_2(s.a(),typename AK::FT(i?-1:1),s.r_sq()),
Root_of_2(s.b()),
Root_of_2(s.c()));
}
template <class AK, class OutputIterator>
OutputIterator
x_critical_points(const typename AK::Polynomial_for_spheres_2_3 & s, OutputIterator res)
{
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::FT FT;
*res++ = Root_for_spheres_2_3(make_root_of_2(s.a(),FT(-1),s.r_sq()),
Root_of_2(s.b()),
Root_of_2(s.c()));
*res++ = Root_for_spheres_2_3(make_root_of_2(s.a(),FT(1),s.r_sq()),
Root_of_2(s.b()),
Root_of_2(s.c()));
return res;
}
template <class AK>
typename AK::Root_for_spheres_2_3
y_critical_point(const typename AK::Polynomial_for_spheres_2_3 &s, bool i)
{
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
return Root_for_spheres_2_3(
Root_of_2(s.a()),
make_root_of_2(s.b(),typename AK::FT(i?-1:1),s.r_sq()),
Root_of_2(s.c()));
}
template <class AK, class OutputIterator>
OutputIterator
y_critical_points(const typename AK::Polynomial_for_spheres_2_3 & s, OutputIterator res)
{
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::FT FT;
*res++ = Root_for_spheres_2_3(Root_of_2(s.a()),
make_root_of_2(s.b(),FT(-1),s.r_sq()),
Root_of_2(s.c()));
*res++ = Root_for_spheres_2_3(Root_of_2(s.a()),
make_root_of_2(s.b(),FT(1),s.r_sq()),
Root_of_2(s.c()));
return res;
}
template <class AK>
typename AK::Root_for_spheres_2_3
z_critical_point(const typename AK::Polynomial_for_spheres_2_3 &s, bool i)
{
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
return Root_for_spheres_2_3(Root_of_2(s.a()),
Root_of_2(s.b()),
make_root_of_2(s.c(),typename AK::FT(i?-1:1),s.r_sq()));
}
template <class AK, class OutputIterator>
OutputIterator
z_critical_points(const typename AK::Polynomial_for_spheres_2_3 & s, OutputIterator res)
{
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::FT FT;
*res++ = Root_for_spheres_2_3(Root_of_2(s.a()),
Root_of_2(s.b()),
make_root_of_2(s.c(),FT(-1),s.r_sq()));
*res++ = Root_for_spheres_2_3(Root_of_2(s.a()),
Root_of_2(s.b()),
make_root_of_2(s.c(),FT(1),s.r_sq()));
return res;
}
template <class AK>
typename AK::Root_for_spheres_2_3
x_critical_point( const std::pair<typename AK::Polynomial_for_spheres_2_3,
typename AK::Polynomial_1_3 > &c, bool i)
{
typedef typename AK::FT FT;
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::Polynomial_for_spheres_2_3 Polynomial_for_spheres_2_3;
typedef typename AK::Polynomial_1_3 Polynomial_1_3;
const Polynomial_for_spheres_2_3 &s = c.first;
const Polynomial_1_3 &p = c.second;
// It has to be the equation of a diametral circle
CGAL_kernel_precondition((intersect<AK>(p,s)));
CGAL_kernel_precondition(CGAL_NTS sign(p.a() * s.a() + p.b() * s.b() +
p.c() * s.c() + p.d()) == ZERO);
CGAL_kernel_precondition(!(is_zero(p.b()) && is_zero(p.c())));
const FT sqbc = CGAL::square(p.b()) + CGAL::square(p.c());
const FT sq_sum = sqbc + CGAL::square(p.a());
const FT delta = (sqbc * s.r_sq())/sq_sum;
const FT cy = (p.a()*p.b())/sqbc;
const FT cz = (p.a()*p.c())/sqbc;
const Root_of_2 x = make_root_of_2(s.a(),FT(i?-1:1),delta);
const Root_of_2 y = make_root_of_2(s.b(),FT(i?(cy):FT(-cy)),delta);
const Root_of_2 z = make_root_of_2(s.c(),FT(i?(cz):FT(-cz)),delta);
return Root_for_spheres_2_3(x,y,z);
}
template <class AK, class OutputIterator>
OutputIterator
x_critical_points( const std::pair<typename AK::Polynomial_for_spheres_2_3,
typename AK::Polynomial_1_3 > &c,
OutputIterator res)
{
typedef typename AK::FT FT;
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::Polynomial_for_spheres_2_3 Polynomial_for_spheres_2_3;
typedef typename AK::Polynomial_1_3 Polynomial_1_3;
const Polynomial_for_spheres_2_3 &s = c.first;
const Polynomial_1_3 &p = c.second;
// It has to be the equation of a diametral circle
CGAL_kernel_precondition((intersect<AK>(p,s)));
CGAL_kernel_precondition(CGAL_NTS sign(p.a() * s.a() + p.b() * s.b() +
p.c() * s.c() + p.d()) == ZERO);
CGAL_kernel_precondition(!(is_zero(p.b()) && is_zero(p.c())));
const FT sqbc = CGAL::square(p.b()) + CGAL::square(p.c());
const FT sq_sum = sqbc + CGAL::square(p.a());
const FT delta = (sqbc * s.r_sq())/sq_sum;
const FT cy = (p.a()*p.b())/sqbc;
const FT cz = (p.a()*p.c())/sqbc;
const Root_of_2 x1 = make_root_of_2(s.a(),-1,delta);
const Root_of_2 y1 = make_root_of_2(s.b(),cy,delta);
const Root_of_2 z1 = make_root_of_2(s.c(),cz,delta);
const Root_of_2 x2 = make_root_of_2(s.a(),1,delta);
const Root_of_2 y2 = make_root_of_2(s.b(),-cy,delta);
const Root_of_2 z2 = make_root_of_2(s.c(),-cz,delta);
*res++ = Root_for_spheres_2_3(x1,y1,z1);
*res++ = Root_for_spheres_2_3(x2,y2,z2);
return res;
}
template <class AK>
typename AK::Root_for_spheres_2_3
y_critical_point( const std::pair<typename AK::Polynomial_for_spheres_2_3,
typename AK::Polynomial_1_3 > &c, bool i)
{
typedef typename AK::FT FT;
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::Polynomial_for_spheres_2_3 Polynomial_for_spheres_2_3;
typedef typename AK::Polynomial_1_3 Polynomial_1_3;
const Polynomial_for_spheres_2_3 &s = c.first;
const Polynomial_1_3 &p = c.second;
// It has to be the equation of a diametral circle
CGAL_kernel_precondition((intersect<AK>(p,s)));
CGAL_kernel_precondition(CGAL_NTS sign(p.a() * s.a() + p.b() * s.b() +
p.c() * s.c() + p.d()) == ZERO);
CGAL_kernel_precondition(!(is_zero(p.a()) && is_zero(p.c())));
const FT sqac = CGAL::square(p.a()) + CGAL::square(p.c());
const FT sq_sum = sqac + CGAL::square(p.b());
const FT delta = (sqac * s.r_sq())/sq_sum;
const FT cx = (p.a()*p.b())/sqac;
const FT cz = (p.c()*p.b())/sqac;
if(!is_positive(cx)) {
const Root_of_2 x = make_root_of_2(s.a(),FT(i?(cx):FT(-cx)),delta);
const Root_of_2 y = make_root_of_2(s.b(),FT(i?-1:1),delta);
const Root_of_2 z = make_root_of_2(s.c(),FT(i?(cz):FT(-cz)),delta);
return Root_for_spheres_2_3(x,y,z);
} else {
const Root_of_2 x = make_root_of_2(s.a(),FT(i?FT(-cx):(cx)),delta);
const Root_of_2 y = make_root_of_2(s.b(),FT(i?1:-1),delta);
const Root_of_2 z = make_root_of_2(s.c(),FT(i?FT(-cz):(cz)),delta);
return Root_for_spheres_2_3(x,y,z);
}
}
template <class AK, class OutputIterator>
OutputIterator
y_critical_points( const std::pair<typename AK::Polynomial_for_spheres_2_3,
typename AK::Polynomial_1_3 > &c,
OutputIterator res)
{
typedef typename AK::FT FT;
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::Polynomial_for_spheres_2_3 Polynomial_for_spheres_2_3;
typedef typename AK::Polynomial_1_3 Polynomial_1_3;
const Polynomial_for_spheres_2_3 &s = c.first;
const Polynomial_1_3 &p = c.second;
// It has to be the equation of a diametral circle
CGAL_kernel_precondition((intersect<AK>(p,s)));
CGAL_kernel_precondition(CGAL_NTS sign(p.a() * s.a() + p.b() * s.b() +
p.c() * s.c() + p.d()) == ZERO);
CGAL_kernel_precondition(!(is_zero(p.a()) && is_zero(p.c())));
const FT sqac = CGAL::square(p.a()) + CGAL::square(p.c());
const FT sq_sum = sqac + CGAL::square(p.b());
const FT delta = (sqac * s.r_sq())/sq_sum;
const FT cx = (p.a()*p.b())/sqac;
const FT cz = (p.c()*p.b())/sqac;
const Root_of_2 x1 = make_root_of_2(s.a(),cx,delta);
const Root_of_2 y1 = make_root_of_2(s.b(),FT(-1),delta);
const Root_of_2 z1 = make_root_of_2(s.c(),cz,delta);
const Root_of_2 x2 = make_root_of_2(s.a(),-cx,delta);
const Root_of_2 y2 = make_root_of_2(s.b(),FT(1),delta);
const Root_of_2 z2 = make_root_of_2(s.c(),-cz,delta);
if(!is_positive(cx)) {
*res++ = Root_for_spheres_2_3(x1,y1,z1);
*res++ = Root_for_spheres_2_3(x2,y2,z2);
} else {
*res++ = Root_for_spheres_2_3(x2,y2,z2);
*res++ = Root_for_spheres_2_3(x1,y1,z1);
}
return res;
}
template <class AK>
typename AK::Root_for_spheres_2_3
z_critical_point( const std::pair<typename AK::Polynomial_for_spheres_2_3,
typename AK::Polynomial_1_3 > &c, bool i)
{
typedef typename AK::FT FT;
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::Polynomial_for_spheres_2_3 Polynomial_for_spheres_2_3;
typedef typename AK::Polynomial_1_3 Polynomial_1_3;
const Polynomial_for_spheres_2_3 &s = c.first;
const Polynomial_1_3 &p = c.second;
// It has to be the equation of a diametral circle
CGAL_kernel_precondition((intersect<AK>(p,s)));
CGAL_kernel_precondition(CGAL_NTS sign(p.a() * s.a() + p.b() * s.b() +
p.c() * s.c() + p.d()) == ZERO);
CGAL_kernel_precondition(!(is_zero(p.a()) && is_zero(p.b())));
const FT sqab = CGAL::square(p.a()) + CGAL::square(p.b());
const FT sq_sum = sqab + CGAL::square(p.c());
const FT delta = (sqab * s.r_sq())/sq_sum;
const FT cx = (p.a()*p.c())/sqab;
const FT cy = (p.c()*p.b())/sqab;
if(is_negative(cx)) {
const Root_of_2 x = make_root_of_2(s.a(),FT(i?(cx):FT(-cx)),delta);
const Root_of_2 y = make_root_of_2(s.b(),FT(i?(cy):FT(-cy)),delta);
const Root_of_2 z = make_root_of_2(s.c(),FT(i?-1:1),delta);
return Root_for_spheres_2_3(x,y,z);
} else if(is_zero(cx)) {
if(!is_positive(cy)) {
const Root_of_2 x = s.a();
const Root_of_2 y = make_root_of_2(s.b(),FT(i?(cy):FT(-cy)),delta);
const Root_of_2 z = make_root_of_2(s.c(),FT(i?-1:1),delta);
return Root_for_spheres_2_3(x,y,z);
} else {
const Root_of_2 x = s.a();
const Root_of_2 y = make_root_of_2(s.b(),FT(i?FT(-cy):(cy)),delta);
const Root_of_2 z = make_root_of_2(s.c(),FT(i?1:-1),delta);
return Root_for_spheres_2_3(x,y,z);
}
} else {
const Root_of_2 x = make_root_of_2(s.a(),FT(i?FT(-cx):(cx)),delta);
const Root_of_2 y = make_root_of_2(s.b(),FT(i?FT(-cy):(cy)),delta);
const Root_of_2 z = make_root_of_2(s.c(),FT(i?1:-1),delta);
return Root_for_spheres_2_3(x,y,z);
}
}
template <class AK, class OutputIterator>
OutputIterator
z_critical_points( const std::pair<typename AK::Polynomial_for_spheres_2_3,
typename AK::Polynomial_1_3 > &c,
OutputIterator res)
{
typedef typename AK::FT FT;
typedef typename AK::Root_of_2 Root_of_2;
typedef typename AK::Root_for_spheres_2_3 Root_for_spheres_2_3;
typedef typename AK::Polynomial_for_spheres_2_3 Polynomial_for_spheres_2_3;
typedef typename AK::Polynomial_1_3 Polynomial_1_3;
const Polynomial_for_spheres_2_3 &s = c.first;
const Polynomial_1_3 &p = c.second;
// It has to be the equation of a diametral circle
CGAL_kernel_precondition((intersect<AK>(p,s)));
CGAL_kernel_precondition(CGAL_NTS sign(p.a() * s.a() + p.b() * s.b() +
p.c() * s.c() + p.d()) == ZERO);
CGAL_kernel_precondition(!(is_zero(p.a()) && is_zero(p.b())));
const FT sqab = CGAL::square(p.a()) + CGAL::square(p.b());
const FT sq_sum = sqab + CGAL::square(p.c());
const FT delta = (sqab * s.r_sq())/sq_sum;
const FT cx = (p.a()*p.c())/sqab;
const FT cy = (p.c()*p.b())/sqab;
if(is_negative(cx)) {
const Root_of_2 x1 = make_root_of_2(s.a(),(cx),delta);
const Root_of_2 y1 = make_root_of_2(s.b(),(cy),delta);
const Root_of_2 z1 = make_root_of_2(s.c(),-1,delta);
const Root_of_2 x2 = make_root_of_2(s.a(),(-cx),delta);
const Root_of_2 y2 = make_root_of_2(s.b(),(-cy),delta);
const Root_of_2 z2 = make_root_of_2(s.c(),1,delta);
*res++ = Root_for_spheres_2_3(x1,y1,z1);
*res++ = Root_for_spheres_2_3(x2,y2,z2);
} else if(is_zero(cx)) {
if(!is_positive(cy)) {
const Root_of_2 x1 = s.a();
const Root_of_2 y1 = make_root_of_2(s.b(),(cy),delta);
const Root_of_2 z1 = make_root_of_2(s.c(),FT(-1),delta);
const Root_of_2 y2 = make_root_of_2(s.b(),(-cy),delta);
const Root_of_2 z2 = make_root_of_2(s.c(),FT(1),delta);
*res++ = Root_for_spheres_2_3(x1,y1,z1);
*res++ = Root_for_spheres_2_3(x1,y2,z2);
} else {
const Root_of_2 x1 = s.a();
const Root_of_2 y1 = make_root_of_2(s.b(),(-cy),delta);
const Root_of_2 z1 = make_root_of_2(s.c(),FT(1),delta);
const Root_of_2 y2 = make_root_of_2(s.b(),(cy),delta);
const Root_of_2 z2 = make_root_of_2(s.c(),FT(-1),delta);
*res++ = Root_for_spheres_2_3(x1,y1,z1);
*res++ = Root_for_spheres_2_3(x1,y2,z2);
}
} else {
const Root_of_2 x1 = make_root_of_2(s.a(),(-cx),delta);
const Root_of_2 y1 = make_root_of_2(s.b(),(-cy),delta);
const Root_of_2 z1 = make_root_of_2(s.c(),FT(1),delta);
const Root_of_2 x2 = make_root_of_2(s.a(),(cx),delta);
const Root_of_2 y2 = make_root_of_2(s.b(),(cy),delta);
const Root_of_2 z2 = make_root_of_2(s.c(),FT(-1),delta);
*res++ = Root_for_spheres_2_3(x1,y1,z1);
*res++ = Root_for_spheres_2_3(x2,y2,z2);
}
return res;
}
} // namespace AlgebraicSphereFunctors
} // namespace CGAL
#endif // CGAL_ALGEBRAIC_KERNEL_FUNCTIONS_ON_ROOTS_AND_POLYNOMIALS_2_3_H
|
# pylint: disable=no-member
# This module is only used to create and compile the gevent.libuv._corecffi module;
# nothing should be directly imported from it except `ffi`, which should only be
# used for `ffi.compile()`; programs should import gevent._corecfffi.
# However, because we are using "out-of-line" mode, it is necessary to examine
# this file to know what functions are created and available on the generated
# module.
from __future__ import absolute_import, print_function
import os
import os.path # pylint:disable=no-name-in-module
import platform
import sys
from cffi import FFI
sys.path.append(".")
try:
import _setuputils
except ImportError:
print("This file must be imported with setup.py in the current working dir.")
raise
__all__ = []
WIN = sys.platform.startswith('win32')
LIBUV_EMBED = _setuputils.should_embed('libuv')
ffi = FFI()
thisdir = os.path.dirname(os.path.abspath(__file__))
parentdir = os.path.abspath(os.path.join(thisdir, '..'))
setup_py_dir = os.path.abspath(os.path.join(thisdir, '..', '..', '..'))
libuv_dir = os.path.abspath(os.path.join(setup_py_dir, 'deps', 'libuv'))
def read_source(name):
with open(os.path.join(thisdir, name), 'r') as f:
return f.read()
_cdef = read_source('_corecffi_cdef.c')
_source = read_source('_corecffi_source.c')
# These defines and uses help keep the C file readable and lintable by
# C tools.
_cdef = _cdef.replace('#define GEVENT_STRUCT_DONE int', '')
_cdef = _cdef.replace("GEVENT_STRUCT_DONE _;", '...;')
# nlink_t is not used in libuv.
_cdef = _cdef.replace('#define GEVENT_ST_NLINK_T int',
'')
_cdef = _cdef.replace('GEVENT_ST_NLINK_T', 'nlink_t')
_cdef = _cdef.replace('#define GEVENT_UV_OS_SOCK_T int', '')
# uv_os_sock_t is int on POSIX and SOCKET on Win32, but socket is
# just another name for handle, which is just another name for 'void*'
# which we will treat as an 'unsigned long' or 'unsigned long long'
# since it comes through 'fileno()' where it has been cast as an int.
# See class watcher.io
_void_pointer_as_integer = 'intptr_t'
_cdef = _cdef.replace("GEVENT_UV_OS_SOCK_T", 'int' if not WIN else _void_pointer_as_integer)
LIBUV_INCLUDE_DIRS = [
os.path.join(libuv_dir, 'include'),
os.path.join(libuv_dir, 'src'),
]
# Initially based on https://github.com/saghul/pyuv/blob/v1.x/setup_libuv.py
def _libuv_source(rel_path):
# Certain versions of setuptools, notably on windows, are *very*
# picky about what we feed to sources= "setup() arguments must
# *always* be /-separated paths relative to the setup.py
# directory, *never* absolute paths." POSIX doesn't have that issue.
path = os.path.join('deps', 'libuv', 'src', rel_path)
return path
LIBUV_SOURCES = [
_libuv_source('fs-poll.c'),
_libuv_source('inet.c'),
_libuv_source('threadpool.c'),
_libuv_source('uv-common.c'),
_libuv_source('version.c'),
_libuv_source('uv-data-getter-setters.c'),
_libuv_source('timer.c'),
_libuv_source('idna.c'),
_libuv_source('strscpy.c')
]
if WIN:
LIBUV_SOURCES += [
_libuv_source('win/async.c'),
_libuv_source('win/core.c'),
_libuv_source('win/detect-wakeup.c'),
_libuv_source('win/dl.c'),
_libuv_source('win/error.c'),
_libuv_source('win/fs-event.c'),
_libuv_source('win/fs.c'),
# getaddrinfo.c refers to ConvertInterfaceIndexToLuid
# and ConvertInterfaceLuidToNameA, which are supposedly in iphlpapi.h
# and iphlpapi.lib/dll. But on Windows 10 with Python 3.5 and VC 14 (Visual Studio 2015),
# I get an undefined warning from the compiler for those functions and
# a link error from the linker, so this file can't be included.
# This is possibly because the functions are defined for Windows Vista, and
# Python 3.5 builds with at earlier SDK?
# Fortunately we don't use those functions.
#_libuv_source('win/getaddrinfo.c'),
# getnameinfo.c refers to uv__getaddrinfo_translate_error from
# getaddrinfo.c, which we don't have.
#_libuv_source('win/getnameinfo.c'),
_libuv_source('win/handle.c'),
_libuv_source('win/loop-watcher.c'),
_libuv_source('win/pipe.c'),
_libuv_source('win/poll.c'),
_libuv_source('win/process-stdio.c'),
_libuv_source('win/process.c'),
_libuv_source('win/signal.c'),
_libuv_source('win/snprintf.c'),
_libuv_source('win/stream.c'),
_libuv_source('win/tcp.c'),
_libuv_source('win/thread.c'),
_libuv_source('win/tty.c'),
_libuv_source('win/udp.c'),
_libuv_source('win/util.c'),
_libuv_source('win/winapi.c'),
_libuv_source('win/winsock.c'),
]
else:
LIBUV_SOURCES += [
_libuv_source('unix/async.c'),
_libuv_source('unix/core.c'),
_libuv_source('unix/dl.c'),
_libuv_source('unix/fs.c'),
_libuv_source('unix/getaddrinfo.c'),
_libuv_source('unix/getnameinfo.c'),
_libuv_source('unix/loop-watcher.c'),
_libuv_source('unix/loop.c'),
_libuv_source('unix/pipe.c'),
_libuv_source('unix/poll.c'),
_libuv_source('unix/process.c'),
_libuv_source('unix/signal.c'),
_libuv_source('unix/stream.c'),
_libuv_source('unix/tcp.c'),
_libuv_source('unix/thread.c'),
_libuv_source('unix/tty.c'),
_libuv_source('unix/udp.c'),
]
if sys.platform.startswith('linux'):
LIBUV_SOURCES += [
_libuv_source('unix/linux-core.c'),
_libuv_source('unix/linux-inotify.c'),
_libuv_source('unix/linux-syscalls.c'),
_libuv_source('unix/procfs-exepath.c'),
_libuv_source('unix/proctitle.c'),
_libuv_source('unix/sysinfo-loadavg.c'),
]
elif sys.platform == 'darwin':
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/darwin.c'),
_libuv_source('unix/darwin-proctitle.c'),
_libuv_source('unix/fsevents.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/proctitle.c'),
]
elif sys.platform.startswith(('freebsd', 'dragonfly')):
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/freebsd.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/posix-hrtime.c'),
_libuv_source('unix/bsd-proctitle.c'),
]
elif sys.platform.startswith('openbsd'):
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/openbsd.c'),
_libuv_source('unix/posix-hrtime.c'),
_libuv_source('unix/bsd-proctitle.c'),
]
elif sys.platform.startswith('netbsd'):
LIBUV_SOURCES += [
_libuv_source('unix/bsd-ifaddrs.c'),
_libuv_source('unix/kqueue.c'),
_libuv_source('unix/netbsd.c'),
_libuv_source('unix/posix-hrtime.c'),
_libuv_source('unix/bsd-proctitle.c'),
]
elif sys.platform.startswith('sunos'):
LIBUV_SOURCES += [
_libuv_source('unix/no-proctitle.c'),
_libuv_source('unix/sunos.c'),
]
elif sys.platform.startswith('aix'):
LIBUV_SOURCES += [
_libuv_source('unix/aix.c'),
_libuv_source('unix/aix-common.c'),
]
LIBUV_MACROS = [
('LIBUV_EMBED', int(LIBUV_EMBED)),
]
def _define_macro(name, value):
LIBUV_MACROS.append((name, value))
LIBUV_LIBRARIES = []
def _add_library(name):
LIBUV_LIBRARIES.append(name)
if sys.platform != 'win32':
_define_macro('_LARGEFILE_SOURCE', 1)
_define_macro('_FILE_OFFSET_BITS', 64)
if sys.platform.startswith('linux'):
_add_library('dl')
_add_library('rt')
_define_macro('_GNU_SOURCE', 1)
_define_macro('_POSIX_C_SOURCE', '200112')
elif sys.platform == 'darwin':
_define_macro('_DARWIN_USE_64_BIT_INODE', 1)
_define_macro('_DARWIN_UNLIMITED_SELECT', 1)
elif sys.platform.startswith('netbsd'):
_add_library('kvm')
elif sys.platform.startswith('sunos'):
_define_macro('__EXTENSIONS__', 1)
_define_macro('_XOPEN_SOURCE', 500)
_add_library('kstat')
_add_library('nsl')
_add_library('sendfile')
_add_library('socket')
if platform.release() == '5.10':
# https://github.com/libuv/libuv/issues/1458
# https://github.com/giampaolo/psutil/blob/4d6a086411c77b7909cce8f4f141bbdecfc0d354/setup.py#L298-L300
_define_macro('SUNOS_NO_IFADDRS', '')
elif sys.platform.startswith('aix'):
_define_macro('_LINUX_SOURCE_COMPAT', 1)
_add_library('perfstat')
elif WIN:
_define_macro('_GNU_SOURCE', 1)
_define_macro('WIN32', 1)
_define_macro('_CRT_SECURE_NO_DEPRECATE', 1)
_define_macro('_CRT_NONSTDC_NO_DEPRECATE', 1)
_define_macro('_CRT_SECURE_NO_WARNINGS', 1)
_define_macro('_WIN32_WINNT', '0x0600')
_define_macro('WIN32_LEAN_AND_MEAN', 1)
_add_library('advapi32')
_add_library('iphlpapi')
_add_library('psapi')
_add_library('shell32')
_add_library('user32')
_add_library('userenv')
_add_library('ws2_32')
if not LIBUV_EMBED:
del LIBUV_SOURCES[:]
del LIBUV_INCLUDE_DIRS[:]
_add_library('uv')
LIBUV_INCLUDE_DIRS.append(parentdir)
ffi.cdef(_cdef)
ffi.set_source(
'gevent.libuv._corecffi',
_source,
sources=LIBUV_SOURCES,
depends=LIBUV_SOURCES,
include_dirs=LIBUV_INCLUDE_DIRS,
libraries=list(LIBUV_LIBRARIES),
define_macros=list(LIBUV_MACROS),
extra_compile_args=list(_setuputils.IGNORE_THIRD_PARTY_WARNINGS),
)
if __name__ == '__main__':
# See notes in libev/_corecffi_build.py for how to test this.
#
# Other than the obvious directory changes, the changes are:
#
# CPPFLAGS=-Ideps/libuv/include/ -Isrc/gevent/
ffi.compile(verbose=True)
|
__________________________________________________________________________________________________
sample 104 ms submission
import collections
class Solution:
def largestDivisibleSubset(self, nums: List[int]) -> List[int]:
if nums == []:
return []
counter = collections.Counter(nums)
nums = sorted(counter.keys())
n = len(nums)
max_num, max_val = nums[0], counter[nums[0]]
res = dict()
res[nums[0]] = -1, counter[nums[0]]
for i in range(1, n):
x = nums[i]
sqrt_ = min(int(math.sqrt(x) + 0.5) + 1, x + 1)
pre, best = -1, 0
for j in range(1, sqrt_):
if x % j == 0:
tmp = res.get(j, (-1, -1))[1]
if tmp > best:
pre = j
best = tmp
u = x // j
tmp = res.get(u, (-1, -1))[1]
if tmp > best:
pre = u
best = tmp
res[x] = pre, best + counter[x]
if res[x][1] > max_val:
max_val = res[x][1]
max_num = x
ans = []
while max_num != -1:
for _ in range(counter[max_num]):
ans.append(max_num)
max_num = res[max_num][0]
return list(reversed(ans))
__________________________________________________________________________________________________
sample 13072 kb submission
class Solution:
def largestDivisibleSubset(self, nums: List[int]) -> List[int]:
n = len(nums)
if n == 0:
return []
dp = [[1, i] for i in range(n)]
nums.sort()
for i in range(1, n):
for j in range(i):
if nums[i] % nums[j] == 0:
if 1 + dp[j][0] > dp[i][0]:
dp[i][0] = 1 + dp[j][0]
dp[i][1] = j
mi = 0
mx = 0
for i in range(n):
if dp[i][0] > mx:
mx = dp[i][0]
mi = i
ans = []
i = mi
while dp[i][1] < i:
ans.append(nums[i])
i = dp[i][1]
return [nums[i]] + ans
__________________________________________________________________________________________________
|
// Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.4.4.17-7-c-iii-20
description: >
Array.prototype.some - return value of callbackfn is the Math
object
---*/
function callbackfn(val, idx, obj) {
return Math;
}
assert([11].some(callbackfn), '[11].some(callbackfn) !== true');
|
# Copyright 2021 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
import os
import shutil
from utils.unzip import Unzip
from utils.zip import Zip
from .type import Type, TypeEnum
from .worker import Worker
class Frontend:
def __init__(self, args):
source = args['source']
original_source = source
dirname = os.path.dirname(source)
# TODO: If not a directory, create one and move this item into it.
type = Type().getFileType(source)
if type is TypeEnum.UNKNOWN:
# TODO: Error
return
elif type is TypeEnum.ZIP:
foldername = str(Path(os.path.basename(source)).with_suffix(''))
destination = dirname + os.sep + foldername
if os.path.exists(destination):
os.remove(destination)
Unzip().extract(source, dirname, foldername)
source = str(Path(source).with_suffix(''))
elif type != TypeEnum.DIR:
source_dir = source + '_dir'
os.mkdir(source_dir)
os.rename(source, source_dir + os.sep + os.path.basename(source))
source = source_dir
worker = Worker()
worker.work(source)
if type is TypeEnum.ZIP:
prev_source = source
source = source + '_delete'
# TODO: This project should not rely on / paths on Windows.
os.mkdir(dirname + '/tmp_delete')
shutil.move(source, dirname + '/tmp_delete/extension/')
Zip().zip(dirname + '/tmp_delete', source)
shutil.rmtree(dirname + '/tmp_delete')
shutil.rmtree(prev_source)
os.remove(original_source)
os.rename(source + '.zip', original_source)
elif type is TypeEnum.MANIFEST or type is TypeEnum.JS:
os.remove(source + os.sep + os.path.basename(original_source))
os.rmdir(source)
source = source + '_delete/'
os.rename(source + os.path.basename(original_source), original_source)
os.rmdir(source)
# shutil.rmtree(source)
|
// @flow
import * as React from "react";
import Heading from "../../Heading";
import List from "../../List";
import ListItem from "../../List/ListItem";
import Stack from "../../Stack";
import InputStepper from "../index";
import Text from "../../Text";
import TextLink from "../../TextLink";
export default {
Example: () => {
const [actions, setActions] = React.useState([]);
const [currentValue, setCurrentValue] = React.useState(2);
const addAction = action => {
setActions([...actions, action]);
};
return (
<Stack direction="column">
<Stack spacing="small">
<Stack direction="column">
<Heading type="title3" as="h3">
onChange
</Heading>
<Text>For getting the current value.</Text>
</Stack>
<div style={{ maxWidth: "11em" }}>
<InputStepper
onChange={value => setCurrentValue(value)}
label="Travelers"
defaultValue={2}
maxValue={10}
minValue={1}
titleIncrement="Add a traveler"
titleDecrement="Remove a traveler"
/>
</div>
<Text>The current value is: {currentValue}</Text>
</Stack>
<Stack direction="column">
<Heading type="title3" as="h3">
All callbacks
</Heading>
<div style={{ maxWidth: "11em" }}>
<InputStepper
onBlur={() => addAction("Blurred")}
onChange={() => addAction("Changed")}
onFocus={() => addAction("Focused")}
label="Travelers"
defaultValue={2}
maxValue={10}
minValue={1}
titleIncrement="Add a traveler"
titleDecrement="Remove a traveler"
/>
</div>
<Text>
What has happened?{" "}
<TextLink type="secondary" onClick={() => setActions([])}>
Clear list
</TextLink>
</Text>
{actions && (
<List>
{actions.map((action, i) => {
// eslint-disable-next-line react/no-array-index-key
return <ListItem key={i}>{action}</ListItem>;
})}
</List>
)}
</Stack>
</Stack>
);
},
info: {
title: "Callbacks",
description:
"If you want to take actions on user interaction, use one of the callbacks available for input steppers.",
},
};
|
"""distutils.command.bdist_wininst
Implements the Distutils 'bdist_wininst' command: create a windows installer
exe-program."""
__revision__ = "$Id: bdist_wininst.py 77761 2010-01-26 22:46:15Z tarek.ziade $"
import sys
import os
import string
from sysconfig import get_python_version
from distutils.core import Command
from distutils.dir_util import remove_tree
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
from distutils import log
from distutils.util import get_platform
class bdist_wininst (Command):
description = "create an executable installer for MS Windows"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('bitmap=', 'b',
"bitmap to use for the installer instead of python-powered logo"),
('title=', 't',
"title to display on the installer background instead of default"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
('user-access-control=', None,
"specify Vista's UAC handling - 'none'/default=no "
"handling, 'auto'=use UAC if target Python installed for "
"all users, 'force'=always use UAC"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.no_target_compile = 0
self.no_target_optimize = 0
self.target_version = None
self.dist_dir = None
self.bitmap = None
self.title = None
self.skip_build = 0
self.install_script = None
self.pre_install_script = None
self.user_access_control = None
# initialize_options()
def finalize_options (self):
if self.bdist_dir is None:
if self.skip_build and self.plat_name:
# If build is skipped and plat_name is overridden, bdist will
# not see the correct 'plat_name' - so set that up manually.
bdist = self.distribution.get_command_obj('bdist')
bdist.plat_name = self.plat_name
# next the command will be initialized using that name
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wininst')
if not self.target_version:
self.target_version = ""
if not self.skip_build and self.distribution.has_ext_modules():
short_version = get_python_version()
if self.target_version and self.target_version != short_version:
raise DistutilsOptionError, \
"target version can only be %s, or the '--skip_build'" \
" option must be specified" % (short_version,)
self.target_version = short_version
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'),
)
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise DistutilsOptionError, \
"install_script '%s' not found in scripts" % \
self.install_script
# finalize_options()
def run (self):
if (sys.platform != "win32" and
(self.distribution.has_ext_modules() or
self.distribution.has_c_libraries())):
raise DistutilsPlatformError \
("distribution contains extensions and/or C libraries; "
"must be compiled on a Windows 32 platform")
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
install.plat_name = self.plat_name
install_lib = self.reinitialize_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = 0
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = sys.version[0:3]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
# Use a custom scheme for the zip-file, because we have to decide
# at installation time which scheme to use.
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
value = string.upper(key)
if key == 'headers':
value = value + '/Include/$dist_name'
setattr(install,
'install_' + key,
value)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
# And make an archive relative to the root of the
# pseudo-installation tree.
from tempfile import mktemp
archive_basename = mktemp()
fullname = self.distribution.get_fullname()
arcname = self.make_archive(archive_basename, "zip",
root_dir=self.bdist_dir)
# create an exe containing the zip-file
self.create_exe(arcname, fullname, self.bitmap)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_wininst', pyversion,
self.get_installer_filename(fullname)))
# remove the zip-file again
log.debug("removing temporary file '%s'", arcname)
os.remove(arcname)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# run()
def get_inidata (self):
# Return data describing the installation.
lines = []
metadata = self.distribution.metadata
# Write the [metadata] section.
lines.append("[metadata]")
# 'info' will be displayed in the installer's dialog box,
# describing the items to be installed.
info = (metadata.long_description or '') + '\n'
# Escape newline characters
def escape(s):
return string.replace(s, "\n", "\\n")
for name in ["author", "author_email", "description", "maintainer",
"maintainer_email", "name", "url", "version"]:
data = getattr(metadata, name, "")
if data:
info = info + ("\n %s: %s" % \
(string.capitalize(name), escape(data)))
lines.append("%s=%s" % (name, escape(data)))
# The [setup] section contains entries controlling
# the installer runtime.
lines.append("\n[Setup]")
if self.install_script:
lines.append("install_script=%s" % self.install_script)
lines.append("info=%s" % escape(info))
lines.append("target_compile=%d" % (not self.no_target_compile))
lines.append("target_optimize=%d" % (not self.no_target_optimize))
if self.target_version:
lines.append("target_version=%s" % self.target_version)
if self.user_access_control:
lines.append("user_access_control=%s" % self.user_access_control)
title = self.title or self.distribution.get_fullname()
lines.append("title=%s" % escape(title))
import time
import distutils
build_info = "Built %s with distutils-%s" % \
(time.ctime(time.time()), distutils.__version__)
lines.append("build_info=%s" % build_info)
return string.join(lines, "\n")
# get_inidata()
def create_exe (self, arcname, fullname, bitmap=None):
import struct
self.mkpath(self.dist_dir)
cfgdata = self.get_inidata()
installer_name = self.get_installer_filename(fullname)
self.announce("creating %s" % installer_name)
if bitmap:
bitmapdata = open(bitmap, "rb").read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
file = open(installer_name, "wb")
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
# Convert cfgdata from unicode to ascii, mbcs encoded
try:
unicode
except NameError:
pass
else:
if isinstance(cfgdata, unicode):
cfgdata = cfgdata.encode("mbcs")
# Append the pre-install script
cfgdata = cfgdata + "\0"
if self.pre_install_script:
script_data = open(self.pre_install_script, "r").read()
cfgdata = cfgdata + script_data + "\n\0"
else:
# empty pre-install script
cfgdata = cfgdata + "\0"
file.write(cfgdata)
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
file.write(open(arcname, "rb").read())
# create_exe()
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
# if we create an installer for a specific python version,
# it's better to include this in the name
installer_name = os.path.join(self.dist_dir,
"%s.%s-py%s.exe" %
(fullname, self.plat_name, self.target_version))
else:
installer_name = os.path.join(self.dist_dir,
"%s.%s.exe" % (fullname, self.plat_name))
return installer_name
# get_installer_filename()
def get_exe_bytes (self):
from distutils.msvccompiler import get_build_version
# If a target-version other than the current version has been
# specified, then using the MSVC version from *this* build is no good.
# Without actually finding and executing the target version and parsing
# its sys.version, we just hard-code our knowledge of old versions.
# NOTE: Possible alternative is to allow "--target-version" to
# specify a Python executable rather than a simple version string.
# We can then execute this program to obtain any info we need, such
# as the real sys.version string for the build.
cur_version = get_python_version()
if self.target_version and self.target_version != cur_version:
# If the target version is *later* than us, then we assume they
# use what we use
# string compares seem wrong, but are what sysconfig.py itself uses
if self.target_version > cur_version:
bv = get_build_version()
else:
if self.target_version < "2.4":
bv = 6.0
else:
bv = 7.1
else:
# for current version - use authoritative check.
bv = get_build_version()
# wininst-x.y.exe is in the same directory as this file
directory = os.path.dirname(__file__)
# we must use a wininst-x.y.exe built with the same C compiler
# used for python. XXX What about mingw, borland, and so on?
# if plat_name starts with "win" but is not "win32"
# we want to strip "win" and leave the rest (e.g. -amd64)
# for all other cases, we don't want any suffix
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
sfix = self.plat_name[3:]
else:
sfix = ''
filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
return open(filename, "rb").read()
# class bdist_wininst
|
import About from '../components/About/About.component';
import Detail from '../components/Detail/Detail.component';
import Home from '../components/Home/Home.component';
import Icon from 'react-native-vector-icons/Foundation';
import List from '../components/List/List.component';
import React from 'react';
import Setting from '../components/Setting/Setting.component';
import VectorIcon from '../components/VectorIcon/VectorIcon.component';
import {DrawerNavigator, StackNavigator} from 'react-navigation';
const getRootHomeNavOpt01 = ({navigation}) => {
const onPressMenuButton = () => {
navigation.navigate('DrawerToggle');
};
const MenuButton = ( // eslint-disable-next-line
<Icon.Button name='list' color='red' iconStyle={{padding: 10}} backgroundColor='transparent' onPress={onPressMenuButton}/>
);
return {
headerTitle: 'Home Nav',
headerLeft: MenuButton,
drawerLabel: 'Home Drawer'
};
};
const HomeStackNavigator = StackNavigator({
Home: {
screen: Home,
navigationOptions: getRootHomeNavOpt01
},
List: {
screen: List,
navigationOptions: {
headerTitle: 'List'
}
},
Detail: {
screen: Detail,
navigationOptions: {
headerTitle: 'Detail'
}
}
});
const getRootHomeNavOpt02 = ({navigation}) => {
const onPressMenuButton = () => {
navigation.navigate('DrawerToggle');
};
const MenuButton = ( // eslint-disable-next-line
<Icon.Button name='list' color='green' iconStyle={{padding: 10}} backgroundColor='transparent' onPress={onPressMenuButton}/>
);
return {
headerTitle: 'About Nav',
headerLeft: MenuButton,
drawerLabel: 'About Drawer'
};
};
const AboutStackNavigator = StackNavigator({
About: {
screen: About,
navigationOptions: getRootHomeNavOpt02
}
});
const getRootHomeNavOpt03 = ({navigation}) => {
const onPressMenuButton = () => {
navigation.navigate('DrawerToggle');
};
const MenuButton = ( // eslint-disable-next-line
<Icon.Button name='list' color='blue' iconStyle={{padding: 10}} backgroundColor='transparent' onPress={onPressMenuButton}/>
);
return {
headerTitle: 'Setting Nav',
headerLeft: MenuButton,
drawerLabel: 'Setting Drawer'
};
};
const SettingStackNavigator = StackNavigator({
Setting: {
screen: Setting,
navigationOptions: getRootHomeNavOpt03
}
});
const getRootHomeNavOpt04 = ({navigation}) => {
const onPressMenuButton = () => {
navigation.navigate('DrawerToggle');
};
const MenuButton = ( // eslint-disable-next-line
<Icon.Button name='list' color='black' iconStyle={{padding: 10}} backgroundColor='transparent' onPress={onPressMenuButton}/>
);
return {
headerTitle: 'VectorIcon Nav',
headerLeft: MenuButton,
drawerLabel: 'VectorIcon Drawer'
};
};
const VectorIconStackNavigator = StackNavigator({
VectorIcon: {
screen: VectorIcon,
navigationOptions: getRootHomeNavOpt04
}
});
// ----------
const HomeDrawerNavigator = DrawerNavigator({
Home: {
screen: HomeStackNavigator
},
About: {
screen: AboutStackNavigator
},
Setting: {
screen: SettingStackNavigator
},
VectorIcon: {
screen: VectorIconStackNavigator
}
});
export default HomeDrawerNavigator;
|
/**
******************************************************************************
* @file stm32f30x_wwdg.h
* @author MCD Application Team
* @version V1.2.2
* @date 27-February-2015
* @brief This file contains all the functions prototypes for the WWDG
* firmware library.
******************************************************************************
* @attention
*
* <h2><center>© COPYRIGHT 2015 STMicroelectronics</center></h2>
*
* Licensed under MCD-ST Liberty SW License Agreement V2, (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.st.com/software_license_agreement_liberty_v2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef __STM32F30x_WWDG_H
#define __STM32F30x_WWDG_H
#ifdef __cplusplus
extern "C" {
#endif
/* Includes ------------------------------------------------------------------*/
#include "stm32f30x.h"
/** @addtogroup STM32F30x_StdPeriph_Driver
* @{
*/
/** @addtogroup WWDG
* @{
*/
/* Exported types ------------------------------------------------------------*/
/* Exported constants --------------------------------------------------------*/
/** @defgroup WWDG_Exported_Constants
* @{
*/
/** @defgroup WWDG_Prescaler
* @{
*/
#define WWDG_Prescaler_1 ((uint32_t)0x00000000)
#define WWDG_Prescaler_2 ((uint32_t)0x00000080)
#define WWDG_Prescaler_4 ((uint32_t)0x00000100)
#define WWDG_Prescaler_8 ((uint32_t)0x00000180)
#define IS_WWDG_PRESCALER(PRESCALER) (((PRESCALER) == WWDG_Prescaler_1) || \
((PRESCALER) == WWDG_Prescaler_2) || \
((PRESCALER) == WWDG_Prescaler_4) || \
((PRESCALER) == WWDG_Prescaler_8))
#define IS_WWDG_WINDOW_VALUE(VALUE) ((VALUE) <= 0x7F)
#define IS_WWDG_COUNTER(COUNTER) (((COUNTER) >= 0x40) && ((COUNTER) <= 0x7F))
/**
* @}
*/
/**
* @}
*/
/* Exported macro ------------------------------------------------------------*/
/* Exported functions ------------------------------------------------------- */
/* Function used to set the WWDG configuration to the default reset state ****/
void WWDG_DeInit(void);
/* Prescaler, Refresh window and Counter configuration functions **************/
void WWDG_SetPrescaler(uint32_t WWDG_Prescaler);
void WWDG_SetWindowValue(uint8_t WindowValue);
void WWDG_EnableIT(void);
void WWDG_SetCounter(uint8_t Counter);
/* WWDG activation functions **************************************************/
void WWDG_Enable(uint8_t Counter);
/* Interrupts and flags management functions **********************************/
FlagStatus WWDG_GetFlagStatus(void);
void WWDG_ClearFlag(void);
#ifdef __cplusplus
}
#endif
#endif /* __STM32F30x_WWDG_H */
/**
* @}
*/
/**
* @}
*/
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
|
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@angular/cdk/platform"),require("@angular/core"),require("@ant-design/icons-angular"),require("ng-zorro-antd/core/util"),require("@angular/common"),require("@angular/common/http"),require("@angular/platform-browser"),require("ng-zorro-antd/core/config"),require("ng-zorro-antd/core/logger"),require("rxjs"),require("@ant-design/icons-angular/icons")):"function"==typeof define&&define.amd?define("ng-zorro-antd/icon",["exports","@angular/cdk/platform","@angular/core","@ant-design/icons-angular","ng-zorro-antd/core/util","@angular/common","@angular/common/http","@angular/platform-browser","ng-zorro-antd/core/config","ng-zorro-antd/core/logger","rxjs","@ant-design/icons-angular/icons"],t):t(((e="undefined"!=typeof globalThis?globalThis:e||self)["ng-zorro-antd"]=e["ng-zorro-antd"]||{},e["ng-zorro-antd"].icon={}),e.ng.cdk.platform,e.ng.core,e.iconsAngular,e["ng-zorro-antd"].core.util,e.ng.common,e.ng.common.http,e.ng.platformBrowser,e["ng-zorro-antd"].core.config,e["ng-zorro-antd"].core.logger,e.rxjs,e.icons)}(this,(function(e,t,n,o,r,i,c,a,l,s,u,p){"use strict";
/*! *****************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */var f=function(e,t){return(f=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var n in t)Object.prototype.hasOwnProperty.call(t,n)&&(e[n]=t[n])})(e,t)};function h(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Class extends value "+String(t)+" is not a constructor or null");function n(){this.constructor=e}f(e,t),e.prototype=null===t?Object.create(t):(n.prototype=t.prototype,new n)}function d(e,t){if("object"==typeof Reflect&&"function"==typeof Reflect.metadata)return Reflect.metadata(e,t)}Object.create;function g(e,t){var n="function"==typeof Symbol&&e[Symbol.iterator];if(!n)return e;var o,r,i=n.call(e),c=[];try{for(;(void 0===t||t-- >0)&&!(o=i.next()).done;)c.push(o.value)}catch(e){r={error:e}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}return c}Object.create;var y=[p.BarsOutline,p.CalendarOutline,p.CaretUpFill,p.CaretUpOutline,p.CaretDownFill,p.CaretDownOutline,p.CheckCircleFill,p.CheckCircleOutline,p.CheckOutline,p.ClockCircleOutline,p.CloseCircleOutline,p.CloseCircleFill,p.CloseOutline,p.CopyOutline,p.DoubleLeftOutline,p.DoubleRightOutline,p.DownOutline,p.EditOutline,p.EllipsisOutline,p.ExclamationCircleFill,p.ExclamationCircleOutline,p.EyeOutline,p.FileFill,p.FileOutline,p.FilterFill,p.InfoCircleFill,p.InfoCircleOutline,p.LeftOutline,p.LoadingOutline,p.PaperClipOutline,p.QuestionCircleOutline,p.RightOutline,p.RotateRightOutline,p.RotateLeftOutline,p.StarFill,p.SearchOutline,p.StarFill,p.UploadOutline,p.VerticalAlignTopOutline,p.UpOutline,p.SwapRightOutline,p.ZoomInOutline,p.ZoomOutOutline],m=new n.InjectionToken("nz_icons"),C=new n.InjectionToken("nz_icon_default_twotone_color"),O="#1890ff",v=function(e){function t(t,n,o,r,i,c){var a=e.call(this,t,r,i,n)||this;return a.nzConfigService=o,a.configUpdated$=new u.Subject,a.iconfontCache=new Set,a.onConfigChange(),a.addIcon.apply(a,function(){for(var e=[],t=0;t<arguments.length;t++)e=e.concat(g(arguments[t]));return e}(y,c||[])),a.configDefaultTwotoneColor(),a.configDefaultTheme(),a}return h(t,e),t.prototype.normalizeSvgElement=function(e){e.getAttribute("viewBox")||this._renderer.setAttribute(e,"viewBox","0 0 1024 1024"),e.getAttribute("width")&&e.getAttribute("height")||(this._renderer.setAttribute(e,"width","1em"),this._renderer.setAttribute(e,"height","1em")),e.getAttribute("fill")||this._renderer.setAttribute(e,"fill","currentColor")},t.prototype.fetchFromIconfont=function(e){var t=e.scriptUrl;if(this._document&&!this.iconfontCache.has(t)){var n=this._renderer.createElement("script");this._renderer.setAttribute(n,"src",t),this._renderer.setAttribute(n,"data-namespace",t.replace(/^(https?|http):/g,"")),this._renderer.appendChild(this._document.body,n),this.iconfontCache.add(t)}},t.prototype.createIconfontIcon=function(e){return this._createSVGElementFromString('<svg><use xlink:href="'+e+'"></svg>')},t.prototype.onConfigChange=function(){var e=this;this.nzConfigService.getConfigChangeEventForComponent("icon").subscribe((function(){e.configDefaultTwotoneColor(),e.configDefaultTheme(),e.configUpdated$.next()}))},t.prototype.configDefaultTheme=function(){var e=this.getConfig();this.defaultTheme=e.nzTheme||"outline"},t.prototype.configDefaultTwotoneColor=function(){var e=this.getConfig().nzTwotoneColor||O,t=O;e&&(e.startsWith("#")?t=e:s.warn("Twotone color must be a hex color!")),this.twoToneColor={primaryColor:t}},t.prototype.getConfig=function(){return this.nzConfigService.getConfigForComponent("icon")||{}},t}(o.IconService);v.ɵprov=n.ɵɵdefineInjectable({factory:function(){return new v(n.ɵɵinject(n.RendererFactory2),n.ɵɵinject(a.DomSanitizer),n.ɵɵinject(l.NzConfigService),n.ɵɵinject(c.HttpBackend,8),n.ɵɵinject(i.DOCUMENT,8),n.ɵɵinject(m,8))},token:v,providedIn:"root"}),v.decorators=[{type:n.Injectable,args:[{providedIn:"root"}]}],v.ctorParameters=function(){return[{type:n.RendererFactory2},{type:a.DomSanitizer},{type:l.NzConfigService},{type:c.HttpBackend,decorators:[{type:n.Optional}]},{type:void 0,decorators:[{type:n.Optional},{type:n.Inject,args:[i.DOCUMENT]}]},{type:Array,decorators:[{type:n.Optional},{type:n.Inject,args:[m]}]}]};var b=new n.InjectionToken("nz_icons_patch"),z=function(){function e(e,t){this.extraIcons=e,this.rootIconService=t,this.patched=!1}return e.prototype.doPatch=function(){var e=this;this.patched||(this.extraIcons.forEach((function(t){return e.rootIconService.addIcon(t)})),this.patched=!0)},e}();z.decorators=[{type:n.Injectable}],z.ctorParameters=function(){return[{type:Array,decorators:[{type:n.Self},{type:n.Inject,args:[b]}]},{type:v}]};var I=function(e){function t(t,n,o,r){var i=e.call(this,n,t,o)||this;return i.iconService=n,i.renderer=o,i.cacheClassName=null,i.nzRotate=0,i.spin=!1,r&&r.doPatch(),i.el=t.nativeElement,i}return h(t,e),Object.defineProperty(t.prototype,"nzSpin",{set:function(e){this.spin=e},enumerable:!1,configurable:!0}),Object.defineProperty(t.prototype,"nzType",{set:function(e){this.type=e},enumerable:!1,configurable:!0}),Object.defineProperty(t.prototype,"nzTheme",{set:function(e){this.theme=e},enumerable:!1,configurable:!0}),Object.defineProperty(t.prototype,"nzTwotoneColor",{set:function(e){this.twoToneColor=e},enumerable:!1,configurable:!0}),Object.defineProperty(t.prototype,"nzIconfont",{set:function(e){this.iconfont=e},enumerable:!1,configurable:!0}),t.prototype.ngOnChanges=function(e){var t=e.nzType,n=e.nzTwotoneColor,o=e.nzSpin,r=e.nzTheme,i=e.nzRotate;t||n||o||r?this.changeIcon2():i?this.handleRotate(this.el.firstChild):this._setSVGElement(this.iconService.createIconfontIcon("#"+this.iconfont))},t.prototype.ngOnInit=function(){this.renderer.setAttribute(this.el,"class",("anticon "+this.el.className).trim())},t.prototype.ngAfterContentChecked=function(){if(!this.type){var e=this.el.children,t=e.length;if(!this.type&&e.length)for(;t--;){var n=e[t];"svg"===n.tagName.toLowerCase()&&this.iconService.normalizeSvgElement(n)}}},t.prototype.changeIcon2=function(){var e=this;this.setClassName(),this._changeIcon().then((function(t){t&&(e.setSVGData(t),e.handleSpin(t),e.handleRotate(t))}))},t.prototype.handleSpin=function(e){this.spin||"loading"===this.type?this.renderer.addClass(e,"anticon-spin"):this.renderer.removeClass(e,"anticon-spin")},t.prototype.handleRotate=function(e){this.nzRotate?this.renderer.setAttribute(e,"style","transform: rotate("+this.nzRotate+"deg)"):this.renderer.removeAttribute(e,"style")},t.prototype.setClassName=function(){this.cacheClassName&&this.renderer.removeClass(this.el,this.cacheClassName),this.cacheClassName="anticon-"+this.type,this.renderer.addClass(this.el,this.cacheClassName)},t.prototype.setSVGData=function(e){this.renderer.setAttribute(e,"data-icon",this.type),this.renderer.setAttribute(e,"aria-hidden","true")},t}(o.IconDirective);I.decorators=[{type:n.Directive,args:[{selector:"[nz-icon]",exportAs:"nzIcon",host:{"[class.anticon]":"true"}}]}],I.ctorParameters=function(){return[{type:n.ElementRef},{type:v},{type:n.Renderer2},{type:z,decorators:[{type:n.Optional}]}]},I.propDecorators={nzSpin:[{type:n.Input}],nzRotate:[{type:n.Input}],nzType:[{type:n.Input}],nzTheme:[{type:n.Input}],nzTwotoneColor:[{type:n.Input}],nzIconfont:[{type:n.Input}]},function(e,t,n,o){var r,i=arguments.length,c=i<3?t:null===o?o=Object.getOwnPropertyDescriptor(t,n):o;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)c=Reflect.decorate(e,t,n,o);else for(var a=e.length-1;a>=0;a--)(r=e[a])&&(c=(i<3?r(c):i>3?r(t,n,c):r(t,n))||c);i>3&&c&&Object.defineProperty(t,n,c)}([r.InputBoolean(),d("design:type",Boolean),d("design:paramtypes",[Boolean])],I.prototype,"nzSpin",null);var _=function(){function e(){}return e.forRoot=function(t){return{ngModule:e,providers:[{provide:m,useValue:t}]}},e.forChild=function(t){return{ngModule:e,providers:[z,{provide:b,useValue:t}]}},e}();_.decorators=[{type:n.NgModule,args:[{exports:[I],declarations:[I],imports:[t.PlatformModule]}]}],e.DEFAULT_TWOTONE_COLOR=O,e.NZ_ICONS=m,e.NZ_ICONS_PATCH=b,e.NZ_ICONS_USED_BY_ZORRO=y,e.NZ_ICON_DEFAULT_TWOTONE_COLOR=C,e.NzIconDirective=I,e.NzIconModule=_,e.NzIconPatchService=z,e.NzIconService=v,Object.defineProperty(e,"__esModule",{value:!0})}));
//# sourceMappingURL=ng-zorro-antd-icon.umd.min.js.map
|
'use strict';
const Cluster = require('../Cluster');
const ATTRIBUTES = {
};
const COMMANDS = {};
class FanControlCluster extends Cluster {
static get ID() {
return 514;
}
static get NAME() {
return 'fanControl';
}
static get ATTRIBUTES() {
return ATTRIBUTES;
}
static get COMMANDS() {
return COMMANDS;
}
}
Cluster.addCluster(FanControlCluster);
module.exports = FanControlCluster;
|
# Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//div[@class='titleDeal']/h1[@class='title-dealdt']",
'price' : "//div[@class='amo-price']/p[@class='priceDeal']/span",
'category' : "//div[@class='current-path']/p/span/a",
'description' : "//div[@id='highlights-deal']/p",
'images' : "//div[@class='slideImgDeal']/div[@id='slider1_container']//img/@src",
'canonical' : "//link[@rel='canonical']/@href",
'base_url' : "",
'brand' : ""
}
name = 'bevame.vn'
allowed_domains = ['bevame.vn']
start_urls = ['http://bevame.vn/']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = []
rules = [
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+-\d+\.html']), 'parse_item'),
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+\.html']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'),
]
|
/* @flow */
import * as React from 'react';
import { View, StyleSheet } from 'react-native';
import { polyfill } from 'react-lifecycles-compat';
import createTabNavigator, {
type InjectedProps,
} from '../utils/createTabNavigator';
import BottomTabBar, { type TabBarOptions } from '../views/BottomTabBar';
import ResourceSavingScene from '../views/ResourceSavingScene';
type Props = InjectedProps & {
lazy?: boolean,
tabBarComponent?: React.ComponentType<*>,
tabBarOptions?: TabBarOptions,
};
type State = {
loaded: number[],
};
class TabNavigationView extends React.PureComponent<Props, State> {
static defaultProps = {
lazy: true,
};
static getDerivedStateFromProps(nextProps, prevState) {
const { index } = nextProps.navigation.state;
return {
// Set the current tab to be loaded if it was not loaded before
loaded: prevState.loaded.includes(index)
? prevState.loaded
: [...prevState.loaded, index],
};
}
state = {
loaded: [this.props.navigation.state.index],
};
_getLabel = ({ route, focused, tintColor }) => {
const label = this.props.getLabelText({ route });
if (typeof label === 'function') {
return label({ focused, tintColor });
}
return label;
};
_renderTabBar = () => {
const {
tabBarComponent: TabBarComponent = BottomTabBar,
tabBarOptions,
navigation,
screenProps,
getLabelText,
getAccessibilityLabel,
getButtonComponent,
getTestID,
renderIcon,
onTabPress,
} = this.props;
const { descriptors } = this.props;
const { state } = this.props.navigation;
const route = state.routes[state.index];
const descriptor = descriptors[route.key];
const options = descriptor.options;
if (options.tabBarVisible === false) {
return null;
}
return (
<TabBarComponent
{...tabBarOptions}
jumpTo={this._jumpTo}
navigation={navigation}
screenProps={screenProps}
onTabPress={onTabPress}
getLabelText={getLabelText}
getButtonComponent={getButtonComponent}
getAccessibilityLabel={getAccessibilityLabel}
getTestID={getTestID}
renderIcon={renderIcon}
/>
);
};
_jumpTo = (key: string) => {
const { navigation, onIndexChange } = this.props;
const index = navigation.state.routes.findIndex(route => route.key === key);
onIndexChange(index);
};
render() {
const { navigation, renderScene, lazy } = this.props;
const { routes } = navigation.state;
const { loaded } = this.state;
return (
<View style={styles.container}>
<View style={styles.pages}>
{routes.map((route, index) => {
if (lazy && !loaded.includes(index)) {
// Don't render a screen if we've never navigated to it
return null;
}
const isFocused = navigation.state.index === index;
return (
<ResourceSavingScene
key={route.key}
style={[
StyleSheet.absoluteFill,
{ opacity: isFocused ? 1 : 0 },
]}
isFocused={isFocused}
>
{renderScene({ route })}
</ResourceSavingScene>
);
})}
</View>
{this._renderTabBar()}
</View>
);
}
}
polyfill(TabNavigationView);
const styles = StyleSheet.create({
container: {
flex: 1,
overflow: 'hidden',
},
pages: {
flex: 1,
},
});
export default createTabNavigator(TabNavigationView);
|
const modsApi = {};
const modList = [
{
id: 'testmod',
name: 'TestMod'
}
];
const mods = {
testmod: {
name: 'TestMod'
}
};
// Mod list
// On success returns:
// mods
modsApi.modList = (req, res) => {
res.json({
error: false,
mods: modList
});
};
// Mod details
// On success returns:
// mod
modsApi.mod = (req, res) => {
const modId = req.params.modId;
if (!modId) return res.json({
error: true
});
const mod = mods[modId];
if (!mod) return res.json({
error: true
});
res.json({
error: false,
mod: mod
});
};
module.exports = modsApi;
|
class Solution:
def maxDistance(self, grid: List[List[int]]) -> int:
# if grid.count(1)==0 or grid.count(0):
# return -1
land = []
m = n = len(grid)
for i in range(len(grid)):
for j in range(len(grid)):
if grid[i][j] == 1:
land.append([i, j])
if len(land) == m*n or len(land) == 0:
return -1
count = 0
while land:
size = len(land)
for _ in range(size):
i, j = land.pop(0)
for x, y in [(1, 0), (-1, 0), (0, 1), (0, -1)]:
xi, yj = x+i, y+j
if 0 <= xi < m and 0 <= yj < n and grid[xi][yj] == 0:
land.append((xi, yj))
grid[xi][yj] = 1
count += 1
return count-1
|
//
// ABCardWindowControllerPatches.h
// ABKeyManager
//
// Created by Robert Goldsmith on 12/03/2005.
// Copyright 2005 Far-Blue. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@interface ABCardWindowControllerPatches : NSObject {
}
@end
|
import React, { useState, useEffect } from 'react'
import { StyleSheet } from 'react-native'
import { ThemeContext } from '../../../contexts/theme-context'
import themes from '../../../resources/themes'
import ButtonWithIcon from '../../buttons/ButtonWithIcon'
const ToggleThemeButton = () => {
const themeContext = React.useContext(ThemeContext)
const inverseThemeName = theme => theme === 'light' ? 'dark' : 'light'
const [themeName, setThemeName] = useState(inverseThemeName(themeContext.theme))
const [themeButtonText, setthemeButtonText] = useState('DARK')
const [themeButtonIcon, setthemeButtonIcon] = useState('moon')
const changeTheme = () => {
themeContext.toggleTheme()
setThemeName(inverseThemeName(themeName))
}
useEffect(() => {
const { text, icon } = themeName === 'light' ? themes.dark : themes.light
setthemeButtonIcon(icon)
setthemeButtonText(text)
})
return (
<ButtonWithIcon
accessibilityRole="button"
accessibilityLabel="UI Kitten Change Theme"
style={styles.iconButton}
text={`SWITCH TO ${themeButtonText} THEME`}
icon={themeButtonIcon}
onPress={changeTheme}
iconStyle={{ tintColor: 'white' }}
/>
)
}
export default ToggleThemeButton
const styles = StyleSheet.create({
iconButton: {
marginVertical: 16
}
})
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class Network(nn.Module):
def __init__(self,input_dim,out_dim,hidden1=128,hidden2=128):
super(Network,self).__init__()
self.fc1=nn.Linear(input_dim,hidden1)
self.fc2=nn.Linear(hidden1,hidden2)
self.output_layer=nn.Linear(hidden2,out_dim)
self._initialize()
def _initialize(self):
nn.init.xavier_uniform_(self.fc1.weight.data)
nn.init.constant_(self.fc1.bias.data,0.0)
nn.init.xavier_uniform_(self.fc2.weight.data)
nn.init.constant_(self.fc2.bias.data,0.0)
nn.init.xavier_uniform_(self.output_layer.weight.data)
nn.init.constant_(self.output_layer.bias.data,0.0)
def forward(self,inputs):
fc1out=F.relu(self.fc1(inputs),inplace=True)
fc2out=F.relu(self.fc2(fc1out),inplace=True)
return self.output_layer(fc2out)
if __name__ == '__main__':
net=Network(16,1)
optim=torch.optim.Adam(net.parameters(),lr=0.005)
loss_func=nn.MSELoss()
for i in range(24):
state=torch.randn(16).to(torch.float32)
action=net.forward(state)
print(action)
|
// This file was procedurally generated from the following sources:
// - src/dstr-assignment/array-elem-nested-obj-null.case
// - src/dstr-assignment/error/assignment-expr.template
/*---
description: When DestructuringAssignmentTarget is an object literal and the value is `null`, a TypeError should be thrown. (AssignmentExpression)
esid: sec-variable-statement-runtime-semantics-evaluation
features: [destructuring-binding]
flags: [generated]
info: |
VariableDeclaration : BindingPattern Initializer
1. Let rhs be the result of evaluating Initializer.
2. Let rval be GetValue(rhs).
3. ReturnIfAbrupt(rval).
4. Return the result of performing BindingInitialization for
BindingPattern passing rval and undefined as arguments.
---*/
var x;
assert.throws(TypeError, function() {
0, [{ x }] = [null];
});
|
from model.group import Group
import random
import string
import os
import getopt
import sys
import jsonpickle
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of groups", "file"])
except getopt.GetoptError as err:
print(err)
getopt.usage()
sys.exit(2)
n = 5
f = "data/groups.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "*10
return prefix+"".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [Group(name="", header="", footer="")] + [
Group(name=random_string("name", 10), header=random_string("header", 20),
footer=random_string("footer", 20))
for i in range(n)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata))
|
// -*-Mode: C++;-*- // technically C99
// * BeginRiceCopyright *****************************************************
//
// $HeadURL$
// $Id$
//
// --------------------------------------------------------------------------
// Part of HPCToolkit (hpctoolkit.org)
//
// Information about sources of support for research and development of
// HPCToolkit is at 'hpctoolkit.org' and in 'README.Acknowledgments'.
// --------------------------------------------------------------------------
//
// Copyright ((c)) 2002-2020, Rice University
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// * Neither the name of Rice University (RICE) nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// This software is provided by RICE and contributors "as is" and any
// express or implied warranties, including, but not limited to, the
// implied warranties of merchantability and fitness for a particular
// purpose are disclaimed. In no event shall RICE or contributors be
// liable for any direct, indirect, incidental, special, exemplary, or
// consequential damages (including, but not limited to, procurement of
// substitute goods or services; loss of use, data, or profits; or
// business interruption) however caused and on any theory of liability,
// whether in contract, strict liability, or tort (including negligence
// or otherwise) arising in any way out of the use of this software, even
// if advised of the possibility of such damage.
//
// ******************************************************* EndRiceCopyright *
//***************************************************************************
//
// File:
// libdl.h
//
// Purpose:
// simple wrappers that facilitate using dlopen and dlsym to dynamically
// bind symbols for use by hpcrun sample sources.
//
//***************************************************************************
#ifndef _HPCTOOLKIT_LIBDL_H_
#define _HPCTOOLKIT_LIBDL_H_
//*****************************************************************************
// system include files
//*****************************************************************************
#include <dlfcn.h>
//*****************************************************************************
// local include files
//*****************************************************************************
#include <monitor.h>
//*****************************************************************************
// macros
//*****************************************************************************
#define DLERR -1
#define DYN_FN_NAME(f) f ## _fn
#define CHK_DLOPEN(h, lib, flags) \
void* h = dlopen(lib, flags); \
if (!h) { \
return -1; \
}
#define CHK_DLSYM(h, fn) { \
dlerror(); \
DYN_FN_NAME(fn) = dlsym(h, #fn); \
if (DYN_FN_NAME(fn) == 0) { \
return -1; \
} \
}
#endif
|
/*
* Generated by asn1c-0.9.29 (http://lionet.info/asn1c)
* From ASN.1 module "EUTRA-RRC-Definitions"
* found in "/home/user/openairinterface5g/openair2/RRC/LTE/MESSAGES/asn1c/ASN1_files/lte-rrc-14.7.0.asn1"
* `asn1c -pdu=all -fcompound-names -gen-PER -no-gen-OER -no-gen-example -D /home/user/openairinterface5g/cmake_targets/basic_simulator/ue/CMakeFiles/RRC_Rel14`
*/
#include "RadioResourceConfigCommonSIB.h"
static asn_TYPE_member_t asn_MBR_ext1_13[] = {
{ ATF_POINTER, 1, offsetof(struct RadioResourceConfigCommonSIB__ext1, uplinkPowerControlCommon_v1020),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_UplinkPowerControlCommon_v1020,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"uplinkPowerControlCommon-v1020"
},
};
static const int asn_MAP_ext1_oms_13[] = { 0 };
static const ber_tlv_tag_t asn_DEF_ext1_tags_13[] = {
(ASN_TAG_CLASS_CONTEXT | (10 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_ext1_tag2el_13[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 } /* uplinkPowerControlCommon-v1020 */
};
static asn_SEQUENCE_specifics_t asn_SPC_ext1_specs_13 = {
sizeof(struct RadioResourceConfigCommonSIB__ext1),
offsetof(struct RadioResourceConfigCommonSIB__ext1, _asn_ctx),
asn_MAP_ext1_tag2el_13,
1, /* Count of tags in the map */
asn_MAP_ext1_oms_13, /* Optional members */
1, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_ext1_13 = {
"ext1",
"ext1",
&asn_OP_SEQUENCE,
asn_DEF_ext1_tags_13,
sizeof(asn_DEF_ext1_tags_13)
/sizeof(asn_DEF_ext1_tags_13[0]) - 1, /* 1 */
asn_DEF_ext1_tags_13, /* Same as above */
sizeof(asn_DEF_ext1_tags_13)
/sizeof(asn_DEF_ext1_tags_13[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_ext1_13,
1, /* Elements count */
&asn_SPC_ext1_specs_13 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_ext2_15[] = {
{ ATF_POINTER, 1, offsetof(struct RadioResourceConfigCommonSIB__ext2, rach_ConfigCommon_v1250),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_RACH_ConfigCommon_v1250,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"rach-ConfigCommon-v1250"
},
};
static const int asn_MAP_ext2_oms_15[] = { 0 };
static const ber_tlv_tag_t asn_DEF_ext2_tags_15[] = {
(ASN_TAG_CLASS_CONTEXT | (11 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_ext2_tag2el_15[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 } /* rach-ConfigCommon-v1250 */
};
static asn_SEQUENCE_specifics_t asn_SPC_ext2_specs_15 = {
sizeof(struct RadioResourceConfigCommonSIB__ext2),
offsetof(struct RadioResourceConfigCommonSIB__ext2, _asn_ctx),
asn_MAP_ext2_tag2el_15,
1, /* Count of tags in the map */
asn_MAP_ext2_oms_15, /* Optional members */
1, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_ext2_15 = {
"ext2",
"ext2",
&asn_OP_SEQUENCE,
asn_DEF_ext2_tags_15,
sizeof(asn_DEF_ext2_tags_15)
/sizeof(asn_DEF_ext2_tags_15[0]) - 1, /* 1 */
asn_DEF_ext2_tags_15, /* Same as above */
sizeof(asn_DEF_ext2_tags_15)
/sizeof(asn_DEF_ext2_tags_15[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_ext2_15,
1, /* Elements count */
&asn_SPC_ext2_specs_15 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_ext3_17[] = {
{ ATF_POINTER, 1, offsetof(struct RadioResourceConfigCommonSIB__ext3, pusch_ConfigCommon_v1270),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PUSCH_ConfigCommon_v1270,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pusch-ConfigCommon-v1270"
},
};
static const int asn_MAP_ext3_oms_17[] = { 0 };
static const ber_tlv_tag_t asn_DEF_ext3_tags_17[] = {
(ASN_TAG_CLASS_CONTEXT | (12 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_ext3_tag2el_17[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 } /* pusch-ConfigCommon-v1270 */
};
static asn_SEQUENCE_specifics_t asn_SPC_ext3_specs_17 = {
sizeof(struct RadioResourceConfigCommonSIB__ext3),
offsetof(struct RadioResourceConfigCommonSIB__ext3, _asn_ctx),
asn_MAP_ext3_tag2el_17,
1, /* Count of tags in the map */
asn_MAP_ext3_oms_17, /* Optional members */
1, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_ext3_17 = {
"ext3",
"ext3",
&asn_OP_SEQUENCE,
asn_DEF_ext3_tags_17,
sizeof(asn_DEF_ext3_tags_17)
/sizeof(asn_DEF_ext3_tags_17[0]) - 1, /* 1 */
asn_DEF_ext3_tags_17, /* Same as above */
sizeof(asn_DEF_ext3_tags_17)
/sizeof(asn_DEF_ext3_tags_17[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_ext3_17,
1, /* Elements count */
&asn_SPC_ext3_specs_17 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_ext4_19[] = {
{ ATF_POINTER, 7, offsetof(struct RadioResourceConfigCommonSIB__ext4, bcch_Config_v1310),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_BCCH_Config_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"bcch-Config-v1310"
},
{ ATF_POINTER, 6, offsetof(struct RadioResourceConfigCommonSIB__ext4, pcch_Config_v1310),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PCCH_Config_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pcch-Config-v1310"
},
{ ATF_POINTER, 5, offsetof(struct RadioResourceConfigCommonSIB__ext4, freqHoppingParameters_r13),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_FreqHoppingParameters_r13,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"freqHoppingParameters-r13"
},
{ ATF_POINTER, 4, offsetof(struct RadioResourceConfigCommonSIB__ext4, pdsch_ConfigCommon_v1310),
(ASN_TAG_CLASS_CONTEXT | (3 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PDSCH_ConfigCommon_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pdsch-ConfigCommon-v1310"
},
{ ATF_POINTER, 3, offsetof(struct RadioResourceConfigCommonSIB__ext4, pusch_ConfigCommon_v1310),
(ASN_TAG_CLASS_CONTEXT | (4 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PUSCH_ConfigCommon_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pusch-ConfigCommon-v1310"
},
{ ATF_POINTER, 2, offsetof(struct RadioResourceConfigCommonSIB__ext4, prach_ConfigCommon_v1310),
(ASN_TAG_CLASS_CONTEXT | (5 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PRACH_ConfigSIB_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"prach-ConfigCommon-v1310"
},
{ ATF_POINTER, 1, offsetof(struct RadioResourceConfigCommonSIB__ext4, pucch_ConfigCommon_v1310),
(ASN_TAG_CLASS_CONTEXT | (6 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PUCCH_ConfigCommon_v1310,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pucch-ConfigCommon-v1310"
},
};
static const int asn_MAP_ext4_oms_19[] = { 0, 1, 2, 3, 4, 5, 6 };
static const ber_tlv_tag_t asn_DEF_ext4_tags_19[] = {
(ASN_TAG_CLASS_CONTEXT | (13 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_ext4_tag2el_19[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* bcch-Config-v1310 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* pcch-Config-v1310 */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 }, /* freqHoppingParameters-r13 */
{ (ASN_TAG_CLASS_CONTEXT | (3 << 2)), 3, 0, 0 }, /* pdsch-ConfigCommon-v1310 */
{ (ASN_TAG_CLASS_CONTEXT | (4 << 2)), 4, 0, 0 }, /* pusch-ConfigCommon-v1310 */
{ (ASN_TAG_CLASS_CONTEXT | (5 << 2)), 5, 0, 0 }, /* prach-ConfigCommon-v1310 */
{ (ASN_TAG_CLASS_CONTEXT | (6 << 2)), 6, 0, 0 } /* pucch-ConfigCommon-v1310 */
};
static asn_SEQUENCE_specifics_t asn_SPC_ext4_specs_19 = {
sizeof(struct RadioResourceConfigCommonSIB__ext4),
offsetof(struct RadioResourceConfigCommonSIB__ext4, _asn_ctx),
asn_MAP_ext4_tag2el_19,
7, /* Count of tags in the map */
asn_MAP_ext4_oms_19, /* Optional members */
7, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_ext4_19 = {
"ext4",
"ext4",
&asn_OP_SEQUENCE,
asn_DEF_ext4_tags_19,
sizeof(asn_DEF_ext4_tags_19)
/sizeof(asn_DEF_ext4_tags_19[0]) - 1, /* 1 */
asn_DEF_ext4_tags_19, /* Same as above */
sizeof(asn_DEF_ext4_tags_19)
/sizeof(asn_DEF_ext4_tags_19[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_ext4_19,
7, /* Elements count */
&asn_SPC_ext4_specs_19 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_ext5_27[] = {
{ ATF_POINTER, 3, offsetof(struct RadioResourceConfigCommonSIB__ext5, highSpeedConfig_r14),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_HighSpeedConfig_r14,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"highSpeedConfig-r14"
},
{ ATF_POINTER, 2, offsetof(struct RadioResourceConfigCommonSIB__ext5, prach_Config_v1430),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PRACH_Config_v1430,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"prach-Config-v1430"
},
{ ATF_POINTER, 1, offsetof(struct RadioResourceConfigCommonSIB__ext5, pucch_ConfigCommon_v1430),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PUCCH_ConfigCommon_v1430,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pucch-ConfigCommon-v1430"
},
};
static const int asn_MAP_ext5_oms_27[] = { 0, 1, 2 };
static const ber_tlv_tag_t asn_DEF_ext5_tags_27[] = {
(ASN_TAG_CLASS_CONTEXT | (14 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_ext5_tag2el_27[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* highSpeedConfig-r14 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* prach-Config-v1430 */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 } /* pucch-ConfigCommon-v1430 */
};
static asn_SEQUENCE_specifics_t asn_SPC_ext5_specs_27 = {
sizeof(struct RadioResourceConfigCommonSIB__ext5),
offsetof(struct RadioResourceConfigCommonSIB__ext5, _asn_ctx),
asn_MAP_ext5_tag2el_27,
3, /* Count of tags in the map */
asn_MAP_ext5_oms_27, /* Optional members */
3, 0, /* Root/Additions */
-1, /* First extension addition */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_ext5_27 = {
"ext5",
"ext5",
&asn_OP_SEQUENCE,
asn_DEF_ext5_tags_27,
sizeof(asn_DEF_ext5_tags_27)
/sizeof(asn_DEF_ext5_tags_27[0]) - 1, /* 1 */
asn_DEF_ext5_tags_27, /* Same as above */
sizeof(asn_DEF_ext5_tags_27)
/sizeof(asn_DEF_ext5_tags_27[0]), /* 2 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_ext5_27,
3, /* Elements count */
&asn_SPC_ext5_specs_27 /* Additional specs */
};
asn_TYPE_member_t asn_MBR_RadioResourceConfigCommonSIB_1[] = {
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, rach_ConfigCommon),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_RACH_ConfigCommon,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"rach-ConfigCommon"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, bcch_Config),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_BCCH_Config,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"bcch-Config"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, pcch_Config),
(ASN_TAG_CLASS_CONTEXT | (2 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PCCH_Config,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pcch-Config"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, prach_Config),
(ASN_TAG_CLASS_CONTEXT | (3 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PRACH_ConfigSIB,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"prach-Config"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, pdsch_ConfigCommon),
(ASN_TAG_CLASS_CONTEXT | (4 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PDSCH_ConfigCommon,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pdsch-ConfigCommon"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, pusch_ConfigCommon),
(ASN_TAG_CLASS_CONTEXT | (5 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PUSCH_ConfigCommon,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pusch-ConfigCommon"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, pucch_ConfigCommon),
(ASN_TAG_CLASS_CONTEXT | (6 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_PUCCH_ConfigCommon,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"pucch-ConfigCommon"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, soundingRS_UL_ConfigCommon),
(ASN_TAG_CLASS_CONTEXT | (7 << 2)),
+1, /* EXPLICIT tag at current level */
&asn_DEF_SoundingRS_UL_ConfigCommon,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"soundingRS-UL-ConfigCommon"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, uplinkPowerControlCommon),
(ASN_TAG_CLASS_CONTEXT | (8 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_UplinkPowerControlCommon,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"uplinkPowerControlCommon"
},
{ ATF_NOFLAGS, 0, offsetof(struct RadioResourceConfigCommonSIB, ul_CyclicPrefixLength),
(ASN_TAG_CLASS_CONTEXT | (9 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_UL_CyclicPrefixLength,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ul-CyclicPrefixLength"
},
{ ATF_POINTER, 5, offsetof(struct RadioResourceConfigCommonSIB, ext1),
(ASN_TAG_CLASS_CONTEXT | (10 << 2)),
0,
&asn_DEF_ext1_13,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext1"
},
{ ATF_POINTER, 4, offsetof(struct RadioResourceConfigCommonSIB, ext2),
(ASN_TAG_CLASS_CONTEXT | (11 << 2)),
0,
&asn_DEF_ext2_15,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext2"
},
{ ATF_POINTER, 3, offsetof(struct RadioResourceConfigCommonSIB, ext3),
(ASN_TAG_CLASS_CONTEXT | (12 << 2)),
0,
&asn_DEF_ext3_17,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext3"
},
{ ATF_POINTER, 2, offsetof(struct RadioResourceConfigCommonSIB, ext4),
(ASN_TAG_CLASS_CONTEXT | (13 << 2)),
0,
&asn_DEF_ext4_19,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext4"
},
{ ATF_POINTER, 1, offsetof(struct RadioResourceConfigCommonSIB, ext5),
(ASN_TAG_CLASS_CONTEXT | (14 << 2)),
0,
&asn_DEF_ext5_27,
0,
{ 0, 0, 0 },
0, 0, /* No default value */
"ext5"
},
};
static const int asn_MAP_RadioResourceConfigCommonSIB_oms_1[] = { 10, 11, 12, 13, 14 };
static const ber_tlv_tag_t asn_DEF_RadioResourceConfigCommonSIB_tags_1[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_RadioResourceConfigCommonSIB_tag2el_1[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* rach-ConfigCommon */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 }, /* bcch-Config */
{ (ASN_TAG_CLASS_CONTEXT | (2 << 2)), 2, 0, 0 }, /* pcch-Config */
{ (ASN_TAG_CLASS_CONTEXT | (3 << 2)), 3, 0, 0 }, /* prach-Config */
{ (ASN_TAG_CLASS_CONTEXT | (4 << 2)), 4, 0, 0 }, /* pdsch-ConfigCommon */
{ (ASN_TAG_CLASS_CONTEXT | (5 << 2)), 5, 0, 0 }, /* pusch-ConfigCommon */
{ (ASN_TAG_CLASS_CONTEXT | (6 << 2)), 6, 0, 0 }, /* pucch-ConfigCommon */
{ (ASN_TAG_CLASS_CONTEXT | (7 << 2)), 7, 0, 0 }, /* soundingRS-UL-ConfigCommon */
{ (ASN_TAG_CLASS_CONTEXT | (8 << 2)), 8, 0, 0 }, /* uplinkPowerControlCommon */
{ (ASN_TAG_CLASS_CONTEXT | (9 << 2)), 9, 0, 0 }, /* ul-CyclicPrefixLength */
{ (ASN_TAG_CLASS_CONTEXT | (10 << 2)), 10, 0, 0 }, /* ext1 */
{ (ASN_TAG_CLASS_CONTEXT | (11 << 2)), 11, 0, 0 }, /* ext2 */
{ (ASN_TAG_CLASS_CONTEXT | (12 << 2)), 12, 0, 0 }, /* ext3 */
{ (ASN_TAG_CLASS_CONTEXT | (13 << 2)), 13, 0, 0 }, /* ext4 */
{ (ASN_TAG_CLASS_CONTEXT | (14 << 2)), 14, 0, 0 } /* ext5 */
};
asn_SEQUENCE_specifics_t asn_SPC_RadioResourceConfigCommonSIB_specs_1 = {
sizeof(struct RadioResourceConfigCommonSIB),
offsetof(struct RadioResourceConfigCommonSIB, _asn_ctx),
asn_MAP_RadioResourceConfigCommonSIB_tag2el_1,
15, /* Count of tags in the map */
asn_MAP_RadioResourceConfigCommonSIB_oms_1, /* Optional members */
0, 5, /* Root/Additions */
10, /* First extension addition */
};
asn_TYPE_descriptor_t asn_DEF_RadioResourceConfigCommonSIB = {
"RadioResourceConfigCommonSIB",
"RadioResourceConfigCommonSIB",
&asn_OP_SEQUENCE,
asn_DEF_RadioResourceConfigCommonSIB_tags_1,
sizeof(asn_DEF_RadioResourceConfigCommonSIB_tags_1)
/sizeof(asn_DEF_RadioResourceConfigCommonSIB_tags_1[0]), /* 1 */
asn_DEF_RadioResourceConfigCommonSIB_tags_1, /* Same as above */
sizeof(asn_DEF_RadioResourceConfigCommonSIB_tags_1)
/sizeof(asn_DEF_RadioResourceConfigCommonSIB_tags_1[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_RadioResourceConfigCommonSIB_1,
15, /* Elements count */
&asn_SPC_RadioResourceConfigCommonSIB_specs_1 /* Additional specs */
};
|
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_UTIL_SPARSE_DIM_COMPARATOR_H_
#define TENSORFLOW_CORE_UTIL_SPARSE_DIM_COMPARATOR_H_
#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor"
#include "tensorflow/core/framework/bounds_check.h"
#include "tensorflow/core/lib/gtl/array_slice.h"
#include "tensorflow/core/platform/logging.h"
#include "tensorflow/core/platform/types.h"
namespace tensorflow {
namespace sparse {
/////////////////
// DimComparator
/////////////////
//
// Helper class, mainly used by the IndexSortOrder. This comparator
// can be passed to e.g. std::sort, or any other sorter, to sort two
// rows of an index matrix according to the dimension(s) of interest.
// The dimensions to sort by are passed to the constructor as "order".
//
// Example: if given index matrix IX, two rows ai and bi, and order = {2,1}.
// operator() compares
// IX(ai,2) < IX(bi,2).
// If IX(ai,2) == IX(bi,2), it compares
// IX(ai,1) < IX(bi,1).
//
// This can be used to sort a vector of row indices into IX according to
// the values in IX in particular columns (dimensions) of interest.
class DimComparator {
public:
typedef typename gtl::ArraySlice<int64_t> VarDimArray;
DimComparator(const TTypes<int64_t>::Matrix& ix, const VarDimArray& order,
const VarDimArray& shape)
: ix_(ix), order_(order), dims_(shape.size()) {
DCHECK_GT(order.size(), size_t{0}) << "Must order using at least one index";
DCHECK_LE(order.size(), shape.size()) << "Can only sort up to dims";
for (size_t d = 0; d < order.size(); ++d) {
DCHECK_GE(order[d], 0);
DCHECK_LT(order[d], shape.size());
}
}
inline bool operator()(const int64_t i, const int64_t j) const {
for (int di = 0; di < dims_; ++di) {
const int64_t d = order_[di];
if (ix_(i, d) < ix_(j, d)) return true;
if (ix_(i, d) > ix_(j, d)) return false;
}
return false;
}
// Compares two indices taken from corresponding index matrices, using the
// standard, row-major (or lexicographic) order. Useful for cases that need
// to distinguish between all three orderings (<, ==, >).
inline static int cmp(const TTypes<int64_t>::ConstMatrix& a_idx,
const TTypes<int64_t>::ConstMatrix& b_idx,
const int64_t a_row, const int64_t b_row,
const int dims) {
for (int d = 0; d < dims; ++d) {
const int64_t a = a_idx(a_row, d);
const int64_t b = b_idx(b_row, d);
if (a < b) {
return -1;
} else if (a > b) {
return 1;
}
}
return 0;
}
protected:
const TTypes<int64_t>::Matrix ix_;
const VarDimArray order_;
const int dims_;
const std::vector<int64_t>* ix_order_;
};
template <int ORDER_DIM>
class FixedDimComparator : DimComparator {
public:
FixedDimComparator(const TTypes<int64_t>::Matrix& ix,
const VarDimArray& order, const VarDimArray& shape)
: DimComparator(ix, order, shape) {
DCHECK_EQ(order.size(), ORDER_DIM);
}
inline bool operator()(const int64_t i, const int64_t j) const {
bool value = false;
for (int di = 0; di < ORDER_DIM; ++di) {
const int64_t d = order_[di];
if (ix_(i, d) < ix_(j, d)) {
value = true;
break;
}
if (ix_(i, d) > ix_(j, d)) break;
}
return value;
}
};
} // namespace sparse
} // namespace tensorflow
#endif // TENSORFLOW_CORE_UTIL_SPARSE_DIM_COMPARATOR_H_
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['RoleDefinitionArgs', 'RoleDefinition']
@pulumi.input_type
class RoleDefinitionArgs:
def __init__(__self__, *,
scope: pulumi.Input[str],
assignable_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input['PermissionArgs']]]] = None,
role_definition_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
role_type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a RoleDefinition resource.
:param pulumi.Input[str] scope: The scope of the role definition.
:param pulumi.Input[Sequence[pulumi.Input[str]]] assignable_scopes: Role definition assignable scopes.
:param pulumi.Input[str] description: The role definition description.
:param pulumi.Input[Sequence[pulumi.Input['PermissionArgs']]] permissions: Role definition permissions.
:param pulumi.Input[str] role_definition_id: The ID of the role definition.
:param pulumi.Input[str] role_name: The role name.
:param pulumi.Input[str] role_type: The role type.
"""
pulumi.set(__self__, "scope", scope)
if assignable_scopes is not None:
pulumi.set(__self__, "assignable_scopes", assignable_scopes)
if description is not None:
pulumi.set(__self__, "description", description)
if permissions is not None:
pulumi.set(__self__, "permissions", permissions)
if role_definition_id is not None:
pulumi.set(__self__, "role_definition_id", role_definition_id)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
if role_type is not None:
pulumi.set(__self__, "role_type", role_type)
@property
@pulumi.getter
def scope(self) -> pulumi.Input[str]:
"""
The scope of the role definition.
"""
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: pulumi.Input[str]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter(name="assignableScopes")
def assignable_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Role definition assignable scopes.
"""
return pulumi.get(self, "assignable_scopes")
@assignable_scopes.setter
def assignable_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "assignable_scopes", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The role definition description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def permissions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PermissionArgs']]]]:
"""
Role definition permissions.
"""
return pulumi.get(self, "permissions")
@permissions.setter
def permissions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PermissionArgs']]]]):
pulumi.set(self, "permissions", value)
@property
@pulumi.getter(name="roleDefinitionId")
def role_definition_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the role definition.
"""
return pulumi.get(self, "role_definition_id")
@role_definition_id.setter
def role_definition_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_definition_id", value)
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[pulumi.Input[str]]:
"""
The role name.
"""
return pulumi.get(self, "role_name")
@role_name.setter
def role_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_name", value)
@property
@pulumi.getter(name="roleType")
def role_type(self) -> Optional[pulumi.Input[str]]:
"""
The role type.
"""
return pulumi.get(self, "role_type")
@role_type.setter
def role_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_type", value)
class RoleDefinition(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
assignable_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PermissionArgs']]]]] = None,
role_definition_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
role_type: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Role definition.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] assignable_scopes: Role definition assignable scopes.
:param pulumi.Input[str] description: The role definition description.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PermissionArgs']]]] permissions: Role definition permissions.
:param pulumi.Input[str] role_definition_id: The ID of the role definition.
:param pulumi.Input[str] role_name: The role name.
:param pulumi.Input[str] role_type: The role type.
:param pulumi.Input[str] scope: The scope of the role definition.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RoleDefinitionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Role definition.
:param str resource_name: The name of the resource.
:param RoleDefinitionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RoleDefinitionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
assignable_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PermissionArgs']]]]] = None,
role_definition_id: Optional[pulumi.Input[str]] = None,
role_name: Optional[pulumi.Input[str]] = None,
role_type: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RoleDefinitionArgs.__new__(RoleDefinitionArgs)
__props__.__dict__["assignable_scopes"] = assignable_scopes
__props__.__dict__["description"] = description
__props__.__dict__["permissions"] = permissions
__props__.__dict__["role_definition_id"] = role_definition_id
__props__.__dict__["role_name"] = role_name
__props__.__dict__["role_type"] = role_type
if scope is None and not opts.urn:
raise TypeError("Missing required property 'scope'")
__props__.__dict__["scope"] = scope
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:authorization/v20150701:RoleDefinition"), pulumi.Alias(type_="azure-native:authorization:RoleDefinition"), pulumi.Alias(type_="azure-nextgen:authorization:RoleDefinition"), pulumi.Alias(type_="azure-native:authorization/v20180101preview:RoleDefinition"), pulumi.Alias(type_="azure-nextgen:authorization/v20180101preview:RoleDefinition")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(RoleDefinition, __self__).__init__(
'azure-native:authorization/v20150701:RoleDefinition',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'RoleDefinition':
"""
Get an existing RoleDefinition resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = RoleDefinitionArgs.__new__(RoleDefinitionArgs)
__props__.__dict__["assignable_scopes"] = None
__props__.__dict__["description"] = None
__props__.__dict__["name"] = None
__props__.__dict__["permissions"] = None
__props__.__dict__["role_name"] = None
__props__.__dict__["role_type"] = None
__props__.__dict__["type"] = None
return RoleDefinition(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="assignableScopes")
def assignable_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Role definition assignable scopes.
"""
return pulumi.get(self, "assignable_scopes")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The role definition description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The role definition name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def permissions(self) -> pulumi.Output[Optional[Sequence['outputs.PermissionResponse']]]:
"""
Role definition permissions.
"""
return pulumi.get(self, "permissions")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> pulumi.Output[Optional[str]]:
"""
The role name.
"""
return pulumi.get(self, "role_name")
@property
@pulumi.getter(name="roleType")
def role_type(self) -> pulumi.Output[Optional[str]]:
"""
The role type.
"""
return pulumi.get(self, "role_type")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The role definition type.
"""
return pulumi.get(self, "type")
|
/* eslint-disable import/no-extraneous-dependencies */
const exec = require('exec-sh');
const inquirer = require('inquirer');
const fs = require('fs');
const path = require('path');
const pkg = require('../package.json');
const pkgCore = require('../packages/core/package.json');
const pkgReact = require('../packages/react/package.json');
const pkgVue = require('../packages/vue/package.json');
const pkgSvelte = require('../packages/svelte/package.json');
async function release() {
const options = await inquirer.prompt([
{
type: 'input',
name: 'version',
message: 'Version:',
default: pkgCore.version,
},
{
type: 'list',
name: 'alpha',
message: 'Alpha?',
when: (opts) => opts.version.indexOf('alpha') >= 0,
choices: [
{
name: 'YES',
value: true,
},
{
name: 'NO',
value: false,
},
],
},
{
type: 'list',
name: 'beta',
message: 'Beta?',
when: (opts) => opts.version.indexOf('beta') >= 0,
choices: [
{
name: 'YES',
value: true,
},
{
name: 'NO',
value: false,
},
],
},
]);
// Set version
pkgCore.version = options.version;
pkgReact.version = options.version;
pkgVue.version = options.version;
pkgSvelte.version = options.version;
// Copy dependencies
pkgCore.dependencies = pkg.dependencies;
fs.writeFileSync(
path.resolve(__dirname, '../packages/core/package.json'),
JSON.stringify(pkgCore, null, 2),
);
fs.writeFileSync(
path.resolve(__dirname, '../packages/react/package.json'),
JSON.stringify(pkgReact, null, 2),
);
fs.writeFileSync(
path.resolve(__dirname, '../packages/vue/package.json'),
JSON.stringify(pkgVue, null, 2),
);
fs.writeFileSync(
path.resolve(__dirname, '../packages/svelte/package.json'),
JSON.stringify(pkgSvelte, null, 2),
);
await exec.promise('git pull');
await exec.promise('npm i');
await exec.promise(`npm run build-core:prod`);
await exec.promise(`npm run build-react:prod`);
await exec.promise(`npm run build-vue:prod`);
await exec.promise(`npm run build-svelte:prod`);
// NPM publish
if (options.beta) {
await exec.promise('cd ./packages/core && npm publish --tag beta');
await exec.promise('cd ./packages/react && npm publish --tag beta');
await exec.promise('cd ./packages/vue && npm publish --tag beta');
await exec.promise('cd ./packages/svelte && npm publish --tag beta');
} else if (options.alpha) {
await exec.promise('cd ./packages/core && npm publish --tag next');
await exec.promise('cd ./packages/react && npm publish --tag next');
await exec.promise('cd ./packages/vue && npm publish --tag next');
await exec.promise('cd ./packages/svelte && npm publish --tag next');
} else {
await exec.promise('cd ./packages/core && npm publish');
await exec.promise('cd ./packages/react && npm publish');
await exec.promise('cd ./packages/vue && npm publish');
await exec.promise('cd ./packages/svelte && npm publish');
}
// Build Production Kitchen Sink
await exec.promise('npm run build-ks:core');
await exec.promise('npm run build-ks:react');
await exec.promise('npm run build-ks:svelte');
await exec.promise('npm run build-ks:vue');
// Git commit & push
await exec.promise('git add .');
await exec.promise(`git commit -m "${pkgCore.version} release"`);
await exec.promise('git push');
await exec.promise(`git tag v${pkgCore.version}`);
await exec.promise('git push origin --tags');
}
release();
|
var accordion1 = document.getElementsByClassName("accordion");
var i;
for (i = 0; i < accordion1.length; i++) {
accordion1[i].onclick = function() {
if( !this.classList.contains('active') ){
closeAll();
}
this.classList.toggle("active");
var panel = this.nextElementSibling;
if (panel.style.maxHeight){
panel.style.maxHeight = null;
} else {
panel.style.maxHeight = panel.scrollHeight + "px";
}
}
}
function closeAll(){
for (i = 0; i < accordion1.length; i++) {
accordion1[i].classList.remove("active");
accordion1[i].nextElementSibling.style.maxHeight = null;
}
}
var accordion = document.getElementsByClassName("accordion2");
var i;
for (i = 0; i < accordion.length; i++) {
accordion[i].onclick = function() {
if( !this.classList.contains('active') ){
closeAll();
}
this.classList.toggle("active");
var panel = this.nextElementSibling;
if (panel.style.maxHeight){
panel.style.maxHeight = null;
} else {
panel.style.maxHeight = panel.scrollHeight + "px";
}
}
}
function closeAll(){
for (i = 0; i < accordion.length; i++) {
accordion[i].classList.remove("active");
accordion[i].nextElementSibling.style.maxHeight = null;
}
}
function myFunction711() {
document.getElementById("One").click();
}
function myFunction1() {
document.getElementById("Two").click();
}
function myFunction2() {
document.getElementById("Three").click();
}
function myFunction3() {
document.getElementById("Four").click();
}
function collapseall() { //problematic part
var x = document.getElementsByClassName("panel");
var b;
for (b = 0; b < x.length; b++) {
x[b].style.maxHeight = null;
x[b].previousElementSibling.classList.remove('active');
}
}
function NavBar() {
var x = document.getElementById("nav-bar");
if (x.className === "nav-bar") {
x.className += " responsive";
} else {
x.className = "nav-bar";
}
}
|
#pragma once
#include "RenderCommand.h"
#include "OrthographicCamera.h"
#include "Shader.h"
namespace Hazel {
class Renderer
{
public:
static void Init();
static void BeginScene(OrthographicCamera& camera);
static void EndScene();
static void Submit(const std::shared_ptr<Shader>& shader, const std::shared_ptr<VertexArray>& vertexArray, const glm::mat4& transform = glm::mat4(1.0f));
inline static RendererAPI::API GetAPI() { return RendererAPI::GetAPI(); }
private:
struct SceneData
{
glm::mat4 ViewProjectionMatrix;
};
static SceneData* s_SceneData;
};
}
|
import React from 'react';
import { Text } from '@sitecore-jss/sitecore-jss-react';
import Banner from '../Banner/index';
const Hero = ({fields}) => (
<>
{console.log(fields)}
<header className="defaultHero">
<div className="banner">
<Banner fields={{ title: fields.title.value,
subtitle: fields.subtitle.value,
buttonurl: fields.buttonurl.value,
buttontext: fields.buttontext.value
}}>
</Banner>
</div>
</header>
</>
);
export default Hero;
|
from docassemble.base.functions import word, currency_symbol, url_action, comma_and_list, server
from docassemble.base.filter import markdown_to_html, get_audio_urls, get_video_urls, audio_control, video_control, noquote, to_text, my_escape
from docassemble.base.parse import Question, debug
from docassemble.base.logger import logmessage
import urllib
import sys
import os
import re
import json
import random
import sys
import codecs
def tracker_tag(status):
output = ''
output += ' <input type="hidden" name="csrf_token" value="' + server.generate_csrf() + '"/>\n'
if status.question.name:
output += ' <input type="hidden" name="_question_name" value="' + status.question.name + '"/>\n'
# if 'orig_action' in status.current_info:
# output += ' <input type="hidden" name="_action_context" value=' + myb64doublequote(json.dumps(dict(action=status.current_info['orig_action'], arguments=status.current_info['orig_arguments']))) + '/>\n'
output += ' <input type="hidden" name="_tracker" value="' + str(status.tracker) + '"/>\n'
if 'track_location' in status.extras and status.extras['track_location']:
output += ' <input type="hidden" id="_track_location" name="_track_location" value=""/>\n'
return output
def datatype_tag(datatypes):
if len(datatypes):
return(' <input type="hidden" name="_datatypes" value=' + myb64doublequote(json.dumps(datatypes)) + '/>\n')
return ('')
def varname_tag(varnames):
if len(varnames):
return(' <input type="hidden" name="_varnames" value=' + myb64doublequote(json.dumps(varnames)) + '/>\n')
return ('')
def icon_html(status, name, width_value=1.0, width_units='em'):
the_image = status.question.interview.images.get(name, None)
if the_image.attribution is not None:
status.attributions.add(the_image.attribution)
if the_image is None:
return('')
url = server.url_finder(str(the_image.package) + ':' + str(the_image.filename))
sizing = 'width:' + str(width_value) + str(width_units) + ';'
filename = server.file_finder(str(the_image.package) + ':' + str(the_image.filename))
if 'extension' in filename and filename['extension'] == 'svg':
if filename['width'] and filename['height']:
sizing += 'height:' + str(width_value * (filename['height']/filename['width'])) + str(width_units) + ';'
else:
sizing += 'height:auto;'
return('<img class="daicon" src="' + url + '" style="' + sizing + '"/>')
# def signature_html(status, debug, root, validation_rules):
# if (status.continueLabel):
# continue_label = markdown_to_html(status.continueLabel, trim=True)
# else:
# continue_label = word('Done')
# output = ' <div class="sigpage" id="sigpage">\n <div class="sigshowsmallblock sigheader" id="sigheader">\n <div class="siginnerheader">\n <a id="new" class="signavbtn signav-left">' + word('Clear') + '</a>\n <a id="save" class="signavbtn signav-right">' + continue_label + '</a>\n <div class="sigtitle">'
# if status.questionText:
# output += markdown_to_html(status.questionText, trim=True)
# else:
# output += word('Sign Your Name')
# output += '</div>\n </div>\n </div>\n <div class="sigtoppart" id="sigtoppart">\n <div id="errormess" class="sigerrormessage signotshowing">' + word("You must sign your name to continue.") + '</div>\n '
# output += '\n </div>'
# if status.subquestionText:
# output += '\n <div class="sigmidpart">\n ' + markdown_to_html(status.subquestionText) + '\n </div>'
# output += '\n <div id="sigcontent"><p style="text-align:center;border-style:solid;border-width:1px">' + word('Loading. Please wait . . . ') + '</p></div>\n <div class="sigbottompart" id="sigbottompart">\n '
# if (status.underText):
# output += markdown_to_html(status.underText, trim=True)
# output += "\n </div>"
# output += """
# <div class="form-actions sighidesmall sigbuttons">
# <a id="savetwo" class="btn btn-primary btn-lg">""" + continue_label + """</a>
# <a id="savetwo" class="btn btn-warning btn-lg">""" + word('Clear') + """</a>
# </div>
# """
# output += ' </div>\n <form action="' + root + '" id="dasigform" method="POST"><input type="hidden" name="_save_as" value="' + escape_id(status.question.fields[0].saveas) + '"/><input type="hidden" id="_the_image" name="_the_image" value=""/><input type="hidden" id="_success" name="_success" value="0"/>'
# output += tracker_tag(status)
# output += '</form>\n'
# add_validation(status.extra_scripts, validation_rules)
# return output
def get_choices_with_abb(status, field, terms=None, links=None):
if terms is None:
terms = dict()
if links is None:
links = list()
choice_list = get_choices(status, field)
data = dict()
while True:
success = True
data['keys'] = list()
data['abb'] = dict()
data['abblower'] = dict()
data['label'] = list()
for choice in choice_list:
flabel = to_text(markdown_to_html(choice[0], trim=False, status=status, strip_newlines=True), terms, links, status).strip()
success = try_to_abbreviate(choice[0], flabel, data, len(choice_list))
if not success:
break
if success:
break
return data, choice_list
def get_choices(interview_status, field):
question = interview_status.question
choice_list = list()
if hasattr(field, 'saveas') and field.saveas is not None:
saveas = myb64unquote(field.saveas)
if interview_status.question.question_type == "multiple_choice":
if hasattr(field, 'has_code') and field.has_code:
pairlist = list(interview_status.selectcompute[field.number])
for pair in pairlist:
choice_list.append([pair[1], saveas, pair[0]])
else:
for choice in field.choices:
for key in choice:
if key == 'image':
continue
choice_list.append([key, saveas, choice[key]])
elif hasattr(field, 'choicetype'):
if field.choicetype == 'compute':
pairlist = list(interview_status.selectcompute[field.number])
elif field.datatype in ['checkboxes', 'object_checkboxes'] and field.choicetype != 'manual':
pairlist = list()
else:
pairlist = list(field.selections)
#if field.datatype in ['object', 'radio', 'object_radio', 'checkboxes', 'object_checkboxes']:
if field.datatype in ['object_checkboxes']:
for pair in pairlist:
choice_list.append([pair[1], saveas, from_safeid(pair[0])])
elif field.datatype in ['object', 'object_radio']:
for pair in pairlist:
choice_list.append([pair[1], saveas, from_safeid(pair[0])])
elif field.datatype in ['checkboxes']:
for pair in pairlist:
choice_list.append([pair[1], saveas + "[" + repr(pair[0]) + "]", True])
else:
for pair in pairlist:
choice_list.append([pair[1], saveas, pair[0]])
else:
indexno = 0
for choice in field.choices:
for key in choice:
if key == 'image':
continue
choice_list.append([key, '_internal["answers"][' + repr(question.name) + ']', indexno])
indexno += 1
return choice_list
sms_bad_words = ['cancel', 'end', 'help', 'info', 'quit', 'stop', 'stopall', 'unsubscribe', 'back', 'question', 'exit']
def try_to_abbreviate(label, flabel, data, length):
if 'size' not in data:
data['size'] = 1
if 'keys' not in data:
data['keys'] = list()
if 'abb' not in data:
data['abb'] = dict()
if 'abblower' not in data:
data['abblower'] = dict()
if 'label' not in data:
data['label'] = list()
if length > 8:
method = 'fromstart'
else:
method = 'float'
startpoint = 0
endpoint = startpoint + data['size']
prospective_key = flabel
while endpoint <= len(flabel):
prospective_key = flabel[startpoint:endpoint]
if method == 'float' and re.search(r'[^A-Za-z0-9]', prospective_key):
startpoint += 1
#data['size'] = 1
endpoint = startpoint + data['size']
continue
if method == 'fromstart' and re.search(r'[^A-Za-z0-9]$', prospective_key):
endpoint += 1
continue
if prospective_key.lower() in data['abblower'] or prospective_key.lower() in sms_bad_words:
if method == 'float':
data['size'] += 1
return False
endpoint += 1
continue
break
data['abb'][prospective_key] = label
data['abblower'][prospective_key.lower()] = label
data['keys'].append(prospective_key)
data['label'].append(flabel[0:startpoint] + "[" + prospective_key + ']' + flabel[endpoint:])
return True
def as_sms(status, links=None, menu_items=None):
if links is None:
links = list()
if menu_items is None:
menu_items = list()
terms = dict()
#logmessage("length of links is " + str(len(links)))
links_len = 0
menu_items_len = 0
next_variable = None
qoutput = ''
if status.question.question_type == 'signature':
qoutput += word('Sign Your Name') + "\n"
#logmessage("The question is " + status.questionText)
qoutput += to_text(markdown_to_html(status.questionText, trim=False, status=status, strip_newlines=True), terms, links, status)
if status.subquestionText:
qoutput += "\n" + to_text(markdown_to_html(status.subquestionText, status=status), terms, links, status)
#logmessage("output is: " + repr(qoutput))
qoutput += "XXXXMESSAGE_AREAXXXX"
if len(status.question.fields):
field = None
next_field = None
for the_field in status.question.fields:
if hasattr(the_field, 'datatype'):
# if the_field.datatype in ['script', 'css']:
# continue
if the_field.datatype in ['html', 'note'] and field is not None:
continue
if the_field.datatype in ['note']:
qoutput += "\n" + to_text(markdown_to_html(status.extras['note'][the_field.number], status=status), terms, links, status)
continue
if the_field.datatype in ['html']:
qoutput += "\n" + to_text(status.extras['html'][the_field.number].rstrip())
continue
#logmessage("field number is " + str(the_field.number))
if not hasattr(the_field, 'saveas'):
logmessage("as_sms: field has no saveas")
continue
if the_field.number not in status.current_info['skip']:
#logmessage("field is not defined yet")
if field is None:
field = the_field
elif next_field is None:
next_field = the_field
continue
if field is None:
logmessage("as_sms: field seemed to be defined already?")
field = status.question.fields[0]
#return dict(question=qoutput, help=None, next=next_variable)
label = None
next_label = ''
if next_field is not None:
next_variable = myb64unquote(next_field.saveas)
if hasattr(next_field, 'label') and status.labels[next_field.number] not in ["no label", ""]:
next_label = ' (' + word("Next will be") + ' ' + to_text(markdown_to_html(status.labels[next_field.number], trim=False, status=status, strip_newlines=True), terms, links, status) + ')'
if hasattr(field, 'label') and status.labels[field.number] != "no label":
label = to_text(markdown_to_html(status.labels[field.number], trim=False, status=status, strip_newlines=True), terms, links, status)
question = status.question
# if hasattr(field, 'datatype'):
# logmessage("as_sms: data type is " + field.datatype)
# else:
# logmessage("as_sms: data type is undefined")
if question.question_type == "settrue":
qoutput += "\n" + word("Type ok to continue.")
elif question.question_type in ["yesno", "noyes"] or (hasattr(field, 'datatype') and (field.datatype in ['yesno', 'yesnowide', 'noyes', 'noyeswide'] or (field.datatype == 'boolean' and question.question_type == 'fields'))):
if question.question_type == 'fields' and label:
qoutput += "\n" + label + ":" + next_label
qoutput += "\n" + word("Type [y]es or [n]o.")
elif question.question_type in ["yesnomaybe"] or (hasattr(field, 'datatype') and (field.datatype in ['yesnomaybe', 'yesnowidemaybe', 'noyesmaybe', 'noyesmaybe', 'noyeswidemaybe'] or (field.datatype == 'threestate' and question.question_type == 'fields'))):
if question.question_type == 'fields' and label:
qoutput += "\n" + label + ":" + next_label
qoutput += "\n" + word("Type [y]es, [n]o, or [d]on't know")
elif question.question_type == 'multiple_choice' or hasattr(field, 'choicetype') or (hasattr(field, 'datatype') and field.datatype in ['object', 'checkboxes', 'object_checkboxes']):
if question.question_type == 'fields' and label:
qoutput += "\n" + label + ":" + next_label
data, choice_list = get_choices_with_abb(status, field, terms=terms, links=links)
qoutput += "\n" + word("Choices:")
if hasattr(field, 'shuffle') and field.shuffle:
random.shuffle(data['label'])
for the_label in data['label']:
qoutput += "\n" + the_label
if hasattr(field, 'datatype') and field.datatype in ['checkboxes', 'object_checkboxes']:
qoutput += "\n" + word("Type your selection(s), separated by commas, or type none.")
else:
if status.extras['required'][field.number]:
if len(choice_list) == 1:
qoutput += "\n" + word("Type") + " " + data['keys'][0] + " " + word("to proceed.")
else:
qoutput += "\n" + word("Type your selection.")
else:
qoutput += "\n" + word("Type your selection, or type skip to move on without selecting.")
elif question.question_type == 'signature':
if status.underText:
qoutput += "\n__________________________\n" + to_text(markdown_to_html(status.underText, trim=False, status=status, strip_newlines=True), terms, links, status)
qoutput += "\n" + word('Type x to sign your name electronically')
elif hasattr(field, 'datatype') and field.datatype == 'range':
max_string = str(int(status.extras['max'][field.number]))
min_string = str(int(status.extras['min'][field.number]))
if label:
qoutput += "\n" + label + ":" + next_label
qoutput += "\n" + word('Type a value between') + ' ' + min_string + ' ' + word('and') + ' ' + max_string
elif hasattr(field, 'datatype') and field.datatype in ['file', 'camera']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Please send an image or file.')
else:
qoutput += "\n" + word('Please send an image or file, or type skip.')
elif hasattr(field, 'datatype') and field.datatype in ['files']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Please send one or more images.')
else:
qoutput += "\n" + word('Please send one or more images, or type skip.')
elif hasattr(field, 'datatype') and field.datatype in ['camcorder']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Please send a video.')
else:
qoutput += "\n" + word('Please send a video, or type skip.')
elif hasattr(field, 'datatype') and field.datatype in ['microphone']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Please send an audio clip.')
else:
qoutput += "\n" + word('Please send an audio clip, or type skip.')
elif hasattr(field, 'datatype') and field.datatype in ['number', 'float', 'integer']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Type a number.')
else:
qoutput += "\n" + word('Type a number, or type skip.')
elif hasattr(field, 'datatype') and field.datatype in ['currency']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Type a currency value.')
else:
qoutput += "\n" + word('Type a currency value, or type skip.')
elif hasattr(field, 'datatype') and field.datatype in ['date']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Type a date.')
else:
qoutput += "\n" + word('Type a date, or type skip.')
elif hasattr(field, 'datatype') and field.datatype in ['email']:
if label:
qoutput += "\n" + label + ":" + next_label
if status.extras['required'][field.number]:
qoutput += "\n" + word('Type an e-mail address.')
else:
qoutput += "\n" + word('Type an e-mail address, or type skip.')
else:
if label:
if status.extras['required'][field.number]:
qoutput += "\n" + word("Type the") + " " + label + "." + next_label
else:
qoutput += "\n" + word("Type the") + " " + label + " " + word("or type skip to leave blank.") + next_label
if status.underText and question.question_type != 'signature':
qoutput += "\n" + to_text(markdown_to_html(status.underText, status=status), terms, links, status)
if 'menu_items' in status.extras and type(status.extras['menu_items']) is list:
for menu_item in status.extras['menu_items']:
if type(menu_item) is dict and 'url' in menu_item and 'label' in menu_item:
menu_items.append((menu_item['url'], menu_item['label']))
if len(links):
indexno = 1
qoutput_add = "\n" + "== " + word("Links") + " =="
seen = dict()
for (href, label) in links:
if label in seen and href in seen[label]:
continue
if label not in seen:
seen[label] = set()
seen[label].add(href)
if re.search(r'action=', href):
qoutput_add += "\n* " + label + ": [" + word('link') + str(indexno) + ']'
indexno += 1
else:
qoutput_add += "\n* " + label + ": " + href
if indexno == 2:
qoutput_add += "\n" + word("You can type link1 to visit the link")
else:
qoutput_add += "\n" + word("You can type link1, etc. to visit a link")
qoutput = re.sub(r'XXXXMESSAGE_AREAXXXX', qoutput_add + r'XXXXMESSAGE_AREAXXXX', qoutput)
links_len = len(links)
links_orig = list(links)
while len(links):
links.pop()
for (href, label) in links_orig:
if re.search(r'action=', href):
links.append((href, label))
if len(status.helpText) or len(terms) or len(menu_items):
houtput = ''
for help_section in status.helpText:
if houtput != '':
houtput += "\n"
if help_section['heading'] is not None:
houtput += '== ' + to_text(markdown_to_html(help_section['heading'], trim=False, status=status, strip_newlines=True), terms, links, status) + ' =='
else:
houtput += '== ' + word('Help with this question') + ' =='
houtput += "\n" + to_text(markdown_to_html(help_section['content'], trim=False, status=status, strip_newlines=True), terms, links, status)
if len(terms):
if houtput != '':
houtput += "\n"
houtput += "== " + word("Terms used:") + " =="
for term, definition in terms.iteritems():
houtput += "\n" + term + ': ' + definition
if len(menu_items):
indexno = 1
if houtput != '':
houtput += "\n"
houtput += "== " + word("Menu:") + " =="
for (href, label) in menu_items:
if re.search(r'action=', href):
houtput += "\n* " + label + ": [" + word('menu') + str(indexno) + ']'
indexno += 1
else:
houtput += "\n* " + label + ": " + href
if indexno == 2:
houtput += "\n" + word("You can type menu1 to select the menu item")
else:
houtput += "\n" + word("You can type menu1, etc. to select a menu item")
menu_items_len = len(menu_items)
menu_items_orig = list(menu_items)
while len(menu_items):
menu_items.pop()
for (href, label) in menu_items_orig:
if re.search(r'action=', href):
menu_items.append((href, label))
#houtput += "\n" + word("You can type question to read the question again.")
else:
houtput = None
if status.question.helptext is not None:
qoutput = re.sub(r'XXXXMESSAGE_AREAXXXX', "\n" + word("Type ? for additional assistance.") + 'XXXXMESSAGE_AREAXXXX', qoutput)
elif len(terms) or menu_items_len:
items = list()
if len(terms):
items.append(word("definitions of words"))
if menu_items_len:
items.append(word("menu items"))
qoutput = re.sub(r'XXXXMESSAGE_AREAXXXX', "\n" + word("Type ? to see") + " " + comma_and_list(items) + "." + 'XXXXMESSAGE_AREAXXXX', qoutput)
# if status.question.question_type == 'deadend':
# return dict(question=qoutput, help=houtput)
if len(status.attachments) > 0:
if len(status.attachments) > 1:
qoutput += "\n" + word("Your documents are attached.")
else:
qoutput += "\n" + word("Your document is attached.")
return dict(question=qoutput, help=houtput, next=next_variable)
def embed_input(status, variable):
for field in status.question.fields:
if variable == from_safeid(field.saveas):
status.embedded.add(field.saveas)
return input_for(status, field, embedded=True)
return 'ERROR: field not found'
def is_empty_mc(status, field):
if hasattr(field, 'choicetype'):
if field.choicetype == 'compute':
if field.number not in status.selectcompute:
#logmessage("selectcompute had nothing for field " + str(field.number))
return False
#logmessage("Using selectcompute")
pairlist = list(status.selectcompute[field.number])
else:
#logmessage("Using field selections")
pairlist = list(field.selections)
#logmessage("Pairlist was " + str(pairlist))
if len(pairlist) == 0:
return True
return False
def as_html(status, url_for, debug, root, validation_rules):
decorations = list()
uses_audio_video = False
audio_text = ''
video_text = ''
datatypes = dict()
varnames = dict()
onchange = list()
if 'script' in status.extras and status.extras['script'] is not None:
status.extra_scripts.append(status.extras['script'])
if 'css' in status.extras and status.extras['css'] is not None:
status.extra_css.append(status.extras['css'])
if status.continueLabel:
continue_label = markdown_to_html(status.continueLabel, trim=True)
else:
continue_label = word('Continue')
# if status.question.script is not None:
# status.extra_scripts.append(status.question.script)
if status.audiovideo is not None:
uses_audio_video = True
audio_urls = get_audio_urls(status.audiovideo)
if len(audio_urls):
audio_text += '<div>\n' + audio_control(audio_urls) + '</div>\n'
video_urls = get_video_urls(status.audiovideo)
if len(video_urls):
video_text += '<div>\n' + video_control(video_urls) + '</div>\n'
if status.using_screen_reader and 'question' in status.screen_reader_links:
audio_text += '<div>\n' + audio_control(status.screen_reader_links['question'], preload="none") + '</div>\n'
if status.decorations is not None:
#sys.stderr.write("yoo1\n")
for decoration in status.decorations:
#sys.stderr.write("yoo2\n")
if 'image' in decoration:
#sys.stderr.write("yoo3\n")
the_image = status.question.interview.images.get(decoration['image'], None)
if the_image is not None:
#sys.stderr.write("yoo4\n")
url = server.url_finder(str(the_image.package) + ':' + str(the_image.filename))
width_value = 2.0
width_units = 'em'
sizing = 'width:' + str(width_value) + str(width_units) + ';'
filename = server.file_finder(str(the_image.package) + ':' + str(the_image.filename))
if 'extension' in filename and filename['extension'] == 'svg' and 'width' in filename:
if filename['width'] and filename['height']:
sizing += 'height:' + str(width_value * (filename['height']/filename['width'])) + str(width_units) + ';'
else:
sizing += 'height:auto;'
if url is not None:
#sys.stderr.write("yoo5\n")
if the_image.attribution is not None:
#sys.stderr.write("yoo6\n")
status.attributions.add(the_image.attribution)
decorations.append('<img class="daiconfloat" style="' + sizing + '" src="' + url + '"/>')
if len(decorations):
decoration_text = decorations[0];
else:
decoration_text = ''
master_output = ""
master_output += ' <section id="question" class="tab-pane active col-lg-6 col-md-8 col-sm-10">\n'
output = ""
if status.question.question_type == "signature":
output += ' <div class="sigpage" id="sigpage">\n <div class="sigshowsmallblock sigheader" id="sigheader">\n <div class="siginnerheader">\n <a class="btn btn-sm btn-warning signav-left sigclear">' + word('Clear') + '</a>\n <a class="btn btn-sm btn-primary signav-right sigsave">' + continue_label + '</a>\n <div class="sigtitle">'
if status.questionText:
output += markdown_to_html(status.questionText, trim=True)
else:
output += word('Sign Your Name')
output += '</div>\n </div>\n </div>\n <div class="sigtoppart" id="sigtoppart">\n <div id="errormess" class="sigerrormessage signotshowing">' + word("You must sign your name to continue.") + '</div>\n'
if status.questionText:
output += ' <div class="sighidesmall">' + markdown_to_html(status.questionText, trim=True) + '</div>\n'
output += ' </div>'
if status.subquestionText:
output += '\n <div class="sigmidpart">\n ' + markdown_to_html(status.subquestionText) + '\n </div>'
else:
output += '\n <div class="sigmidpart"></div>'
output += '\n <div id="sigcontent"><p style="text-align:center;border-style:solid;border-width:1px">' + word('Loading. Please wait . . . ') + '</p></div>\n <div class="sigbottompart" id="sigbottompart">\n '
if (status.underText):
output += markdown_to_html(status.underText, trim=True)
output += "\n </div>"
output += """
<div class="form-actions sighidesmall sigbuttons">
<a class="btn btn-primary btn-lg sigsave">""" + continue_label + """</a>
<a class="btn btn-warning btn-lg sigclear">""" + word('Clear') + """</a>
</div>
"""
output += ' </div>\n <form action="' + root + '" id="dasigform" method="POST"><input type="hidden" name="_save_as" value="' + escape_id(status.question.fields[0].saveas) + '"/><input type="hidden" id="_the_image" name="_the_image" value=""/><input type="hidden" id="_success" name="_success" value="0"/>'
output += tracker_tag(status)
output += ' </form>\n'
output += ' <div class="sigshowsmallblock"><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br><br></div>'
elif status.question.question_type in ["yesno", "yesnomaybe"]:
#varnames[safeid('_field_' + str(status.question.fields[0].number))] = status.question.fields[0].saveas
datatypes[status.question.fields[0].saveas] = status.question.fields[0].datatype
output += indent_by(audio_text, 12) + ' <form action="' + root + '" id="daform" method="POST">\n <fieldset>\n'
output += ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + markdown_to_html(status.subquestionText, status=status, indent=18) + ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
output += ' <p class="sr-only">' + word('Press one of the following buttons:') + '</p>\n'
output += ' <div class="btn-toolbar">\n <button class="btn btn-primary btn-lg " name="' + escape_id(status.question.fields[0].saveas) + '" type="submit" value="True">' + status.question.yes() + '</button>\n <button class="btn btn-lg btn-info" name="' + escape_id(status.question.fields[0].saveas) + '" type="submit" value="False">' + status.question.no() + '</button>'
if status.question.question_type == 'yesnomaybe':
output += '\n <button class="btn btn-lg btn-warning" name="' + escape_id(status.question.fields[0].saveas) + '" type="submit" value="None">' + status.question.maybe() + '</button>'
output += '\n </div>\n'
#output += question_name_tag(status.question)
if (status.underText):
output += markdown_to_html(status.underText, status=status, indent=18, divclass="undertext")
output += tracker_tag(status)
output += datatype_tag(datatypes)
output += varname_tag(varnames)
output += ' </fieldset>\n </form>\n'
elif status.question.question_type in ["noyes", "noyesmaybe"]:
#varnames[safeid('_field_' + str(status.question.fields[0].number))] = status.question.fields[0].saveas
datatypes[status.question.fields[0].saveas] = status.question.fields[0].datatype
output += indent_by(audio_text, 12) + ' <form action="' + root + '" id="daform" method="POST">\n <fieldset>\n'
output += ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + markdown_to_html(status.subquestionText, status=status, indent=18) + ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
output += ' <p class="sr-only">' + word('Press one of the following buttons:') + '</p>\n'
output += ' <div class="btn-toolbar">\n <button class="btn btn-primary btn-lg" name="' + escape_id(status.question.fields[0].saveas) + '" type="submit" value="False">' + status.question.yes() + '</button>\n <button class="btn btn-lg btn-info" name="' + escape_id(status.question.fields[0].saveas) + '" type="submit" value="True">' + status.question.no() + '</button>'
if status.question.question_type == 'noyesmaybe':
output += '\n <button class="btn btn-lg btn-warning" name="' + escape_id(status.question.fields[0].saveas) + '" type="submit" value="None">' + status.question.maybe() + '</button>'
output += '\n </div>\n'
if (status.underText):
output += markdown_to_html(status.underText, status=status, indent=18, divclass="undertext")
output += tracker_tag(status)
output += datatype_tag(datatypes)
output += varname_tag(varnames)
output += ' </fieldset>\n </form>\n'
elif status.question.question_type == "review":
fieldlist = list()
for field in status.question.fields:
if not status.extras['ok'][field.number]:
continue
if hasattr(field, 'extras'):
if 'script' in field.extras and 'script' in status.extras and field.number in status.extras['script']:
status.extra_scripts.append(status.extras['script'][field.number])
# if 'css' in field.extras and 'css' in status.extras and field.number in status.extras['css']:
# status.extra_css.append(status.extras['css'][field.number])
if hasattr(field, 'datatype'):
if field.datatype == 'html' and 'html' in status.extras and field.number in status.extras['html']:
fieldlist.append(' <div class="form-group' + req_tag +'"><div class="col-md-12"><note>' + status.extras['html'][field.number].rstrip() + '</note></div></div>\n')
continue
elif field.datatype == 'note' and 'note' in status.extras and field.number in status.extras['note']:
fieldlist.append(' <div class="row"><div class="col-md-12">' + markdown_to_html(status.extras['note'][field.number], status=status, strip_newlines=True) + '</div></div>\n')
continue
# elif field.datatype in ['script', 'css']:
# continue
elif field.datatype == 'button' and hasattr(field, 'label') and field.number in status.helptexts:
fieldlist.append(' <div class="row"><div class="col-md-12"><a class="label label-success review-action" href="' + url_action(field.action) + '">' + markdown_to_html(status.labels[field.number], trim=True, status=status, strip_newlines=True) + '</a>' + markdown_to_html(status.helptexts[field.number], status=status, strip_newlines=True) + '</div></div>\n')
continue
if hasattr(field, 'label'):
fieldlist.append(' <div class="form-group"><div class="col-md-12"><a href="' + url_action(field.action) + '">' + markdown_to_html(status.labels[field.number], trim=True, status=status, strip_newlines=True) + '</a></div></div>\n')
if field.number in status.helptexts:
fieldlist.append(' <div class="row"><div class="col-md-12">' + markdown_to_html(status.helptexts[field.number], status=status, strip_newlines=True) + '</div></div>\n')
output += indent_by(audio_text, 12) + ' <form action="' + root + '" id="daform" class="form-horizontal" method="POST">\n <fieldset>\n'
output += ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + markdown_to_html(status.subquestionText, status=status, indent=18) + ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
if (len(fieldlist)):
output += "".join(fieldlist)
if status.continueLabel:
resume_button_label = markdown_to_html(status.continueLabel, trim=True)
else:
resume_button_label = word('Resume')
output += ' <div class="form-actions"><button class="btn btn-lg btn-primary" type="submit">' + resume_button_label + '</button></div>\n'
if (status.underText):
output += markdown_to_html(status.underText, status=status, indent=18, divclass="undertext")
output += tracker_tag(status)
output += ' </fieldset>\n </form>\n'
elif status.question.question_type == "fields":
enctype_string = ""
fieldlist = list()
checkboxes = list()
files = list()
hiddens = dict()
checkbox_validation = False
if status.subquestionText:
sub_question_text = markdown_to_html(status.subquestionText, status=status, indent=18, embedder=embed_input)
for field in status.question.fields:
if is_empty_mc(status, field):
if hasattr(field, 'datatype'):
hiddens[field.saveas] = field.datatype
else:
hiddens[field.saveas] = True
if hasattr(field, 'datatype'):
datatypes[field.saveas] = field.datatype
if field.datatype == 'object_checkboxes':
datatypes[safeid(from_safeid(field.saveas) + ".gathered")] = 'boolean'
continue
if not status.extras['ok'][field.number]:
continue
if status.extras['required'][field.number]:
req_tag = ' required'
else:
req_tag = ''
if hasattr(field, 'extras'):
# if 'script' in field.extras and 'script' in status.extras:
# status.extra_scripts.append(status.extras['script'][field.number])
# if 'css' in field.extras and 'css' in status.extras:
# status.extra_css.append(status.extras['css'][field.number])
#fieldlist.append("<div>datatype is " + str(field.datatype) + "</div>")
if 'show_if_var' in field.extras and 'show_if_val' in status.extras:
if hasattr(field, 'saveas'):
fieldlist.append(' <div class="showif" data-saveas="' + escape_id(field.saveas) + '" data-showif-sign="' + escape_id(field.extras['show_if_sign']) + '" data-showif-var="' + escape_id(field.extras['show_if_var']) + '" data-showif-val=' + noquote(unicode(status.extras['show_if_val'][field.number])) + '>\n')
else:
fieldlist.append(' <div class="showif" data-showif-sign="' + escape_id(field.extras['show_if_sign']) + '" data-showif-var="' + escape_id(field.extras['show_if_var']) + '" data-showif-val=' + noquote(unicode(status.extras['show_if_val'][field.number])) + '>\n')
if hasattr(field, 'datatype'):
if field.datatype == 'html':
fieldlist.append(' <div class="form-group' + req_tag +'"><div class="col-md-12"><note>' + status.extras['html'][field.number].rstrip() + '</note></div></div>\n')
#continue
elif field.datatype == 'note':
fieldlist.append(' <div class="row"><div class="col-md-12">' + markdown_to_html(status.extras['note'][field.number], status=status, strip_newlines=True) + '</div></div>\n')
#continue
# elif field.datatype in ['script', 'css']:
# continue
else:
datatypes[field.saveas] = field.datatype
if field.datatype == 'object_checkboxes':
datatypes[safeid(from_safeid(field.saveas) + ".gathered")] = 'boolean'
if field.number in status.helptexts:
helptext_start = '<a class="daterm" data-container="body" data-toggle="popover" data-placement="bottom" data-content=' + noquote(status.helptexts[field.number]) + '>'
helptext_end = '</a>'
else:
helptext_start = ''
helptext_end = ''
if hasattr(field, 'disableothers') and field.disableothers and hasattr(field, 'saveas'):
onchange.append(field.saveas)
if hasattr(field, 'saveas'):
varnames[safeid('_field_' + str(field.number))] = field.saveas
if hasattr(field, 'extras') and 'show_if_var' in field.extras and 'show_if_val' in status.extras:
the_saveas = safeid('_field_' + str(field.number))
else:
the_saveas = field.saveas
validation_rules['messages'][the_saveas] = {'required': word("This field is required.")}
if status.extras['required'][field.number]:
#sys.stderr.write(field.datatype + "\n")
validation_rules['rules'][the_saveas] = {'required': True}
else:
validation_rules['rules'][the_saveas] = {'required': False}
for key in ['minlength', 'maxlength']:
if hasattr(field, 'extras') and key in field.extras and key in status.extras:
#sys.stderr.write("Adding validation rule for " + str(key) + "\n")
validation_rules['rules'][the_saveas][key] = int(status.extras[key][field.number])
if hasattr(field, 'datatype'):
if field.datatype == 'date':
validation_rules['rules'][the_saveas]['date'] = True
validation_rules['messages'][the_saveas]['date'] = word("You need to enter a valid date.")
if field.datatype == 'email':
validation_rules['rules'][the_saveas]['email'] = True
if status.extras['required'][field.number]:
validation_rules['rules'][the_saveas]['minlength'] = 1
validation_rules['messages'][the_saveas]['minlength'] = word("This field is required.")
validation_rules['messages'][the_saveas]['email'] = word("You need to enter a complete e-mail address.")
if field.datatype in ['number', 'currency', 'float', 'integer']:
validation_rules['rules'][the_saveas]['number'] = True
validation_rules['messages'][the_saveas]['number'] = word("You need to enter a number.")
#sys.stderr.write("Considering adding validation rule\n")
for key in ['min', 'max']:
if hasattr(field, 'extras') and key in field.extras and key in status.extras:
#sys.stderr.write("Adding validation rule for " + str(key) + "\n")
validation_rules['rules'][the_saveas][key] = int(status.extras[key][field.number])
if (field.datatype in ['files', 'file', 'camera', 'camcorder', 'microphone']):
enctype_string = ' enctype="multipart/form-data"'
files.append(field.saveas)
if field.datatype in ['boolean', 'threestate']:
checkboxes.append(field.saveas)
elif field.datatype in ['checkboxes', 'object_checkboxes']:
if field.choicetype == 'compute':
pairlist = list(status.selectcompute[field.number])
elif field.choicetype == 'manual':
pairlist = list(field.selections)
else:
pairlist = list()
if hasattr(field, 'shuffle') and field.shuffle:
random.shuffle(pairlist)
for pair in pairlist:
if pair[0] is not None:
checkboxes.append(safeid(from_safeid(field.saveas) + "[" + myb64quote(pair[0]) + "]"))
if hasattr(field, 'saveas') and field.saveas in status.embedded:
continue
if hasattr(field, 'label'):
if status.labels[field.number] == 'no label':
fieldlist.append(' <div class="form-group' + req_tag +'"><div class="col-md-12">' + input_for(status, field, wide=True) + '</div></div>\n')
elif hasattr(field, 'inputtype') and field.inputtype in ['yesnowide', 'noyeswide']:
fieldlist.append(' <div class="row"><div class="col-md-12">' + input_for(status, field) + '</div></div>\n')
elif hasattr(field, 'inputtype') and field.inputtype in ['yesno', 'noyes']:
fieldlist.append(' <div class="form-group' + req_tag +'"><div class="col-sm-offset-4 col-sm-8">' + input_for(status, field) + '</div></div>\n')
else:
fieldlist.append(' <div class="form-group' + req_tag + '"><label for="' + escape_id(field.saveas) + '" class="control-label col-sm-4">' + helptext_start + markdown_to_html(status.labels[field.number], trim=True, status=status, strip_newlines=True) + helptext_end + '</label><div class="col-sm-8 fieldpart">' + input_for(status, field) + '</div></div>\n')
if hasattr(field, 'extras') and 'show_if_var' in field.extras and 'show_if_val' in status.extras:
fieldlist.append(' </div>\n')
output += indent_by(audio_text, 12) + ' <form action="' + root + '" id="daform" class="form-horizontal" method="POST"' + enctype_string + '>\n <fieldset>\n'
output += ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + sub_question_text
for saveas_string in status.embedded:
output += '<label style="display: none;" for="' + escape_id(saveas_string) + '" class="help-inline" id="' + escape_id(saveas_string) + '-error"></label> '
output += ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
if (len(fieldlist)):
output += "".join(fieldlist)
#else:
# output += " <p>Error: no fields</p>\n"
#output += '</div>\n'
if len(checkboxes):
output += ' <input type="hidden" name="_checkboxes" value=' + myb64doublequote(json.dumps(checkboxes)) + '/>\n'
if len(hiddens):
output += ' <input type="hidden" name="_empties" value=' + myb64doublequote(json.dumps(hiddens)) + '/>\n'
if len(files):
output += ' <input type="hidden" name="_files" value=' + myb64doublequote(json.dumps(files)) + '/>\n'
init_string = '<script>'
for saveasname in files:
init_string += '$("#' + escape_for_jquery(saveasname) + '").fileinput();' + "\n"
init_string += '</script>'
#status.extra_scripts.append('<script src="' + url_for('static', filename='bootstrap-fileinput/js/fileinput.min.js') + '"></script>' + init_string)
status.extra_scripts.append(init_string)
#status.extra_css.append('<link href="' + url_for('static', filename='bootstrap-fileinput/css/fileinput.min.css') + '" media="all" rel="stylesheet" type="text/css" />')
output += ' <p class="sr-only">' + word('You can press the following button:') + '</p>\n'
output += ' <div class="form-actions"><button class="btn btn-lg btn-primary" type="submit">' + continue_label + '</button></div>\n'
#output += question_name_tag(status.question)
if (status.underText):
output += markdown_to_html(status.underText, status=status, indent=18, divclass="undertext")
output += tracker_tag(status)
output += datatype_tag(datatypes)
output += varname_tag(varnames)
output += ' </fieldset>\n </form>\n'
elif status.question.question_type == "settrue":
#varnames[safeid('_field_' + str(status.question.fields[0].number))] = status.question.fields[0].saveas
datatypes[status.question.fields[0].saveas] = "boolean"
output += indent_by(audio_text, 12) + ' <form action="' + root + '" id="daform" method="POST">\n <fieldset>\n'
output += ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + markdown_to_html(status.subquestionText, status=status, indent=18) + ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
output += ' <p class="sr-only">' + word('You can press the following button:') + '</p>\n'
output += ' <div class="form-actions"><button type="submit" class="btn btn-lg btn-primary" name="' + escape_id(status.question.fields[0].saveas) + '" value="True"> ' + continue_label + '</button></div>\n'
#output += question_name_tag(status.question)
if (status.underText):
output += markdown_to_html(status.underText, status=status, indent=18, divclass="undertext")
output += tracker_tag(status)
output += datatype_tag(datatypes)
output += varname_tag(varnames)
output += ' </fieldset>\n </form>\n'
elif status.question.question_type == "multiple_choice":
#varnames[safeid('_field_' + str(status.question.fields[0].number))] = status.question.fields[0].saveas
if status.question.fields[0].number in status.defaults and type(status.defaults[status.question.fields[0].number]) in [str, unicode, int, float]:
defaultvalue = unicode(status.defaults[status.question.fields[0].number])
#logmessage("Default value is " + str(defaultvalue))
else:
defaultvalue = None
if hasattr(status.question.fields[0], 'datatype'):
datatypes[status.question.fields[0].saveas] = status.question.fields[0].datatype
output += indent_by(audio_text, 12) + ' <form action="' + root + '" id="daform" method="POST">\n <fieldset>\n'
output += ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + markdown_to_html(status.subquestionText, status=status, indent=18) + ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
output += ' <div id="errorcontainer" class="alert alert-danger" role="alert" style="display:none"></div>\n'
output += ' <p class="sr-only">' + word('Your choices are:') + '</p>\n'
validation_rules['errorElement'] = "span"
validation_rules['errorLabelContainer'] = "#errorcontainer"
if status.question.question_variety == "radio":
if hasattr(status.question.fields[0], 'saveas'):
if hasattr(status.question.fields[0], 'has_code') and status.question.fields[0].has_code:
id_index = 0
pairlist = list(status.selectcompute[status.question.fields[0].number])
if hasattr(status.question.fields[0], 'shuffle') and status.question.fields[0].shuffle:
random.shuffle(pairlist)
for pair in pairlist:
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, escape=True)
if defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue):
ischecked = ' checked="checked"'
else:
ischecked = ''
if pair[0] is not None:
output += ' <div class="row"><div class="col-md-12"><input alt="' + formatted_item + '" data-labelauty="' + formatted_item + '|' + formatted_item + '" class="to-labelauty radio-icon" id="' + escape_id(status.question.fields[0].saveas) + '_' + str(id_index) + '" name="' + escape_id(status.question.fields[0].saveas) + '" type="radio" value="' + unicode(pair[0]) + '"' + ischecked + '/></div></div>\n'
else:
output += ' <div class="form-group"><div class="col-md-12">' + markdown_to_html(pair[1], status=status) + '</div></div>\n'
id_index += 1
else:
id_index = 0
choicelist = list(status.question.fields[0].choices)
if hasattr(status.question.fields[0], 'shuffle') and status.question.fields[0].shuffle:
random.shuffle(choicelist)
for choice in choicelist:
if 'image' in choice:
the_icon = icon_html(status, choice['image']) + ' '
else:
the_icon = ''
for key in choice:
if key == 'image':
continue
formatted_key = markdown_to_html(key, status=status, trim=True, escape=True)
if defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(choice[key]) == unicode(defaultvalue):
ischecked = ' checked="checked"'
else:
ischecked = ''
output += ' <div class="row"><div class="col-md-12"><input alt="' + formatted_key + '" data-labelauty="' + my_escape(the_icon) + formatted_key + '|' + my_escape(the_icon) + formatted_key + '" class="to-labelauty radio-icon" id="' + escape_id(status.question.fields[0].saveas) + '_' + str(id_index) + '" name="' + escape_id(status.question.fields[0].saveas) + '" type="radio" value="' + unicode(choice[key]) + '"' + ischecked + '/></div></div>\n'
id_index += 1
validation_rules['ignore'] = None
validation_rules['rules'][status.question.fields[0].saveas] = {'required': True}
validation_rules['messages'][status.question.fields[0].saveas] = {'required': word("You need to select one.")}
else:
indexno = 0
for choice in status.question.fields[0].choices:
if 'image' in choice:
the_icon = icon_html(status, choice['image']) + ' '
else:
the_icon = ''
id_index = 0
for key in choice:
if key == 'image':
continue
formatted_key = markdown_to_html(key, status=status, trim=True, escape=True)
output += ' <div class="row"><div class="col-md-12"><input alt="' + formatted_key + '" data-labelauty="' + my_escape(the_icon) + formatted_key + '|' + my_escape(the_icon) + formatted_key + '" class="to-labelauty radio-icon" id="multiple_choice_' + str(indexno) + '_' + str(id_index) + '" name="X211bHRpcGxlX2Nob2ljZQ==" type="radio" value="' + str(indexno) + '"/></div></div>\n'
id_index += 1
indexno += 1
validation_rules['rules']['X211bHRpcGxlX2Nob2ljZQ=='] = {'required': True}
validation_rules['messages']['X211bHRpcGxlX2Nob2ljZQ=='] = {'required': word("You need to select one.")}
output += ' <br/>\n'
output += ' <p class="sr-only">' + word('You can press the following button:') + '</p>\n'
output += ' <button class="btn btn-lg btn-primary" type="submit">' + continue_label + '</button>\n'
else:
#output += ' <p class="sr-only">' + word('Press one of the following buttons:') + '</p>\n'
output += ' <div class="btn-toolbar">\n'
if hasattr(status.question.fields[0], 'saveas'):
btn_class = ' btn-primary'
if hasattr(status.question.fields[0], 'has_code') and status.question.fields[0].has_code:
pairlist = list(status.selectcompute[status.question.fields[0].number])
if hasattr(status.question.fields[0], 'shuffle') and status.question.fields[0].shuffle:
random.shuffle(pairlist)
for pair in pairlist:
if pair[0] is not None:
output += ' <button type="submit" class="btn btn-lg' + btn_class + '" name="' + escape_id(status.question.fields[0].saveas) + '" value="' + unicode(pair[0]) + '"> ' + markdown_to_html(pair[1], status=status, trim=True, do_terms=False) + '</button>\n'
else:
output += markdown_to_html(pair[1], status=status)
else:
choicelist = list(status.question.fields[0].choices)
if hasattr(status.question.fields[0], 'shuffle') and status.question.fields[0].shuffle:
random.shuffle(choicelist)
for choice in choicelist:
if 'image' in choice:
the_icon = '<div>' + icon_html(status, choice['image'], width_value=4.0) + '</div>';
btn_class = ' btn-default btn-da-custom'
else:
the_icon = ''
for key in choice:
if key == 'image':
continue
output += ' <button type="submit" class="btn btn-lg' + btn_class + '" name="' + escape_id(status.question.fields[0].saveas) + '" value="' + unicode(choice[key]) + '"> ' + the_icon + markdown_to_html(key, status=status, trim=True, do_terms=False) + '</button>\n'
else:
indexno = 0
for choice in status.question.fields[0].choices:
btn_class = ' btn-primary'
if 'image' in choice:
the_icon = '<div>' + icon_html(status, choice['image'], width_value=4.0) + '</div>'
btn_class = ' btn-default btn-da-custom'
else:
the_icon = ''
for key in choice:
if key == 'image':
continue
if isinstance(choice[key], Question) and choice[key].question_type in ["exit", "continue", "restart", "refresh", "signin", "register", "leave", "link"]:
if choice[key].question_type in ["continue", "register"]:
btn_class = ' btn-primary'
elif choice[key].question_type in ["leave", "link", "restart"]:
btn_class = ' btn-warning'
elif choice[key].question_type == "refresh":
btn_class = ' btn-success'
elif choice[key].question_type == "signin":
btn_class = ' btn-info'
elif choice[key].question_type == "exit":
btn_class = ' btn-danger'
output += ' <button type="submit" class="btn btn-lg' + btn_class + '" name="X211bHRpcGxlX2Nob2ljZQ==" value="' + str(indexno) + '">' + the_icon + markdown_to_html(key, status=status, trim=True, do_terms=False, strip_newlines=True) + '</button>\n'
indexno += 1
output += ' </div>\n'
#output += question_name_tag(status.question)
if (status.underText):
output += markdown_to_html(status.underText, status=status, indent=18, divclass="undertext")
output += tracker_tag(status)
output += datatype_tag(datatypes)
output += varname_tag(varnames)
output += ' </fieldset>\n </form>\n'
elif status.question.question_type == 'deadend':
output += indent_by(audio_text, 12) + ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + markdown_to_html(status.subquestionText, status=status, indent=18) + ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
else:
output += indent_by(audio_text, 12) + ' <form action="' + root + '" id="daform" class="form-horizontal" method="POST">\n <fieldset>\n'
output += ' <div class="page-header"><h3>' + decoration_text + markdown_to_html(status.questionText, trim=True, status=status, strip_newlines=True) + '<div class="daclear"></div></h3></div>\n'
if status.subquestionText:
output += ' <div>\n' + markdown_to_html(status.subquestionText, status=status, indent=18) + ' </div>\n'
if video_text:
output += indent_by(video_text, 12)
output += ' <p class="sr-only">' + word('You can press the following button:') + '</p>\n'
output += ' <div class="form-actions"><button class="btn btn-lg btn-primary" type="submit">' + continue_label + '</button></div>\n'
#output += question_name_tag(status.question)
if (status.underText):
output += markdown_to_html(status.underText, status=status, indent=18, divclass="undertext")
output += tracker_tag(status)
output += ' </fieldset>\n </form>\n'
if len(status.attachments) > 0:
output += ' <br/>\n'
if len(status.attachments) > 1:
output += ' <div class="alert alert-success" role="alert">' + word('attachment_message_plural') + '</div>\n'
else:
output += ' <div class="alert alert-success" role="alert">' + word('attachment_message_singular') + '</div>\n'
attachment_index = 0
editable_included = False
if len(status.attachments) > 1:
file_word = 'files'
else:
file_word = 'file'
editable_name = ''
for attachment in status.attachments:
if 'rtf' in attachment['valid_formats'] or 'docx' in attachment['valid_formats'] or '*' in attachment['valid_formats']:
if 'pdf' in attachment['valid_formats'] or '*' in attachment['valid_formats']:
editable_included = True
if 'rtf' in attachment['valid_formats'] or '*' in attachment['valid_formats']:
if 'docx' in attachment['valid_formats']:
editable_name = 'RTF and DOCX files'
else:
editable_name = 'RTF ' + file_word
elif 'docx' in attachment['valid_formats']:
editable_name = 'DOCX ' + file_word
if debug and len(attachment['markdown']):
if 'html' in attachment['valid_formats'] or '*' in attachment['valid_formats']:
md_format = 'html'
else:
for format_type in attachment['valid_formats']:
md_format = format_type
break
if md_format in attachment['markdown'] and attachment['markdown'][md_format] != '':
show_markdown = True
else:
show_markdown = False
else:
show_markdown = False
#logmessage("markdown is " + str(attachment['markdown']))
if 'pdf' in attachment['valid_formats'] or 'rtf' in attachment['valid_formats'] or 'docx' in attachment['valid_formats'] or (debug and 'tex' in attachment['valid_formats']) or '*' in attachment['valid_formats']:
show_download = True
else:
show_download = False
if 'html' in attachment['valid_formats'] or '*' in attachment['valid_formats']:
show_preview = True
else:
show_preview = False
if len(attachment['valid_formats']) > 1 or '*' in attachment['valid_formats']:
multiple_formats = True
else:
multiple_formats = False
output += ' <div><h3>' + markdown_to_html(attachment['name'], trim=True, status=status, strip_newlines=True) + '</h3></div>\n'
if attachment['description']:
output += ' <div>' + markdown_to_html(attachment['description'], status=status, strip_newlines=True) + '</div>\n'
output += ' <div class="tabbable">\n'
if True or show_preview or show_markdown:
output += ' <ul class="nav nav-tabs">\n'
if show_download:
output += ' <li class="active"><a href="#download' + str(attachment_index) + '" data-toggle="tab">' + word('Download') + '</a></li>\n'
if show_preview:
output += ' <li><a href="#preview' + str(attachment_index) + '" data-toggle="tab">' + word('Preview') + '</a></li>\n'
if show_markdown:
output += ' <li><a href="#markdown' + str(attachment_index) + '" data-toggle="tab">' + word('Markdown') + '</a></li>\n'
output += ' </ul>\n'
output += ' <div class="tab-content">\n'
if show_download:
output += ' <div class="tab-pane active" id="download' + str(attachment_index) + '">\n'
if multiple_formats:
output += ' <p>' + word('save_as_multiple') + '</p>\n'
#else:
#output += ' <p>' + word('save_as_singular') + '</p>\n'
if 'pdf' in attachment['valid_formats'] or '*' in attachment['valid_formats']:
output += ' <p><a href="?filename=1&i=' + urllib.quote(status.question.interview.source.path, '') + '&question=' + str(status.question.number) + '&index=' + str(attachment_index) + '&format=pdf"><i class="glyphicon glyphicon-print"></i> PDF</a> (' + word('pdf_message') + ')</p>\n'
if 'rtf' in attachment['valid_formats'] or '*' in attachment['valid_formats']:
output += ' <p><a href="?filename=1&i=' + urllib.quote(status.question.interview.source.path, '') + '&question=' + str(status.question.number) + '&index=' + str(attachment_index) + '&format=rtf"><i class="glyphicon glyphicon-pencil"></i> RTF</a> (' + word('rtf_message') + ')</p>\n'
if 'docx' in attachment['valid_formats']:
output += ' <p><a href="?filename=1&i=' + urllib.quote(status.question.interview.source.path, '') + '&question=' + str(status.question.number) + '&index=' + str(attachment_index) + '&format=docx"><i class="glyphicon glyphicon-pencil"></i> DOCX</a> (' + word('docx_message') + ')</p>\n'
if debug and ('tex' in attachment['valid_formats'] or '*' in attachment['valid_formats']):
output += ' <p><a href="?filename=1&i=' + urllib.quote(status.question.interview.source.path, '') + '&question=' + str(status.question.number) + '&index=' + str(attachment_index) + '&format=tex"><i class="glyphicon glyphicon-pencil"></i> LaTeX</a> (' + word('tex_message') + ')</p>\n'
output += ' </div>\n'
if show_preview:
output += ' <div class="tab-pane" id="preview' + str(attachment_index) + '">\n'
output += ' <blockquote>' + unicode(attachment['content']['html']) + '</blockquote>\n'
output += ' </div>\n'
if show_markdown:
output += ' <div class="tab-pane" id="markdown' + str(attachment_index) + '">\n'
output += ' <pre>' + safe_html(attachment['markdown'][md_format]) + '</pre>\n'
output += ' </div>\n'
output += ' </div>\n </div>\n'
attachment_index += 1
if status.question.allow_emailing:
if len(status.attachments) > 1:
email_header = word("E-mail these documents")
else:
email_header = word("E-mail this document")
if status.current_info['user']['is_authenticated'] and status.current_info['user']['email']:
default_email = status.current_info['user']['email']
else:
default_email = ''
output += """\
<div class="panel-group" id="accordion" role="tablist" aria-multiselectable="true">
<div class="panel panel-default">
<div class="panel-heading" role="tab" id="headingOne">
<h4 class="panel-title">
<a role="button" data-toggle="collapse" data-parent="#accordion" href="#collapseOne" aria-expanded="true" aria-controls="collapseOne">
""" + email_header + """
</a>
</h4>
</div>
<div id="collapseOne" class="panel-collapse collapse in" role="tabpanel" aria-labelledby="headingOne">
<div class="panel-body">
<form action=\"""" + root + """\" id="emailform" class="form-horizontal" method="POST">
<fieldset>
<div class="form-group"><label for="_attachment_email_address" class="control-label col-sm-4">""" + word('E-mail address') + """</label><div class="col-sm-8"><input alt=""" + '"' + word ("Input box") + '"' + """ class="form-control" type="email" name="_attachment_email_address" id="_attachment_email_address" value=""" + '"' + str(default_email) + '"' + """/></div></div>"""
if editable_included:
output += """
<div class="form-group"><div class="col-sm-4"></div><div class="col-sm-8"><input alt="' + word ("Check box") + ", " + word('Include ' + editable_name + ' for editing') + '" type="checkbox" value="True" name="_attachment_include_editable" id="_attachment_include_editable"/> <label for="_attachment_include_editable" class="nobold">""" + word('Include ' + editable_name + ' for editing') + '</label></div></div>\n'
output += """
<div class="form-actions"><button class="btn btn-primary" type="submit">""" + word('Send') + '</button></div><input type="hidden" name="_email_attachments" value="1"/><input type="hidden" name="_question_number" value="' + str(status.question.number) + '"/>'
output += """
</fieldset>
<input type="hidden" name="csrf_token" value=""" + '"' + server.generate_csrf() + '"' + """/>
</form>
</div>
</div>
</div>
</div>
"""
status.extra_scripts.append("""<script>
$("#emailform").validate({'submitHandler': daValidationHandler, 'rules': {'_attachment_email_address': {'minlength': 1, 'required': true, 'email': true}}, 'messages': {'_attachment_email_address': {'required': """ + repr(str(word("An e-mail address is required."))) + """, 'email': """ + repr(str(word("You need to enter a complete e-mail address."))) + """}}, 'errorClass': 'help-inline'});
</script>""")
if status.question.question_type != "signature":
if len(status.attributions):
output += ' <br/><br/><br/><br/><br/><br/><br/>\n'
for attribution in sorted(status.attributions):
output += ' <div><attribution><small>' + markdown_to_html(attribution, strip_newlines=True) + '</small></attribution></div>\n'
if debug or status.using_screen_reader:
status.screen_reader_text['question'] = unicode(output)
master_output += output
master_output += ' </section>\n'
master_output += ' <section id="help" class="tab-pane col-lg-6 col-md-8 col-sm-10">\n'
output = '<div><a id="backToQuestion" data-toggle="tab" href="#question" class="btn btn-info btn-md"><i class="glyphicon glyphicon-arrow-left"></i> ' + word("Back to question") + '</a></div>'
output += """
<div id="daPhoneMessage" class="row invisible">
<div class="col-md-12">
<h3>""" + word("Telephone assistance") + """</h3>
<p></p>
</div>
</div>
<div id="daChatBox" class="invisible">
<div class="row">
<div class="col-md-12 dachatbutton">
<a id="daChatOnButton" class="label label-success">""" + word("Activate chat") + """</a>
<a id="daChatOffButton" class="label label-warning">""" + word("Turn off chat") + """</a>
<h3>""" + word("Live chat") + """</h3>
</div>
</div>
<div class="row">
<div class="col-md-12">
<ul class="list-group dachatbox" id="daCorrespondence"></ul>
</div>
</div>
<form id="dachat" autocomplete="off">
<div class="row">
<div class="col-md-12">
<div class="input-group">
<input type="text" class="form-control" id="daMessage" placeholder=""" + '"' + word("Type your message here.") + '"' + """>
<span class="input-group-btn"><button class="btn btn-default" id="daSend" type="button">""" + word("Send") + """</button></span>
</div>
</div>
</div>
</form>
<div class="row invisible">
<div class="col-md-12">
<p id="daPushResult"></p>
</div>
</div>
<div class="row topspace">
<div class="col-md-12">
<p>
<span class="peer-message" id="peerMessage"></span>
<span class="peer-message" id="peerHelpMessage"></span>
</p>
</div>
</div>
</div>
"""
if len(status.helpText):
if status.using_screen_reader and 'help' in status.screen_reader_links:
output += ' <div>\n' + indent_by(audio_control(status.screen_reader_links['help'], preload="none"), 14) + ' </div>\n'
for help_section in status.helpText:
if help_section['heading'] is not None:
output += ' <div class="page-header"><h3>' + help_section['heading'] + '</h3></div>\n'
else:
output += ' <div class="page-header"><h3>' + word('Help with this question') + '</h3></div>\n'
if help_section['audiovideo'] is not None:
uses_audio_video = True
audio_urls = get_audio_urls(help_section['audiovideo'])
if len(audio_urls):
output += ' <div>\n' + indent_by(audio_control(audio_urls), 14) + ' </div>\n'
video_urls = get_video_urls(help_section['audiovideo'])
if len(video_urls):
output += ' <div>\n' + indent_by(video_control(video_urls), 14) + ' </div>\n'
output += markdown_to_html(help_section['content'], status=status, indent=12)
if len(status.attributions):
output += ' <br/><br/><br/><br/><br/><br/><br/>\n'
for attribution in sorted(status.attributions):
output += ' <div><attribution><small>' + markdown_to_html(attribution, strip_newlines=True) + '</small></attribution></div>\n'
if debug or status.using_screen_reader:
status.screen_reader_text['help'] = unicode(output)
master_output += output
master_output += ' </section>\n'
# if status.question.question_type == "fields":
# status.extra_scripts.append("""\
# <script>
# $("#daform").find('button[type="submit"]').prop("disabled", true);
# daform = $("#daform");
# $("#daform input, #daform select, #daform textarea").on('change input propertychange paste', function(){
# if (daform.valid()){
# $("#daform").find('button[type="submit"]').prop("disabled", false);
# }
# else{
# $("#daform").find('button[type="submit"]').prop("disabled", true);
# }
# });
# </script>""")
add_validation(status.extra_scripts, validation_rules)
for element_id_unescaped in onchange:
element_id = re.sub(r'(:|\.|\[|\]|,|=)', r'\\\\\1', element_id_unescaped)
the_script = """\
<script>
$("#""" + element_id + """").change(function(){
if ($( this ).val() == ""){
$("#daform input:not(#""" + element_id + """):not(:hidden)").prop("disabled", false);
$("#daform select:not(#""" + element_id + """):not(:hidden)").prop("disabled", false);
$("#daform input:not(#""" + element_id + """):not(:hidden)").parent().parent().removeClass("greyedout");
$("#daform select:not(#""" + element_id + """):not(:hidden)").parent().parent().removeClass("greyedout");
}
else{
$("#daform input:not(#""" + element_id + """):not(:hidden)").prop("disabled", true);
$("#daform select:not(#""" + element_id + """):not(:hidden)").prop("disabled", true);
$("#daform input:not(#""" + element_id + """):not(:hidden)").parent().parent().addClass("greyedout");
$("#daform select:not(#""" + element_id + """):not(:hidden)").parent().parent().addClass("greyedout");
}
});
</script>
"""
status.extra_scripts.append(the_script)
if 'track_location' in status.extras and status.extras['track_location']:
track_js = """\
<script>
function daSetPosition(position) {
document.getElementById('_track_location').value = JSON.stringify({'latitude': position.coords.latitude, 'longitude': position.coords.longitude})
}
function daShowError(error) {
switch(error.code) {
case error.PERMISSION_DENIED:
document.getElementById('_track_location').value = JSON.stringify({error: "User denied the request for Geolocation"});
console.log("User denied the request for Geolocation.");
break;
case error.POSITION_UNAVAILABLE:
document.getElementById('_track_location').value = JSON.stringify({error: "Location information is unavailable"});
console.log("Location information is unavailable.");
break;
case error.TIMEOUT:
document.getElementById('_track_location').value = JSON.stringify({error: "The request to get user location timed out"});
console.log("The request to get user location timed out.");
break;
case error.UNKNOWN_ERROR:
document.getElementById('_track_location').value = JSON.stringify({error: "An unknown error occurred"});
console.log("An unknown error occurred.");
break;
}
}
$( document ).ready(function() {
$(function () {
if (navigator.geolocation) {
document.getElementById('_track_location').value = JSON.stringify({error: "getCurrentPosition was still running"});
navigator.geolocation.getCurrentPosition(daSetPosition, daShowError, {timeout: 1000, maximumAge: 3600000});
}
else{
document.getElementById('_track_location').value = JSON.stringify({error: "navigator.geolocation not available in browser"});
}
});
});
</script>"""
status.extra_scripts.append(track_js)
if len(status.maps):
map_js = """\
<script>
map_info = [""" + ", ".join(status.maps) + """];
</script>
"""
status.extra_scripts.append(map_js)
status.extra_scripts.append('<script async defer src="https://maps.googleapis.com/maps/api/js?signed_in=true&callback=daInitMap"></script>')
return master_output
def add_validation(extra_scripts, validation_rules):
extra_scripts.append(""" <script>
var validation_rules = """ + json.dumps(validation_rules) + """;
validation_rules.submitHandler = daValidationHandler;
$("#daform").validate(validation_rules);
$("button").click(function(event){
whichButton = this;
});
$("#backbutton").submit(function(event){
$("#backbutton").addClass("dabackiconpressed");
var informed = '';
if (daInformedChanged){
informed = '&informed=' + Object.keys(daInformed).join(',');
}
$.ajax({
type: "POST",
url: $("#backbutton").attr('action'),
data: $("#backbutton").serialize() + '&ajax=1' + informed,
success: function(data){
setTimeout(function(){
daProcessAjax(data, document.getElementById('backbutton'));
}, 0);
},
error: function(xhr, status, error){
setTimeout(function(){
daProcessAjaxError(xhr, status, error);
}, 0);
}
});
event.preventDefault();
});
</script>""")
def input_for(status, field, wide=False, embedded=False):
output = ""
if field.number in status.defaults:
if type(status.defaults[field.number]) in [str, unicode, int, float]:
defaultvalue = unicode(status.defaults[field.number])
else:
defaultvalue = status.defaults[field.number]
else:
defaultvalue = None
if field.number in status.hints:
placeholdertext = ' placeholder=' + json.dumps(unicode(status.hints[field.number].replace('\n', ' ')))
else:
placeholdertext = ''
if hasattr(field, 'extras') and 'show_if_var' in field.extras and 'show_if_val' in status.extras and hasattr(field, 'saveas'):
saveas_string = safeid('_field_' + str(field.number))
else:
saveas_string = field.saveas
if embedded:
extra_class = ' input-embedded'
extra_checkbox = ' checkbox-embedded'
extra_radio = ' radio-embedded'
label_text = strip_quote(to_text(markdown_to_html(status.labels[field.number], trim=False, status=status, strip_newlines=True), dict(), list(), status).strip())
if label_text != 'no label':
title_text = ' title="' + label_text + '"'
else:
title_text = ''
else:
extra_class = ''
extra_checkbox = ''
extra_radio = ''
title_text = ''
if hasattr(field, 'choicetype'):
if field.choicetype == 'compute':
pairlist = list(status.selectcompute[field.number])
else:
pairlist = list(field.selections)
if hasattr(field, 'shuffle') and field.shuffle:
random.shuffle(pairlist)
if field.datatype in ['checkboxes', 'object_checkboxes']:
#if len(pairlist) == 0:
# return '<input type="hidden" name="' + safeid(from_safeid(saveas_string))+ '" value="None"/>'
inner_fieldlist = list()
id_index = 0
output += '<p class="sr-only">' + word('Checkboxes:') + '</p>'
for pair in pairlist:
if pair[0] is not None:
inner_field = safeid(from_safeid(saveas_string) + "[" + myb64quote(pair[0]) + "]")
#sys.stderr.write("I've got a " + repr(pair[1]) + "\n")
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, escape=True)
if len(pair) > 2 and pair[2]:
ischecked = ' checked'
elif defaultvalue is None:
ischecked = ''
elif type(defaultvalue) in (list, set) and unicode(pair[0]) in defaultvalue:
ischecked = ' checked'
elif type(defaultvalue) is dict and unicode(pair[0]) in defaultvalue and defaultvalue[unicode(pair[0])]:
ischecked = ' checked'
elif pair[0] is defaultvalue:
ischecked = ' checked'
elif type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue):
ischecked = ' checked'
else:
ischecked = ''
inner_fieldlist.append('<input alt="' + formatted_item + '" data-labelauty="' + formatted_item + '|' + formatted_item + '" class="to-labelauty checkbox-icon' + extra_checkbox + '"' + title_text + ' id="' + escape_id(saveas_string) + '_' + str(id_index) + '" name="' + inner_field + '" type="checkbox" value="True"' + ischecked + '/>')
else:
inner_fieldlist.append('<div>' + markdown_to_html(pair[1], status=status) + '</div>')
id_index += 1
output += u''.join(inner_fieldlist)
if field.datatype in ['object_checkboxes']:
output += '<input type="hidden" name="' + safeid(from_safeid(saveas_string) + ".gathered")+ '" value="True"/>'
elif field.datatype in ['radio', 'object_radio']:
inner_fieldlist = list()
id_index = 0
output += '<p class="sr-only">' + word('Choices:') + '</p>'
for pair in pairlist:
if pair[0] is not None:
#sys.stderr.write(str(saveas_string) + "\n")
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, escape=True)
if (len(pair) > 2 and pair[2]) or (defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue)):
ischecked = ' checked="checked"'
else:
ischecked = ''
inner_fieldlist.append('<input alt="' + formatted_item + '" data-labelauty="' + formatted_item + '|' + formatted_item + '" class="to-labelauty radio-icon' + extra_radio + '" id="' + escape_id(saveas_string) + '_' + str(id_index) + '" name="' + escape_id(saveas_string) + '" type="radio" value="' + unicode(pair[0]) + '"' + ischecked + '/>')
else:
inner_fieldlist.append('<div>' + markdown_to_html(unicode(pair[1]), status=status) + '</div>')
id_index += 1
output += "".join(inner_fieldlist)
else:
if embedded:
emb_text = 'class="input-embedded" '
label_text = strip_quote(to_text(markdown_to_html(status.labels[field.number], trim=False, status=status, strip_newlines=True), dict(), list(), status).strip())
if label_text != 'no label':
emb_text += 'title="' + label_text + '" '
else:
output += '<p class="sr-only">' + word('Select box') + '</p>'
emb_text = 'class="form-control" '
output += '<select ' + emb_text + 'name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '" >'
output += '<option value="">' + word('Select...') + '</option>'
for pair in pairlist:
if pair[0] is not None:
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, do_terms=False)
output += '<option value="' + unicode(pair[0]) + '"'
if (len(pair) > 2 and pair[2]) or (defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue)):
output += ' selected="selected"'
output += '>' + formatted_item + '</option>'
output += '</select> '
elif hasattr(field, 'datatype'):
if field.datatype == 'boolean':
label_text = markdown_to_html(status.labels[field.number], trim=True, status=status, strip_newlines=True, escape=True)
if hasattr(field, 'inputtype') and field.inputtype in ['yesnoradio', 'noyesradio']:
output += '<p class="sr-only">' + word('Choices:') + '</p>'
inner_fieldlist = list()
id_index = 0
if field.sign > 0:
for pair in [['True', status.question.yes()], ['False', status.question.no()]]:
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, escape=True)
if (len(pair) > 2 and pair[2]) or (defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue)):
ischecked = ' checked="checked"'
else:
ischecked = ''
inner_fieldlist.append('<input alt="' + formatted_item + '" data-labelauty="' + formatted_item + '|' + formatted_item + '" class="to-labelauty radio-icon' + extra_radio + '" id="' + escape_id(saveas_string) + '_' + str(id_index) + '" name="' + escape_id(saveas_string) + '" type="radio" value="' + unicode(pair[0]) + '"' + ischecked + '/>')
id_index += 1
else:
for pair in [['False', status.question.yes()], ['True', status.question.no()]]:
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, escape=True)
if (len(pair) > 2 and pair[2]) or (defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue)):
ischecked = ' checked="checked"'
else:
ischecked = ''
inner_fieldlist.append('<input alt="' + formatted_item + '" data-labelauty="' + formatted_item + '|' + formatted_item + '" class="to-labelauty radio-icon' + extra_radio + '" id="' + escape_id(saveas_string) + '_' + str(id_index) + '" name="' + escape_id(saveas_string) + '" type="radio" value="' + unicode(pair[0]) + '"' + ischecked + '/>')
id_index += 1
output += "".join(inner_fieldlist)
else:
if field.sign > 0:
output += '<input alt="' + label_text + '" class="to-labelauty checkbox-icon' + extra_checkbox + '"' + title_text + ' type="checkbox" value="True" data-labelauty="' + label_text + '|' + label_text + '" name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"'
else:
output += '<input alt="' + label_text + '" class="to-labelauty checkbox-icon' + extra_checkbox + '"' + title_text + ' type="checkbox" value="False" data-labelauty="' + label_text + '|' + label_text + '" name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"'
if defaultvalue:
output += ' checked'
output += '/> '
elif field.datatype == 'threestate':
inner_fieldlist = list()
id_index = 0
output += '<p class="sr-only">' + word('Choices:') + '</p>'
if field.sign > 0:
for pair in [['True', status.question.yes()], ['False', status.question.no()], ['None', status.question.maybe()]]:
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, escape=True)
if (len(pair) > 2 and pair[2]) or (defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue)):
ischecked = ' checked="checked"'
else:
ischecked = ''
inner_fieldlist.append('<input alt="' + formatted_item + '" data-labelauty="' + formatted_item + '|' + formatted_item + '" class="to-labelauty radio-icon' + extra_radio + '"' + title_text + ' id="' + escape_id(saveas_string) + '_' + str(id_index) + '" name="' + escape_id(saveas_string) + '" type="radio" value="' + unicode(pair[0]) + '"' + ischecked + '/>')
id_index += 1
else:
for pair in [['False', status.question.yes()], ['True', status.question.no()], ['None', status.question.maybe()]]:
formatted_item = markdown_to_html(unicode(pair[1]), status=status, trim=True, escape=True)
if (len(pair) > 2 and pair[2]) or (defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float] and unicode(pair[0]) == unicode(defaultvalue)):
ischecked = ' checked="checked"'
else:
ischecked = ''
inner_fieldlist.append('<input alt="' + formatted_item + '" data-labelauty="' + formatted_item + '|' + formatted_item + '" class="to-labelauty radio-icon' + extra_radio + '"' + title_text + ' id="' + escape_id(saveas_string) + '_' + str(id_index) + '" name="' + escape_id(saveas_string) + '" type="radio" value="' + unicode(pair[0]) + '"' + ischecked + '/>')
id_index += 1
output += "".join(inner_fieldlist)
elif field.datatype in ['file', 'files', 'camera', 'camcorder', 'microphone']:
if field.datatype == 'files':
multipleflag = ' multiple'
else:
multipleflag = ''
if field.datatype == 'camera':
accept = ' accept="image/*;capture=camera"'
elif field.datatype == 'camcorder':
accept = ' accept="video/*;capture=camcorder"'
elif field.datatype == 'microphone':
accept = ' accept="audio/*;capture=microphone"'
else:
accept = ''
if embedded:
output += '<input alt="' + word("You can upload a file here") + '" type="file" class="file file-embedded" name="' + escape_id(saveas_string) + '"' + title_text + ' id="' + escape_id(saveas_string) + '"' + multipleflag + accept + '/>'
else:
output += '<input alt="' + word("You can upload a file here") + '" type="file" class="file" data-show-upload="false" data-preview-file-type="text" name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"' + multipleflag + accept + '/>'
#output += '<div class="fileinput fileinput-new input-group" data-provides="fileinput"><div class="form-control" data-trigger="fileinput"><i class="glyphicon glyphicon-file fileinput-exists"></i><span class="fileinput-filename"></span></div><span class="input-group-addon btn btn-default btn-file"><span class="fileinput-new">' + word('Select file') + '</span><span class="fileinput-exists">' + word('Change') + '</span><input type="file" name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"' + multipleflag + '></span><a href="#" class="input-group-addon btn btn-default fileinput-exists" data-dismiss="fileinput">' + word('Remove') + '</a></div>\n'
elif field.datatype == 'range':
ok = True
for key in ['min', 'max']:
if not (hasattr(field, 'extras') and key in field.extras and key in status.extras and field.number in status.extras[key]):
ok = False
if ok:
if defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float]:
the_default = ' data-slider-value="' + str(defaultvalue) + '"'
else:
the_default = ''
if 'step' in field.extras and 'step' in status.extras and field.number in status.extras['step']:
the_step = ' data-slider-step="' + str(status.extras['step'][field.number]) + '"'
else:
the_step = ''
max_string = str(int(status.extras['max'][field.number]))
min_string = str(int(status.extras['min'][field.number]))
if embedded:
output += '<div class="form-group slider-embedded"' + title_text + '><input alt="' + word('Select a value between') + ' ' + min_string + ' ' + word('and') + ' ' + max_string + '" name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"' + the_default + ' data-slider-max="' + max_string + '" data-slider-min="' + min_string + '"' + the_step + '></div>'
else:
output += '<input alt="' + word('Select a value between') + ' ' + min_string + ' ' + word('and') + ' ' + max_string + '" name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"' + the_default + ' data-slider-max="' + max_string + '" data-slider-min="' + min_string + '"' + the_step + '>'
status.extra_scripts.append('<script>$("#' + escape_for_jquery(saveas_string) + '").slider({tooltip: "always"});</script>\n')
elif field.datatype == 'area':
output += '<textarea alt="' + word("Input box") + '" class="form-control' + extra_class + '"' + title_text + ' rows="4" name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"' + placeholdertext + '>'
if defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float]:
output += defaultvalue
output += '</textarea>'
else:
if defaultvalue is not None and type(defaultvalue) in [str, unicode, int, bool, float]:
defaultstring = ' value="' + defaultvalue + '"'
else:
defaultstring = ''
input_type = field.datatype
step_string = ''
if field.datatype in ['integer', 'float', 'currency', 'number']:
input_type = 'number'
if field.datatype == 'integer':
step_string = ' step="1"'
if field.datatype == 'float' or field.datatype == 'number':
step_string = ' step="0.01"'
if field.datatype == 'currency':
step_string = ' step="0.01"'
output += '<div class="input-group"><span class="input-group-addon" id="addon-' + do_escape_id(saveas_string) + '">' + currency_symbol() + '</span>'
output += '<input' + defaultstring + placeholdertext + ' alt="' + word("Input box") + '" class="form-control' + extra_class + '"' + title_text + ' type="' + input_type + '"' + step_string + ' name="' + escape_id(saveas_string) + '" id="' + escape_id(saveas_string) + '"'
if field.datatype == 'currency':
output += ' aria-describedby="addon-' + do_escape_id(saveas_string) + '"/></div><label style="display: none;" for="' + escape_id(saveas_string) + '" class="help-inline" id="' + escape_id(saveas_string) + '-error"></label>'
else:
output += '/>'
return output
def get_ischecked(pair, defaultvalue):
return ischecked
def myb64doublequote(text):
return '"' + codecs.encode(text.encode('utf8'), 'base64').decode().replace('\n', '') + '"'
def myb64quote(text):
return "'" + codecs.encode(text.encode('utf8'), 'base64').decode().replace('\n', '') + "'"
def indent_by(text, num):
if not text:
return ""
return (" " * num) + re.sub(r'\n', "\n" + (" " * num), text).rstrip() + "\n"
def safeid(text):
return codecs.encode(text.encode('utf8'), 'base64').decode().replace('\n', '')
def from_safeid(text):
return(codecs.decode(text, 'base64').decode('utf8'))
def escape_id(text):
return str(text)
#return re.sub(r'(:|\.|\[|\]|,|=)', r'\\\\\1', text)
def do_escape_id(text):
return re.sub(r'(:|\.|\[|\]|,|=)', r'\\\1', text)
def escape_for_jquery(text):
return re.sub(r'(:|\.|\[|\]|,|=)', r'\\\\\1', text)
def myb64unquote(the_string):
return(codecs.decode(the_string, 'base64').decode('utf8'))
def strip_quote(the_string):
return re.sub(r'"', r'', the_string)
def safe_html(the_string):
the_string = re.sub(r'\&', '&', the_string)
the_string = re.sub(r'\<', '<', the_string)
the_string = re.sub(r'\>', '>', the_string)
return the_string
|
# coding=utf-8
"""
Generates Project Structure
"""
import collections
import dataclasses
import logging
import os
import pathlib
import re
import tempfile
import typing
import arrow
import pgdumplib
from pgdumplib import dump
from pglifecycle import common, constants, parse, pgdump, storage
LOGGER = logging.getLogger(__name__)
DEFAULT_NAMESPACE = 'public'
ISO_FORMAT = 'YYYY-MM-DD HH:mm:ss ZZ'
SET_PATTERN = re.compile(r"SET .* = '(?P<value>.*)'")
YAML_EXTENSION = 'yaml'
def _filter(entries: typing.List[dump.Entry], desc: str,
parent_id: int = None) -> typing.Generator[dump.Entry, None, None]:
"""Return a filtered list of the provided entries.
Generator function that iterates over the entries provided and
returns all matching entries. If ``parent_id`` is specified, it will
filter down to only entries that have the ``parent_id`` value in their
dependencies.
"""
for e in [e for e in entries if e.desc == desc]:
if parent_id is not None:
if parent_id in e.dependencies:
yield e
else:
yield e
def _function_filename(tag, filenames):
"""Create a filename for a function file, using an auto-incrementing
value for duplicate functions with different parameters.
:param str tag: The entity tag
:param set filenames: Already used filenames
:rtype: str
"""
counter = 1
base = tag.split('(')[0]
parts = tag.count(',')
if parts:
filename = '{}-{}'.format(base, parts)
else:
filename = base
while filename in filenames:
if parts:
filename = '{}-{}_{}'.format(base, parts, counter)
else:
filename = '{}_{}'.format(base, counter)
counter += 1
LOGGER.debug('Returning filename: %r', filename)
return filename
_FILENAME_MAP = {
constants.FUNCTION: _function_filename,
constants.PROCEDURE: _function_filename
}
def _prettify(sql: str) -> str:
return sql.strip().rstrip(';')
def _remove_null_values(values: dict) -> dict:
for key, value in list(values.items()):
if value is None or \
isinstance(value, (dict, list, str)) and not value:
del values[key]
elif isinstance(value, dict):
values[key] = _remove_null_values(value)
if not value:
del values[key]
elif isinstance(value, list):
values[key] = [_remove_null_values(v)
if isinstance(v, dict) else v
for v in value]
return values
class Generate:
"""Generate Project Structure"""
def __init__(self, args):
self.args = args
self.ignore = set()
self.project_path = pathlib.Path(args.dest)
self.tempdir = tempfile.TemporaryDirectory()
self.dump = None
self.dump_path = args.dump or pathlib.Path(self.tempdir.name) / \
'pg-lifecycle-{}'.format(os.getpid())
self.files_created = []
self._processed = set()
self._roles = {}
self.structure = None
@property
def processed(self) -> set:
"""Returns the set of processed dump_ids"""
return self._processed | self.structure.processed
def run(self) -> typing.NoReturn:
"""Implement as core logic for generating the project"""
if self.project_path.exists() and not self.args.force:
common.exit_application('{} already exists'.format(
self.project_path), 3)
LOGGER.info('Generating project in %s', self.project_path)
LOGGER.debug('args: %r', self.args)
if self.args.ignore:
with open(self.args.ignore, 'r') as handle:
for line in handle:
self.ignore.add(line.strip())
LOGGER.info('Ignoring %i files', len(self.ignore))
if self.args.extract:
LOGGER.info('Dumping schema from postgresql://%s:%s/%s',
self.args.host, self.args.port, self.args.dbname)
pgdump.dump(self.args, self.dump_path)
LOGGER.debug('Loading dump from %s', self.dump_path)
self.dump = pgdumplib.load(self.dump_path)
self.structure = Structure(self.dump.entries)
self._create_directories()
self._create_project_file()
if self.args.extract_roles:
self._extract_roles()
self._process_acls()
self._create_role_files()
self._create_group_files()
self._create_user_files()
self._create_namespace_files(constants.CAST)
self._create_namespace_files(constants.COLLATION)
self._create_namespace_files(constants.CONVERSION)
self._create_files(constants.DOMAIN)
self._create_files(constants.EVENT_TRIGGER)
self._create_files(constants.FOREIGN_DATA_WRAPPER)
self._create_files(constants.FUNCTION)
self._create_namespace_files(constants.MATERIALIZED_VIEW)
self._create_operator_files()
self._create_namespace_files(constants.PROCEDURE)
self._create_namespace_files(constants.PUBLICATION)
self._create_schema_files()
self._create_files(constants.SEQUENCE)
self._create_namespace_files(constants.SUBSCRIPTION)
self._create_files(constants.SERVER)
self._create_files(constants.TABLE)
self._create_namespace_files(constants.TABLESPACE)
self._create_namespace_files(constants.TYPE)
self._create_namespace_files(constants.TEXT_SEARCH_CONFIGURATION)
self._create_namespace_files(constants.TEXT_SEARCH_DICTIONARY)
self._create_files(constants.USER_MAPPING)
self._create_files(constants.VIEW)
remaining = collections.Counter()
for entry in [e for e in self.dump.entries
if e.dump_id not in self.processed
and e.desc != constants.SEARCHPATH]:
remaining['{}:{}'.format(entry.section, entry.desc)] += 1
for key in sorted(remaining.keys(), reverse=True):
LOGGER.info('Remaining %s: %i', key, remaining[key])
if self.args.save_remaining:
LOGGER.debug('Writing remaining.yaml')
with open(self.project_path / 'remaining.yaml', 'w') as handle:
storage.yaml_dump(handle, [
dataclasses.asdict(e) for e in self.dump.entries
if e.dump_id not in self.processed])
if self.args.gitkeep:
storage.remove_unneeded_gitkeeps(self.project_path)
if self.args.remove_empty_dirs:
storage.remove_empty_directories(self.project_path)
def _create_directories(self) -> typing.NoReturn:
LOGGER.debug('Creating %s', self.project_path)
os.makedirs(self.project_path, exist_ok=self.args.force)
for value in constants.PATHS.values():
subdir_path = self.project_path / value
try:
os.makedirs(subdir_path, exist_ok=self.args.force)
except FileExistsError:
pass
if self.args.gitkeep:
storage.create_gitkeep(subdir_path)
def _create_project_file(self) -> typing.NoReturn:
"""Generates project.yaml"""
LOGGER.debug('Creating the project file (project.yaml)')
temp = [e for e in self.dump.entries if e.desc == constants.DATABASE]
self._mark_processed(temp[0].dump_id)
comments = {
'pg_dump version': self.dump.dump_version,
'postgres version': self.dump.server_version,
'dumped at': arrow.get(self.dump.timestamp).format(ISO_FORMAT)
}
project = {
'name': self.dump.dbname
}
for entry in self.dump.entries:
if entry.defn.startswith('SET '):
self._mark_processed(entry.dump_id)
match = SET_PATTERN.match(entry.defn)
project[entry.tag.lower()] = match.group(1)
project.update({
'extensions': self._find_extensions(),
'languages': self._find_languages(),
'shell_types': self._find_shell_types()
})
project = _remove_null_values(project)
self.files_created.append(
storage.save(self.project_path, 'project.yaml', constants.DATABASE,
self.dump.dbname, project, comments))
def _create_files(self, object_type: str) -> typing.NoReturn:
"""Generate the schema files for the given object type"""
LOGGER.info('Creating %s files', object_type.lower())
formatter = getattr(self.structure,
object_type.lower().replace(' ', '_'),
self.structure.generic)
for entry in _filter(self.dump.entries, object_type):
self._mark_processed(entry.dump_id)
data = formatter(entry)
data = self._remove_empty_values(data)
filename = None
if entry.desc in _FILENAME_MAP:
filename = _FILENAME_MAP[entry.desc](
entry.tag, self.files_created)
file_path = self._object_path(entry, filename)
if str(file_path) in self.ignore:
LOGGER.debug('Skipping %s', file_path)
continue
self.files_created.append(
storage.save(self.project_path, file_path, entry.desc,
entry.tag, data))
def _create_group_files(self) -> typing.NoReturn:
"""Generate the group files based upon the collected information"""
LOGGER.info('Creating group files')
for role in [
r for r in self._roles.values() if r['type'] == constants.GROUP
]:
data = {
'name': role['role'],
'grants': {
'{}s'.format(k.lower()): v
for k, v in role['grant'].items() if v
},
'revocations': {
'{}s'.format(k.lower()): v
for k, v in role['revoke'].items() if v
},
'options': role.get('options'),
'settings': role.get('settings')
}
file_path = constants.PATHS[constants.GROUP] / '{}.{}'.format(
role['role'], YAML_EXTENSION)
if str(file_path) in self.ignore:
LOGGER.debug('Skipping %s', file_path)
continue
self.files_created.append(
storage.save(self.project_path, file_path,
constants.GROUP, role['role'], data))
def _create_namespace_files(self, object_type: str) -> typing.NoReturn:
"""Generate the schema files for the given object type"""
LOGGER.info('Creating %s files', object_type.lower())
formatter = getattr(self.structure,
object_type.lower().replace(' ', '_'),
self.structure.generic)
namespace = {}
for entry in _filter(self.dump.entries, object_type):
self._mark_processed(entry.dump_id)
if entry.namespace not in namespace.keys():
namespace[entry.namespace] = []
data = formatter(entry)
data = self._remove_empty_values(data)
namespace[entry.namespace].append(data)
for value in namespace.keys():
key = '{}S'.format(object_type)
file_path = constants.PATHS[object_type] / '{}.{}'.format(
value, YAML_EXTENSION)
if str(file_path) in self.ignore:
LOGGER.debug('Skipping %s', file_path)
continue
self.files_created.append(
storage.save(
self.project_path, file_path, key, value, {
'schema': value,
key.lower(): namespace[value]}))
def _create_operator_files(self) -> typing.NoReturn:
"""Generate the schema files for operators"""
LOGGER.info('Creating operator files')
namespace = {}
for obj_type in {constants.OPERATOR, constants.OPERATOR_CLASS}:
for entry in _filter(self.dump.entries, obj_type):
self._mark_processed(entry.dump_id)
if entry.namespace not in namespace.keys():
namespace[entry.namespace] = []
data = self.structure.operator(entry)
data = self._remove_empty_values(data)
namespace[entry.namespace].append(data)
for value in namespace.keys():
for offset, row in namespace[value]:
if 'schema' in row.keys():
del namespace[value][offset]['schema']
self.files_created.append(
storage.save(
self.project_path,
constants.PATHS[constants.OPERATOR] / '{}.{}'.format(
value, YAML_EXTENSION), '{}S'.format(
constants.OPERATOR), value, {
'operators': namespace[value]}))
def _create_role_files(self) -> typing.NoReturn:
"""Generate the role files based upon the collected information"""
LOGGER.info('Creating role file')
for role in [
r for r in self._roles.values()
if not r.get('password') and r['type'] == constants.ROLE
]:
data = {
'name': role['role'],
'grants': {
'{}s'.format(k.lower()): v
for k, v in role['grant'].items() if v
},
'revocations': {
'{}s'.format(k.lower()): v
for k, v in role['revoke'].items() if v
},
'options': role.get('options'),
'settings': role.get('settings')
}
self.files_created.append(
storage.save(self.project_path,
constants.PATHS[constants.ROLE] / '{}.{}'.format(
role['role'], YAML_EXTENSION), constants.ROLE,
role['role'], data))
def _create_schema_files(self) -> typing.NoReturn:
"""Generate the schema files for the given object type"""
LOGGER.info('Creating schemata files')
for entry in _filter(self.dump.entries, constants.SCHEMA):
self._mark_processed(entry.dump_id)
data = self.structure.schema(entry)
data = self._remove_empty_values(data)
self.files_created.append(
storage.save(
self.project_path,
constants.PATHS[constants.SCHEMA] / '{}.{}'.format(
entry.tag, YAML_EXTENSION),
entry.desc, entry.tag, data))
def _create_user_files(self) -> typing.NoReturn:
"""Generate the role files based upon the collected information"""
LOGGER.info('Creating user files')
for role in [r for r in self._roles.values()
if r.get('password') or r['type'] == constants.USER]:
data = {
'name': role['role'],
'password': role['password'],
'grants': {
'{}s'.format(k.lower()): v
for k, v in role['grant'].items() if v
},
'revocations': {
'{}s'.format(k.lower()): v
for k, v in role['revoke'].items() if v
},
'options': role.get('options'),
'settings': role.get('settings')
}
self.files_created.append(
storage.save(self.project_path,
constants.PATHS[constants.USER] / '{}.{}'.format(
role['role'], YAML_EXTENSION), constants.USER,
role['role'], data))
@staticmethod
def _empty_grant() -> dict:
return {v: {} for v in constants.GRANT_KEYS.values()}
def _empty_role(self) -> dict:
return {
'role': None,
'grant': self._empty_grant(),
'revoke': self._empty_grant(),
'options': [],
'settings': []
}
def _extract_roles(self) -> typing.NoReturn:
LOGGER.debug('Dumping roles')
dump_path = pathlib.Path(self.tempdir.name) / \
'pg-lifecycle-{}-roles'.format(os.getpid())
pgdump.dump_roles(self.args, dump_path)
with open(dump_path) as handle:
for line in handle.readlines():
line = line.rstrip()
if not line or line.startswith('--') or line.startswith('SET'):
continue
parsed = parse.sql(line)
if parsed['role'] not in self._roles:
self._roles[parsed['role']] = self._empty_role()
if 'options' in parsed and parsed['options']:
self._roles[parsed['role']]['options'] += parsed['options']
del parsed['options']
if 'settings' in parsed and parsed['settings']:
self._roles[parsed['role']]['settings'].append(
parsed['settings'])
del parsed['settings']
if 'grant' in parsed and parsed['grant']:
self._roles[parsed['role']]['grant'][
constants.ROLE].append(parsed['grant'])
del parsed['grant']
elif 'revoke' in parsed and parsed['revoke']:
self._roles[parsed['role']]['revoke'][
constants.ROLE].append(parsed['revoke'])
del parsed['revoke']
for key, value in [(k, v) for k, v in parsed.items()
if v is not None]:
if key not in self._roles[parsed['role']].keys() or \
not self._roles[parsed['role']][key]:
self._roles[parsed['role']][key] = value
def _find_extensions(self) -> list:
extensions = []
for entry in _filter(self.dump.entries, constants.EXTENSION):
self._mark_processed(entry.dump_id)
# parsed = parse.sql(entry.defn) @ TODO work through this
extensions.append(
{
'name': entry.tag,
'schema': entry.namespace
}
)
return extensions
def _find_languages(self) -> list:
languages = []
for entry in _filter(self.dump.entries, constants.PROCEDURAL_LANGUAGE):
languages.append(parse.sql(entry.defn))
self._mark_processed(entry.dump_id)
return languages
def _find_shell_types(self) -> list:
values = []
for entry in _filter(self.dump.entries, constants.SHELL_TYPE):
self._mark_processed(entry.dump_id)
values.append(entry.defn.strip())
return values
def _mark_processed(self, dump_id: int) -> typing.NoReturn:
self._processed.add(dump_id)
@staticmethod
def _object_path(entry: dump.Entry, name_override: str = None) -> str:
return constants.PATHS[entry.desc] / entry.namespace / '{}.{}'.format(
name_override or entry.tag, YAML_EXTENSION)
def _process_acls(self) -> typing.NoReturn:
def _maybe_ignore_revoke(acls: list) -> list:
remove = []
revokes = [a for a in acls if a['type'] == constants.REVOKE]
for ga in [a for a in acls if a['type'] == constants.GRANT]:
grant = dict(ga)
del grant['type']
for ra in revokes:
revoke = dict(ra)
del revoke['type']
if revoke == grant:
remove.append(ra)
for record in remove:
acls.remove(record)
return acls
for entry in _filter(self.dump.entries, constants.ACL):
for acl in _maybe_ignore_revoke(parse.sql(entry.defn)):
if acl['to'] not in self._roles:
self._roles[acl['to']] = self._empty_role()
self._roles[acl['to']]['role'] = acl['to']
op = acl['type'].lower()
subj = acl['subject']['type'].replace('_', ' ')
if acl['subject']['type'] in [
constants.DATABASE, constants.SCHEMA,
constants.SEQUENCE, constants.TABLE
]:
self._roles[acl['to']][op][subj][acl['subject'][
'name']] = acl['privileges']
elif acl['subject']['type'] == constants.FUNCTION:
if not isinstance(acl['subject']['name']['args'], list):
name = '{}({})'.format(
'.'.join(acl['subject']['name']['name']),
acl['subject']['name']['args'])
else:
name = '{}({})'.format('.'.join(
acl['subject']['name']['name']),
', '.join(acl['subject']['name']['args']))
self._roles[acl['to']][op][subj][name] = acl['privileges']
else:
raise ValueError('Unsupported ACL: {!r}'.format(acl))
self._mark_processed(entry.dump_id)
def _remove_empty_values(self, data: typing.Dict[str, typing.Any]) \
-> dict:
"""Remove keys from a dict that are empty or null and values
where it should be omitted due to cli args
:param dict data: The dict to remove empty values from
"""
for key, value in list(data.items()):
if ((not value and not isinstance(value, int))
or value is None
or (key == 'owner' and self.args.no_owner)
or (key == 'tablespace' and self.args.no_tablespaces)
or (key == 'security label'
and self.args.no_security_labels)):
del data[key]
if isinstance(value, dict):
data[key] = self._remove_empty_values(value)
elif isinstance(value, list):
data[key] = [self._remove_empty_values(i)
if isinstance(i, dict) else i for i in value]
return _remove_null_values(data)
class Structure:
"""Returns SQL sql based data structures"""
def __init__(self, entries: list):
self.dependency_cache = {}
self.entries = entries
self.processed = set()
def generic(self, entry: dump.Entry) -> dict:
"""Return a data structure for for the entry"""
self._mark_processed(entry.dump_id)
return {
'name': entry.tag,
'schema': entry.namespace,
'tablespace': entry.tablespace,
'owner': entry.owner,
'comment': self._find_comment(entry),
'sql': _prettify(entry.defn),
'dependencies': self._resolve_dependencies(entry.dependencies),
'acls': self._find_acls(entry)
}
def operator(self, entry: dump.Entry) -> dict:
"""Return a data structure for for the entry"""
self._mark_processed(entry.dump_id)
return {
'type': entry.desc,
'owner': entry.owner,
'comment': self._find_comment(entry),
'sql': _prettify(entry.defn),
'dependencies': self._resolve_dependencies(entry.dependencies)
}
def schema(self, entry: dump.Entry) -> dict:
"""Return a data structure for for the entry"""
self._mark_processed(entry.dump_id)
return {
'tablespace': entry.tablespace,
'name': entry.tag,
'owner': entry.owner,
'comment': self._find_comment(entry),
'sql': _prettify(entry.defn),
'acls': self._find_acls(entry)
}
def sequence(self, entry: dump.Entry) -> dict:
"""Return a data structure for for the entry"""
value = self.generic(entry)
children = self._find_children(entry, constants.SEQUENCE_OWNED_BY)
for child in children:
parsed = parse.sql(child['sql'])
if parsed.get('options', {}).get('name') == 'owned_by':
value['owned_by'] = '.'.join(parsed['options']['arg'])
else:
LOGGER.error('Unsupported seq child: %r', child)
raise RuntimeError
return value
def server(self, entry: dump.Entry) -> dict:
"""Return a data structure for for the entry"""
self._mark_processed(entry.dump_id)
return {
'tablespace': entry.tablespace,
'name': entry.tag,
'owner': entry.owner,
'comment': self._find_comment(entry),
'sql': _prettify(entry.defn),
'dependencies': self._resolve_dependencies(entry.dependencies),
'acls': self._find_acls(entry)
}
def table(self, entry: dump.Entry) -> dict:
"""Return a data structure for for the entry"""
self._mark_processed(entry.dump_id)
return {
'schema': entry.namespace,
'name': entry.tag,
'tablespace': entry.tablespace,
'owner': entry.owner,
'comment': self._find_comment(entry),
'dependencies': self._resolve_dependencies(entry.dependencies),
'sql': _prettify(entry.defn),
'comments': self._find_column_comments(entry),
'defaults': self._find_children(entry, constants.DEFAULT),
'check constraints': self._find_children(
entry, constants.CHECK_CONSTRAINT),
'constraints': self._find_children(entry, constants.CONSTRAINT),
'foreign keys': self._find_children(
entry, constants.FK_CONSTRAINT),
'indexes': self._find_children(entry, constants.INDEX),
'rules': self._find_children(entry, constants.RULE),
'triggers': self._find_children(entry, constants.TRIGGER),
'acls': self._find_acls(entry)
}
def view(self, entry: dump.Entry) -> dict:
"""Return a data structure for for the entry"""
self._mark_processed(entry.dump_id)
return {
'schema': entry.namespace,
'name': entry.tag,
'tablespace': entry.tablespace,
'owner': entry.owner,
'comment': self._find_comment(entry),
'sql': _prettify(entry.defn),
'comments': self._find_column_comments(entry),
'dependencies': self._resolve_dependencies(entry.dependencies),
'rules': self._find_children(entry, constants.RULE),
'triggers': self._find_children(entry, constants.TRIGGER),
'acls': self._find_acls(entry)
}
def _find_acls(self, parent: dump.Entry) -> list:
acls = []
for entry in _filter(self.entries, constants.ACL, parent.dump_id):
if entry.tag.startswith(parent.tag):
self._mark_processed(entry.dump_id)
for line in entry.defn.splitlines(False):
if line:
acls.append(line.rstrip(';'))
return acls
def _find_children(self, parent: dump.Entry, entry_type) -> list:
children = []
parent_name = self._object_name(parent)
ignore = {parent.desc: parent_name}
for entry in _filter(self.entries, entry_type, parent.dump_id):
add_child = False
if entry.tag.startswith(parent.tag):
add_child = True
else:
queries = parse.sql(entry.defn)
if not isinstance(queries, list):
queries = [queries]
for parsed in queries:
LOGGER.debug('Parsed: %r', parsed)
parsed_child = parsed.get('relation')
if parsed_child and '.' not in parsed_child:
parsed_child = '{}.{}'.format(entry.namespace,
parsed_child)
LOGGER.debug('Checking %r against %r', parsed_child,
parent_name)
if parsed_child == parent_name:
add_child = True
break
elif parsed_child is None:
raise RuntimeError
if add_child:
LOGGER.debug('Adding %s child %s to %s - %r', entry_type,
self._object_name(entry), parent_name, entry)
self._mark_processed(entry.dump_id)
deps = self._resolve_dependencies(entry.dependencies)
try:
deps.remove(ignore)
except ValueError:
pass
child = {
'name': entry.tag,
'schema': entry.namespace,
'owner': entry.owner,
'tablespace': entry.tablespace,
'comment': self._find_comment(entry),
'sql': _prettify(entry.defn),
'dependencies': deps,
'acls': self._find_children(entry, constants.ACL)
}
child = _remove_null_values(child)
children.append(child)
return children
def _find_column_comments(self, parent: dump.Entry) -> list:
comments = []
for entry in _filter(self.entries, constants.COMMENT, parent.dump_id):
if entry.tag.startswith('COLUMN'):
self._mark_processed(entry.dump_id)
comments.append(_prettify(entry.defn))
return comments
def _find_comment(self, parent: dump.Entry) -> typing.Optional[str]:
parent_name = parent.tag
if '(' in parent_name:
parent_name = parent_name[:parent_name.find('(')]
if parent.desc == constants.TRIGGER:
expectation = 'ON {}'.format(parent_name.split(' ')[0])
else:
expectation = '{} {}'.format(parent.desc, parent_name)
for entry in _filter(self.entries, constants.COMMENT, parent.dump_id):
LOGGER.debug('Expectation: %r / %r', expectation, entry.tag)
if ((parent.desc == constants.TRIGGER and entry.tag.startswith(
constants.TRIGGER) and entry.tag.endswith(expectation))
or entry.tag.startswith(expectation)):
LOGGER.debug('Comment matches expectation (%r): %r',
expectation, entry.dump_id)
self._mark_processed(entry.dump_id)
parsed = parse.sql(entry.defn)
return parsed['comment']
return None
def _mark_processed(self, dump_id: int) -> typing.NoReturn:
self.processed.add(dump_id)
@staticmethod
def _object_name(value: dump.Entry) -> str:
if not value.namespace:
return value.tag
return '{}.{}'.format(value.namespace, value.tag)
def _resolve_dependencies(self, dependencies: list) -> list:
"""Resolve the dependencies to a list of dictionaries describing
the dependency types
:param list dependencies: List of entry dependency dump_ids
:rtype: list
"""
key = ','.join(str(d) for d in dependencies)
if key in self.dependency_cache:
return self.dependency_cache[key]
LOGGER.debug('Resolving dependencies: %r', dependencies)
values = []
for entry in [e for e in self.entries if e.dump_id in dependencies]:
if entry.desc == constants.SCHEMA:
continue
values.append({entry.desc: self._object_name(entry)})
self.dependency_cache[key] = values
if len(self.dependency_cache.keys()) > 1024:
del self.dependency_cache[list(self.dependency_cache.keys())[0]]
return self.dependency_cache[key]
|
function showNewOutgoingTaskForm() {
// receive the form
sendAjax(
'get',
'/user/get-outgoing-form',
function (data){
let frm = $(data['form']);
console.log(frm);
let modal = makeModal(
'Создание новой задачи'
)
modal.find('div.modal-body').append(frm);
normalReload();
}
)
}
function handleFilterResults() {
let filterResultsBtn = $('.filter-view');
let incomingFilterView = $('.incoming-filter-view');
$('.dropdown-menu li').on('click', function (e) {
e.cancelBubble = true;
e.stopPropagation();
});
filterResultsBtn.on('hidden.bs.dropdown', function () {
$('form#listStyleForm').submit();
});
incomingFilterView.on('hidden.bs.dropdown', function () {
$('form#incomingListStyleForm').submit();
});
}
function handleOrderSelect(){
let sortCookie = getCookie('outgoingOrderBy');
let select = $('#orderBySelect');
select.on('change', function (){
// отправлю запрос на установку сортировки
});
}
$(function () {
enableTabNavigation();
handleAjaxActivators();
// creating modal with form for create new ticket
let createTaskBtn = $('#createTaskBtn');
createTaskBtn.on('click.createTask', function (){
showNewOutgoingTaskForm();
});
handleOrderSelect();
handleFilterResults();
});
|
#!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RPC call related to the uptime command.
Test corresponds to code in rpc/server.cpp.
"""
import time
from test_framework.test_framework import PocketcoinTestFramework
class UptimeTest(PocketcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def run_test(self):
self._test_uptime()
def _test_uptime(self):
wait_time = 10
self.nodes[0].setmocktime(int(time.time() + wait_time))
assert(self.nodes[0].uptime() >= wait_time)
if __name__ == '__main__':
UptimeTest().main()
|
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import time
import logging
import chromaprint
from contextlib import closing
from sqlalchemy import sql
from acoustid import tables as schema, const
from acoustid.indexclient import IndexClientError
logger = logging.getLogger(__name__)
FINGERPRINT_VERSION = 1
PARTS = ((1, 20), (21, 100))
PART_SEARCH_SQL = """
SELECT f.id, f.track_id, t.gid AS track_gid, score FROM (
SELECT id, track_id, acoustid_compare2(fingerprint, query, %(max_offset)s) AS score
FROM fingerprint, (SELECT %(fp)s::int4[] AS query) q
WHERE
length BETWEEN %(length)s - %(max_length_diff)s AND %(length)s + %(max_length_diff)s AND
subarray(acoustid_extract_query(query), %(part_start)s, %(part_length)s) && acoustid_extract_query(fingerprint)
) f JOIN track t ON f.track_id=t.id WHERE f.score > %(min_score)s ORDER BY f.score DESC, f.id
"""
def decode_fingerprint(fingerprint_string):
"""Decode a compressed and base64-encoded fingerprint"""
fingerprint, version = chromaprint.decode_fingerprint(fingerprint_string)
if version == FINGERPRINT_VERSION:
return fingerprint
def lookup_fingerprint(conn, fp, length, good_enough_score, min_score, fast=False, max_offset=0):
"""Search for a fingerprint in the database"""
matched = []
best_score = 0.0
for part_start, part_length in PARTS:
params = dict(fp=fp, length=length, part_start=part_start,
part_length=part_length, max_length_diff=const.FINGERPRINT_MAX_LENGTH_DIFF,
min_score=min_score, max_offset=max_offset)
with closing(conn.execute(PART_SEARCH_SQL, params)) as result:
for row in result:
matched.append(row)
if row['score'] >= best_score:
best_score = row['score']
if best_score > good_enough_score:
break
return matched
class FingerprintSearcher(object):
def __init__(self, db, idx=None, fast=True):
self.db = db
self.idx = idx
self.min_score = const.TRACK_GROUP_MERGE_THRESHOLD
self.max_length_diff = const.FINGERPRINT_MAX_LENGTH_DIFF
self.max_offset = const.TRACK_MAX_OFFSET
self.fast = fast
def _create_search_query(self, fp, length, condition):
# construct the subquery
f_columns = [
schema.fingerprint.c.id,
schema.fingerprint.c.track_id,
sql.func.acoustid_compare2(schema.fingerprint.c.fingerprint, fp,
self.max_offset).label('score'),
]
f_where = sql.and_(
condition,
schema.fingerprint.c.length.between(length - self.max_length_diff,
length + self.max_length_diff))
f = sql.select(f_columns, f_where).alias('f')
# construct the main query
columns = [f.c.id, f.c.track_id, schema.track.c.gid.label('track_gid'), f.c.score]
src = f.join(schema.track, schema.track.c.id == f.c.track_id)
return sql.select(columns, f.c.score > self.min_score, src,
order_by=[f.c.score.desc(), f.c.id])
def _search_index(self, fp, length):
# index search
fp_query = self.db.execute(sql.select([sql.func.acoustid_extract_query(fp)])).scalar()
if not fp_query:
return []
with closing(self.idx.connect()) as idx:
results = idx.search(fp_query)
if not results:
return []
min_score = results[0].score * 0.1 # at least 10% of the top score
candidate_ids = [r.id for r in results if r.score > min_score]
if not candidate_ids:
return []
# construct the query
condition = schema.fingerprint.c.id.in_(candidate_ids)
query = self._create_search_query(fp, length, condition)
# database scoring
matches = self.db.execute(query).fetchall()
return matches
def _search_database(self, fp, length, min_fp_id):
# construct the query
condition = sql.func.acoustid_extract_query(schema.fingerprint.c.fingerprint).op('&&')(sql.func.acoustid_extract_query(fp))
if min_fp_id:
condition = sql.and_(condition, schema.fingerprint.c.id > min_fp_id)
query = self._create_search_query(fp, length, condition)
# database scoring
matches = self.db.execute(query).fetchall()
return matches
def _get_min_indexed_fp_id(self):
if self.idx is None:
return 0
with closing(self.idx.connect()) as idx:
return int(idx.get_attribute('max_document_id') or '0')
def search(self, fp, length):
min_fp_id = 0 if self.idx is None or self.fast else self._get_min_indexed_fp_id()
matches = None
if self.idx is not None:
try:
matches = self._search_index(fp, length)
except IndexClientError:
logger.exception("Index search error")
matches = None
if not self.fast and not matches:
matches = self._search_database(fp, length, min_fp_id)
return matches or []
def insert_fingerprint(conn, data, submission_id=None, source_id=None):
"""
Insert a new fingerprint into the database
"""
with conn.begin():
insert_stmt = schema.fingerprint.insert().values({
'fingerprint': data['fingerprint'],
'length': data['length'],
'bitrate': data.get('bitrate'),
'format_id': data.get('format_id'),
'track_id': data['track_id'],
'submission_count': 1,
})
id = conn.execute(insert_stmt).inserted_primary_key[0]
if submission_id and source_id:
insert_stmt = schema.fingerprint_source.insert().values({
'fingerprint_id': id,
'submission_id': submission_id,
'source_id': source_id,
})
conn.execute(insert_stmt)
logger.debug("Inserted fingerprint %r", id)
return id
def inc_fingerprint_submission_count(conn, id, submission_id=None, source_id=None):
update_stmt = schema.fingerprint.update().where(schema.fingerprint.c.id == id)
conn.execute(update_stmt.values(submission_count=sql.text('submission_count+1')))
if submission_id and source_id:
insert_stmt = schema.fingerprint_source.insert().values({
'fingerprint_id': id,
'submission_id': submission_id,
'source_id': source_id,
})
conn.execute(insert_stmt)
return True
def update_fingerprint_index(db, index, limit=1000):
with closing(index.connect()) as index:
max_id = int(index.get_attribute('max_document_id') or '0')
query = sql.select([
schema.fingerprint.c.id,
sql.func.acoustid_extract_query(schema.fingerprint.c.fingerprint),
]).where(schema.fingerprint.c.id > max_id).\
order_by(schema.fingerprint.c.id).limit(limit)
in_transaction = False
for id, fingerprint in db.execute(query):
if not in_transaction:
index.begin()
in_transaction = True
logger.debug("Adding fingerprint %s to index %s", id, index)
index.insert(id, fingerprint)
if in_transaction:
index.commit()
|
# This file is part of the pycalver project
# https://github.com/mbarkhau/pycalver
#
# Copyright (c) 2018-2020 Manuel Barkhau (mbarkhau@gmail.com) - MIT License
# SPDX-License-Identifier: MIT
"""Rewrite files, updating occurences of version strings."""
import io
import typing as typ
import logging
from . import parse
from . import config
from . import rewrite
from . import version
from . import regexfmt
from . import v1version
from .patterns import Pattern
logger = logging.getLogger("bumpver.v1rewrite")
def rewrite_lines(
patterns : typ.List[Pattern],
new_vinfo: version.V1VersionInfo,
old_lines: typ.List[str],
) -> typ.List[str]:
"""Replace occurances of patterns in old_lines with new_vinfo."""
found_patterns: typ.Set[Pattern] = set()
new_lines = old_lines[:]
for match in parse.iter_matches(old_lines, patterns):
found_patterns.add(match.pattern)
replacement = v1version.format_version(new_vinfo, match.pattern.raw_pattern)
span_l, span_r = match.span
new_line = match.line[:span_l] + replacement + match.line[span_r:]
new_lines[match.lineno] = new_line
non_matched_patterns = set(patterns) - found_patterns
if non_matched_patterns:
for nmp in non_matched_patterns:
logger.error(f"No match for pattern '{nmp.raw_pattern}'")
msg = (
"\n# "
+ regexfmt.regex101_url(nmp.regexp.pattern)
+ "\nregex = "
+ regexfmt.pyexpr_regex(nmp.regexp.pattern)
)
logger.error(msg)
raise rewrite.NoPatternMatch("Invalid pattern(s)")
else:
return new_lines
def rfd_from_content(
patterns : typ.List[Pattern],
new_vinfo: version.V1VersionInfo,
content : str,
path : str = "<path>",
) -> rewrite.RewrittenFileData:
r"""Rewrite pattern occurrences with version string.
>>> version_pattern = "{pycalver}"
>>> new_vinfo = v1version.parse_version_info("v201809.0123")
>>> from .v1patterns import compile_pattern
>>> patterns = [compile_pattern(version_pattern, '__version__ = "{pycalver}"')]
>>> content = '__version__ = "v201809.0001-alpha"'
>>> rfd = rfd_from_content(patterns, new_vinfo, content)
>>> rfd.new_lines
['__version__ = "v201809.0123"']
>>> patterns = [compile_pattern('{semver}', '__version__ = "v{semver}"')]
>>> new_vinfo = v1version.parse_version_info("v1.2.3", "v{semver}")
>>> content = '__version__ = "v1.2.2"'
>>> rfd = rfd_from_content(patterns, new_vinfo, content)
>>> rfd.new_lines
['__version__ = "v1.2.3"']
"""
line_sep = rewrite.detect_line_sep(content)
old_lines = content.split(line_sep)
new_lines = rewrite_lines(patterns, new_vinfo, old_lines)
return rewrite.RewrittenFileData(path, line_sep, old_lines, new_lines)
def iter_rewritten(
file_patterns: config.PatternsByFile,
new_vinfo : version.V1VersionInfo,
) -> typ.Iterable[rewrite.RewrittenFileData]:
"""Iterate over files with version string replaced."""
fobj: typ.IO[str]
for file_path, pattern_strs in rewrite.iter_path_patterns_items(file_patterns):
with file_path.open(mode="rt", encoding="utf-8") as fobj:
content = fobj.read()
rfd = rfd_from_content(pattern_strs, new_vinfo, content)
yield rfd._replace(path=str(file_path))
def diff(
old_vinfo : version.V1VersionInfo,
new_vinfo : version.V1VersionInfo,
file_patterns: config.PatternsByFile,
) -> str:
"""Generate diffs of rewritten files."""
full_diff = ""
fobj: typ.IO[str]
for file_path, patterns in sorted(rewrite.iter_path_patterns_items(file_patterns)):
with file_path.open(mode="rt", encoding="utf-8") as fobj:
content = fobj.read()
has_updated_version = False
for pattern in patterns:
old_str = v1version.format_version(old_vinfo, pattern.raw_pattern)
new_str = v1version.format_version(new_vinfo, pattern.raw_pattern)
if old_str != new_str:
has_updated_version = True
try:
rfd = rfd_from_content(patterns, new_vinfo, content)
except rewrite.NoPatternMatch:
# pylint:disable=raise-missing-from ; we support py2, so not an option
errmsg = f"No patterns matched for file '{file_path}'"
raise rewrite.NoPatternMatch(errmsg)
rfd = rfd._replace(path=str(file_path))
lines = rewrite.diff_lines(rfd)
if len(lines) == 0 and has_updated_version:
errmsg = f"No patterns matched for file '{file_path}'"
raise rewrite.NoPatternMatch(errmsg)
full_diff += "\n".join(lines) + "\n"
full_diff = full_diff.rstrip("\n")
return full_diff
def rewrite_files(
file_patterns: config.PatternsByFile,
new_vinfo : version.V1VersionInfo,
) -> None:
"""Rewrite project files, updating each with the new version."""
fobj: typ.IO[str]
for file_data in iter_rewritten(file_patterns, new_vinfo):
new_content = file_data.line_sep.join(file_data.new_lines)
with io.open(file_data.path, mode="wt", encoding="utf-8") as fobj:
fobj.write(new_content)
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from .. import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .account import *
from .get_account import *
from .get_private_endpoint_connection import *
from .list_account_keys import *
from .private_endpoint_connection import *
from ._inputs import *
from . import outputs
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_azure_native.purview.v20201201preview as v20201201preview
else:
v20201201preview = _utilities.lazy_import('pulumi_azure_native.purview.v20201201preview')
|
'''
Author : MiKueen
Level : Medium
Problem Statement : Add Two Numbers
You are given two non-empty linked lists representing two non-negative integers. The digits are stored in reverse order and each of their nodes contain a single digit. Add the two numbers and return it as a linked list.
You may assume the two numbers do not contain any leading zero, except the number 0 itself.
Example:
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
Explanation: 342 + 465 = 807.
'''
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
res = ListNode(0)
temp = res
carry = 0
while l1 or l2 or carry:
val1 = l1.val if l1 else 0
val2 = l2.val if l2 else 0
lsum = carry + val1 + val2
carry = 1 if lsum >= 10 else 0
lsum = lsum if lsum < 10 else lsum % 10
temp.next = ListNode(lsum)
temp = temp.next
l1 = (l1.next if l1 else None)
l2 = (l2.next if l2 else None)
return res.next
|
#
# Licensed to Big Data Genomics (BDG) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The BDG licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from bdgenomics.adam.adamContext import ADAMContext
from bdgenomics.adam.rdd import CoverageRDD, FeatureRDD
from bdgenomics.adam.test import SparkTestCase
import os
class CoverageRDDTest(SparkTestCase):
def test_save(self):
testFile = self.resourceFile("sorted.sam")
ac = ADAMContext(self.ss)
reads = ac.loadAlignments(testFile)
coverage = reads.toCoverage()
tmpPath = self.tmpFile() + ".coverage.adam"
coverage.save(tmpPath,
asSingleFile=True,
disableFastConcat=True)
assert(os.listdir(tmpPath) != [])
def test_collapse(self):
testFile = self.resourceFile("sorted.sam")
ac = ADAMContext(self.ss)
reads = ac.loadAlignments(testFile)
coverage = reads.toCoverage()
collapsed = coverage.collapse()
self.assertEquals(collapsed.toDF().count(), coverage.toDF().count())
def test_toFeatures(self):
testFile = self.resourceFile("sorted.sam")
ac = ADAMContext(self.ss)
reads = ac.loadAlignments(testFile)
coverage = reads.toCoverage()
features = coverage.toFeatures()
assert(isinstance(features, FeatureRDD))
self.assertEquals(features.toDF().count(), coverage.toDF().count())
def test_aggregatedCoverage(self):
testFile = self.resourceFile("small.sam")
ac = ADAMContext(self.ss)
reads = ac.loadAlignments(testFile)
coverage = reads.toCoverage()
collapsed = coverage.aggregatedCoverage(10)
self.assertEquals(collapsed.toDF().count(), 166)
def test_flatten(self):
testFile = self.resourceFile("small.sam")
ac = ADAMContext(self.ss)
reads = ac.loadAlignments(testFile)
coverage = reads.toCoverage()
flattened = coverage.flatten()
self.assertEquals(flattened.toDF().count(), 1500)
|
#!/usr/bin/env python
"""Extract events from kafka and write them to hdfs
"""
import json
from pyspark.sql import SparkSession, Row
from pyspark.sql.functions import udf
@udf('boolean')
def is_purchase(event_as_json):
event = json.loads(event_as_json)
if event['event_type'] == 'purchase_sword':
return True
return False
def main():
"""main
"""
spark = SparkSession \
.builder \
.appName("ExtractEventsJob") \
.getOrCreate()
raw_events = spark \
.read \
.format("kafka") \
.option("kafka.bootstrap.servers", "kafka:29092") \
.option("subscribe", "curlProjectEvents") \
.option("startingOffsets", "earliest") \
.option("endingOffsets", "latest") \
.load()
purchase_events = raw_events \
.select(raw_events.value.cast('string').alias('raw'),
raw_events.timestamp.cast('string')) \
.filter(is_purchase('raw'))
extracted_purchase_events = purchase_events \
.rdd \
.map(lambda r: Row(timestamp=r.timestamp, **json.loads(r.raw))) \
.toDF()
extracted_purchase_events.printSchema()
extracted_purchase_events.show()
if __name__ == "__main__":
main()
|
#Ex049 Refaça o desafio 009, mostrando a tabuada de um número que o usuário escolher, só que agora utilizando um
# laço for.
n = int(input('Digite o número no qual você deseja saber a tabuada: '))
for c in range(1, 11):
t = n * c
print(f'{c} x {n} = {t}')
|
import numpy as np
# read datasets line by line
def read_line_by_line(dataset_name,C,model,vec_size):
# get stop words (except for twitter!)
SW = set()
for line in open('wmd/stop_words.txt'):
line = line.strip()
if line != '':
SW.add(line)
stop = list(SW)
f = open(dataset_name)
if len(C) == 0:
C = np.array([], dtype=np.object)
num_lines = sum(1 for line in open(dataset_name))
y = np.zeros((num_lines,))
X = np.zeros((num_lines,), dtype=np.object)
BOW_X = np.zeros((num_lines,), dtype=np.object)
count = 0
remain = np.zeros((num_lines,), dtype=np.object)
the_words = np.zeros((num_lines,), dtype=np.object)
for line in f:
print ('%d out of %d' % (count+1, num_lines))
line = line.strip()
T = line.split('\t')
classID = T[0]
if classID in C:
IXC = np.where(C==classID)
y[count] = IXC[0]+1
else:
C = np.append(C,classID)
y[count] = len(C)
W = line.split()
F = np.zeros((vec_size,len(W)-1))
inner = 0
RC = np.zeros((len(W)-1,), dtype=np.object)
word_order = np.zeros((len(W)-1), dtype=np.object)
bow_x = np.zeros((len(W)-1,))
for word in W[1:len(W)]:
try:
test = model[word]
if word in stop:
word_order[inner] = ''
continue
if word in word_order:
IXW = np.where(word_order==word)
bow_x[IXW] += 1
word_order[inner] = ''
else:
word_order[inner] = word
bow_x[inner] += 1
F[:,inner] = model[word]
except:
#print 'Key error: "%s"' % str(e)
word_order[inner] = ''
inner = inner + 1
Fs = F.T[~np.all(F.T == 0, axis=1)]
word_orders = word_order[word_order != '']
bow_xs = bow_x[bow_x != 0]
X[count] = Fs.T
the_words[count] = word_orders
BOW_X[count] = bow_xs
count = count + 1;
return (X,BOW_X,y,C,the_words)
|
import React from 'react';
import createSvg from '../utils/createSvg';
export default createSvg(<g><path d="M20 6h-8l-2-2H4c-1.1 0-1.99.9-1.99 2L2 18c0 1.1.9 2 2 2h16c1.1 0 2-.9 2-2V8c0-1.1-.9-2-2-2zm-5 3c1.1 0 2 .9 2 2s-.9 2-2 2-2-.9-2-2 .9-2 2-2zm4 8h-8v-1c0-1.33 2.67-2 4-2s4 .67 4 2v1z"/></g>, 'FolderShared');
|
#!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid transactions.
In this test we connect to one node over p2p, and test tx requests.
"""
from data import invalid_txs
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
)
from test_framework.p2p import P2PDataStore
from test_framework.script import OP_TRUE, CScript
from test_framework.test_framework import BitcoinTestFramework
from test_framework.txtools import pad_tx
from test_framework.util import assert_equal
class InvalidTxRequestTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [
["-acceptnonstdtxn=1", ]
]
self.setup_clean_chain = True
def bootstrap_p2p(self, *, num_connections=1):
"""Add a P2P connection to the node.
Helper to connect and wait for version handshake."""
for _ in range(num_connections):
self.nodes[0].add_p2p_connection(P2PDataStore())
def reconnect_p2p(self, **kwargs):
"""Tear down and bootstrap the P2P connection to the node.
The node gets disconnected several times in this test. This helper
method reconnects the p2p and restarts the network thread."""
self.nodes[0].disconnect_p2ps()
self.bootstrap_p2p(**kwargs)
def run_test(self):
node = self.nodes[0] # convenience reference to the node
self.bootstrap_p2p() # Add one p2p connection to the node
best_block = self.nodes[0].getbestblockhash()
tip = int(best_block, 16)
best_block_time = self.nodes[0].getblock(best_block)['time']
block_time = best_block_time + 1
self.log.info("Create a new block with an anyone-can-spend coinbase.")
height = 1
block = create_block(tip, create_coinbase(height), block_time)
block.solve()
# Save the coinbase for later
block1 = block
tip = block.sha256
node.p2ps[0].send_blocks_and_test([block], node, success=True)
self.log.info("Mature the block.")
self.nodes[0].generatetoaddress(
100, self.nodes[0].get_deterministic_priv_key().address)
# Iterate through a list of known invalid transaction types, ensuring each is
# rejected. Some are consensus invalid and some just violate policy.
for BadTxTemplate in invalid_txs.iter_all_templates():
self.log.info(
"Testing invalid transaction: %s",
BadTxTemplate.__name__)
template = BadTxTemplate(spend_block=block1)
tx = template.get_tx()
node.p2ps[0].send_txs_and_test(
[tx], node, success=False,
expect_disconnect=template.expect_disconnect,
reject_reason=template.reject_reason,
)
if template.expect_disconnect:
self.log.info("Reconnecting to peer")
self.reconnect_p2p()
# Make two p2p connections to provide the node with orphans
# * p2ps[0] will send valid orphan txs (one with low fee)
# * p2ps[1] will send an invalid orphan tx (and is later disconnected for that)
self.reconnect_p2p(num_connections=2)
self.log.info('Test orphan transaction handling ... ')
# Create a root transaction that we withold until all dependend transactions
# are sent out and in the orphan cache
SCRIPT_PUB_KEY_OP_TRUE = CScript([OP_TRUE])
tx_withhold = CTransaction()
tx_withhold.vin.append(
CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0)))
tx_withhold.vout.append(
CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
pad_tx(tx_withhold)
tx_withhold.calc_sha256()
# Our first orphan tx with some outputs to create further orphan txs
tx_orphan_1 = CTransaction()
tx_orphan_1.vin.append(
CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0)))
tx_orphan_1.vout = [
CTxOut(
nValue=10 * COIN,
scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)] * 3
pad_tx(tx_orphan_1)
tx_orphan_1.calc_sha256()
# A valid transaction with low fee
tx_orphan_2_no_fee = CTransaction()
tx_orphan_2_no_fee.vin.append(
CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0)))
tx_orphan_2_no_fee.vout.append(
CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
pad_tx(tx_orphan_2_no_fee)
# A valid transaction with sufficient fee
tx_orphan_2_valid = CTransaction()
tx_orphan_2_valid.vin.append(
CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1)))
tx_orphan_2_valid.vout.append(
CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
tx_orphan_2_valid.calc_sha256()
pad_tx(tx_orphan_2_valid)
# An invalid transaction with negative fee
tx_orphan_2_invalid = CTransaction()
tx_orphan_2_invalid.vin.append(
CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2)))
tx_orphan_2_invalid.vout.append(
CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
pad_tx(tx_orphan_2_invalid)
tx_orphan_2_invalid.calc_sha256()
self.log.info('Send the orphans ... ')
# Send valid orphan txs from p2ps[0]
node.p2ps[0].send_txs_and_test(
[tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False)
# Send invalid tx from p2ps[1]
node.p2ps[1].send_txs_and_test(
[tx_orphan_2_invalid], node, success=False)
# Mempool should be empty
assert_equal(0, node.getmempoolinfo()['size'])
# p2ps[1] is still connected
assert_equal(2, len(node.getpeerinfo()))
self.log.info('Send the withhold tx ... ')
with node.assert_debug_log(expected_msgs=["bad-txns-in-belowout"]):
node.p2ps[0].send_txs_and_test([tx_withhold], node, success=True)
# Transactions that should end up in the mempool
expected_mempool = {
t.hash
for t in [
tx_withhold, # The transaction that is the root for all orphans
tx_orphan_1, # The orphan transaction that splits the coins
# The valid transaction (with sufficient fee)
tx_orphan_2_valid,
]
}
# Transactions that do not end up in the mempool
# tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
# tx_orphan_invaid, because it has negative fee (p2ps[1] is
# disconnected for relaying that tx)
# p2ps[1] is no longer connected
self.wait_until(lambda: 1 == len(node.getpeerinfo()),
timeout=12)
assert_equal(expected_mempool, set(node.getrawmempool()))
self.log.info('Test orphan pool overflow')
orphan_tx_pool = [CTransaction() for _ in range(101)]
for i in range(len(orphan_tx_pool)):
orphan_tx_pool[i].vin.append(CTxIn(outpoint=COutPoint(i, 333)))
orphan_tx_pool[i].vout.append(
CTxOut(
nValue=11 * COIN,
scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
pad_tx(orphan_tx_pool[i])
with node.assert_debug_log(['mapOrphan overflow, removed 1 tx']):
node.p2ps[0].send_txs_and_test(orphan_tx_pool, node, success=False)
rejected_parent = CTransaction()
rejected_parent.vin.append(
CTxIn(
outpoint=COutPoint(
tx_orphan_2_invalid.sha256,
0)))
rejected_parent.vout.append(
CTxOut(
nValue=11 * COIN,
scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
pad_tx(rejected_parent)
rejected_parent.rehash()
with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(rejected_parent.hash)]):
node.p2ps[0].send_txs_and_test(
[rejected_parent], node, success=False)
if __name__ == '__main__':
InvalidTxRequestTest().main()
|
const PREFIX = 'http://127.0.0.1:3000'
const PREFIX_LOCAL = '127.0.0.1:8070/api/v1/test-import/upload'
//const PREFIX_ON = 'http://10.3.10.92:8080/tms-boot-rest-0.0.1-SNAPSHOT'
const PREFIX_ON = 'http://10.3.10.112:9006/tms-boot-rest-0.0.1-SNAPSHOT'
//const PREFIX_ON = 'http://microsvc.dycd.com:9006/tms-boot-rest-0.0.1-SNAPSHOT'
//const import_on = '10.3.10.92:8080/tms-boot-rest-0.0.1-SNAPSHOT'
const import_on = '10.3.10.112:9006/tms-boot-rest-0.0.1-SNAPSHOT'
//const import_on = 'microsvc.dycd.com:9006/tms-boot-rest-0.0.1-SNAPSHOT'
const import_temp = 'contract.xlsx'
const APIV1 = '/api/v1'
const APIV2 = '/api/v2'
module.exports = {
name: '',
prefix: 'DYCD-TMS',
footerText: 'DYCD © 2018 TMS',
logo: '/logo.png',
logo_: '/logo_.png',
logo_small: '/logo_small.png',
logo_small_: '/logo_small_.png',
iconFontCSS: '/iconfont.css',
iconFontJS: '/iconfont.js',
CORS: [],
openPages: ['/login'],
apiPrefix: '/api/v1',
import_on,
import_temp,
APIV1,
APIV2,
api: {
userLogin: `${PREFIX_ON}${APIV1}/login`,
userLogout: `${PREFIX_ON}${APIV1}/login/loginOut`,
userInfo: `${APIV1}/userInfo`,
users: `${PREFIX_ON}${APIV1}/role-user`,
posts: `${APIV1}/posts`,
user: `${APIV1}/user/:id`,
// dashboard: `${APIV1}/dashboard`,
//menus: `${APIV1}/menus`,
menus: `${PREFIX_ON}${APIV1}/role-user`,
weather: `${APIV1}/weather`,
v1test: `${APIV1}/test`,
v2test: `${APIV2}/test`,
manageLocation: `${PREFIX_ON}${APIV1}/region-info`,
getManageLocation: `${PREFIX_ON}${APIV1}/manage-location/`,
provinc: `${PREFIX}/provinc-names`,
account: `${PREFIX_ON}${APIV1}/account`,
addAccount: `${PREFIX_ON}${APIV1}/region-user/add-region-userinfo`,
insuranceList: `${PREFIX_ON}${APIV1}/insurance-template-info/list`,
insuranceCreate: `${PREFIX_ON}${APIV1}/insurance-template-info/add`,
insuranceRemove: `${PREFIX_ON}${APIV1}/insurance-template-info/`,
vehicleValidatePriceInfo: `${PREFIX_ON}${APIV1}/vehicle-validate-price-info/get`,
vehicleValidatePriceInfoUpdate: `${PREFIX_ON}${APIV1}/vehicle-validate-price-info`,
// transportRegularInfo: `${PREFIX_ON}${APIV1}/transport-regular-info`,
manageCarrier: `${PREFIX_ON}${APIV1}/carrier-info`,
manageCarrierEmployee: `${PREFIX_ON}${APIV1}/carrier-employee-info`,
manageCarrierRegion: `${PREFIX_ON}${APIV1}/carrier-transport-region`,
manageCarrierLine: `${PREFIX_ON}${APIV1}/carrier-contract-line-info`,
regionInfterFace: `${PREFIX_ON}${APIV1}/region-interface`,
transportRegularInfo: `${PREFIX_ON}${APIV1}/transport-regular-info`,
transportInfo: `${PREFIX_ON}${APIV1}/transport-fixed-regular-info`,
_transportInfo: `${PREFIX_ON}${APIV1}/transport-fixed-regular-info`,
wayBill: `${PREFIX_ON}${APIV1}/waybill-manage`,
cityCollectionInfo: `${PREFIX_ON}${APIV1}/city-collectionInfo`,
// uploadUrl: `http://base-server.dev.dycd.com`,
uploadUrl: `${PREFIX_ON}${APIV1}/file/upload`,
servicePrice: `${PREFIX_ON}${APIV1}/service-price-info`,
recoveryVehiclePrice: `${PREFIX_ON}${APIV1}/vehicleRescuePriceInfo`,
dashboard: `${PREFIX_ON}${APIV1}/workbench`,
takeVehiclePrice: `${PREFIX_ON}${APIV1}/vehicle-batch-price-info`,
usedCarDrivingPrice: `${PREFIX_ON}/vehicle-used-diving-price`,
carrierOfferPriceinfo: `${PREFIX_ON}${APIV1}/carrier-offer-priceinfo`,
// transportInfo: 'http://10.3.10.92:8080/tms-boot-rest-0.0.1-SNAPSHOT/api/v1/transport-fixed-regular-info/get-region-list',
checkButtonResource: `${PREFIX_ON}${APIV1}/role-user/check-button-resource`,
deliverStorePrice: `${PREFIX_ON}${APIV1}/to-repo-price`,
cityCenterInfo: `${PREFIX_ON}${APIV1}/city-centerInfo`,
},
}
|
#! python3
# a ZIP file whose filename increments
import zipfile, os
def backupToZip(folder):
# Backup the entire contents of "folder" into a ZIP file.
folder = os.path.abspath(folder) # make sure folder is absolute
# Figure out the filename this code should use based on what file already exist
number = 1
while True:
zipFileName = os.path.basename(folder) + '_' + str(number) + '.zip'
if not os.path.exists(zipFileName):
break
number += 1
# TODO: Create The ZIP file.
print('Creating %s...' % zipFileName)
backupZip = zipfile.ZipFile(zipFileName, 'w')
# TODO: Walk the entire folder tree and compress the files in each folder.
for foldername, subfolders, filenames in os.walk(folder):
print('Adding files in %s...' % (foldername))
# Add the current folder to the ZIP file
backupZip.write(foldername)
# Add all the files in this folder to the ZIP file
for filename in filenames:
newBase = os.path.basename(folder) + '_'
if filename.startswith(newBase) and filename.endswith('.zip'):
continue # don't backup the backup ZIP files
backupZip.write(os.path.join(foldername, filename))
backupZip.close()
print('Done.')
if __name__ == '__main__':
backupToZip('.')
|
// based on react-obfuscate
// https://github.com/coston/react-obfuscate/blob/master/src/obfuscate.js
import React, { Component } from 'react'
export default class Obfuscate2 extends Component {
constructor(props) {
super(props)
this.state = {
humanInteraction: false,
}
}
// Convert contact information to contact URL scheme
createContactLink(props) {
let link
// Combine email header parameters for use with email
const combineHeaders = params => {
return Object.keys(params)
.map(key => `${key}=${encodeURIComponent(params[key])}`)
.join('&')
}
if (props.email) {
link = `mailto:${atob(props.email)}`
if (props.headers) {
link += `?${combineHeaders(props.headers)}`
}
} else if (props.tel) {
link = `tel:${props.tel}`
} else if (props.sms) {
link = `sms:${props.sms}`
} else if (props.facetime) {
link = `facetime:${props.facetime}`
} else if (props.href) {
link = props.href
} else if (typeof props.children !== 'object') {
link = props.children
} else {
return ''
}
return link
}
handleClick(event) {
const { onClick } = this.props
// If focused or hovered, this js will be skipped with preference for html
if (this.state.humanInteraction === false) {
event.preventDefault()
// Allow instantiator to provide an onClick method to be called
// before we change location (e.g. for analytics tracking)
if (onClick && typeof onClick === 'function') {
onClick()
}
window.location.href = this.createContactLink(this.props)
}
}
handleCopiability() {
this.setState({
humanInteraction: true,
})
}
reverse(string) {
if (typeof string !== 'undefined') {
return string
.split('')
.reverse()
.join('')
.replace('(', ')')
.replace(')', '(')
}
}
render() {
const { humanInteraction } = this.state
const {
element: Element = 'a',
children,
tel,
sms,
facetime,
email,
href,
headers,
obfuscate,
obfuscateChildren,
linkText,
style,
...others
} = this.props
const propsList = children || tel || sms || facetime || email || href
const obsStyle = {
...(style || {}),
unicodeBidi: 'bidi-override',
direction:
humanInteraction === true ||
obfuscate === false ||
obfuscateChildren === false
? 'ltr'
: 'rtl',
}
const link =
humanInteraction === true ||
obfuscate === false ||
typeof children === 'object' ||
obfuscateChildren === false // Allow child elements
? propsList
: this.reverse(propsList)
const clickProps =
Element === 'a'
? {
href:
humanInteraction === true || obfuscate === false
? this.createContactLink(this.props)
: linkText || 'obfuscated',
onClick: this.handleClick.bind(this),
}
: {}
const props = {
onFocus: this.handleCopiability.bind(this),
onMouseOver: this.handleCopiability.bind(this),
onContextMenu: this.handleCopiability.bind(this),
...others,
...clickProps,
style: obsStyle,
}
return <Element {...props}>{this.props.children}</Element>
}
}
|
# -*- coding: utf8 -*-
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import signal
import time
from threading import Thread
from pytest import mark
import zmq
from zmq.tests import (
BaseZMQTestCase, SkipTest, skip_pypy
)
from zmq.utils.strtypes import b
# Partially based on EINTRBaseTest from CPython 3.5 eintr_tester
class TestEINTRSysCall(BaseZMQTestCase):
""" Base class for EINTR tests. """
# delay for initial signal delivery
signal_delay = 0.1
# timeout for tests. Must be > signal_delay
timeout = .25
timeout_ms = int(timeout * 1e3)
def alarm(self, t=None):
"""start a timer to fire only once
like signal.alarm, but with better resolution than integer seconds.
"""
if not hasattr(signal, 'setitimer'):
raise SkipTest('EINTR tests require setitimer')
if t is None:
t = self.signal_delay
self.timer_fired = False
self.orig_handler = signal.signal(signal.SIGALRM, self.stop_timer)
# signal_period ignored, since only one timer event is allowed to fire
signal.setitimer(signal.ITIMER_REAL, t, 1000)
def stop_timer(self, *args):
self.timer_fired = True
signal.setitimer(signal.ITIMER_REAL, 0, 0)
signal.signal(signal.SIGALRM, self.orig_handler)
@mark.skipif(not hasattr(zmq, 'RCVTIMEO'), reason="requires RCVTIMEO")
def test_retry_recv(self):
pull = self.socket(zmq.PULL)
pull.rcvtimeo = self.timeout_ms
self.alarm()
self.assertRaises(zmq.Again, pull.recv)
assert self.timer_fired
@mark.skipif(not hasattr(zmq, 'SNDTIMEO'), reason="requires SNDTIMEO")
def test_retry_send(self):
push = self.socket(zmq.PUSH)
push.sndtimeo = self.timeout_ms
self.alarm()
self.assertRaises(zmq.Again, push.send, b('buf'))
assert self.timer_fired
def test_retry_poll(self):
x, y = self.create_bound_pair()
poller = zmq.Poller()
poller.register(x, zmq.POLLIN)
self.alarm()
def send():
time.sleep(2 * self.signal_delay)
y.send(b('ping'))
t = Thread(target=send)
t.start()
evts = dict(poller.poll(2 * self.timeout_ms))
t.join()
assert x in evts
assert self.timer_fired
x.recv()
def test_retry_term(self):
push = self.socket(zmq.PUSH)
push.linger = self.timeout_ms
push.connect('tcp://10.0.0.7:5555')
push.send(b('ping'))
time.sleep(0.1)
self.alarm()
self.context.destroy()
assert self.timer_fired
assert self.context.closed
def test_retry_getsockopt(self):
raise SkipTest("TODO: find a way to interrupt getsockopt")
def test_retry_setsockopt(self):
raise SkipTest("TODO: find a way to interrupt setsockopt")
|
/*
* This is a part of the BugTrap package.
* Copyright (c) 2005-2009 IntelleSoft.
* All rights reserved.
*
* Description: Log file descriptor.
* Author: Maksim Pyatkovskiy.
*
* This source code is only intended as a supplement to the
* BugTrap package reference and related electronic documentation
* provided with the product. See these sources for detailed
* information regarding the BugTrap package.
*/
#pragma once
#include "BugTrap.h"
#include "BugTrapUtils.h"
/// Log file descriptor.
class CLogLink
{
public:
/// Initialize the object.
CLogLink(void);
/// Initialize the object.
CLogLink(PCTSTR pszLogFileName);
/// Destroy the object.
virtual ~CLogLink(void);
/// Get log type.
virtual BUGTRAP_LOGTYPE GetLogType(void) const;
/// Save log link entries.
virtual void SaveEntries(bool bCrash);
/// Get custom log file name.
PCTSTR GetLogFileName(void) const;
/// Set custom log file name.
void SetLogFileName(PCTSTR pszLogFileName);
/// Object comparison.
friend bool operator==(const CLogLink& rLogLink1, const CLogLink& rLogLink2);
/// Object comparison.
friend bool operator!=(const CLogLink& rLogLink1, const CLogLink& rLogLink2);
protected:
/// Custom log file name.
TCHAR m_szLogFileName[MAX_PATH];
};
inline CLogLink::CLogLink(void)
{
*m_szLogFileName = _T('\0');
}
/**
* @param pszLogFileName - pointer to custom log file name.
*/
inline CLogLink::CLogLink(PCTSTR pszLogFileName)
{
GetCompleteLogFileName(m_szLogFileName, pszLogFileName, NULL);
}
inline CLogLink::~CLogLink(void)
{
}
/**
* @return pointer to custom log file name.
*/
inline PCTSTR CLogLink::GetLogFileName(void) const
{
return m_szLogFileName;
}
/**
* @param pszLogFileName - pointer to custom log file name.
*/
inline void CLogLink::SetLogFileName(PCTSTR pszLogFileName)
{
GetCompleteLogFileName(m_szLogFileName, pszLogFileName, NULL);
}
/**
* @return type of the log.
*/
inline BUGTRAP_LOGTYPE CLogLink::GetLogType(void) const
{
return BTLT_LOGFILE;
}
/**
* @param bCrash - true if crash has occurred.
*/
inline void CLogLink::SaveEntries(bool /*bCrash*/)
{
}
/// Reg file descriptor.
class CRegLink : public CLogLink
{
public:
/// Object constructor.
CRegLink(void);
/// Object constructor.
CRegLink(PCTSTR pszLogFileName);
/// Object constructor.
CRegLink(PCTSTR pszLogFileName, PCTSTR pszRegKey);
/// Get log type.
virtual BUGTRAP_LOGTYPE GetLogType(void) const;
/// Save log link entries.
virtual void SaveEntries(bool bCrash);
/// Get registry key path.
PCTSTR GetRegKey(void) const;
/// Set registry key path.
void SetRegKey(PCTSTR pszRegKey);
protected:
/// Registry key path.
TCHAR m_szRegKey[MAX_PATH];
};
inline CRegLink::CRegLink(void)
{
*m_szRegKey = _T('\0');
}
/**
* @param pszLogFileName - pointer to custom log file name.
*/
inline CRegLink::CRegLink(PCTSTR pszLogFileName) : CLogLink(pszLogFileName)
{
*m_szRegKey = _T('\0');
}
/**
* @param pszLogFileName - pointer to custom log file name.
* @param pszRegKey - registry key path
*/
inline CRegLink::CRegLink(PCTSTR pszLogFileName, PCTSTR pszRegKey) : CLogLink(pszLogFileName)
{
_tcscpy_s(m_szRegKey, countof(m_szRegKey), pszRegKey);
}
/**
* @return type of the log.
*/
inline BUGTRAP_LOGTYPE CRegLink::GetLogType(void) const
{
return BTLT_REGEXPORT;
}
/**
* @param bCrash - true if crash has occurred.
*/
inline void CRegLink::SaveEntries(bool bCrash)
{
if (bCrash)
BT_ExportRegistryKey(m_szLogFileName, m_szRegKey);
}
/**
* @param pszRegKey - registry key path
*/
inline void CRegLink::SetRegKey(PCTSTR pszRegKey)
{
_tcscpy_s(m_szRegKey, countof(m_szRegKey), pszRegKey);
}
/**
* @return registry key path.
*/
inline PCTSTR CRegLink::GetRegKey(void) const
{
return m_szRegKey;
}
/**
* @param rLogLink1 - 1st object.
* @param rLogLink2 - 2nd object.
* @return comparison result.
*/
inline bool operator==(const CLogLink& rLogLink1, const CLogLink& rLogLink2)
{
return (_tcsicmp(rLogLink1.m_szLogFileName, rLogLink2.m_szLogFileName) == 0);
}
/**
* @param rLogLink1 - 1st object.
* @param rLogLink2 - 2nd object.
* @return comparison result.
*/
inline bool operator!=(const CLogLink& rLogLink1, const CLogLink& rLogLink2)
{
return (_tcsicmp(rLogLink1.m_szLogFileName, rLogLink2.m_szLogFileName) != 0);
}
|