prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>connectionService.js<|end_file_name|><|fim▁begin|>/*!
* speedt
* Copyright(c) 2015 speedt <13837186852@qq.com>
* BSD 3 Licensed
*/
'use strict';
var utils = require('speedt-utils');
var Service = function(app){
var self = this;
// TODO
self.serverId = app.getServerId();
self.connCount = 0;
self.loginedCount = 0;
self.logined = {};
};
module.exports = Service;
var proto = Service.prototype;
proto.increaseConnectionCount = function(){
return ++this.connCount;
};
proto.decreaseConnectionCount = function(uid){
var self = this;
// TODO
var result = [--self.connCount];
// TODO
if(uid) result.push(removeLoginedUser.call(self, uid));
return result;
};
proto.replaceLoginedUser = function(uid, info){
var self = this;
// TODO
var user = self.logined[uid];
if(user) return updateUserInfo.call(self, user, info);
// TODO
self.loginedCount++;
// TODO
info.uid = uid;
self.logined[uid] = info;
};
var updateUserInfo = function(user, info){
var self = this;
// TODO
for(var p in info){
if(info.hasOwnProperty(p) && typeof 'function' !== info[p]){
self.logined[user.uid][p] = info[p];
} // END
} // END
};<|fim▁hole|>var removeLoginedUser = function(uid){
var self = this;
// TODO
if(!self.logined[uid]) return;
// TODO
delete self.logined[uid];
// TODO
return --self.loginedCount;
};
proto.getStatisticsInfo = function(){
var self = this;
return {
serverId: self.serverId,
connCount: self.connCount,
loginedCount: self.loginedCount,
logined: self.logined
};
};<|fim▁end|> | |
<|file_name|>debug_view_handler.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Request handler to display the debug view for a Failure."""
import jinja2
import os
import sys
import webapp2
from common import ispy_utils
import views
JINJA = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(views.__file__)),
extensions=['jinja2.ext.autoescape'])
class DebugViewHandler(webapp2.RequestHandler):
"""Request handler to display the debug view for a failure."""
def get(self):
"""Handles get requests to the /debug_view page.
GET Parameters:
test_run: The test run.
expectation: The expectation name.
"""
test_run = self.request.get('test_run')
expectation = self.request.get('expectation')
expected_path = ispy_utils.GetExpectationPath(expectation, 'expected.png')
actual_path = ispy_utils.GetFailurePath(test_run, expectation, 'actual.png')
data = {}
<|fim▁hole|> data['actual'] = _ImagePath(actual_path)
data['test_run'] = test_run
data['expectation'] = expectation
template = JINJA.get_template('debug_view.html')
self.response.write(template.render(data))<|fim▁end|> | def _ImagePath(url):
return '/image?file_path=%s' % url
data['expected'] = _ImagePath(expected_path) |
<|file_name|>test_dg_input_advection.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
input_name = '../examples/dg/advection_2D.py'
output_name = 'advection_sol.msh'
from tests_basic import TestInput
class Test( TestInput ):<|fim▁hole|><|fim▁end|> | pass |
<|file_name|>clean.py<|end_file_name|><|fim▁begin|>from gitcd.interface.cli.abstract import BaseCommand
from gitcd.git.branch import Branch
from gitcd.app.clean import Clean as CleanHelper
class Clean(BaseCommand):
updateRemote = True
def run(self, branch: Branch):
helper = CleanHelper()
branchesToDelete = helper.getBranchesToDelete()
self.interface.writeOut('Branches to delete')
if len(branchesToDelete) == 0:
self.interface.ok(' - no branches to delete')
<|fim▁hole|> self.interface.red(" - %s" % branchToDelete.getName())
self.interface.writeOut('')
if len(branchesToDelete) == 0:
self.interface.ok('Nice, your local repository is clean already.')
return True
delete = self.interface.askFor(
'Do you want me to delete those branches locally?',
['yes', 'no'],
'yes'
)
if delete == 'yes':
helper.deleteBranches(branchesToDelete)
return True<|fim▁end|> | for branchToDelete in branchesToDelete: |
<|file_name|>EntityEvokerPet.java<|end_file_name|><|fim▁begin|>/*
* This file is part of EchoPet.
*
* EchoPet is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* EchoPet is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with EchoPet. If not, see <http://www.gnu.org/licenses/>.
*/
package com.dsh105.echopet.compat.nms.v1_14_R1.entity.type;
import com.dsh105.echopet.compat.api.entity.EntityPetType;
import com.dsh105.echopet.compat.api.entity.EntitySize;
import com.dsh105.echopet.compat.api.entity.IPet;
import com.dsh105.echopet.compat.api.entity.PetType;
import com.dsh105.echopet.compat.api.entity.SizeCategory;
import com.dsh105.echopet.compat.api.entity.type.nms.IEntityEvokerPet;
import net.minecraft.server.v1_14_R1.DataWatcher;
import net.minecraft.server.v1_14_R1.DataWatcherObject;
import net.minecraft.server.v1_14_R1.DataWatcherRegistry;
import net.minecraft.server.v1_14_R1.EntityInsentient;
import net.minecraft.server.v1_14_R1.EntityTypes;
import net.minecraft.server.v1_14_R1.World;
/**
* @since Nov 19, 2016
*/
@EntitySize(width = 0.6F, height = 1.95F)
@EntityPetType(petType = PetType.EVOKER)
public class EntityEvokerPet extends EntityIllagerAbstractPet implements IEntityEvokerPet{
// EntityIllagerWizard
private static final DataWatcherObject<Byte> c = DataWatcher.a(EntityEvokerPet.class, DataWatcherRegistry.a);// some sorta spell shit
public EntityEvokerPet(EntityTypes<? extends EntityInsentient> type, World world){
super(type, world);
}
public EntityEvokerPet(EntityTypes<? extends EntityInsentient> type, World world, IPet pet){
super(type, world, pet);<|fim▁hole|> this(EntityTypes.EVOKER, world);
}
public EntityEvokerPet(World world, IPet pet){
this(EntityTypes.EVOKER, world, pet);
}
@Override
protected void initDatawatcher(){
super.initDatawatcher();
this.datawatcher.register(c, (byte) 0);
}
@Override
public SizeCategory getSizeCategory(){
return SizeCategory.REGULAR;
}
}<|fim▁end|> | }
public EntityEvokerPet(World world){ |
<|file_name|>sse_spider.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'tyler'
import urllib2
import scrapy
from scrapy import log
import demjson
'''class AutoSpider(scrapy.Spider):
name = "sse"
allowed_domains = ["query.sse.com.cn"]
preurl='http://data.eastmoney.com/stock';
start_urls = [
'http://query.sse.com.cn/infodisplay/showTradePublicFile.do?jsonCallBack=jQuery172023210379532913938_1430627585124&dateTx=2015-04-29&random=0.48195114223841695&_=1430627617454'
]
def parse(self, response):
jsonstr=response.body_as_unicode()
log.msg(jsonstr[len('jQuery172023210379532913938_1430627585124'):-1])
s1=demjson.decode(jsonstr[len('jQuery172023210379532913938_1430627585124('):-1])
log.msg(s1['fileContents'])
if __name__=='__main__':'''
import re
tradeDay=''
send_headers = {
'Host': 'query.sse.com.cn',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
'Accept-Encoding': 'gzip, deflate',
'Referer': 'http://www.sse.com.cn/disclosure/diclosure/public/',
'Connection': 'keep-alive'
}
url='http://query.sse.com.cn/infodisplay/showTradePublicFile.do?jsonCallBack=jQuery172023210379532913938_1430627585124&dateTx=2015-04-29&random=0.48195114223841695&_=1430627617454'
req = urllib2.Request(url,headers=send_headers)
response = urllib2.urlopen(req)
html = response.read()
jsonStr=demjson.decode(html[len('jQuery172023210379532913938_1430627585124('):-1])
lines=jsonStr['fileContents']
def loopLineFun(lines):
for line in lines:
yield line.encode('utf8')
loopline=loopLineFun(lines)
class LHBItem():
pass
dictlist = {}
r1 = re.compile(ur'\s+\(\d\)\s+(\d+)\s+([\u4e00-\u9fa5]+)\s+((-?\d+)(\.\d+)?)%\s+(\d+)\s+((-?\d+)(\.\d+)?)')
#r1 = re.compile(ur'\s+\(\d\)')
def readDep(loop,code):
state='buy'
<|fim▁hole|> if tmp.find('买入营业部名称')>=0:
state='buy'
continue
if tmp.find('卖出营业部名称')>=0:
state='sell'
continue
outMatch=rout.match(tmp)
if outMatch and state=='sell':
print '跳出'
return
if rdep.match(tmp.decode('utf8')):
dep=re.split('\s+',tmp)
depName=dep[2]
tradeAmount=dep[3]
print 'depName ' + depName
r2=re.compile(ur'\s+[\u4e00-\u9fa5]+:\s(\d+)\s+[\u4e00-\u9fa5]+:\s[\u4e00-\u9fa5]+')
def readA7(loop):
for tmp in loop:
mat=r1.match(tmp.decode('utf8'))
if mat:
lbhItem =LHBItem()
lbhItem.symbol= mat.group(1)
lbhItem.stockName= mat.group(2)
lbhItem.zhengdie= mat.group(3)
lbhItem.vol=mat.group(6)
lbhItem.amount= mat.group(7)
dictlist[lbhItem.symbol]=lbhItem
continue
#dep
mat2=r2.match(tmp.decode('utf8'))
if mat2:
print '*************************'
readDep(loop,mat2.group(1))
if tmp.find('二、')>=0:
return
for tmp in loopline:
print tmp
if tmp.find('交易日期')>=0:
tradeDay=tmp[13:]
print tradeDay
if tmp.find('偏离值达到7%')>=0:
tmp=readA7(loopline)
print tmp;
break
if tmp.find('二、')>=0:
print '-------'
for k in dictlist:
print k<|fim▁end|> | rdep = re.compile(ur'\s+\(\d\)')
rout=re.compile(ur'^\s?$')
for tmp in loop:
print tmp
|
<|file_name|>PasteSites.py<|end_file_name|><|fim▁begin|># Copright (C) 2015 Eric Skoglund
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, see http://www.gnu.org/licenses/gpl-2.0.html
import requests<|fim▁hole|> self.value = value
def __str__(self):
return repr(self.value)
class PasteSite(object):
def __init__(self, url):
self.url = url
self.paste_url = None
self.data = None
@staticmethod
def siteFactory(site_name):
if site_name == 'slexy.org':
return Slexy()
elif site_name == 'pastebin.mozilla.org':
return Mozilla()
else:
raise NotSupported("This site is not supported")
def parse(self, args):
""" Internal method used by the PasteSite class.
Returns a dictionary of the parsed input arguments.
Parses the arguments given at the command line.
Many pastebin like sites use different arguments
for the paste so this method should be implemented
for each subclass of PasteSite.
See the slexy class for an example of how to implement
this method for subclasses.
"""
self.data = args
def paste(self):
"""Posts the data to the paste site.
This method tries to post the data to the paste site.
If the resulting request does not have a ok status the
program exits else we return the resulting paste url.
The method assumes that the data is in a dictionary.
"""
if self.data == None:
print('You can only paste after a parse')
sys.exit(-1)
res = requests.post(self.url, self.data)
if not res.ok:
print('Bad response {0} {1}'.format(res.reason, res.status_code))
sys.exit(-1)
self.paste_url = res.url
class Slexy(PasteSite):
def __init__(self):
super(Slexy, self).__init__('http://slexy.org/submit')
def parse(self, args):
form_data = {}
arg_translation = {'text' : 'raw_paste',
'language' : 'language',
'expiration' : 'expire',
'comment' : 'comment',
'description' : 'descr',
'visibility' : 'permissions',
'linum' : 'linenumbers',
'author' : 'author'}
for k,v in args.items():
if arg_translation.get(k):
form_data[arg_translation[k]] = v
form_data['submit'] = 'Submit Paste'
self.data = form_data
class Mozilla(PasteSite):
def __init__(self):
super(Mozilla, self).__init__('https://pastebin.mozilla.org')
def parse(self, args):
form_data = {}
arg_translation = {'text' : 'code2',
'expiration' : 'expiry',
'syntax_highlight' : 'format',
'author' : 'poster'}
for k,v in args.items():
if arg_translation.get(k):
form_data[arg_translation[k]] = v
form_data['paste'] = 'Send'
form_data['parent_pid'] = ''
self.data = form_data<|fim▁end|> | import sys
class NotSupported(Exception):
def __init__(self, value): |
<|file_name|>xrm.d.ts<|end_file_name|><|fim▁begin|>declare namespace Xrm {
interface EntityDefinition {
EntitySetName: string;
}
namespace Attributes {
interface LookupAttribute {
getLookupTypes(): LookupValue[];
}
}
namespace Controls {
interface Control {
getAttribute(): Xrm.Attributes.Attribute;
}
}
namespace Page {
interface LookupValue {
type: string;
typename: string;
}
}
interface Ui {
getCurrentControl(): Xrm.Controls.Control;
}
interface XrmStatic {<|fim▁hole|>
interface XrmInternal {
getEntityCode(entityName: string): number;
isUci(): boolean;
}
interface GlobalContext {
isOffice365(): boolean;
isOnPremises(): boolean;
}
}<|fim▁end|> | Internal: XrmInternal;
} |
<|file_name|>SnapshotUtils.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2019 Frederic Thevenet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.binjr.common.javafx.controls;
import javafx.scene.Node;
import javafx.scene.SnapshotParameters;
import javafx.scene.image.WritableImage;
import javafx.scene.transform.Transform;
import javafx.stage.Screen;
public final class SnapshotUtils {
public static WritableImage outputScaleAwareSnapshot(Node node) {
return scaledSnapshot(node, 0.0,0.0);
}
public static WritableImage scaledSnapshot(Node node, double scaleX, double scaleY) {
SnapshotParameters spa = new SnapshotParameters();
spa.setTransform(Transform.scale(
scaleX == 0.0 ? Screen.getPrimary().getOutputScaleX() : scaleX,
scaleY == 0.0 ? Screen.getPrimary().getOutputScaleY() : scaleY));
return node.snapshot(spa, null);<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>mathsquiz-step2.py<|end_file_name|><|fim▁begin|># this function will print a welcome message to the user
def welcome_message():
print("Hello! I'm going to ask you 10 maths questions.")
print("Let's see how many you can get right!")
# this function will ask a maths question and return the points awarded (1 or 0)
def ask_question(first_number, second_number):
print("What is", first_number, "x", second_number)
answer = input("Answer: ")
if int(answer) == first_number * second_number:
print("Correct!")
points_awarded = 1
else:
print("Wrong!")
points_awarded = 0
print("")
return points_awarded
# this function will look at the final scores and print the results
def print_final_scores(final_score):
print("That's all the questions done. So...what was your score...?")
print("You scored", score, "points out of a possible 10.")
if score < 5:
print("You need to practice your maths!")
elif score < 8:
print("That's pretty good!")
elif score < 10:
print("You did really well! Try and get 10 out of 10 next time!")
elif score == 10:
print("Wow! What a maths star you are!! I'm impressed!")
<|fim▁hole|>welcome_message()
# set the score to zero
score = 0
# ask questions
score = score + ask_question(8,7)
score = score + ask_question(4,9)
score = score + ask_question(12,6)
score = score + ask_question(6,8)
score = score + ask_question(7,7)
score = score + ask_question(11,6)
score = score + ask_question(11,2)
score = score + ask_question(7,9)
score = score + ask_question(6,6)
score = score + ask_question(4,8)
# print the final scores
print_final_scores(score)<|fim▁end|> |
# display welcome message |
<|file_name|>knn_binary.py<|end_file_name|><|fim▁begin|>""" kNN digit classifier, converting images to binary before
training and classification. Should (or should allow for)
reduction in kNN object size.
"""
<|fim▁hole|>from utils import classifier as cs
from utils import knn
from utils import mnist
class KnnBinary(knn.KnnDigitClassifier):
def train(self, images, labels):
super(KnnBinary, self).train(
self.preprocess_all(images), labels)
def classify(self, image):
return super(KnnBinary, self).classify(self.preprocess(image))
def preprocess(self, image):
# [1]: threshold returns tuple (x, x, img), where x is
# something I cbf figuring out
return cv2.threshold(image, 127, 1, cv2.THRESH_BINARY)[1]
def preprocess_all(self, images):
for image in images:
yield self.preprocess(image)
if __name__ == '__main__':
NUM_TRAINS = 100
NUM_TESTS = 100
runner = cs.ClassifierRunner(KnnBinary())
runner.train(mnist.training_images(NUM_TRAINS), mnist.training_labels(NUM_TRAINS))
runner.run(mnist.test_images(NUM_TESTS), mnist.test_labels(NUM_TESTS))
print(runner.get_report_str())<|fim▁end|> | import cv2
|
<|file_name|>jquery-ui.searchflyout.js<|end_file_name|><|fim▁begin|>/*global console: false, creatis_carpenterStorage_replaceContentFromStorage: false */
$(function () {
function initDesktop() {
$("#accordion").accordion({
collapsible: true,
active: localStorage.selectedCarpenter ? false : true,
});
// Kontaktseite Suche
$("#search-site-submit").click(function () {
var searchQuery = $("#search-query-site").val();
if (isValidPostal(searchQuery)) {
document.location = "/tischler?query=" + searchQuery;
}
return false;
});
$("#search-query-site").on('input', function () {
var isValid = isValidPostal($("#search-query-site").val());
$("#search-query-site").css('border-color', !isValid ? 'red' : '#d5d5d5');
});
$("#search-query").on('input', function () {
var isValid = isValidPostal($("#search-query").val());
$("#search-query").css('border-color', !isValid ? 'red' : '#d5d5d5');
});
//Prevent default on enter
$('#search-query').keypress(function (event) {
if (event.keyCode == 10 || event.keyCode == 13)
event.preventDefault();
});
$("#accordion").removeClass('c-hidden');
$("#search-flyout-submit").click(function () {
var searchQuery = $("#search-query").val();
if (searchQuery !== null && searchQuery !== "" && isValidPostal(searchQuery)) {
document.location = "/tischler?query=" + searchQuery;
}
return false;
});
if (typeof (creatis_carpenterStorage_replaceContentFromStorage) !== "undefined") {
creatis_carpenterStorage_replaceContentFromStorage();
}
}
function initMobile() {
//code taken from https://github.com/codrops/ButtonComponentMorph/blob/master/index.html
var docElem = window.document.documentElement, didScroll, scrollPosition;
// trick to prevent scrolling when opening/closing button
function noScrollFn() {
window.scrollTo(scrollPosition ? scrollPosition.x : 0, scrollPosition ? scrollPosition.y : 0);
}
function noScroll() {
window.removeEventListener('scroll', scrollHandler);
window.addEventListener('scroll', noScrollFn);
}
function scrollFn() {
window.addEventListener('scroll', scrollHandler);
}
function canScroll() {
window.removeEventListener('scroll', noScrollFn);
scrollFn();
}
function scrollHandler() {
if (!didScroll) {
didScroll = true;
setTimeout(function () { scrollPage(); }, 60);
}
};
function scrollPage() {
scrollPosition = { x: window.pageXOffset || docElem.scrollLeft, y: window.pageYOffset || docElem.scrollTop };
didScroll = false;
};
scrollFn();
// Mobile carpenter search button
var mobileSearchButton = document.querySelector('#search-mobile .morph-button');
$('#search-mobile .morph-button').click(function (ev) {
if (ev.originalEvent) {
ev.originalEvent.preventDefault();
}
});
var mobileSearchMorphingButton = new UIMorphingButton(mobileSearchButton, {
closeEl: '.icon-close',
onBeforeOpen: function () {
// don't allow to scroll
noScroll();
},
onAfterOpen: function () {
// can scroll again
canScroll();
$('.dialog-page').hide();
$('.mobile-search-page-overview').show();
},
onBeforeClose: function () {
// don't allow to scroll
noScroll();
},
onAfterClose: function () {
// can scroll again
if (window.buyWithoutCarpenter && localStorage.selectedCarpenter !== null) {
$('#purchase-request-starter')[0].click();
}
else
{
window.buyWithoutCarpenter = false;
}
canScroll();
}
});
}
(function startup() {
if (isMobile()) {
initMobile();
} else {
if ($("#accordion").length > 0) {
initDesktop();
}
}
//initDesktop();
//initMobile();
displaySelectedCarpenter();
})();
});
function isMobile() {
return $("#search-mobile").css("visibility") === "visible";
}
function displaySelectedCarpenter() {
if (localStorage.selectedCarpenter) {<|fim▁hole|> var data = JSON.parse(localStorage.getItem('selectedCarpenter'));
var cId = data.id;
setTimeout(function () {
$('#' + cId + ' .btn-select').first().trigger('click');
}, 50);
}
}<|fim▁end|> | $('#div_text').css('display', 'none'); |
<|file_name|>20160603120209_c9218e757bc_rename_admin_role_from_ggrc_admin_to_.py<|end_file_name|><|fim▁begin|># Copyright (C) 2017 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Rename admin role from gGRC Admin to Administrator.
Create Date: 2016-06-03 12:02:09.438599
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
# revision identifiers, used by Alembic.
revision = 'c9218e757bc'
down_revision = '4d5180ab1b42'
roles_table = table(
'roles',
column('id', sa.Integer),
column('name', sa.String),
column('updated_at', sa.DateTime),
column('description', sa.Text),<|fim▁hole|>
def upgrade():
op.execute(roles_table.update()
.where(roles_table.c.name == 'gGRC Admin')
.values(name='Administrator',
description='System Administrator with super-user '
'privileges'))
def downgrade():
op.execute(roles_table.update()
.where(roles_table.c.name == 'Administrator')
.values(name='gGRC Admin',
description='gGRC System Administrator with super-user '
'privileges'))<|fim▁end|> | ) |
<|file_name|>0038_merge.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-22 14:09
from __future__ import unicode_literals
from django.db import migrations
<|fim▁hole|>class Migration(migrations.Migration):
dependencies = [
('eighth', '0036_eighthscheduledactivity_administrative'),
('eighth', '0037_auto_20160307_2342'),
]<|fim▁end|> | |
<|file_name|>fox_and_snake.py<|end_file_name|><|fim▁begin|>a, b = map(int,raw_input().split())
i=0
while(i<a):
j=0
c=[]
if(i%2==0):
while(j<b):
c.append('#')
j=j+1
print (''.join(c))
else:
k = int(i/2)
if (k%2==0):
while(j<(b-1)):
c.append(".")<|fim▁hole|> else:
c.append('#')
while(j<(b-1)):
c.append(".")
j=j+1
print (''.join(c))
i=i+1<|fim▁end|> | j=j+1
c.append("#")
print (''.join(c)) |
<|file_name|>test_callisto.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Author: Florian Mayer <florian.mayer@bitsrc.org>
from __future__ import absolute_import
import shutil
from tempfile import mkdtemp
from datetime import datetime
import pytest
import os
import glob
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_allclose
import sunpy.data.test
from sunpy.spectra.sources.callisto import (
CallistoSpectrogram, query, download, minimal_pairs
)
@pytest.fixture
def CALLISTO_IMAGE():
testpath = sunpy.data.test.rootdir
return os.path.join(testpath, 'BIR_20110922_050000_01.fit')
@pytest.fixture
def CALLISTO_IMAGE_GLOB_KEY():
return 'BIR_*'
@pytest.fixture
def CALLISTO_IMAGE_GLOB_INDEX(CALLISTO_IMAGE, CALLISTO_IMAGE_GLOB_KEY):
testpath = sunpy.data.test.rootdir
res = glob.glob(os.path.join(testpath, CALLISTO_IMAGE_GLOB_KEY))
return res.index(CALLISTO_IMAGE)
def test_read(CALLISTO_IMAGE):
ca = CallistoSpectrogram.read(CALLISTO_IMAGE)
assert ca.start == datetime(2011, 9, 22, 5, 0, 0, 454000)
assert ca.t_init == 18000.0
assert ca.shape == (200, 3600)
assert ca.t_delt == 0.25
# Test linearity of time axis.
assert np.array_equal(
ca.time_axis, np.linspace(0, 0.25 * (ca.shape[1] - 1), ca.shape[1])
)
assert ca.dtype == np.uint8
@pytest.mark.online
def test_query():
URL = 'http://soleil.i4ds.ch/solarradio/data/2002-20yy_Callisto/2011/09/22/'
result = list(query(
datetime(2011, 9, 22, 5), datetime(2011, 9, 22, 6), set(["BIR"])
))
RESULTS = [
"BIR_20110922_050000_01.fit.gz",
"BIR_20110922_051500_01.fit.gz",
"BIR_20110922_053000_01.fit.gz",
"BIR_20110922_050000_03.fit.gz",
"BIR_20110922_051500_03.fit.gz",
"BIR_20110922_053000_03.fit.gz",
"BIR_20110922_054500_03.fit.gz",
]
RESULTS.sort()
# Should be sorted anyway, but better to assume as little as possible.
result.sort()
for item in RESULTS:
assert URL + item in result
@pytest.mark.online
@pytest.mark.xfail
def test_query_number():
URL = 'http://soleil.i4ds.ch/solarradio/data/2002-20yy_Callisto/2011/09/22/'
result = list(query(
datetime(2011, 9, 22, 5), datetime(2011, 9, 22, 6), set([("BIR", 1)])
))
RESULTS = [
"BIR_20110922_050000_01.fit.gz",
"BIR_20110922_051500_01.fit.gz",
"BIR_20110922_053000_01.fit.gz",
]
RESULTS.sort()
# Should be sorted anyway, but better to assume as little as possible.
result.sort()
assert len(result) == len(RESULTS)
@pytest.mark.online
@pytest.mark.xfail
def test_download():
directory = mkdtemp()
try:
result = query(
datetime(2011, 9, 22, 5), datetime(2011, 9, 22, 6), set([("BIR", 1)])
)
RESULTS = [
"BIR_20110922_050000_01.fit.gz",
"BIR_20110922_051500_01.fit.gz",
"BIR_20110922_053000_01.fit.gz",
]
download(result, directory)
for item in RESULTS:
assert item in sorted(os.listdir(directory))
finally:
shutil.rmtree(directory)
def test_create_file(CALLISTO_IMAGE):
ca = CallistoSpectrogram.create(CALLISTO_IMAGE)
assert np.array_equal(ca.data, CallistoSpectrogram.read(CALLISTO_IMAGE).data)
def test_create_file_kw(CALLISTO_IMAGE):
ca = CallistoSpectrogram.create(filename=CALLISTO_IMAGE)
assert np.array_equal(ca.data, CallistoSpectrogram.read(CALLISTO_IMAGE).data)
@pytest.mark.online
def test_create_url():
URL = (
"http://soleil.i4ds.ch/solarradio/data/2002-20yy_Callisto/2011/09/22/"
"BIR_20110922_050000_01.fit.gz"
)
ca = CallistoSpectrogram.create(URL)
assert np.array_equal(ca.data, CallistoSpectrogram.read(URL).data)
@pytest.mark.online
def test_create_url_kw():
URL = (
"http://soleil.i4ds.ch/solarradio/data/2002-20yy_Callisto/2011/09/22/"
"BIR_20110922_050000_01.fit.gz"
)
ca = CallistoSpectrogram.create(url=URL)
assert np.array_equal(ca.data, CallistoSpectrogram.read(URL).data)
def test_create_single_glob(CALLISTO_IMAGE, CALLISTO_IMAGE_GLOB_INDEX, CALLISTO_IMAGE_GLOB_KEY):
PATTERN = os.path.join(os.path.dirname(CALLISTO_IMAGE), CALLISTO_IMAGE_GLOB_KEY)
ca = CallistoSpectrogram.create(PATTERN)
assert_allclose(ca[CALLISTO_IMAGE_GLOB_INDEX].data,
CallistoSpectrogram.read(CALLISTO_IMAGE).data)
# seems like this does not work anymore and can't figure out what it is for
#def test_create_single_glob_kw(CALLISTO_IMAGE):
# PATTERN = os.path.join( os.path.dirname(CALLISTO_IMAGE), "BIR_*")
# ca = CallistoSpectrogram.create(singlepattern=PATTERN)
# assert np.array_equal(ca[0].data, CallistoSpectrogram.read(CALLISTO_IMAGE).data)
def test_create_glob_kw(CALLISTO_IMAGE, CALLISTO_IMAGE_GLOB_INDEX, CALLISTO_IMAGE_GLOB_KEY):
PATTERN = os.path.join(
os.path.dirname(CALLISTO_IMAGE),
CALLISTO_IMAGE_GLOB_KEY
)
ca = CallistoSpectrogram.create(pattern=PATTERN)[CALLISTO_IMAGE_GLOB_INDEX]
assert_allclose(ca.data, CallistoSpectrogram.read(CALLISTO_IMAGE).data)
def test_create_glob(CALLISTO_IMAGE_GLOB_KEY):
PATTERN = os.path.join(
os.path.dirname(sunpy.data.test.__file__),
CALLISTO_IMAGE_GLOB_KEY
)
ca = CallistoSpectrogram.create(PATTERN)
assert len(ca) == 2
def test_minimum_pairs_commotative():
A = [0, 1, 2]
B = [1, 2, 3]
first = list(minimal_pairs(A, B))
assert first == [(b, a, d) for a, b, d in minimal_pairs(B, A)]
def test_minimum_pairs_end():
assert (
list(minimal_pairs([0, 1, 2, 4], [1, 2, 3, 4])) ==
[(1, 0, 0), (2, 1, 0), (3, 3, 0)]
)
def test_minimum_pairs_end_more():
assert (
list(minimal_pairs([0, 1, 2, 4, 8], [1, 2, 3, 4])) ==
[(1, 0, 0), (2, 1, 0), (3, 3, 0)]
)
def test_minimum_pairs_end_diff():
assert (
list(minimal_pairs([0, 1, 2, 8], [1, 2, 3, 4])) ==
[(1, 0, 0), (2, 1, 0), (3, 3, 4)]
)
def test_closest():
assert (
list(minimal_pairs([50, 60], [0, 10, 20, 30, 40, 51, 52])) ==
[(0, 5, 1), (1, 6, 8)]
)
def test_homogenize_factor():
a = np.float64(np.random.randint(0, 255, 3600))[np.newaxis, :]
c1 = CallistoSpectrogram(
a,
np.arange(3600),
np.array([1]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
b = 2 * a
c2 = CallistoSpectrogram(
b,
np.arange(3600),
np.array([1]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
pairs_indices, factors, constants = c1._homogenize_params(
c2, 0
)
assert pairs_indices == [(0, 0)]
assert_array_almost_equal(factors, [0.5], 2)
assert_array_almost_equal(constants, [0], 2)
assert_array_almost_equal(factors[0] * b + constants[0], a)
def test_homogenize_constant():
a = np.float64(np.random.randint(0, 255, 3600))[np.newaxis, :]
c1 = CallistoSpectrogram(
a,
np.arange(3600),<|fim▁hole|> np.array([1]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
b = a + 10
c2 = CallistoSpectrogram(
b,
np.arange(3600),
np.array([1]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
pairs_indices, factors, constants = c1._homogenize_params(
c2, 0
)
assert pairs_indices == [(0, 0)]
assert_array_almost_equal(factors, [1], 2)
assert_array_almost_equal(constants, [-10], 2)
assert_array_almost_equal(factors[0] * b + constants[0], a)
def test_homogenize_both():
a = np.float64(np.random.randint(0, 255, 3600))[np.newaxis, :]
c1 = CallistoSpectrogram(
a,
np.arange(3600),
np.array([1]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
b = 2 * a + 1
c2 = CallistoSpectrogram(
b,
np.arange(3600),
np.array([1]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
pairs_indices, factors, constants = c1._homogenize_params(
c2, 0
)
assert pairs_indices == [(0, 0)]
assert_array_almost_equal(factors, [0.5], 2)
assert_array_almost_equal(constants, [-0.5], 2)
assert_array_almost_equal(factors[0] * b + constants[0], a)
def test_homogenize_rightfq():
a = np.float64(np.random.randint(0, 255, 3600))[np.newaxis, :]
c1 = CallistoSpectrogram(
a,
np.arange(3600),
np.array([1]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
b = 2 * a + 1
c2 = CallistoSpectrogram(
np.concatenate([
np.arange(3600)[np.newaxis, :], b,
np.arange(3600)[np.newaxis, :]
], 0),
np.arange(3600),
np.array([0, 1, 2]),
datetime(2011, 1, 1),
datetime(2011, 1, 1, 1),
0,
1,
'Time',
'Frequency',
'Test',
None,
None,
None,
False
)
pairs_indices, factors, constants = c1._homogenize_params(
c2, 0
)
assert pairs_indices == [(0, 1)]
assert_array_almost_equal(factors, [0.5], 2)
assert_array_almost_equal(constants, [-0.5], 2)
assert_array_almost_equal(factors[0] * b + constants[0], a)
@pytest.mark.online
def test_extend(CALLISTO_IMAGE):
im = CallistoSpectrogram.create(CALLISTO_IMAGE)
im2 = im.extend()
# Not too stable test, but works.
assert im2.data.shape == (200, 7200)<|fim▁end|> | |
<|file_name|>ReloadingClassLoader.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.application;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.wicket.util.collections.UrlExternalFormComparator;
import org.apache.wicket.util.file.File;
import org.apache.wicket.util.listener.IChangeListener;
import org.apache.wicket.util.time.Duration;
import org.apache.wicket.util.watch.IModificationWatcher;
import org.apache.wicket.util.watch.ModificationWatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Custom ClassLoader that reverses the classloader lookups, and that is able to notify a listener
* when a class file is changed.
*
* @author <a href="mailto:jbq@apache.org">Jean-Baptiste Quenot</a>
*/
public class ReloadingClassLoader extends URLClassLoader
{
private static final Logger log = LoggerFactory.getLogger(ReloadingClassLoader.class);
private static final Set<URL> urls = new TreeSet<URL>(new UrlExternalFormComparator());
private static final List<String> patterns = new ArrayList<String>();
private IChangeListener listener;
private final Duration pollFrequency = Duration.seconds(3);
private final IModificationWatcher watcher;
static
{
addClassLoaderUrls(ReloadingClassLoader.class.getClassLoader());
excludePattern("org.apache.wicket.*");
includePattern("org.apache.wicket.examples.*");
}
/**
*
* @param name
* @return true if class if found, false otherwise
*/
protected boolean tryClassHere(String name)
{
// don't include classes in the java or javax.servlet package
if (name != null && (name.startsWith("java.") || name.startsWith("javax.servlet")))
{
return false;
}
// Scan includes, then excludes
boolean tryHere;
// If no explicit includes, try here
if (patterns == null || patterns.size() == 0)
{
tryHere = true;
}
else
{
// See if it matches include patterns
tryHere = false;
for (String rawpattern : patterns)
{
if (rawpattern.length() <= 1)
{
continue;
}
// FIXME it seems that only "includes" are handled. "Excludes" are ignored
boolean isInclude = rawpattern.substring(0, 1).equals("+");
String pattern = rawpattern.substring(1);
if (WildcardMatcherHelper.match(pattern, name) != null)
{
tryHere = isInclude;
}
}
}
return tryHere;
}
/**
* Include a pattern
*
* @param pattern
* the pattern to include
*/
public static void includePattern(String pattern)
{
patterns.add("+" + pattern);
}
/**
* Exclude a pattern
*
* @param pattern
* the pattern to exclude
*/
public static void excludePattern(String pattern)
{
patterns.add("-" + pattern);
}
/**
* Returns the list of all configured inclusion or exclusion patterns
*
* @return list of patterns as String
*/
public static List<String> getPatterns()
{
return patterns;
}
/**
* Add the location of a directory containing class files
*
* @param url
* the URL for the directory
*/
public static void addLocation(URL url)
{
urls.add(url);
}
/**
* Returns the list of all configured locations of directories containing class files
*
* @return list of locations as URL
*/
public static Set<URL> getLocations()
{
return urls;
}
/**
* Add all the url locations we can find for the provided class loader
*
* @param loader
* class loader
*/
private static void addClassLoaderUrls(ClassLoader loader)
{
if (loader != null)
{
final Enumeration<URL> resources;
try
{
resources = loader.getResources("");
}
catch (IOException e)
{
throw new RuntimeException(e);
}
while (resources.hasMoreElements())
{
URL location = resources.nextElement();
ReloadingClassLoader.addLocation(location);
}
}
}
/**
* Create a new reloading ClassLoader from a list of URLs, and initialize the
* ModificationWatcher to detect class file modifications
*
* @param parent
* the parent classloader in case the class file cannot be loaded from the above
* locations
*/
public ReloadingClassLoader(ClassLoader parent)
{
super(new URL[] { }, parent);
// probably doubles from this class, but just in case
addClassLoaderUrls(parent);
for (URL url : urls)
{
addURL(url);
}
watcher = new ModificationWatcher(pollFrequency);
}
/**
* Gets a resource from this <code>ClassLoader</class>. If the
* resource does not exist in this one, we check the parent.
* Please note that this is the exact opposite of the
* <code>ClassLoader</code> spec. We use it to work around inconsistent class loaders from third
* party vendors.
*
* @param name
* of resource
*/
@Override
public final URL getResource(final String name)
{
URL resource = findResource(name);
ClassLoader parent = getParent();
if (resource == null && parent != null)
{
resource = parent.getResource(name);
}
return resource;
}
/**
* Loads the class from this <code>ClassLoader</class>. If the
* class does not exist in this one, we check the parent. Please
* note that this is the exact opposite of the
* <code>ClassLoader</code> spec. We use it to load the class from the same classloader as
* WicketFilter or WicketServlet. When found, the class file is watched for modifications.
*
* @param name
* the name of the class
* @param resolve
* if <code>true</code> then resolve the class
* @return the resulting <code>Class</code> object
* @exception ClassNotFoundException
* if the class could not be found
*/
@Override
public final Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException
{
// First check if it's already loaded
Class<?> clazz = findLoadedClass(name);
if (clazz == null)
{
final ClassLoader parent = getParent();
if (tryClassHere(name))
{
try
{
clazz = findClass(name);
watchForModifications(clazz);
}
catch (ClassNotFoundException cnfe)
{
if (parent == null)
{
// Propagate exception
throw cnfe;
}
}
}
if (clazz == null)
{
if (parent == null)
{
throw new ClassNotFoundException(name);
}
else
{
// Will throw a CFNE if not found in parent
// see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6500212
// clazz = parent.loadClass(name);
clazz = Class.forName(name, false, parent);
}
}
}
if (resolve)
{
resolveClass(clazz);
}
return clazz;
}
/**
* Sets the listener that will be notified when a class changes
*
* @param listener<|fim▁hole|> */
public void setListener(IChangeListener listener)
{
this.listener = listener;
}
/**
* Watch changes of a class file by locating it in the list of location URLs and adding the
* corresponding file to the ModificationWatcher
*
* @param clz
* the class to watch
*/
private void watchForModifications(Class<?> clz)
{
// Watch class in the future
Iterator<URL> locationsIterator = urls.iterator();
File clzFile = null;
while (locationsIterator.hasNext())
{
// FIXME only works for directories, but JARs etc could be checked
// as well
URL location = locationsIterator.next();
String clzLocation = location.getFile() + clz.getName().replaceAll("\\.", "/") +
".class";
log.debug("clzLocation=" + clzLocation);
clzFile = new File(clzLocation);
final File finalClzFile = clzFile;
if (clzFile.exists())
{
log.info("Watching changes of class " + clzFile);
watcher.add(clzFile, new IChangeListener()
{
@Override
public void onChange()
{
log.info("Class file " + finalClzFile + " has changed, reloading");
try
{
listener.onChange();
}
catch (Exception e)
{
log.error("Could not notify listener", e);
// If an error occurs when the listener is notified,
// remove the watched object to avoid rethrowing the
// exception at next check
// FIXME check if class file has been deleted
watcher.remove(finalClzFile);
}
}
});
break;
}
else
{
log.debug("Class file does not exist: " + clzFile);
}
}
if (clzFile != null && !clzFile.exists())
{
log.debug("Could not locate class " + clz.getName());
}
}
/**
* Remove the ModificationWatcher from the current reloading class loader
*/
public void destroy()
{
watcher.destroy();
}
}<|fim▁end|> | * the listener to notify upon class change |
<|file_name|>interface.go<|end_file_name|><|fim▁begin|>/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by informer-gen. DO NOT EDIT.
package v2beta1
import (
internalinterfaces "k8s.io/client-go/informers/internalinterfaces"
)
// Interface provides access to all the informers in this group version.
type Interface interface {
// HorizontalPodAutoscalers returns a HorizontalPodAutoscalerInformer.
HorizontalPodAutoscalers() HorizontalPodAutoscalerInformer
// VerticalPodAutoscalers returns a VerticalPodAutoscalerInformer.
VerticalPodAutoscalers() VerticalPodAutoscalerInformer
}
type version struct {
factory internalinterfaces.SharedInformerFactory
namespace string
tweakListOptions internalinterfaces.TweakListOptionsFunc
}
// New returns a new Interface.<|fim▁hole|>func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) Interface {
return &version{factory: f, namespace: namespace, tweakListOptions: tweakListOptions}
}
// HorizontalPodAutoscalers returns a HorizontalPodAutoscalerInformer.
func (v *version) HorizontalPodAutoscalers() HorizontalPodAutoscalerInformer {
return &horizontalPodAutoscalerInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
}
// VerticalPodAutoscalers returns a VerticalPodAutoscalerInformer.
func (v *version) VerticalPodAutoscalers() VerticalPodAutoscalerInformer {
return &verticalPodAutoscalerInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions}
}<|fim▁end|> | |
<|file_name|>pr7.js<|end_file_name|><|fim▁begin|>function isPrime(n) {
if (n === 2) {
return true;
}
if (n % 2 === 0 || n === 1) {
return false;
}
for (var i = 3; i <= Math.sqrt(n); i += 2) {
if (n % i === 0) {
return false;
}
}
return true;
}
function nthPrimeNumber(n) {
if (n <= 0) {
throw new Error('Must be an integer >= 1');
}
var numPrime = 1;
var i = 1;
if (n === 1) {
return 2;
}
while (numPrime < n) {
i += 2;
if (isPrime(i)) {
numPrime++;
}
}<|fim▁hole|>console.log(nthPrimeNumber(10001));<|fim▁end|> | return i;
} |
<|file_name|>databases.py<|end_file_name|><|fim▁begin|>from troveclient import base
from troveclient.common import check_for_exceptions
from troveclient.common import limit_url
from troveclient.common import Paginated
import urlparse
class Database(base.Resource):
"""
According to Wikipedia, "A database is a system intended to organize,
store, and retrieve
large amounts of data easily."
"""
def __repr__(self):
return "<Database: %s>" % self.name
class Databases(base.ManagerWithFind):
"""
Manage :class:`Databases` resources.
"""
resource_class = Database
<|fim▁hole|> """
Create new databases within the specified instance
"""
body = {"databases": databases}
url = "/instances/%s/databases" % instance_id
resp, body = self.api.client.post(url, body=body)
check_for_exceptions(resp, body)
def delete(self, instance_id, dbname):
"""Delete an existing database in the specified instance"""
url = "/instances/%s/databases/%s" % (instance_id, dbname)
resp, body = self.api.client.delete(url)
check_for_exceptions(resp, body)
def _list(self, url, response_key, limit=None, marker=None):
resp, body = self.api.client.get(limit_url(url, limit, marker))
check_for_exceptions(resp, body)
if not body:
raise Exception("Call to " + url +
" did not return a body.")
links = body.get('links', [])
next_links = [link['href'] for link in links if link['rel'] == 'next']
next_marker = None
for link in next_links:
# Extract the marker from the url.
parsed_url = urlparse.urlparse(link)
query_dict = dict(urlparse.parse_qsl(parsed_url.query))
next_marker = query_dict.get('marker', None)
databases = body[response_key]
databases = [self.resource_class(self, res) for res in databases]
return Paginated(databases, next_marker=next_marker, links=links)
def list(self, instance, limit=None, marker=None):
"""
Get a list of all Databases from the instance.
:rtype: list of :class:`Database`.
"""
return self._list("/instances/%s/databases" % base.getid(instance),
"databases", limit, marker)
# def get(self, instance, database):
# """
# Get a specific instances.
#
# :param flavor: The ID of the :class:`Database` to get.
# :rtype: :class:`Database`
# """
# assert isinstance(instance, Instance)
# assert isinstance(database, (Database, int))
# instance_id = base.getid(instance)
# db_id = base.getid(database)
# url = "/instances/%s/databases/%s" % (instance_id, db_id)
# return self._get(url, "database")<|fim▁end|> | def create(self, instance_id, databases): |
<|file_name|>graph.ts<|end_file_name|><|fim▁begin|>import Vue from 'vue';
import * as Chart from 'chart.js';
import { Component, Prop, Watch } from 'vue-property-decorator';
import { date } from '../../vue/filters/date';
import { ThemeState, ThemeStore } from '../theme/theme.store';
// Try to match site styling.
const fontFamily = `Nunito, 'Helvetica Neue', 'Helvetica', 'Arial', sans-serif`;
const chartOptions: any = {
responsive: true,
maintainAspectRatio: false,
legend: {
position: 'bottom',
// Gotta silence stupid TS error.
labels: {
fontColor: '#c1c1c1',
usePointStyle: true,
fontFamily,
},
},
scales: {
xAxes: [
{
gridLines: {
display: false,
},
},
],
yAxes: [
{
gridLines: {
display: false,
},
ticks: {
beginAtZero: true,
},
},
],
},
tooltips: {
cornerRadius: 0,
titleFontFamily: fontFamily,
titleFontColor: '#fff',
titleFontSize: 14,
bodyFontFamily: fontFamily,
bodyFontColor: '#c1c1c1',
bodyFontSize: 11,
},
};
const lineChartOptions: any = {
tooltips: {
// Tells it to show the tooltip even if not hovered directly over
// the point.
intersect: false,
mode: 'index',
},
hover: {
intersect: false,
mode: 'index',
},
};
const pieChartOptions: any = {
scales: {
xAxes: [{ display: false }],
yAxes: [{ display: false, ticks: { beginAtZero: true } }],
},
};
const backgroundVariantChartOptions: any = {
legend: {
display: false,
},
scales: {
xAxes: [{ display: false }],
yAxes: [{ display: false, ticks: { beginAtZero: true } }],
},
tooltips: {
enabled: false,
},
};
@Component({})
export default class AppGraph extends Vue {
@Prop(Array) dataset!: any[];
@Prop({ type: String, default: 'line' })
type!: string;
@Prop(Boolean) backgroundVariant?: boolean;
@ThemeState theme?: ThemeStore['theme'];
chart: Chart = null as any;
data: any = {};
chartOptions: any = {};
ourColors: any = {};
get globalColors() {
let colors = ['#ffffff', '#ccff00', '#31d6ff', '#ff3fac', '#2f7f6f'];
if (this.theme) {
if (this.theme.custom) {
colors = ['#ffffff', '#' + this.theme.darkHighlight_, '#31d6ff', '#ff3fac', '#2f7f6f'];
} else {
colors = [
'#ffffff',
'#' + this.theme.darkHighlight_,
'#' + this.theme.darkNotice_,
'#' + this.theme.darkBacklight_,
'#31d6ff',
];
}
}
return colors.map(color => {
return {
backgroundColor: 'rgba( 255, 255, 255, 0.05 )',
borderColor: color,
borderWidth: 1,
pointRadius: 4,
pointBorderWidth: 2,
pointBackgroundColor: color,
pointBorderColor: '#191919',
pointHoverBackgroundColor: '#fff',
pointHoverBorderColor: '#fff',
};
});
}
created() {
// We gotta deep copy.
Object.assign(this.chartOptions, JSON.parse(JSON.stringify(chartOptions)));
Object.assign(this.ourColors, JSON.parse(JSON.stringify(this.globalColors)));
if (this.type === 'line') {
Object.assign(this.chartOptions, lineChartOptions);
} else if (this.type === 'pie' || this.type === 'doughnut') {
Object.assign(this.chartOptions, pieChartOptions);
}
if (this.backgroundVariant) {
Object.assign(this.chartOptions, backgroundVariantChartOptions);
this.ourColors[0] = {
borderWidth: 1,
pointRadius: 0,
pointHoverRadius: 0,
pointBorderWidth: 0,
backgroundColor: 'rgba( 127, 127, 127, 0.10 )',
borderColor: '#7e7e7e',
pointBackgroundColor: '#7e7e7e',
pointHoverBackgroundColor: '#7e7e7e',
pointHoverBorderColor: '#7e7e7e',
};
this.ourColors[1] = this.ourColors[0];
}
}
mounted() {
this.checkData();
this.chart = new Chart(this.$refs.canvas as HTMLCanvasElement, {
type: this.type,
data: this.data,
options: this.chartOptions,
});
}
// Will only get called when dataset changes reference.
@Watch('dataset')
onDatasetChanged() {
this.checkData();
}
private checkData() {
if (!this.dataset) {
return;
}
this.data = {
labels: [],
datasets: [],
};
if (this.type === 'line') {
this.dataset.forEach((series: any, i: number) => {
let dataset: any = {
label: series.label,
data: [],
};
Object.assign(dataset, this.ourColors[i]);
for (const row of series.data) {
if (i === 0) {<|fim▁hole|> }
this.data.datasets.push(dataset);
});
} else if (this.type === 'pie' || this.type === 'doughnut') {
this.data.datasets.push({
data: [],
});
this.dataset.forEach((item: any, i: number) => {
const dataset = this.data.datasets[0];
dataset.data.push(item.value);
// We have to override the color info for the chart since the
// defaults are for line charts. We also skip the first color
// value since that's only for line charts (white).
const colorInfo = Object.assign({}, this.ourColors[i + 1]);
colorInfo.backgroundColor = colorInfo.borderColor;
colorInfo.borderColor = '#000';
colorInfo.hoverBackgroundColor = '#fff';
for (const n in colorInfo) {
if (!dataset[n]) {
dataset[n] = [];
}
dataset[n].push(colorInfo[n]);
}
this.data.labels.push(item.label);
});
}
if (this.chart) {
this.chart.data = this.data;
this.chart.update();
}
}
}<|fim▁end|> | this.data.labels.push(date(row[0], 'MMM DD'));
}
dataset.data.push(row[1]); |
<|file_name|>cloudformation.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "cloudformation")]
extern crate rusoto_core;
extern crate rusoto_cloudformation;
use rusoto_cloudformation::{CloudFormation, CloudFormationClient, ListStacksInput};
use rusoto_core::Region;
#[test]
fn should_list_stacks() {<|fim▁hole|> let request = ListStacksInput::default();
let result = client.list_stacks(request).sync().unwrap();
println!("{:#?}", result);
}
#[test]
fn should_list_stacks_with_status_filter() {
let client = CloudFormationClient::new(Region::UsEast1);
let filters = vec!["CREATE_COMPLETE".to_owned()];
let request = ListStacksInput { stack_status_filter: Some(filters), ..Default::default() };
let result = client.list_stacks(request).sync().unwrap();
println!("{:#?}", result);
}<|fim▁end|> | let client = CloudFormationClient::new(Region::UsEast1); |
<|file_name|>p2p_fingerprint.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various fingerprinting protections.
If a stale block more than a month old or its header are requested by a peer,
the node should pretend that it does not have it to avoid fingerprinting.
"""
import time
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.messages import CInv
from test_framework.mininode import (
P2PInterface,
msg_headers,
msg_block,
msg_getdata,
msg_getheaders,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
wait_until,
)
class P2PFingerprintTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
# Build a chain of blocks on top of given one
def build_chain(self, nblocks, prev_hash, prev_height, prev_median_time):
blocks = []
for _ in range(nblocks):
coinbase = create_coinbase(prev_height + 1)
block_time = prev_median_time + 1
block = create_block(int(prev_hash, 16), coinbase, block_time)
block.solve()
blocks.append(block)
prev_hash = block.hash
prev_height += 1
prev_median_time = block_time
return blocks
# Send a getdata request for a given block hash
def send_block_request(self, block_hash, node):
msg = msg_getdata()
msg.inv.append(CInv(2, block_hash)) # 2 == "Block"<|fim▁hole|> msg = msg_getheaders()
msg.hashstop = block_hash
node.send_message(msg)
# Check whether last block received from node has a given hash
def last_block_equals(self, expected_hash, node):
block_msg = node.last_message.get("block")
return block_msg and block_msg.block.rehash() == expected_hash
# Check whether last block header received from node has a given hash
def last_header_equals(self, expected_hash, node):
headers_msg = node.last_message.get("headers")
return (headers_msg and
headers_msg.headers and
headers_msg.headers[0].rehash() == expected_hash)
# Checks that stale blocks timestamped more than a month ago are not served
# by the node while recent stale blocks and old active chain blocks are.
# This does not currently test that stale blocks timestamped within the
# last month but that have over a month's worth of work are also withheld.
def run_test(self):
node0 = self.nodes[0].add_p2p_connection(P2PInterface())
# Set node time to 60 days ago
self.nodes[0].setmocktime(int(time.time()) - 60 * 24 * 60 * 60)
# Generating a chain of 10 blocks
block_hashes = self.nodes[0].generate(nblocks=10)
# Create longer chain starting 2 blocks before current tip
height = len(block_hashes) - 2
block_hash = block_hashes[height - 1]
block_time = self.nodes[0].getblockheader(block_hash)["mediantime"] + 1
new_blocks = self.build_chain(5, block_hash, height, block_time)
# Force reorg to a longer chain
node0.send_message(msg_headers(new_blocks))
node0.wait_for_getdata()
for block in new_blocks:
node0.send_and_ping(msg_block(block))
# Check that reorg succeeded
assert_equal(self.nodes[0].getblockcount(), 13)
stale_hash = int(block_hashes[-1], 16)
# Check that getdata request for stale block succeeds
self.send_block_request(stale_hash, node0)
test_function = lambda: self.last_block_equals(stale_hash, node0)
wait_until(test_function, timeout=3)
# Check that getheader request for stale block header succeeds
self.send_header_request(stale_hash, node0)
test_function = lambda: self.last_header_equals(stale_hash, node0)
wait_until(test_function, timeout=3)
# Longest chain is extended so stale is much older than chain tip
self.nodes[0].setmocktime(0)
tip = self.nodes[0].generate(nblocks=1)[0]
assert_equal(self.nodes[0].getblockcount(), 14)
# Send getdata & getheaders to refresh last received getheader message
block_hash = int(tip, 16)
self.send_block_request(block_hash, node0)
self.send_header_request(block_hash, node0)
node0.sync_with_ping()
# Request for very old stale block should now fail
self.send_block_request(stale_hash, node0)
time.sleep(3)
assert not self.last_block_equals(stale_hash, node0)
# Request for very old stale block header should now fail
self.send_header_request(stale_hash, node0)
time.sleep(3)
assert not self.last_header_equals(stale_hash, node0)
# Verify we can fetch very old blocks and headers on the active chain
block_hash = int(block_hashes[2], 16)
self.send_block_request(block_hash, node0)
self.send_header_request(block_hash, node0)
node0.sync_with_ping()
self.send_block_request(block_hash, node0)
test_function = lambda: self.last_block_equals(block_hash, node0)
wait_until(test_function, timeout=3)
self.send_header_request(block_hash, node0)
test_function = lambda: self.last_header_equals(block_hash, node0)
wait_until(test_function, timeout=3)
if __name__ == '__main__':
P2PFingerprintTest().main()<|fim▁end|> | node.send_message(msg)
# Send a getheaders request for a given single block hash
def send_header_request(self, block_hash, node): |
<|file_name|>GabLoggingLayer.js<|end_file_name|><|fim▁begin|><|fim▁hole|> *
* @return LoggingLayer class (extends CartoDBLayerClass)
*/
define(['abstract/layer/CartoDBLayerClass'], function(CartoDBLayerClass) {
'use strict';
var GabLoggingLayer = CartoDBLayerClass.extend({
options: {
sql:
"SELECT 'gab_logging' as tablename, cartodb_id, the_geom_webmercator, nom_ste_s as company, round(sup_adm::float) as area_ha,nom_ste as name, '{tableName}' AS layer, {analysis} AS analysis FROM {tableName}",
infowindow: true,
interactivity: 'cartodb_id, tablename, name, company, area_ha, analysis',
analysis: true
}
});
return GabLoggingLayer;
});<|fim▁end|> | /**
* The Logging layer module. |
<|file_name|>placessidebar.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkPlacesSidebar — Sidebar that displays frequently-used places in the file system
use gtk::{mod, ffi};
use gtk::ffi::FFIWidget;
use gtk::cast::GTK_PLACES_SIDEBAR;
struct_Widget!(PlacesSidebar)
impl PlacesSidebar {
pub fn new() -> Option<PlacesSidebar> {
let tmp_pointer = unsafe { ffi::gtk_places_sidebar_new() };
check_pointer!(tmp_pointer, PlacesSidebar)
}
pub fn set_open_flags(&self, flags: gtk::PlacesOpenFlags) {
unsafe { ffi::gtk_places_sidebar_set_open_flags(GTK_PLACES_SIDEBAR(self.get_widget()), flags) }
}
pub fn get_open_flags(&self) -> gtk::PlacesOpenFlags {
unsafe { ffi::gtk_places_sidebar_get_open_flags(GTK_PLACES_SIDEBAR(self.get_widget())) }
}
pub fn set_show_desktop(&self, show_desktop: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_desktop(GTK_PLACES_SIDEBAR(self.get_widget()), ffi::to_gboolean(show_desktop)) }
}
pub fn get_show_desktop(&self) -> bool {
unsafe { ffi::to_bool(ffi::gtk_places_sidebar_get_show_desktop(GTK_PLACES_SIDEBAR(self.get_widget()))) }
}
pub fn set_show_connect_to_server(&self, show_connect_to_server: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_connect_to_server(GTK_PLACES_SIDEBAR(self.get_widget()),<|fim▁hole|> pub fn get_show_connect_to_server(&self) -> bool {
unsafe { ffi::to_bool(ffi::gtk_places_sidebar_get_show_connect_to_server(GTK_PLACES_SIDEBAR(self.get_widget()))) }
}
pub fn set_local_only(&self, local_only: bool) {
unsafe { ffi::gtk_places_sidebar_set_local_only(GTK_PLACES_SIDEBAR(self.get_widget()), ffi::to_gboolean(local_only)) }
}
pub fn get_local_only(&self) -> bool {
unsafe { ffi::to_bool(ffi::gtk_places_sidebar_get_local_only(GTK_PLACES_SIDEBAR(self.get_widget()))) }
}
pub fn set_show_enter_location(&self, show_enter_location: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_enter_location(GTK_PLACES_SIDEBAR(self.get_widget()),
ffi::to_gboolean(show_enter_location)) }
}
pub fn get_show_enter_location(&self) -> bool {
unsafe { ffi::to_bool(ffi::gtk_places_sidebar_get_show_enter_location(GTK_PLACES_SIDEBAR(self.get_widget()))) }
}
}
impl_drop!(PlacesSidebar)
impl_TraitWidget!(PlacesSidebar)
impl gtk::ContainerTrait for PlacesSidebar {}
impl gtk::BinTrait for PlacesSidebar {}
impl gtk::ScrolledWindowTrait for PlacesSidebar {}
impl_widget_events!(PlacesSidebar)<|fim▁end|> | ffi::to_gboolean(show_connect_to_server)) }
}
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>#encoding=utf-8
"""
26. Invalid models
This example exists purely to point out errors in models.
"""
from __future__ import unicode_literals
from django.db import connection, models
class FieldErrors(models.Model):
charfield = models.CharField()
charfield2 = models.CharField(max_length=-1)
charfield3 = models.CharField(max_length="bad")
decimalfield = models.DecimalField()
decimalfield2 = models.DecimalField(max_digits=-1, decimal_places=-1)
decimalfield3 = models.DecimalField(max_digits="bad", decimal_places="bad")
decimalfield4 = models.DecimalField(max_digits=9, decimal_places=10)
decimalfield5 = models.DecimalField(max_digits=10, decimal_places=10)
filefield = models.FileField()
choices = models.CharField(max_length=10, choices='bad')
choices2 = models.CharField(max_length=10, choices=[(1, 2, 3), (1, 2, 3)])
index = models.CharField(max_length=10, db_index='bad')
field_ = models.CharField(max_length=10)
nullbool = models.BooleanField(null=True)
class Target(models.Model):
tgt_safe = models.CharField(max_length=10)
clash1 = models.CharField(max_length=10)
clash2 = models.CharField(max_length=10)
clash1_set = models.CharField(max_length=10)
class Clash1(models.Model):
src_safe = models.CharField(max_length=10)
foreign = models.ForeignKey(Target)
m2m = models.ManyToManyField(Target)
class Clash2(models.Model):
src_safe = models.CharField(max_length=10)
foreign_1 = models.ForeignKey(Target, related_name='id')
foreign_2 = models.ForeignKey(Target, related_name='src_safe')
m2m_1 = models.ManyToManyField(Target, related_name='id')
m2m_2 = models.ManyToManyField(Target, related_name='src_safe')
class Target2(models.Model):
clash3 = models.CharField(max_length=10)
foreign_tgt = models.ForeignKey(Target)
clashforeign_set = models.ForeignKey(Target)
m2m_tgt = models.ManyToManyField(Target)
clashm2m_set = models.ManyToManyField(Target)
class Clash3(models.Model):
src_safe = models.CharField(max_length=10)
foreign_1 = models.ForeignKey(Target2, related_name='foreign_tgt')
foreign_2 = models.ForeignKey(Target2, related_name='m2m_tgt')
m2m_1 = models.ManyToManyField(Target2, related_name='foreign_tgt')
m2m_2 = models.ManyToManyField(Target2, related_name='m2m_tgt')
class ClashForeign(models.Model):
foreign = models.ForeignKey(Target2)
class ClashM2M(models.Model):
m2m = models.ManyToManyField(Target2)
class SelfClashForeign(models.Model):
src_safe = models.CharField(max_length=10)
selfclashforeign = models.CharField(max_length=10)
selfclashforeign_set = models.ForeignKey("SelfClashForeign")
foreign_1 = models.ForeignKey("SelfClashForeign", related_name='id')
foreign_2 = models.ForeignKey("SelfClashForeign", related_name='src_safe')
class ValidM2M(models.Model):
src_safe = models.CharField(max_length=10)
validm2m = models.CharField(max_length=10)
# M2M fields are symmetrical by default. Symmetrical M2M fields
# on self don't require a related accessor, so many potential
# clashes are avoided.
validm2m_set = models.ManyToManyField("self")
m2m_1 = models.ManyToManyField("self", related_name='id')
m2m_2 = models.ManyToManyField("self", related_name='src_safe')
m2m_3 = models.ManyToManyField('self')
m2m_4 = models.ManyToManyField('self')
class SelfClashM2M(models.Model):
src_safe = models.CharField(max_length=10)
selfclashm2m = models.CharField(max_length=10)
# Non-symmetrical M2M fields _do_ have related accessors, so
# there is potential for clashes.
selfclashm2m_set = models.ManyToManyField("self", symmetrical=False)
m2m_1 = models.ManyToManyField("self", related_name='id', symmetrical=False)
m2m_2 = models.ManyToManyField("self", related_name='src_safe', symmetrical=False)
m2m_3 = models.ManyToManyField('self', symmetrical=False)
m2m_4 = models.ManyToManyField('self', symmetrical=False)
class Model(models.Model):
"But it's valid to call a model Model."
year = models.PositiveIntegerField() # 1960
make = models.CharField(max_length=10) # Aston Martin
name = models.CharField(max_length=10) # DB 4 GT
class Car(models.Model):
colour = models.CharField(max_length=5)
model = models.ForeignKey(Model)
class MissingRelations(models.Model):
rel1 = models.ForeignKey("Rel1")
rel2 = models.ManyToManyField("Rel2")
class MissingManualM2MModel(models.Model):
name = models.CharField(max_length=5)
missing_m2m = models.ManyToManyField(Model, through="MissingM2MModel")
class Person(models.Model):
name = models.CharField(max_length=5)
class Group(models.Model):
name = models.CharField(max_length=5)
primary = models.ManyToManyField(Person, through="Membership", related_name="primary")
secondary = models.ManyToManyField(Person, through="Membership", related_name="secondary")
tertiary = models.ManyToManyField(Person, through="RelationshipDoubleFK", related_name="tertiary")
class GroupTwo(models.Model):
name = models.CharField(max_length=5)
primary = models.ManyToManyField(Person, through="Membership")
secondary = models.ManyToManyField(Group, through="MembershipMissingFK")
class Membership(models.Model):
person = models.ForeignKey(Person)
group = models.ForeignKey(Group)
not_default_or_null = models.CharField(max_length=5)
class MembershipMissingFK(models.Model):
person = models.ForeignKey(Person)
class PersonSelfRefM2M(models.Model):
name = models.CharField(max_length=5)
friends = models.ManyToManyField('self', through="Relationship")
too_many_friends = models.ManyToManyField('self', through="RelationshipTripleFK")
class PersonSelfRefM2MExplicit(models.Model):
name = models.CharField(max_length=5)
friends = models.ManyToManyField('self', through="ExplicitRelationship", symmetrical=True)
class Relationship(models.Model):
first = models.ForeignKey(PersonSelfRefM2M, related_name="rel_from_set")
second = models.ForeignKey(PersonSelfRefM2M, related_name="rel_to_set")
date_added = models.DateTimeField()
class ExplicitRelationship(models.Model):
first = models.ForeignKey(PersonSelfRefM2MExplicit, related_name="rel_from_set")
second = models.ForeignKey(PersonSelfRefM2MExplicit, related_name="rel_to_set")
date_added = models.DateTimeField()
class RelationshipTripleFK(models.Model):
first = models.ForeignKey(PersonSelfRefM2M, related_name="rel_from_set_2")
second = models.ForeignKey(PersonSelfRefM2M, related_name="rel_to_set_2")
third = models.ForeignKey(PersonSelfRefM2M, related_name="too_many_by_far")
date_added = models.DateTimeField()
class RelationshipDoubleFK(models.Model):
first = models.ForeignKey(Person, related_name="first_related_name")
second = models.ForeignKey(Person, related_name="second_related_name")
third = models.ForeignKey(Group, related_name="rel_to_set")
date_added = models.DateTimeField()
class AbstractModel(models.Model):
name = models.CharField(max_length=10)
class Meta:
abstract = True
class AbstractRelationModel(models.Model):
fk1 = models.ForeignKey('AbstractModel')
fk2 = models.ManyToManyField('AbstractModel')
class UniqueM2M(models.Model):
""" Model to test for unique ManyToManyFields, which are invalid. """
unique_people = models.ManyToManyField(Person, unique=True)
class NonUniqueFKTarget1(models.Model):
""" Model to test for non-unique FK target in yet-to-be-defined model: expect an error """
tgt = models.ForeignKey('FKTarget', to_field='bad')
class UniqueFKTarget1(models.Model):
""" Model to test for unique FK target in yet-to-be-defined model: expect no error """
tgt = models.ForeignKey('FKTarget', to_field='good')
class FKTarget(models.Model):
bad = models.IntegerField()
good = models.IntegerField(unique=True)
class NonUniqueFKTarget2(models.Model):
""" Model to test for non-unique FK target in previously seen model: expect an error """
tgt = models.ForeignKey(FKTarget, to_field='bad')
class UniqueFKTarget2(models.Model):
""" Model to test for unique FK target in previously seen model: expect no error """
tgt = models.ForeignKey(FKTarget, to_field='good')
class NonExistingOrderingWithSingleUnderscore(models.Model):
class Meta:
ordering = ("does_not_exist",)
class InvalidSetNull(models.Model):
fk = models.ForeignKey('self', on_delete=models.SET_NULL)
class InvalidSetDefault(models.Model):
fk = models.ForeignKey('self', on_delete=models.SET_DEFAULT)
<|fim▁hole|> also_good = models.ManyToManyField('FKTarget', related_name='unicode2')
# In Python 3 this should become legal, but currently causes unicode errors
# when adding the errors in core/management/validation.py
#bad = models.ForeignKey('★')
class PrimaryKeyNull(models.Model):
my_pk_field = models.IntegerField(primary_key=True, null=True)
class OrderByPKModel(models.Model):
"""
Model to test that ordering by pk passes validation.
Refs #8291
"""
name = models.CharField(max_length=100, blank=True)
class Meta:
ordering = ('pk',)
class SwappableModel(models.Model):
"""A model that can be, but isn't swapped out.
References to this model *shoudln't* raise any validation error.
"""
name = models.CharField(max_length=100)
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
class SwappedModel(models.Model):
"""A model that is swapped out.
References to this model *should* raise a validation error.
Requires TEST_SWAPPED_MODEL to be defined in the test environment;
this is guaranteed by the test runner using @override_settings.
"""
name = models.CharField(max_length=100)
class Meta:
swappable = 'TEST_SWAPPED_MODEL'
class BadSwappableValue(models.Model):
"""A model that can be swapped out; during testing, the swappable
value is not of the format app.model
"""
name = models.CharField(max_length=100)
class Meta:
swappable = 'TEST_SWAPPED_MODEL_BAD_VALUE'
class BadSwappableModel(models.Model):
"""A model that can be swapped out; during testing, the swappable
value references an unknown model.
"""
name = models.CharField(max_length=100)
class Meta:
swappable = 'TEST_SWAPPED_MODEL_BAD_MODEL'
class HardReferenceModel(models.Model):
fk_1 = models.ForeignKey(SwappableModel, related_name='fk_hardref1')
fk_2 = models.ForeignKey('invalid_models.SwappableModel', related_name='fk_hardref2')
fk_3 = models.ForeignKey(SwappedModel, related_name='fk_hardref3')
fk_4 = models.ForeignKey('invalid_models.SwappedModel', related_name='fk_hardref4')
m2m_1 = models.ManyToManyField(SwappableModel, related_name='m2m_hardref1')
m2m_2 = models.ManyToManyField('invalid_models.SwappableModel', related_name='m2m_hardref2')
m2m_3 = models.ManyToManyField(SwappedModel, related_name='m2m_hardref3')
m2m_4 = models.ManyToManyField('invalid_models.SwappedModel', related_name='m2m_hardref4')
model_errors = """invalid_models.fielderrors: "charfield": CharFields require a "max_length" attribute that is a positive integer.
invalid_models.fielderrors: "charfield2": CharFields require a "max_length" attribute that is a positive integer.
invalid_models.fielderrors: "charfield3": CharFields require a "max_length" attribute that is a positive integer.
invalid_models.fielderrors: "decimalfield": DecimalFields require a "decimal_places" attribute that is a non-negative integer.
invalid_models.fielderrors: "decimalfield": DecimalFields require a "max_digits" attribute that is a positive integer.
invalid_models.fielderrors: "decimalfield2": DecimalFields require a "decimal_places" attribute that is a non-negative integer.
invalid_models.fielderrors: "decimalfield2": DecimalFields require a "max_digits" attribute that is a positive integer.
invalid_models.fielderrors: "decimalfield3": DecimalFields require a "decimal_places" attribute that is a non-negative integer.
invalid_models.fielderrors: "decimalfield3": DecimalFields require a "max_digits" attribute that is a positive integer.
invalid_models.fielderrors: "decimalfield4": DecimalFields require a "max_digits" attribute value that is greater than or equal to the value of the "decimal_places" attribute.
invalid_models.fielderrors: "filefield": FileFields require an "upload_to" attribute.
invalid_models.fielderrors: "choices": "choices" should be iterable (e.g., a tuple or list).
invalid_models.fielderrors: "choices2": "choices" should be a sequence of two-tuples.
invalid_models.fielderrors: "choices2": "choices" should be a sequence of two-tuples.
invalid_models.fielderrors: "index": "db_index" should be either None, True or False.
invalid_models.fielderrors: "field_": Field names cannot end with underscores, because this would lead to ambiguous queryset filters.
invalid_models.fielderrors: "nullbool": BooleanFields do not accept null values. Use a NullBooleanField instead.
invalid_models.clash1: Accessor for field 'foreign' clashes with field 'Target.clash1_set'. Add a related_name argument to the definition for 'foreign'.
invalid_models.clash1: Accessor for field 'foreign' clashes with related m2m field 'Target.clash1_set'. Add a related_name argument to the definition for 'foreign'.
invalid_models.clash1: Reverse query name for field 'foreign' clashes with field 'Target.clash1'. Add a related_name argument to the definition for 'foreign'.
invalid_models.clash1: Accessor for m2m field 'm2m' clashes with field 'Target.clash1_set'. Add a related_name argument to the definition for 'm2m'.
invalid_models.clash1: Accessor for m2m field 'm2m' clashes with related field 'Target.clash1_set'. Add a related_name argument to the definition for 'm2m'.
invalid_models.clash1: Reverse query name for m2m field 'm2m' clashes with field 'Target.clash1'. Add a related_name argument to the definition for 'm2m'.
invalid_models.clash2: Accessor for field 'foreign_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash2: Accessor for field 'foreign_1' clashes with related m2m field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash2: Reverse query name for field 'foreign_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash2: Reverse query name for field 'foreign_1' clashes with related m2m field 'Target.id'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash2: Accessor for field 'foreign_2' clashes with related m2m field 'Target.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.clash2: Reverse query name for field 'foreign_2' clashes with related m2m field 'Target.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.clash2: Accessor for m2m field 'm2m_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash2: Accessor for m2m field 'm2m_1' clashes with related field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash2: Reverse query name for m2m field 'm2m_1' clashes with field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash2: Reverse query name for m2m field 'm2m_1' clashes with related field 'Target.id'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash2: Accessor for m2m field 'm2m_2' clashes with related field 'Target.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.clash2: Reverse query name for m2m field 'm2m_2' clashes with related field 'Target.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.clash3: Accessor for field 'foreign_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash3: Accessor for field 'foreign_1' clashes with related m2m field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash3: Reverse query name for field 'foreign_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash3: Reverse query name for field 'foreign_1' clashes with related m2m field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.clash3: Accessor for field 'foreign_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.clash3: Accessor for field 'foreign_2' clashes with related m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.clash3: Reverse query name for field 'foreign_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.clash3: Reverse query name for field 'foreign_2' clashes with related m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.clash3: Accessor for m2m field 'm2m_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash3: Accessor for m2m field 'm2m_1' clashes with related field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash3: Reverse query name for m2m field 'm2m_1' clashes with field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash3: Reverse query name for m2m field 'm2m_1' clashes with related field 'Target2.foreign_tgt'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.clash3: Accessor for m2m field 'm2m_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.clash3: Accessor for m2m field 'm2m_2' clashes with related field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.clash3: Reverse query name for m2m field 'm2m_2' clashes with m2m field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.clash3: Reverse query name for m2m field 'm2m_2' clashes with related field 'Target2.m2m_tgt'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.clashforeign: Accessor for field 'foreign' clashes with field 'Target2.clashforeign_set'. Add a related_name argument to the definition for 'foreign'.
invalid_models.clashm2m: Accessor for m2m field 'm2m' clashes with m2m field 'Target2.clashm2m_set'. Add a related_name argument to the definition for 'm2m'.
invalid_models.target2: Accessor for field 'foreign_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'foreign_tgt'.
invalid_models.target2: Accessor for field 'foreign_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'foreign_tgt'.
invalid_models.target2: Accessor for field 'foreign_tgt' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'foreign_tgt'.
invalid_models.target2: Accessor for field 'clashforeign_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashforeign_set'.
invalid_models.target2: Accessor for field 'clashforeign_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashforeign_set'.
invalid_models.target2: Accessor for field 'clashforeign_set' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'clashforeign_set'.
invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
invalid_models.target2: Accessor for m2m field 'm2m_tgt' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'm2m_tgt'.
invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
invalid_models.target2: Accessor for m2m field 'clashm2m_set' clashes with related m2m field 'Target.target2_set'. Add a related_name argument to the definition for 'clashm2m_set'.
invalid_models.selfclashforeign: Accessor for field 'selfclashforeign_set' clashes with field 'SelfClashForeign.selfclashforeign_set'. Add a related_name argument to the definition for 'selfclashforeign_set'.
invalid_models.selfclashforeign: Reverse query name for field 'selfclashforeign_set' clashes with field 'SelfClashForeign.selfclashforeign'. Add a related_name argument to the definition for 'selfclashforeign_set'.
invalid_models.selfclashforeign: Accessor for field 'foreign_1' clashes with field 'SelfClashForeign.id'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.selfclashforeign: Reverse query name for field 'foreign_1' clashes with field 'SelfClashForeign.id'. Add a related_name argument to the definition for 'foreign_1'.
invalid_models.selfclashforeign: Accessor for field 'foreign_2' clashes with field 'SelfClashForeign.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.selfclashforeign: Reverse query name for field 'foreign_2' clashes with field 'SelfClashForeign.src_safe'. Add a related_name argument to the definition for 'foreign_2'.
invalid_models.selfclashm2m: Accessor for m2m field 'selfclashm2m_set' clashes with m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'selfclashm2m_set'.
invalid_models.selfclashm2m: Reverse query name for m2m field 'selfclashm2m_set' clashes with field 'SelfClashM2M.selfclashm2m'. Add a related_name argument to the definition for 'selfclashm2m_set'.
invalid_models.selfclashm2m: Accessor for m2m field 'selfclashm2m_set' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'selfclashm2m_set'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_1' clashes with field 'SelfClashM2M.id'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_2' clashes with field 'SelfClashM2M.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_1' clashes with field 'SelfClashM2M.id'. Add a related_name argument to the definition for 'm2m_1'.
invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_2' clashes with field 'SelfClashM2M.src_safe'. Add a related_name argument to the definition for 'm2m_2'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_3' clashes with m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_3'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_3' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_3'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_3' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_3'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_4' clashes with m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_4'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_4' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_4'.
invalid_models.selfclashm2m: Accessor for m2m field 'm2m_4' clashes with related m2m field 'SelfClashM2M.selfclashm2m_set'. Add a related_name argument to the definition for 'm2m_4'.
invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_3' clashes with field 'SelfClashM2M.selfclashm2m'. Add a related_name argument to the definition for 'm2m_3'.
invalid_models.selfclashm2m: Reverse query name for m2m field 'm2m_4' clashes with field 'SelfClashM2M.selfclashm2m'. Add a related_name argument to the definition for 'm2m_4'.
invalid_models.missingrelations: 'rel1' has a relation with model Rel1, which has either not been installed or is abstract.
invalid_models.missingrelations: 'rel2' has an m2m relation with model Rel2, which has either not been installed or is abstract.
invalid_models.grouptwo: 'primary' is a manually-defined m2m relation through model Membership, which does not have foreign keys to Person and GroupTwo
invalid_models.grouptwo: 'secondary' is a manually-defined m2m relation through model MembershipMissingFK, which does not have foreign keys to Group and GroupTwo
invalid_models.missingmanualm2mmodel: 'missing_m2m' specifies an m2m relation through model MissingM2MModel, which has not been installed
invalid_models.group: The model Group has two manually-defined m2m relations through the model Membership, which is not permitted. Please consider using an extra field on your intermediary model instead.
invalid_models.group: Intermediary model RelationshipDoubleFK has more than one foreign key to Person, which is ambiguous and is not permitted.
invalid_models.personselfrefm2m: Many-to-many fields with intermediate tables cannot be symmetrical.
invalid_models.personselfrefm2m: Intermediary model RelationshipTripleFK has more than two foreign keys to PersonSelfRefM2M, which is ambiguous and is not permitted.
invalid_models.personselfrefm2mexplicit: Many-to-many fields with intermediate tables cannot be symmetrical.
invalid_models.abstractrelationmodel: 'fk1' has a relation with model AbstractModel, which has either not been installed or is abstract.
invalid_models.abstractrelationmodel: 'fk2' has an m2m relation with model AbstractModel, which has either not been installed or is abstract.
invalid_models.uniquem2m: ManyToManyFields cannot be unique. Remove the unique argument on 'unique_people'.
invalid_models.nonuniquefktarget1: Field 'bad' under model 'FKTarget' must have a unique=True constraint.
invalid_models.nonuniquefktarget2: Field 'bad' under model 'FKTarget' must have a unique=True constraint.
invalid_models.nonexistingorderingwithsingleunderscore: "ordering" refers to "does_not_exist", a field that doesn't exist.
invalid_models.invalidsetnull: 'fk' specifies on_delete=SET_NULL, but cannot be null.
invalid_models.invalidsetdefault: 'fk' specifies on_delete=SET_DEFAULT, but has no default value.
invalid_models.hardreferencemodel: 'fk_3' defines a relation with the model 'invalid_models.SwappedModel', which has been swapped out. Update the relation to point at settings.TEST_SWAPPED_MODEL.
invalid_models.hardreferencemodel: 'fk_4' defines a relation with the model 'invalid_models.SwappedModel', which has been swapped out. Update the relation to point at settings.TEST_SWAPPED_MODEL.
invalid_models.hardreferencemodel: 'm2m_3' defines a relation with the model 'invalid_models.SwappedModel', which has been swapped out. Update the relation to point at settings.TEST_SWAPPED_MODEL.
invalid_models.hardreferencemodel: 'm2m_4' defines a relation with the model 'invalid_models.SwappedModel', which has been swapped out. Update the relation to point at settings.TEST_SWAPPED_MODEL.
invalid_models.badswappablevalue: TEST_SWAPPED_MODEL_BAD_VALUE is not of the form 'app_label.app_name'.
invalid_models.badswappablemodel: Model has been swapped out for 'not_an_app.Target' which has not been installed or is abstract.
"""
if not connection.features.interprets_empty_strings_as_nulls:
model_errors += """invalid_models.primarykeynull: "my_pk_field": Primary key fields cannot have null=True.
"""<|fim▁end|> | class UnicodeForeignKeys(models.Model):
"""Foreign keys which can translate to ascii should be OK, but fail if
they're not."""
good = models.ForeignKey('FKTarget') |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(iter_arith)]
#[macro_use] extern crate libeuler;
// Using names.txt (right click and 'Save Link/Target As...'), a 46K text file containing over
// five-thousand first names, begin by sorting it into alphabetical order. Then working out the
// alphabetical value for each name, multiply this value by its alphabetical position in the list
// to obtain a name score.<|fim▁hole|>//
// What is the total of all the name scores in the file?
fn main() {
let mut names: Vec<&str> = include_str!("names.txt")
.split(",")
.map(|a| a.trim_matches(&['"'] as &[char]))
.collect();
names.sort();
solutions! {
sol naive {
names.iter().zip(0..names.len()).map(|(&name, index)| {
name.chars()
.map(|c| c as u64 - 'A' as u64 + 1)
.sum::<u64>() * (index as u64 + 1)
}).sum::<u64>()
}
}
}<|fim▁end|> | //
// For example, when the list is sorted into alphabetical order, COLIN, which is worth 3 + 15 + 12
// + 9 + 14 = 53, is the 938th name in the list. So, COLIN would obtain a score of 938 × 53 =
// 49714. |
<|file_name|>test_logger.py<|end_file_name|><|fim▁begin|>from io import StringIO
from coaster.logger import RepeatValueIndicator, filtered_value, pprint_with_indent
def test_filtered_value():
"""Test for filtered values."""
# Doesn't touch normal key/value pairs
assert filtered_value('normal', 'value') == 'value'
assert filtered_value('also_normal', 123) == 123
# But does redact sensitive keys
assert filtered_value('password', '123pass') != '123pass'
# The returned value is an object that renders via repr and str as '[Filtered]'
assert repr(filtered_value('password', '123pass')) == '[Filtered]'
assert str(filtered_value('password', '123pass')) == '[Filtered]'
# Also works on partial matches in the keys
assert repr(filtered_value('confirm_password', '123pass')) == '[Filtered]'
# The filter uses a verbose regex. Words in the middle of the regex also work
assert repr(filtered_value('access_token', 'secret-here')) == '[Filtered]'
# Filters are case insensitive
assert repr(filtered_value('TELEGRAM_ERROR_APIKEY', 'api:key')) == '[Filtered]'
# Keys with 'token' as a word are also filtered
assert repr(filtered_value('SMS_TWILIO_TOKEN', 'api:key')) == '[Filtered]'
# Numbers that look like card numbers are filtered
assert (
filtered_value('anything', 'My number is 1234 5678 9012 3456')
== 'My number is [Filtered]'
)
# This works with any combination of spaces and dashes within the number
assert (
filtered_value('anything', 'My number is 1234 5678-90123456')
== 'My number is [Filtered]'
)
def test_pprint_with_indent():
"""Test pprint_with_indent does indentation."""
out = StringIO()
data = {
12: 34,
'confirm_password': '12345qwerty',
'credentials': ['abc', 'def'],
'key': 'value',
'nested_dict': {'password': 'not_filtered'},
'password': '12345qwerty',
}
pprint_with_indent(data, out)
assert (
out.getvalue()
== '''\
{12: 34,
'confirm_password': [Filtered],
'credentials': [Filtered],
'key': 'value',
'nested_dict': {'password': 'not_filtered'},
'password': [Filtered]}
'''
)<|fim▁hole|>
def test_repeat_value_indicator():
"""Test RepeatValueIndicator class."""
assert repr(RepeatValueIndicator('key')) == "<same as prior 'key'>"
assert str(RepeatValueIndicator('key')) == "<same as prior 'key'>"<|fim▁end|> | |
<|file_name|>decoder.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Decoding metadata from a single crate's metadata
#![allow(non_camel_case_types)]
pub use self::DefLike::*;
use self::Family::*;
use back::svh::Svh;
use metadata::cstore::crate_metadata;
use metadata::common::*;
use metadata::csearch::MethodInfo;
use metadata::csearch;
use metadata::cstore;
use metadata::tydecode::{parse_ty_data, parse_region_data, parse_def_id,
parse_type_param_def_data, parse_bare_fn_ty_data,
parse_trait_ref_data, parse_predicate_data};
use middle::def;
use middle::lang_items;
use middle::subst;
use middle::ty::{ImplContainer, TraitContainer};
use middle::ty::{self, Ty};
use middle::astencode::vtable_decoder_helpers;
use std::collections::HashMap;
use std::hash::{self, Hash, SipHasher};
use std::io::prelude::*;
use std::io;
use std::num::FromPrimitive;
use std::rc::Rc;
use std::slice::bytes;
use std::str;
use rbml::reader;
use rbml;
use serialize::Decodable;
use syntax::ast_map;
use syntax::attr;
use syntax::parse::token::{IdentInterner, special_idents};
use syntax::parse::token;
use syntax::print::pprust;
use syntax::ast;
use syntax::codemap;
use syntax::ptr::P;
pub type Cmd<'a> = &'a crate_metadata;
// A function that takes a def_id relative to the crate being searched and
// returns a def_id relative to the compilation environment, i.e. if we hit a
// def_id for an item defined in another crate, somebody needs to figure out
// what crate that's in and give us a def_id that makes sense for the current
// build.
fn u32_from_be_bytes(bytes: &[u8]) -> u32 {
let mut b = [0; 4];
bytes::copy_memory(&bytes[..4], &mut b);
unsafe { (*(b.as_ptr() as *const u32)).to_be() }
}
fn lookup_hash<'a, F>(d: rbml::Doc<'a>, mut eq_fn: F, hash: u64) -> Option<rbml::Doc<'a>> where
F: FnMut(&[u8]) -> bool,
{
let index = reader::get_doc(d, tag_index);
let table = reader::get_doc(index, tag_index_table);
let hash_pos = table.start + (hash % 256 * 4) as usize;
let pos = u32_from_be_bytes(&d.data[hash_pos..]) as usize;
let tagged_doc = reader::doc_at(d.data, pos).unwrap();
let belt = tag_index_buckets_bucket_elt;
let mut ret = None;
reader::tagged_docs(tagged_doc.doc, belt, |elt| {
let pos = u32_from_be_bytes(&elt.data[elt.start..]) as usize;
if eq_fn(&elt.data[elt.start + 4 .. elt.end]) {
ret = Some(reader::doc_at(d.data, pos).unwrap().doc);
false
} else {
true
}
});
ret
}
pub fn maybe_find_item<'a>(item_id: ast::NodeId,
items: rbml::Doc<'a>) -> Option<rbml::Doc<'a>> {
fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool {
u32_from_be_bytes(bytes) == item_id
}
lookup_hash(items,
|a| eq_item(a, item_id),
hash::hash::<i64, SipHasher>(&(item_id as i64)))
}
fn find_item<'a>(item_id: ast::NodeId, items: rbml::Doc<'a>) -> rbml::Doc<'a> {
match maybe_find_item(item_id, items) {
None => panic!("lookup_item: id not found: {}", item_id),
Some(d) => d
}
}
// Looks up an item in the given metadata and returns an rbml doc pointing
// to the item data.
fn lookup_item<'a>(item_id: ast::NodeId, data: &'a [u8]) -> rbml::Doc<'a> {
let items = reader::get_doc(rbml::Doc::new(data), tag_items);
find_item(item_id, items)
}
#[derive(PartialEq)]
enum Family {
ImmStatic, // c
MutStatic, // b
Fn, // f
CtorFn, // o
StaticMethod, // F
Method, // h
Type, // y
Mod, // m
ForeignMod, // n
Enum, // t
TupleVariant, // v
StructVariant, // V
Impl, // i
DefaultImpl, // d
Trait, // I
Struct, // S
PublicField, // g
InheritedField, // N
Constant, // C
}
fn item_family(item: rbml::Doc) -> Family {
let fam = reader::get_doc(item, tag_items_data_item_family);
match reader::doc_as_u8(fam) as char {
'C' => Constant,
'c' => ImmStatic,
'b' => MutStatic,
'f' => Fn,
'o' => CtorFn,
'F' => StaticMethod,
'h' => Method,
'y' => Type,
'm' => Mod,
'n' => ForeignMod,
't' => Enum,
'v' => TupleVariant,
'V' => StructVariant,
'i' => Impl,
'd' => DefaultImpl,
'I' => Trait,
'S' => Struct,
'g' => PublicField,
'N' => InheritedField,
c => panic!("unexpected family char: {}", c)
}
}
fn item_visibility(item: rbml::Doc) -> ast::Visibility {
match reader::maybe_get_doc(item, tag_items_data_item_visibility) {
None => ast::Public,
Some(visibility_doc) => {
match reader::doc_as_u8(visibility_doc) as char {
'y' => ast::Public,
'i' => ast::Inherited,
_ => panic!("unknown visibility character")
}
}
}
}
fn item_sort(item: rbml::Doc) -> Option<char> {
let mut ret = None;
reader::tagged_docs(item, tag_item_trait_item_sort, |doc| {
ret = Some(doc.as_str_slice().as_bytes()[0] as char);
false
});
ret
}
fn item_symbol(item: rbml::Doc) -> String {
reader::get_doc(item, tag_items_data_item_symbol).as_str().to_string()
}
fn item_parent_item(d: rbml::Doc) -> Option<ast::DefId> {
let mut ret = None;
reader::tagged_docs(d, tag_items_data_parent_item, |did| {
ret = Some(reader::with_doc_data(did, parse_def_id));
false
});
ret
}
fn item_reqd_and_translated_parent_item(cnum: ast::CrateNum,
d: rbml::Doc) -> ast::DefId {
let trait_did = item_parent_item(d).expect("item without parent");
ast::DefId { krate: cnum, node: trait_did.node }
}
fn item_def_id(d: rbml::Doc, cdata: Cmd) -> ast::DefId {
let tagdoc = reader::get_doc(d, tag_def_id);
return translate_def_id(cdata, reader::with_doc_data(tagdoc, parse_def_id));
}
fn get_provided_source(d: rbml::Doc, cdata: Cmd) -> Option<ast::DefId> {
reader::maybe_get_doc(d, tag_item_method_provided_source).map(|doc| {
translate_def_id(cdata, reader::with_doc_data(doc, parse_def_id))
})
}
fn each_reexport<F>(d: rbml::Doc, f: F) -> bool where
F: FnMut(rbml::Doc) -> bool,
{
reader::tagged_docs(d, tag_items_data_item_reexport, f)
}
fn variant_disr_val(d: rbml::Doc) -> Option<ty::Disr> {
reader::maybe_get_doc(d, tag_disr_val).and_then(|val_doc| {
reader::with_doc_data(val_doc, |data| {
str::from_utf8(data).ok().and_then(|s| s.parse().ok())
})
})
}
fn doc_type<'tcx>(doc: rbml::Doc, tcx: &ty::ctxt<'tcx>, cdata: Cmd) -> Ty<'tcx> {
let tp = reader::get_doc(doc, tag_items_data_item_type);
parse_ty_data(tp.data, cdata.cnum, tp.start, tcx,
|_, did| translate_def_id(cdata, did))
}
fn doc_method_fty<'tcx>(doc: rbml::Doc, tcx: &ty::ctxt<'tcx>,
cdata: Cmd) -> ty::BareFnTy<'tcx> {
let tp = reader::get_doc(doc, tag_item_method_fty);
parse_bare_fn_ty_data(tp.data, cdata.cnum, tp.start, tcx,
|_, did| translate_def_id(cdata, did))
}
pub fn item_type<'tcx>(_item_id: ast::DefId, item: rbml::Doc,
tcx: &ty::ctxt<'tcx>, cdata: Cmd) -> Ty<'tcx> {
doc_type(item, tcx, cdata)
}
fn doc_trait_ref<'tcx>(doc: rbml::Doc, tcx: &ty::ctxt<'tcx>, cdata: Cmd)
-> Rc<ty::TraitRef<'tcx>> {
parse_trait_ref_data(doc.data, cdata.cnum, doc.start, tcx,
|_, did| translate_def_id(cdata, did))
}
fn item_trait_ref<'tcx>(doc: rbml::Doc, tcx: &ty::ctxt<'tcx>, cdata: Cmd)
-> Rc<ty::TraitRef<'tcx>> {
let tp = reader::get_doc(doc, tag_item_trait_ref);
doc_trait_ref(tp, tcx, cdata)
}
fn enum_variant_ids(item: rbml::Doc, cdata: Cmd) -> Vec<ast::DefId> {
let mut ids: Vec<ast::DefId> = Vec::new();
let v = tag_items_data_item_variant;
reader::tagged_docs(item, v, |p| {
let ext = reader::with_doc_data(p, parse_def_id);
ids.push(ast::DefId { krate: cdata.cnum, node: ext.node });
true
});
return ids;
}
fn item_path(item_doc: rbml::Doc) -> Vec<ast_map::PathElem> {
let path_doc = reader::get_doc(item_doc, tag_path);
let len_doc = reader::get_doc(path_doc, tag_path_len);
let len = reader::doc_as_u32(len_doc) as usize;
let mut result = Vec::with_capacity(len);
reader::docs(path_doc, |tag, elt_doc| {
if tag == tag_path_elem_mod {
let s = elt_doc.as_str_slice();
result.push(ast_map::PathMod(token::intern(s)));
} else if tag == tag_path_elem_name {
let s = elt_doc.as_str_slice();
result.push(ast_map::PathName(token::intern(s)));
} else {
// ignore tag_path_len element
}
true
});
result
}
fn item_name(intr: &IdentInterner, item: rbml::Doc) -> ast::Name {
let name = reader::get_doc(item, tag_paths_data_name);
let string = name.as_str_slice();
match intr.find(string) {
None => token::intern(string),
Some(val) => val,
}
}
fn item_to_def_like(item: rbml::Doc, did: ast::DefId, cnum: ast::CrateNum)
-> DefLike {
let fam = item_family(item);
match fam {
Constant => DlDef(def::DefConst(did)),
ImmStatic => DlDef(def::DefStatic(did, false)),
MutStatic => DlDef(def::DefStatic(did, true)),
Struct => DlDef(def::DefStruct(did)),
Fn => DlDef(def::DefFn(did, false)),
CtorFn => DlDef(def::DefFn(did, true)),
Method | StaticMethod => {
// def_static_method carries an optional field of its enclosing
// trait or enclosing impl (if this is an inherent static method).
// So we need to detect whether this is in a trait or not, which
// we do through the mildly hacky way of checking whether there is
// a trait_parent_sort.
let provenance = if reader::maybe_get_doc(
item, tag_item_trait_parent_sort).is_some() {
def::FromTrait(item_reqd_and_translated_parent_item(cnum,
item))
} else {
def::FromImpl(item_reqd_and_translated_parent_item(cnum,
item))
};
DlDef(def::DefMethod(did, provenance))
}
Type => {
if item_sort(item) == Some('t') {
let trait_did = item_reqd_and_translated_parent_item(cnum, item);
DlDef(def::DefAssociatedTy(trait_did, did))
} else {
DlDef(def::DefTy(did, false))
}
}
Mod => DlDef(def::DefMod(did)),
ForeignMod => DlDef(def::DefForeignMod(did)),
StructVariant => {
let enum_did = item_reqd_and_translated_parent_item(cnum, item);
DlDef(def::DefVariant(enum_did, did, true))
}
TupleVariant => {
let enum_did = item_reqd_and_translated_parent_item(cnum, item);
DlDef(def::DefVariant(enum_did, did, false))
}
Trait => DlDef(def::DefTrait(did)),
Enum => DlDef(def::DefTy(did, true)),
Impl | DefaultImpl => DlImpl(did),
PublicField | InheritedField => DlField,
}
}
fn parse_unsafety(item_doc: rbml::Doc) -> ast::Unsafety {
let unsafety_doc = reader::get_doc(item_doc, tag_unsafety);
if reader::doc_as_u8(unsafety_doc) != 0 {
ast::Unsafety::Unsafe
} else {
ast::Unsafety::Normal
}
}
fn parse_paren_sugar(item_doc: rbml::Doc) -> bool {
let paren_sugar_doc = reader::get_doc(item_doc, tag_paren_sugar);
reader::doc_as_u8(paren_sugar_doc) != 0
}
fn parse_polarity(item_doc: rbml::Doc) -> ast::ImplPolarity {
let polarity_doc = reader::get_doc(item_doc, tag_polarity);
if reader::doc_as_u8(polarity_doc) != 0 {
ast::ImplPolarity::Negative
} else {
ast::ImplPolarity::Positive
}
}
fn parse_associated_type_names(item_doc: rbml::Doc) -> Vec<ast::Name> {
let names_doc = reader::get_doc(item_doc, tag_associated_type_names);
let mut names = Vec::new();
reader::tagged_docs(names_doc, tag_associated_type_name, |name_doc| {
let name = token::intern(name_doc.as_str_slice());
names.push(name);
true
});
names
}
pub fn get_trait_def<'tcx>(cdata: Cmd,
item_id: ast::NodeId,
tcx: &ty::ctxt<'tcx>) -> ty::TraitDef<'tcx>
{
let item_doc = lookup_item(item_id, cdata.data());
let generics = doc_generics(item_doc, tcx, cdata, tag_item_generics);
let unsafety = parse_unsafety(item_doc);
let associated_type_names = parse_associated_type_names(item_doc);
let paren_sugar = parse_paren_sugar(item_doc);
ty::TraitDef {
paren_sugar: paren_sugar,
unsafety: unsafety,
generics: generics,
trait_ref: item_trait_ref(item_doc, tcx, cdata),
associated_type_names: associated_type_names,
}
}
pub fn get_predicates<'tcx>(cdata: Cmd,
item_id: ast::NodeId,
tcx: &ty::ctxt<'tcx>)
-> ty::GenericPredicates<'tcx>
{
let item_doc = lookup_item(item_id, cdata.data());
doc_predicates(item_doc, tcx, cdata, tag_item_generics)
}
pub fn get_super_predicates<'tcx>(cdata: Cmd,
item_id: ast::NodeId,
tcx: &ty::ctxt<'tcx>)
-> ty::GenericPredicates<'tcx>
{
let item_doc = lookup_item(item_id, cdata.data());
doc_predicates(item_doc, tcx, cdata, tag_item_super_predicates)
}
pub fn get_type<'tcx>(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt<'tcx>)
-> ty::TypeScheme<'tcx>
{
let item_doc = lookup_item(id, cdata.data());
let t = item_type(ast::DefId { krate: cdata.cnum, node: id }, item_doc, tcx,
cdata);
let generics = doc_generics(item_doc, tcx, cdata, tag_item_generics);
ty::TypeScheme {
generics: generics,
ty: t
}
}
pub fn get_stability(cdata: Cmd, id: ast::NodeId) -> Option<attr::Stability> {
let item = lookup_item(id, cdata.data());
reader::maybe_get_doc(item, tag_items_data_item_stability).map(|doc| {
let mut decoder = reader::Decoder::new(doc);
Decodable::decode(&mut decoder).unwrap()
})
}
pub fn get_repr_attrs(cdata: Cmd, id: ast::NodeId) -> Vec<attr::ReprAttr> {
let item = lookup_item(id, cdata.data());
match reader::maybe_get_doc(item, tag_items_data_item_repr).map(|doc| {
let mut decoder = reader::Decoder::new(doc);
Decodable::decode(&mut decoder).unwrap()
}) {
Some(attrs) => attrs,
None => Vec::new(),
}
}
pub fn get_impl_polarity<'tcx>(cdata: Cmd,
id: ast::NodeId)
-> Option<ast::ImplPolarity>
{
let item_doc = lookup_item(id, cdata.data());
let fam = item_family(item_doc);
match fam {
Family::Impl => {
Some(parse_polarity(item_doc))
}
_ => None
}
}
pub fn get_impl_trait<'tcx>(cdata: Cmd,
id: ast::NodeId,
tcx: &ty::ctxt<'tcx>)
-> Option<Rc<ty::TraitRef<'tcx>>>
{
let item_doc = lookup_item(id, cdata.data());
let fam = item_family(item_doc);
match fam {
Family::Impl | Family::DefaultImpl => {
reader::maybe_get_doc(item_doc, tag_item_trait_ref).map(|tp| {
doc_trait_ref(tp, tcx, cdata)
})
}
_ => None
}
}
pub fn get_impl_vtables<'tcx>(cdata: Cmd,
id: ast::NodeId,
tcx: &ty::ctxt<'tcx>)
-> ty::vtable_res<'tcx>
{
let item_doc = lookup_item(id, cdata.data());
let vtables_doc = reader::get_doc(item_doc, tag_item_impl_vtables);
let mut decoder = reader::Decoder::new(vtables_doc);
decoder.read_vtable_res(tcx, cdata)
}
pub fn get_symbol(data: &[u8], id: ast::NodeId) -> String {
return item_symbol(lookup_item(id, data));
}
// Something that a name can resolve to.
#[derive(Copy, Clone, Debug)]
pub enum DefLike {
DlDef(def::Def),
DlImpl(ast::DefId),
DlField
}
/// Iterates over the language items in the given crate.
pub fn each_lang_item<F>(cdata: Cmd, mut f: F) -> bool where
F: FnMut(ast::NodeId, usize) -> bool,
{
let root = rbml::Doc::new(cdata.data());
let lang_items = reader::get_doc(root, tag_lang_items);
reader::tagged_docs(lang_items, tag_lang_items_item, |item_doc| {
let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
let id = reader::doc_as_u32(id_doc) as usize;
let node_id_doc = reader::get_doc(item_doc,
tag_lang_items_item_node_id);
let node_id = reader::doc_as_u32(node_id_doc) as ast::NodeId;
f(node_id, id)
})
}
fn each_child_of_item_or_crate<F, G>(intr: Rc<IdentInterner>,
cdata: Cmd,
item_doc: rbml::Doc,
mut get_crate_data: G,
mut callback: F) where
F: FnMut(DefLike, ast::Name, ast::Visibility),
G: FnMut(ast::CrateNum) -> Rc<crate_metadata>,
{
// Iterate over all children.
let _ = reader::tagged_docs(item_doc, tag_mod_child, |child_info_doc| {
let child_def_id = reader::with_doc_data(child_info_doc,
parse_def_id);
let child_def_id = translate_def_id(cdata, child_def_id);
// This item may be in yet another crate if it was the child of a
// reexport.
let crate_data = if child_def_id.krate == cdata.cnum {
None
} else {
Some(get_crate_data(child_def_id.krate))
};
let crate_data = match crate_data {
Some(ref cdata) => &**cdata,
None => cdata
};
let other_crates_items = reader::get_doc(rbml::Doc::new(crate_data.data()), tag_items);
// Get the item.
match maybe_find_item(child_def_id.node, other_crates_items) {
None => {}
Some(child_item_doc) => {
// Hand off the item to the callback.
let child_name = item_name(&*intr, child_item_doc);
let def_like = item_to_def_like(child_item_doc,
child_def_id,
cdata.cnum);
let visibility = item_visibility(child_item_doc);
callback(def_like, child_name, visibility);
}
}
true
});
// As a special case, iterate over all static methods of
// associated implementations too. This is a bit of a botch.
// --pcwalton
let _ = reader::tagged_docs(item_doc,
tag_items_data_item_inherent_impl,
|inherent_impl_def_id_doc| {
let inherent_impl_def_id = item_def_id(inherent_impl_def_id_doc,
cdata);
let items = reader::get_doc(rbml::Doc::new(cdata.data()), tag_items);
if let Some(inherent_impl_doc) = maybe_find_item(inherent_impl_def_id.node, items) {
let _ = reader::tagged_docs(inherent_impl_doc,
tag_item_impl_item,
|impl_item_def_id_doc| {
let impl_item_def_id = item_def_id(impl_item_def_id_doc,
cdata);
if let Some(impl_method_doc) = maybe_find_item(impl_item_def_id.node, items) {
if let StaticMethod = item_family(impl_method_doc) {
// Hand off the static method to the callback.
let static_method_name = item_name(&*intr, impl_method_doc);
let static_method_def_like = item_to_def_like(impl_method_doc,
impl_item_def_id,
cdata.cnum);
callback(static_method_def_like,
static_method_name,
item_visibility(impl_method_doc));
}
}
true
});
}
true
});
// Iterate over all reexports.
let _ = each_reexport(item_doc, |reexport_doc| {
let def_id_doc = reader::get_doc(reexport_doc,
tag_items_data_item_reexport_def_id);
let child_def_id = reader::with_doc_data(def_id_doc,
parse_def_id);
let child_def_id = translate_def_id(cdata, child_def_id);
let name_doc = reader::get_doc(reexport_doc,
tag_items_data_item_reexport_name);
let name = name_doc.as_str_slice();
// This reexport may be in yet another crate.
let crate_data = if child_def_id.krate == cdata.cnum {
None
} else {
Some(get_crate_data(child_def_id.krate))
};
let crate_data = match crate_data {
Some(ref cdata) => &**cdata,
None => cdata
};
let other_crates_items = reader::get_doc(rbml::Doc::new(crate_data.data()), tag_items);
// Get the item.
if let Some(child_item_doc) = maybe_find_item(child_def_id.node, other_crates_items) {
// Hand off the item to the callback.
let def_like = item_to_def_like(child_item_doc,
child_def_id,
child_def_id.krate);
// These items have a public visibility because they're part of
// a public re-export.
callback(def_like, token::intern(name), ast::Public);
}
true
});
}
/// Iterates over each child of the given item.
pub fn each_child_of_item<F, G>(intr: Rc<IdentInterner>,
cdata: Cmd,
id: ast::NodeId,
get_crate_data: G,
callback: F) where
F: FnMut(DefLike, ast::Name, ast::Visibility),
G: FnMut(ast::CrateNum) -> Rc<crate_metadata>,
{
// Find the item.
let root_doc = rbml::Doc::new(cdata.data());
let items = reader::get_doc(root_doc, tag_items);
let item_doc = match maybe_find_item(id, items) {
None => return,
Some(item_doc) => item_doc,
};
each_child_of_item_or_crate(intr,
cdata,
item_doc,
get_crate_data,
callback)
}
/// Iterates over all the top-level crate items.
pub fn each_top_level_item_of_crate<F, G>(intr: Rc<IdentInterner>,
cdata: Cmd,
get_crate_data: G,
callback: F) where
F: FnMut(DefLike, ast::Name, ast::Visibility),
G: FnMut(ast::CrateNum) -> Rc<crate_metadata>,
{
let root_doc = rbml::Doc::new(cdata.data());
let misc_info_doc = reader::get_doc(root_doc, tag_misc_info);
let crate_items_doc = reader::get_doc(misc_info_doc,
tag_misc_info_crate_items);
each_child_of_item_or_crate(intr,
cdata,
crate_items_doc,
get_crate_data,
callback)
}
pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> Vec<ast_map::PathElem> {
item_path(lookup_item(id, cdata.data()))
}
pub type DecodeInlinedItem<'a> =
Box<for<'tcx> FnMut(Cmd,
&ty::ctxt<'tcx>,
Vec<ast_map::PathElem>,
rbml::Doc)
-> Result<&'tcx ast::InlinedItem, Vec<ast_map::PathElem>> + 'a>;
pub fn maybe_get_item_ast<'tcx>(cdata: Cmd, tcx: &ty::ctxt<'tcx>, id: ast::NodeId,
mut decode_inlined_item: DecodeInlinedItem)
-> csearch::FoundAst<'tcx> {
debug!("Looking up item: {}", id);
let item_doc = lookup_item(id, cdata.data());
let path = item_path(item_doc).init().to_vec();
match decode_inlined_item(cdata, tcx, path, item_doc) {
Ok(ii) => csearch::FoundAst::Found(ii),
Err(path) => {
match item_parent_item(item_doc) {
Some(did) => {
let did = translate_def_id(cdata, did);
let parent_item = lookup_item(did.node, cdata.data());
match decode_inlined_item(cdata, tcx, path, parent_item) {
Ok(ii) => csearch::FoundAst::FoundParent(did, ii),
Err(_) => csearch::FoundAst::NotFound
}
}
None => csearch::FoundAst::NotFound
}
}
}
}
pub fn get_enum_variant_defs(intr: &IdentInterner,
cdata: Cmd,
id: ast::NodeId)
-> Vec<(def::Def, ast::Name, ast::Visibility)> {
let data = cdata.data();
let items = reader::get_doc(rbml::Doc::new(data), tag_items);
let item = find_item(id, items);
enum_variant_ids(item, cdata).iter().map(|did| {
let item = find_item(did.node, items);
let name = item_name(intr, item);
let visibility = item_visibility(item);
match item_to_def_like(item, *did, cdata.cnum) {
DlDef(def @ def::DefVariant(..)) => (def, name, visibility),
_ => unreachable!()
}
}).collect()
}
pub fn get_enum_variants<'tcx>(intr: Rc<IdentInterner>, cdata: Cmd, id: ast::NodeId,
tcx: &ty::ctxt<'tcx>) -> Vec<Rc<ty::VariantInfo<'tcx>>> {
let data = cdata.data();
let items = reader::get_doc(rbml::Doc::new(data), tag_items);
let item = find_item(id, items);
let mut disr_val = 0;
enum_variant_ids(item, cdata).iter().map(|did| {
let item = find_item(did.node, items);
let ctor_ty = item_type(ast::DefId { krate: cdata.cnum, node: id},
item, tcx, cdata);
let name = item_name(&*intr, item);
let (ctor_ty, arg_tys, arg_names) = match ctor_ty.sty {
ty::ty_bare_fn(_, ref f) =>
(Some(ctor_ty), f.sig.0.inputs.clone(), None),
_ => { // Nullary or struct enum variant.
let mut arg_names = Vec::new();
let arg_tys = get_struct_fields(intr.clone(), cdata, did.node)
.iter()
.map(|field_ty| {
arg_names.push(ast::Ident::new(field_ty.name));
get_type(cdata, field_ty.id.node, tcx).ty
})
.collect();
let arg_names = if arg_names.len() == 0 { None } else { Some(arg_names) };
(None, arg_tys, arg_names)
}
};
match variant_disr_val(item) {
Some(val) => { disr_val = val; }
_ => { /* empty */ }
}
let old_disr_val = disr_val;
disr_val = disr_val.wrapping_add(1);
Rc::new(ty::VariantInfo {
args: arg_tys,
arg_names: arg_names,
ctor_ty: ctor_ty,
name: name,
// I'm not even sure if we encode visibility
// for variants -- TEST -- tjc
id: *did,
disr_val: old_disr_val,
vis: ast::Inherited
})
}).collect()
}
fn get_explicit_self(item: rbml::Doc) -> ty::ExplicitSelfCategory {
fn get_mutability(ch: u8) -> ast::Mutability {
match ch as char {
'i' => ast::MutImmutable,
'm' => ast::MutMutable,
_ => panic!("unknown mutability character: `{}`", ch as char),
}
}
<|fim▁hole|> let string = explicit_self_doc.as_str_slice();
let explicit_self_kind = string.as_bytes()[0];
match explicit_self_kind as char {
's' => ty::StaticExplicitSelfCategory,
'v' => ty::ByValueExplicitSelfCategory,
'~' => ty::ByBoxExplicitSelfCategory,
// FIXME(#4846) expl. region
'&' => {
ty::ByReferenceExplicitSelfCategory(
ty::ReEmpty,
get_mutability(string.as_bytes()[1]))
}
_ => panic!("unknown self type code: `{}`", explicit_self_kind as char)
}
}
/// Returns the def IDs of all the items in the given implementation.
pub fn get_impl_items(cdata: Cmd, impl_id: ast::NodeId)
-> Vec<ty::ImplOrTraitItemId> {
let mut impl_items = Vec::new();
reader::tagged_docs(lookup_item(impl_id, cdata.data()),
tag_item_impl_item, |doc| {
let def_id = item_def_id(doc, cdata);
match item_sort(doc) {
Some('r') | Some('p') => {
impl_items.push(ty::MethodTraitItemId(def_id))
}
Some('t') => impl_items.push(ty::TypeTraitItemId(def_id)),
_ => panic!("unknown impl item sort"),
}
true
});
impl_items
}
pub fn get_trait_name(intr: Rc<IdentInterner>,
cdata: Cmd,
id: ast::NodeId)
-> ast::Name {
let doc = lookup_item(id, cdata.data());
item_name(&*intr, doc)
}
pub fn is_static_method(cdata: Cmd, id: ast::NodeId) -> bool {
let doc = lookup_item(id, cdata.data());
match item_sort(doc) {
Some('r') | Some('p') => {
get_explicit_self(doc) == ty::StaticExplicitSelfCategory
}
_ => false
}
}
pub fn get_impl_or_trait_item<'tcx>(intr: Rc<IdentInterner>,
cdata: Cmd,
id: ast::NodeId,
tcx: &ty::ctxt<'tcx>)
-> ty::ImplOrTraitItem<'tcx> {
let method_doc = lookup_item(id, cdata.data());
let def_id = item_def_id(method_doc, cdata);
let container_id = item_reqd_and_translated_parent_item(cdata.cnum,
method_doc);
let container_doc = lookup_item(container_id.node, cdata.data());
let container = match item_family(container_doc) {
Trait => TraitContainer(container_id),
_ => ImplContainer(container_id),
};
let name = item_name(&*intr, method_doc);
let vis = item_visibility(method_doc);
match item_sort(method_doc) {
Some('r') | Some('p') => {
let generics = doc_generics(method_doc, tcx, cdata, tag_method_ty_generics);
let predicates = doc_predicates(method_doc, tcx, cdata, tag_method_ty_generics);
let fty = doc_method_fty(method_doc, tcx, cdata);
let explicit_self = get_explicit_self(method_doc);
let provided_source = get_provided_source(method_doc, cdata);
ty::MethodTraitItem(Rc::new(ty::Method::new(name,
generics,
predicates,
fty,
explicit_self,
vis,
def_id,
container,
provided_source)))
}
Some('t') => {
ty::TypeTraitItem(Rc::new(ty::AssociatedType {
name: name,
vis: vis,
def_id: def_id,
container: container,
}))
}
_ => panic!("unknown impl/trait item sort"),
}
}
pub fn get_trait_item_def_ids(cdata: Cmd, id: ast::NodeId)
-> Vec<ty::ImplOrTraitItemId> {
let data = cdata.data();
let item = lookup_item(id, data);
let mut result = Vec::new();
reader::tagged_docs(item, tag_item_trait_item, |mth| {
let def_id = item_def_id(mth, cdata);
match item_sort(mth) {
Some('r') | Some('p') => {
result.push(ty::MethodTraitItemId(def_id));
}
Some('t') => result.push(ty::TypeTraitItemId(def_id)),
_ => panic!("unknown trait item sort"),
}
true
});
result
}
pub fn get_item_variances(cdata: Cmd, id: ast::NodeId) -> ty::ItemVariances {
let data = cdata.data();
let item_doc = lookup_item(id, data);
let variance_doc = reader::get_doc(item_doc, tag_item_variances);
let mut decoder = reader::Decoder::new(variance_doc);
Decodable::decode(&mut decoder).unwrap()
}
pub fn get_provided_trait_methods<'tcx>(intr: Rc<IdentInterner>,
cdata: Cmd,
id: ast::NodeId,
tcx: &ty::ctxt<'tcx>)
-> Vec<Rc<ty::Method<'tcx>>> {
let data = cdata.data();
let item = lookup_item(id, data);
let mut result = Vec::new();
reader::tagged_docs(item, tag_item_trait_item, |mth_id| {
let did = item_def_id(mth_id, cdata);
let mth = lookup_item(did.node, data);
if item_sort(mth) == Some('p') {
let trait_item = get_impl_or_trait_item(intr.clone(),
cdata,
did.node,
tcx);
match trait_item {
ty::MethodTraitItem(ref method) => {
result.push((*method).clone())
}
ty::TypeTraitItem(_) => {}
}
}
true
});
return result;
}
pub fn get_type_name_if_impl(cdata: Cmd,
node_id: ast::NodeId) -> Option<ast::Name> {
let item = lookup_item(node_id, cdata.data());
if item_family(item) != Impl {
return None;
}
let mut ret = None;
reader::tagged_docs(item, tag_item_impl_type_basename, |doc| {
ret = Some(token::intern(doc.as_str_slice()));
false
});
ret
}
pub fn get_methods_if_impl(intr: Rc<IdentInterner>,
cdata: Cmd,
node_id: ast::NodeId)
-> Option<Vec<MethodInfo> > {
let item = lookup_item(node_id, cdata.data());
if item_family(item) != Impl {
return None;
}
// If this impl implements a trait, don't consider it.
let ret = reader::tagged_docs(item, tag_item_trait_ref, |_doc| {
false
});
if !ret { return None }
let mut impl_method_ids = Vec::new();
reader::tagged_docs(item, tag_item_impl_item, |impl_method_doc| {
impl_method_ids.push(item_def_id(impl_method_doc, cdata));
true
});
let mut impl_methods = Vec::new();
for impl_method_id in &impl_method_ids {
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data());
let family = item_family(impl_method_doc);
match family {
StaticMethod | Method => {
impl_methods.push(MethodInfo {
name: item_name(&*intr, impl_method_doc),
def_id: item_def_id(impl_method_doc, cdata),
vis: item_visibility(impl_method_doc),
});
}
_ => {}
}
}
return Some(impl_methods);
}
/// If node_id is the constructor of a tuple struct, retrieve the NodeId of
/// the actual type definition, otherwise, return None
pub fn get_tuple_struct_definition_if_ctor(cdata: Cmd,
node_id: ast::NodeId)
-> Option<ast::DefId>
{
let item = lookup_item(node_id, cdata.data());
let mut ret = None;
reader::tagged_docs(item, tag_items_data_item_is_tuple_struct_ctor, |_| {
ret = Some(item_reqd_and_translated_parent_item(cdata.cnum, item));
false
});
ret
}
pub fn get_item_attrs(cdata: Cmd,
orig_node_id: ast::NodeId)
-> Vec<ast::Attribute> {
// The attributes for a tuple struct are attached to the definition, not the ctor;
// we assume that someone passing in a tuple struct ctor is actually wanting to
// look at the definition
let node_id = get_tuple_struct_definition_if_ctor(cdata, orig_node_id);
let node_id = node_id.map(|x| x.node).unwrap_or(orig_node_id);
let item = lookup_item(node_id, cdata.data());
get_attributes(item)
}
pub fn get_struct_field_attrs(cdata: Cmd) -> HashMap<ast::NodeId, Vec<ast::Attribute>> {
let data = rbml::Doc::new(cdata.data());
let fields = reader::get_doc(data, tag_struct_fields);
let mut map = HashMap::new();
reader::tagged_docs(fields, tag_struct_field, |field| {
let id = reader::doc_as_u32(reader::get_doc(field, tag_struct_field_id));
let attrs = get_attributes(field);
map.insert(id, attrs);
true
});
map
}
fn struct_field_family_to_visibility(family: Family) -> ast::Visibility {
match family {
PublicField => ast::Public,
InheritedField => ast::Inherited,
_ => panic!()
}
}
pub fn get_struct_fields(intr: Rc<IdentInterner>, cdata: Cmd, id: ast::NodeId)
-> Vec<ty::field_ty> {
let data = cdata.data();
let item = lookup_item(id, data);
let mut result = Vec::new();
reader::tagged_docs(item, tag_item_field, |an_item| {
let f = item_family(an_item);
if f == PublicField || f == InheritedField {
let name = item_name(&*intr, an_item);
let did = item_def_id(an_item, cdata);
let tagdoc = reader::get_doc(an_item, tag_item_field_origin);
let origin_id = translate_def_id(cdata, reader::with_doc_data(tagdoc, parse_def_id));
result.push(ty::field_ty {
name: name,
id: did,
vis: struct_field_family_to_visibility(f),
origin: origin_id,
});
}
true
});
reader::tagged_docs(item, tag_item_unnamed_field, |an_item| {
let did = item_def_id(an_item, cdata);
let tagdoc = reader::get_doc(an_item, tag_item_field_origin);
let f = item_family(an_item);
let origin_id = translate_def_id(cdata, reader::with_doc_data(tagdoc, parse_def_id));
result.push(ty::field_ty {
name: special_idents::unnamed_field.name,
id: did,
vis: struct_field_family_to_visibility(f),
origin: origin_id,
});
true
});
result
}
fn get_meta_items(md: rbml::Doc) -> Vec<P<ast::MetaItem>> {
let mut items: Vec<P<ast::MetaItem>> = Vec::new();
reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = token::intern_and_get_ident(nd.as_str_slice());
items.push(attr::mk_word_item(n));
true
});
reader::tagged_docs(md, tag_meta_item_name_value, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
let n = token::intern_and_get_ident(nd.as_str_slice());
let v = token::intern_and_get_ident(vd.as_str_slice());
// FIXME (#623): Should be able to decode MetaNameValue variants,
// but currently the encoder just drops them
items.push(attr::mk_name_value_item_str(n, v));
true
});
reader::tagged_docs(md, tag_meta_item_list, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = token::intern_and_get_ident(nd.as_str_slice());
let subitems = get_meta_items(meta_item_doc);
items.push(attr::mk_list_item(n, subitems.into_iter().collect()));
true
});
return items;
}
fn get_attributes(md: rbml::Doc) -> Vec<ast::Attribute> {
let mut attrs: Vec<ast::Attribute> = Vec::new();
match reader::maybe_get_doc(md, tag_attributes) {
Some(attrs_d) => {
reader::tagged_docs(attrs_d, tag_attribute, |attr_doc| {
let is_sugared_doc = reader::doc_as_u8(
reader::get_doc(attr_doc, tag_attribute_is_sugared_doc)
) == 1;
let meta_items = get_meta_items(attr_doc);
// Currently it's only possible to have a single meta item on
// an attribute
assert_eq!(meta_items.len(), 1);
let meta_item = meta_items.into_iter().nth(0).unwrap();
attrs.push(
codemap::Spanned {
node: ast::Attribute_ {
id: attr::mk_attr_id(),
style: ast::AttrOuter,
value: meta_item,
is_sugared_doc: is_sugared_doc,
},
span: codemap::DUMMY_SP
});
true
});
}
None => ()
}
return attrs;
}
fn list_crate_attributes(md: rbml::Doc, hash: &Svh,
out: &mut io::Write) -> io::Result<()> {
try!(write!(out, "=Crate Attributes ({})=\n", *hash));
let r = get_attributes(md);
for attr in &r {
try!(write!(out, "{}\n", pprust::attribute_to_string(attr)));
}
write!(out, "\n\n")
}
pub fn get_crate_attributes(data: &[u8]) -> Vec<ast::Attribute> {
get_attributes(rbml::Doc::new(data))
}
#[derive(Clone)]
pub struct CrateDep {
pub cnum: ast::CrateNum,
pub name: String,
pub hash: Svh,
}
pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
let mut deps: Vec<CrateDep> = Vec::new();
let cratedoc = rbml::Doc::new(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1;
fn docstr(doc: rbml::Doc, tag_: usize) -> String {
let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_string()
}
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
let name = docstr(depdoc, tag_crate_dep_crate_name);
let hash = Svh::new(&docstr(depdoc, tag_crate_dep_hash));
deps.push(CrateDep {
cnum: crate_num,
name: name,
hash: hash,
});
crate_num += 1;
true
});
return deps;
}
fn list_crate_deps(data: &[u8], out: &mut io::Write) -> io::Result<()> {
try!(write!(out, "=External Dependencies=\n"));
for dep in &get_crate_deps(data) {
try!(write!(out, "{} {}-{}\n", dep.cnum, dep.name, dep.hash));
}
try!(write!(out, "\n"));
Ok(())
}
pub fn maybe_get_crate_hash(data: &[u8]) -> Option<Svh> {
let cratedoc = rbml::Doc::new(data);
reader::maybe_get_doc(cratedoc, tag_crate_hash).map(|doc| {
Svh::new(doc.as_str_slice())
})
}
pub fn get_crate_hash(data: &[u8]) -> Svh {
let cratedoc = rbml::Doc::new(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
Svh::new(hashdoc.as_str_slice())
}
pub fn maybe_get_crate_name(data: &[u8]) -> Option<String> {
let cratedoc = rbml::Doc::new(data);
reader::maybe_get_doc(cratedoc, tag_crate_crate_name).map(|doc| {
doc.as_str_slice().to_string()
})
}
pub fn get_crate_triple(data: &[u8]) -> Option<String> {
let cratedoc = rbml::Doc::new(data);
let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple);
triple_doc.map(|s| s.as_str().to_string())
}
pub fn get_crate_name(data: &[u8]) -> String {
maybe_get_crate_name(data).expect("no crate name in crate")
}
pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Write) -> io::Result<()> {
let hash = get_crate_hash(bytes);
let md = rbml::Doc::new(bytes);
try!(list_crate_attributes(md, &hash, out));
list_crate_deps(bytes, out)
}
// Translates a def_id from an external crate to a def_id for the current
// compilation environment. We use this when trying to load types from
// external crates - if those types further refer to types in other crates
// then we must translate the crate number from that encoded in the external
// crate to the correct local crate number.
pub fn translate_def_id(cdata: Cmd, did: ast::DefId) -> ast::DefId {
if did.krate == ast::LOCAL_CRATE {
return ast::DefId { krate: cdata.cnum, node: did.node };
}
match cdata.cnum_map.get(&did.krate) {
Some(&n) => {
ast::DefId {
krate: n,
node: did.node,
}
}
None => panic!("didn't find a crate in the cnum_map")
}
}
pub fn each_impl<F>(cdata: Cmd, mut callback: F) where
F: FnMut(ast::DefId),
{
let impls_doc = reader::get_doc(rbml::Doc::new(cdata.data()), tag_impls);
let _ = reader::tagged_docs(impls_doc, tag_impls_impl, |impl_doc| {
callback(item_def_id(impl_doc, cdata));
true
});
}
pub fn each_implementation_for_type<F>(cdata: Cmd,
id: ast::NodeId,
mut callback: F)
where F: FnMut(ast::DefId),
{
let item_doc = lookup_item(id, cdata.data());
reader::tagged_docs(item_doc,
tag_items_data_item_inherent_impl,
|impl_doc| {
let implementation_def_id = item_def_id(impl_doc, cdata);
callback(implementation_def_id);
true
});
}
pub fn each_implementation_for_trait<F>(cdata: Cmd,
id: ast::NodeId,
mut callback: F) where
F: FnMut(ast::DefId),
{
let item_doc = lookup_item(id, cdata.data());
let _ = reader::tagged_docs(item_doc,
tag_items_data_item_extension_impl,
|impl_doc| {
let implementation_def_id = item_def_id(impl_doc, cdata);
callback(implementation_def_id);
true
});
}
pub fn get_trait_of_item(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt)
-> Option<ast::DefId> {
let item_doc = lookup_item(id, cdata.data());
let parent_item_id = match item_parent_item(item_doc) {
None => return None,
Some(item_id) => item_id,
};
let parent_item_id = translate_def_id(cdata, parent_item_id);
let parent_item_doc = lookup_item(parent_item_id.node, cdata.data());
match item_family(parent_item_doc) {
Trait => Some(item_def_id(parent_item_doc, cdata)),
Impl | DefaultImpl => {
reader::maybe_get_doc(parent_item_doc, tag_item_trait_ref)
.map(|_| item_trait_ref(parent_item_doc, tcx, cdata).def_id)
}
_ => None
}
}
pub fn get_native_libraries(cdata: Cmd)
-> Vec<(cstore::NativeLibraryKind, String)> {
let libraries = reader::get_doc(rbml::Doc::new(cdata.data()),
tag_native_libraries);
let mut result = Vec::new();
reader::tagged_docs(libraries, tag_native_libraries_lib, |lib_doc| {
let kind_doc = reader::get_doc(lib_doc, tag_native_libraries_kind);
let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name);
let kind: cstore::NativeLibraryKind =
FromPrimitive::from_u32(reader::doc_as_u32(kind_doc)).unwrap();
let name = name_doc.as_str().to_string();
result.push((kind, name));
true
});
return result;
}
pub fn get_plugin_registrar_fn(data: &[u8]) -> Option<ast::NodeId> {
reader::maybe_get_doc(rbml::Doc::new(data), tag_plugin_registrar_fn)
.map(|doc| FromPrimitive::from_u32(reader::doc_as_u32(doc)).unwrap())
}
pub fn each_exported_macro<F>(data: &[u8], intr: &IdentInterner, mut f: F) where
F: FnMut(ast::Name, Vec<ast::Attribute>, String) -> bool,
{
let macros = reader::get_doc(rbml::Doc::new(data), tag_macro_defs);
reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
let name = item_name(intr, macro_doc);
let attrs = get_attributes(macro_doc);
let body = reader::get_doc(macro_doc, tag_macro_def_body);
f(name, attrs, body.as_str().to_string())
});
}
pub fn get_dylib_dependency_formats(cdata: Cmd)
-> Vec<(ast::CrateNum, cstore::LinkagePreference)>
{
let formats = reader::get_doc(rbml::Doc::new(cdata.data()),
tag_dylib_dependency_formats);
let mut result = Vec::new();
debug!("found dylib deps: {}", formats.as_str_slice());
for spec in formats.as_str_slice().split(',') {
if spec.len() == 0 { continue }
let cnum = spec.split(':').nth(0).unwrap();
let link = spec.split(':').nth(1).unwrap();
let cnum: ast::CrateNum = cnum.parse().unwrap();
let cnum = match cdata.cnum_map.get(&cnum) {
Some(&n) => n,
None => panic!("didn't find a crate in the cnum_map")
};
result.push((cnum, if link == "d" {
cstore::RequireDynamic
} else {
cstore::RequireStatic
}));
}
return result;
}
pub fn get_missing_lang_items(cdata: Cmd)
-> Vec<lang_items::LangItem>
{
let items = reader::get_doc(rbml::Doc::new(cdata.data()), tag_lang_items);
let mut result = Vec::new();
reader::tagged_docs(items, tag_lang_items_missing, |missing_docs| {
let item: lang_items::LangItem =
FromPrimitive::from_u32(reader::doc_as_u32(missing_docs)).unwrap();
result.push(item);
true
});
return result;
}
pub fn get_method_arg_names(cdata: Cmd, id: ast::NodeId) -> Vec<String> {
let mut ret = Vec::new();
let method_doc = lookup_item(id, cdata.data());
match reader::maybe_get_doc(method_doc, tag_method_argument_names) {
Some(args_doc) => {
reader::tagged_docs(args_doc, tag_method_argument_name, |name_doc| {
ret.push(name_doc.as_str_slice().to_string());
true
});
}
None => {}
}
return ret;
}
pub fn get_reachable_extern_fns(cdata: Cmd) -> Vec<ast::DefId> {
let mut ret = Vec::new();
let items = reader::get_doc(rbml::Doc::new(cdata.data()),
tag_reachable_extern_fns);
reader::tagged_docs(items, tag_reachable_extern_fn_id, |doc| {
ret.push(ast::DefId {
krate: cdata.cnum,
node: reader::doc_as_u32(doc),
});
true
});
return ret;
}
pub fn is_typedef(cdata: Cmd, id: ast::NodeId) -> bool {
let item_doc = lookup_item(id, cdata.data());
match item_family(item_doc) {
Type => true,
_ => false,
}
}
fn doc_generics<'tcx>(base_doc: rbml::Doc,
tcx: &ty::ctxt<'tcx>,
cdata: Cmd,
tag: usize)
-> ty::Generics<'tcx>
{
let doc = reader::get_doc(base_doc, tag);
let mut types = subst::VecPerParamSpace::empty();
reader::tagged_docs(doc, tag_type_param_def, |p| {
let bd = parse_type_param_def_data(
p.data, p.start, cdata.cnum, tcx,
|_, did| translate_def_id(cdata, did));
types.push(bd.space, bd);
true
});
let mut regions = subst::VecPerParamSpace::empty();
reader::tagged_docs(doc, tag_region_param_def, |rp_doc| {
let ident_str_doc = reader::get_doc(rp_doc,
tag_region_param_def_ident);
let name = item_name(&*token::get_ident_interner(), ident_str_doc);
let def_id_doc = reader::get_doc(rp_doc,
tag_region_param_def_def_id);
let def_id = reader::with_doc_data(def_id_doc, parse_def_id);
let def_id = translate_def_id(cdata, def_id);
let doc = reader::get_doc(rp_doc, tag_region_param_def_space);
let space = subst::ParamSpace::from_uint(reader::doc_as_u64(doc) as usize);
let doc = reader::get_doc(rp_doc, tag_region_param_def_index);
let index = reader::doc_as_u64(doc) as u32;
let mut bounds = Vec::new();
reader::tagged_docs(rp_doc, tag_items_data_region, |p| {
bounds.push(
parse_region_data(
p.data, cdata.cnum, p.start, tcx,
|_, did| translate_def_id(cdata, did)));
true
});
regions.push(space, ty::RegionParameterDef { name: name,
def_id: def_id,
space: space,
index: index,
bounds: bounds });
true
});
ty::Generics { types: types, regions: regions }
}
fn doc_predicates<'tcx>(base_doc: rbml::Doc,
tcx: &ty::ctxt<'tcx>,
cdata: Cmd,
tag: usize)
-> ty::GenericPredicates<'tcx>
{
let doc = reader::get_doc(base_doc, tag);
let mut predicates = subst::VecPerParamSpace::empty();
reader::tagged_docs(doc, tag_predicate, |predicate_doc| {
let space_doc = reader::get_doc(predicate_doc, tag_predicate_space);
let space = subst::ParamSpace::from_uint(reader::doc_as_u8(space_doc) as usize);
let data_doc = reader::get_doc(predicate_doc, tag_predicate_data);
let data = parse_predicate_data(data_doc.data, data_doc.start, cdata.cnum, tcx,
|_, did| translate_def_id(cdata, did));
predicates.push(space, data);
true
});
ty::GenericPredicates { predicates: predicates }
}
pub fn is_associated_type(cdata: Cmd, id: ast::NodeId) -> bool {
let items = reader::get_doc(rbml::Doc::new(cdata.data()), tag_items);
match maybe_find_item(id, items) {
None => false,
Some(item) => item_sort(item) == Some('t'),
}
}
pub fn is_defaulted_trait(cdata: Cmd, trait_id: ast::NodeId) -> bool {
let trait_doc = lookup_item(trait_id, cdata.data());
assert!(item_family(trait_doc) == Family::Trait);
let defaulted_doc = reader::get_doc(trait_doc, tag_defaulted_trait);
reader::doc_as_u8(defaulted_doc) != 0
}
pub fn is_default_impl(cdata: Cmd, impl_id: ast::NodeId) -> bool {
let impl_doc = lookup_item(impl_id, cdata.data());
item_family(impl_doc) == Family::DefaultImpl
}
pub fn get_imported_filemaps(metadata: &[u8]) -> Vec<codemap::FileMap> {
let crate_doc = rbml::Doc::new(metadata);
let cm_doc = reader::get_doc(crate_doc, tag_codemap);
let mut filemaps = vec![];
reader::tagged_docs(cm_doc, tag_codemap_filemap, |filemap_doc| {
let mut decoder = reader::Decoder::new(filemap_doc);
let filemap: codemap::FileMap = Decodable::decode(&mut decoder).unwrap();
filemaps.push(filemap);
true
});
return filemaps;
}<|fim▁end|> | let explicit_self_doc = reader::get_doc(item, tag_item_trait_method_explicit_self); |
<|file_name|>keyword_plan_campaign_error.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v1/errors/keyword_plan_campaign_error.proto
package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors"
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "google.golang.org/genproto/googleapis/api/annotations"
// Reference imports to suppress errors if they are not otherwise used.<|fim▁hole|>var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// Enum describing possible errors from applying a keyword plan campaign.
type KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError int32
const (
// Enum unspecified.
KeywordPlanCampaignErrorEnum_UNSPECIFIED KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 0
// The received error code is not known in this version.
KeywordPlanCampaignErrorEnum_UNKNOWN KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 1
// A keyword plan campaign name is missing, empty, longer than allowed limit
// or contains invalid chars.
KeywordPlanCampaignErrorEnum_INVALID_NAME KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 2
// A keyword plan campaign contains one or more untargetable languages.
KeywordPlanCampaignErrorEnum_INVALID_LANGUAGES KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 3
// A keyword plan campaign contains one or more invalid geo targets.
KeywordPlanCampaignErrorEnum_INVALID_GEOS KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 4
// The keyword plan campaign name is duplicate to an existing keyword plan
// campaign name or other keyword plan campaign name in the request.
KeywordPlanCampaignErrorEnum_DUPLICATE_NAME KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 5
// The number of geo targets in the keyword plan campaign exceeds limits.
KeywordPlanCampaignErrorEnum_MAX_GEOS_EXCEEDED KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 6
)
var KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "INVALID_NAME",
3: "INVALID_LANGUAGES",
4: "INVALID_GEOS",
5: "DUPLICATE_NAME",
6: "MAX_GEOS_EXCEEDED",
}
var KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"INVALID_NAME": 2,
"INVALID_LANGUAGES": 3,
"INVALID_GEOS": 4,
"DUPLICATE_NAME": 5,
"MAX_GEOS_EXCEEDED": 6,
}
func (x KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError) String() string {
return proto.EnumName(KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_name, int32(x))
}
func (KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_keyword_plan_campaign_error_5e34e42f720bba2d, []int{0, 0}
}
// Container for enum describing possible errors from applying a keyword plan
// campaign.
type KeywordPlanCampaignErrorEnum struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *KeywordPlanCampaignErrorEnum) Reset() { *m = KeywordPlanCampaignErrorEnum{} }
func (m *KeywordPlanCampaignErrorEnum) String() string { return proto.CompactTextString(m) }
func (*KeywordPlanCampaignErrorEnum) ProtoMessage() {}
func (*KeywordPlanCampaignErrorEnum) Descriptor() ([]byte, []int) {
return fileDescriptor_keyword_plan_campaign_error_5e34e42f720bba2d, []int{0}
}
func (m *KeywordPlanCampaignErrorEnum) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_KeywordPlanCampaignErrorEnum.Unmarshal(m, b)
}
func (m *KeywordPlanCampaignErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_KeywordPlanCampaignErrorEnum.Marshal(b, m, deterministic)
}
func (dst *KeywordPlanCampaignErrorEnum) XXX_Merge(src proto.Message) {
xxx_messageInfo_KeywordPlanCampaignErrorEnum.Merge(dst, src)
}
func (m *KeywordPlanCampaignErrorEnum) XXX_Size() int {
return xxx_messageInfo_KeywordPlanCampaignErrorEnum.Size(m)
}
func (m *KeywordPlanCampaignErrorEnum) XXX_DiscardUnknown() {
xxx_messageInfo_KeywordPlanCampaignErrorEnum.DiscardUnknown(m)
}
var xxx_messageInfo_KeywordPlanCampaignErrorEnum proto.InternalMessageInfo
func init() {
proto.RegisterType((*KeywordPlanCampaignErrorEnum)(nil), "google.ads.googleads.v1.errors.KeywordPlanCampaignErrorEnum")
proto.RegisterEnum("google.ads.googleads.v1.errors.KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError", KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_name, KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_value)
}
func init() {
proto.RegisterFile("google/ads/googleads/v1/errors/keyword_plan_campaign_error.proto", fileDescriptor_keyword_plan_campaign_error_5e34e42f720bba2d)
}
var fileDescriptor_keyword_plan_campaign_error_5e34e42f720bba2d = []byte{
// 367 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0x6a, 0xa3, 0x40,
0x1c, 0xc6, 0x57, 0xb3, 0x9b, 0x85, 0xc9, 0xb2, 0xeb, 0x0a, 0x85, 0x52, 0xd2, 0x1c, 0x7c, 0x80,
0x11, 0xe9, 0x6d, 0x7a, 0xe9, 0x44, 0xa7, 0x22, 0x49, 0x8c, 0x90, 0x6a, 0x43, 0x11, 0x64, 0x1a,
0x45, 0xa4, 0x66, 0x46, 0x9c, 0x34, 0xa5, 0x2f, 0xd3, 0x43, 0x8f, 0x7d, 0x89, 0xde, 0xfb, 0x28,
0x7d, 0x82, 0x1e, 0x8b, 0x4e, 0x0c, 0xbd, 0xa4, 0x27, 0x3f, 0xfe, 0xfe, 0xbe, 0xef, 0xd3, 0xff,
0x1f, 0x5c, 0xe4, 0x9c, 0xe7, 0x65, 0x66, 0xd2, 0x54, 0x98, 0x52, 0x36, 0x6a, 0x6b, 0x99, 0x59,
0x5d, 0xf3, 0x5a, 0x98, 0x77, 0xd9, 0xe3, 0x03, 0xaf, 0xd3, 0xa4, 0x2a, 0x29, 0x4b, 0x56, 0x74,
0x5d, 0xd1, 0x22, 0x67, 0x49, 0xfb, 0x12, 0x56, 0x35, 0xdf, 0x70, 0x7d, 0x24, 0x6d, 0x90, 0xa6,
0x02, 0xee, 0x13, 0xe0, 0xd6, 0x82, 0x32, 0xe1, 0x64, 0xd8, 0x35, 0x54, 0x85, 0x49, 0x19, 0xe3,
0x1b, 0xba, 0x29, 0x38, 0x13, 0xd2, 0x6d, 0xbc, 0x2a, 0x60, 0x38, 0x91, 0x1d, 0x41, 0x49, 0x99,
0xbd, 0x6b, 0x20, 0x8d, 0x97, 0xb0, 0xfb, 0xb5, 0xf1, 0xa4, 0x80, 0xe3, 0x43, 0x80, 0xfe, 0x0f,
0x0c, 0x42, 0x7f, 0x11, 0x10, 0xdb, 0xbb, 0xf4, 0x88, 0xa3, 0xfd, 0xd0, 0x07, 0xe0, 0x77, 0xe8,
0x4f, 0xfc, 0xf9, 0xb5, 0xaf, 0x29, 0xba, 0x06, 0xfe, 0x78, 0x7e, 0x84, 0xa7, 0x9e, 0x93, 0xf8,
0x78, 0x46, 0x34, 0x55, 0x3f, 0x02, 0xff, 0xbb, 0xc9, 0x14, 0xfb, 0x6e, 0x88, 0x5d, 0xb2, 0xd0,
0x7a, 0x5f, 0x41, 0x97, 0xcc, 0x17, 0xda, 0x4f, 0x5d, 0x07, 0x7f, 0x9d, 0x30, 0x98, 0x7a, 0x36,
0xbe, 0x22, 0xd2, 0xfc, 0xab, 0x31, 0xcf, 0xf0, 0xb2, 0x25, 0x12, 0xb2, 0xb4, 0x09, 0x71, 0x88,
0xa3, 0xf5, 0xc7, 0x1f, 0x0a, 0x30, 0x56, 0x7c, 0x0d, 0xbf, 0x5f, 0xc3, 0xf8, 0xf4, 0xd0, 0x4f,
0x04, 0xcd, 0x1e, 0x02, 0xe5, 0xc6, 0xd9, 0x05, 0xe4, 0xbc, 0xa4, 0x2c, 0x87, 0xbc, 0xce, 0xcd,
0x3c, 0x63, 0xed, 0x96, 0xba, 0xcb, 0x54, 0x85, 0x38, 0x74, 0xa8, 0x73, 0xf9, 0x78, 0x56, 0x7b,
0x2e, 0xc6, 0x2f, 0xea, 0xc8, 0x95, 0x61, 0x38, 0x15, 0x50, 0xca, 0x46, 0x45, 0x16, 0x6c, 0x2b,
0xc5, 0x5b, 0x07, 0xc4, 0x38, 0x15, 0xf1, 0x1e, 0x88, 0x23, 0x2b, 0x96, 0xc0, 0xbb, 0x6a, 0xc8,
0x29, 0x42, 0x38, 0x15, 0x08, 0xed, 0x11, 0x84, 0x22, 0x0b, 0x21, 0x09, 0xdd, 0xf6, 0xdb, 0xaf,
0x3b, 0xfb, 0x0c, 0x00, 0x00, 0xff, 0xff, 0xd0, 0x4c, 0xf0, 0x1b, 0x45, 0x02, 0x00, 0x00,
}<|fim▁end|> | var _ = proto.Marshal |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
# Copyright 2009-2019 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
from django.conf import settings
from django.utils.encoding import force_str
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.apps import apps ; get_models = apps.get_models
from lino.api import dd, rt
from lino.core import actions
from lino.core.utils import full_model_name
from lino.core.roles import SiteStaff
from lino.modlib.printing.choicelists import BuildMethods
from lino.modlib.checkdata.choicelists import Checker
# import them here to have them on rt.models.system:
from .choicelists import YesNo, Genders, PeriodEvents
from .mixins import Lockable
class BuildSiteCache(dd.Action):
label = _("Rebuild site cache")
url_action_name = "buildjs"
def run_from_ui(self, ar):
settings.SITE.kernel.default_renderer.build_site_cache(True)
return ar.success(
"""\
Seems that it worked. Refresh your browser.
<br>
Note that other users might experience side effects because
of the unexpected .js update, but there are no known problems so far.
Please report any anomalies.""",<|fim▁hole|>
class SiteConfigManager(models.Manager):
def get(self, *args, **kwargs):
return settings.SITE.site_config
class SiteConfig(dd.Model):
class Meta(object):
abstract = dd.is_abstract_model(__name__, 'SiteConfig')
verbose_name = _("Site configuration")
objects = SiteConfigManager()
real_objects = models.Manager()
default_build_method = BuildMethods.field(
verbose_name=_("Default build method"),
blank=True, null=True)
simulate_today = models.DateField(
_("Simulated date"), blank=True, null=True)
site_company = dd.ForeignKey(
"contacts.Company",
blank=True, null=True,
verbose_name=_("Site owner"),
related_name='site_company_sites')
def __str__(self):
return force_str(_("Site Parameters"))
def update(self, **kw):
"""
Set some field of the SiteConfig object and store it to the
database.
"""
# print("20180502 update({})".format(kw))
for k, v in kw.items():
if not hasattr(self, k):
raise Exception("SiteConfig has no attribute %r" % k)
setattr(self, k, v)
self.full_clean()
self.save()
def save(self, *args, **kw):
# print("20180502 save() {}".format(dd.obj2str(self, True)))
super(SiteConfig, self).save(*args, **kw)
settings.SITE.clear_site_config()
def my_handler(sender, **kw):
# print("20180502 {} my_handler calls clear_site_config()".format(
# settings.SITE))
settings.SITE.clear_site_config()
#~ kw.update(sender=sender)
# dd.database_connected.send(sender)
#~ dd.database_connected.send(sender,**kw)
from django.test.signals import setting_changed
from lino.core.signals import testcase_setup
setting_changed.connect(my_handler)
testcase_setup.connect(my_handler)
dd.connection_created.connect(my_handler)
models.signals.post_migrate.connect(my_handler)
class SiteConfigs(dd.Table):
model = 'system.SiteConfig'
required_roles = dd.login_required(SiteStaff)
# default_action = actions.ShowDetail()
#~ has_navigator = False
hide_navigator = True
allow_delete = False
# hide_top_toolbar = True
#~ can_delete = perms.never
detail_layout = dd.DetailLayout("""
default_build_method
# lino.ModelsBySite
""", window_size=(60, 'auto'))
@classmethod
def get_default_action(cls):
return cls.detail_action
do_build = BuildSiteCache()
# if settings.SITE.user_model == 'users.User':
# dd.inject_field(settings.SITE.user_model,
# 'user_type', UserTypes.field())
# dd.inject_field(settings.SITE.user_model, 'language', dd.LanguageField())
class BleachChecker(Checker):
verbose_name = _("Find unbleached html content")
model = dd.Model
def get_checkable_models(self):
for m in super(BleachChecker, self).get_checkable_models():
if len(m._bleached_fields):
yield m
def get_checkdata_problems(self, obj, fix=False):
t = tuple(obj.fields_to_bleach())
if len(t):
fldnames = ', '.join([f.name for f, old, new in t])
yield (True, _("Fields {} have unbleached content.").format(fldnames))
if fix:
obj.before_ui_save(None, None)
obj.full_clean()
obj.save()
BleachChecker.activate()<|fim▁end|> | alert=_("Success"))
|
<|file_name|>bitcoin_te.ts<|end_file_name|><|fim▁begin|><TS language="te" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>చిరునామా లేదా లేబుల్ సవరించడానికి రైట్-క్లిక్ చేయండి</translation>
</message>
<message>
<source>Create a new address</source>
<translation>క్రొత్త చిరునామా సృష్టించండి</translation>
</message>
<message>
<source>&New</source>
<translation>&క్రొత్త</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>ప్రస్తుతం ఎంచుకున్న చిరునామాను సిస్టం క్లిప్ బోర్డుకు కాపీ చేయండి</translation>
</message>
<message>
<source>&Copy</source>
<translation>&కాపి</translation>
</message>
<message>
<source>C&lose</source>
<translation>C&కోల్పోవు</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>ప్రస్తుతం ఎంచుకున్న చిరునామా ను జాబితా నుండి తీసివేయండి</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>పంపించే చిరునామాలు</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>అందుకునే చిరునామాలు</translation>
</message>
<message>
<source>These are your Chancoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>ఇవి మీరు పంపే చెల్లింపుల బిట్కాయిన్ చిరునామాలు. నాణేలు పంపే ముందు ప్రతిసారి అందుకునే చిరునామా మరియు చెల్లింపు మొత్తం సరిచూసుకోండి.</translation>
</message>
<message>
<source>These are your Chancoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>ఇవి మీరు అందుకునే చెల్లింపుల బిట్కాయిన్ చిరునామాలు. ప్రతీ లావాదేవీకి క్రొత్త అందుకునే చిరునామా వాడటం మంచిది.</translation>
</message>
<message>
<source>Export Address List</source>
<translation>చిరునామా జాబితాను ఎగుమతి చేయండి</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>ఎగుమతి విఫలమయ్యింది</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>ఉల్లాకు</translation>
</message><|fim▁hole|> <message>
<source>Passphrase Dialog</source>
<translation>సంకేతపదము డైలాగ్</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>సంకేతపదము చేర్చండి</translation>
</message>
<message>
<source>New passphrase</source>
<translation>క్రొత్త సంకేతపదము</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>క్రొత్త సంకేతపదము మరలా ఇవ్వండి</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>జోలెకు క్రొత్త సంకేతపదము ఇవ్వండి.<br/> <b>పది లేదా ఎక్కువ యాదృచ్ఛిక అక్షరాలు</b>, లేక <b>ఎనిమిది కంటే ఎక్కువ పదాలు</b> కలిగి ఉన్న సంకేతపదము దయచేసి వాడండి.</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR CHANCOINS</b>!</source>
<translation>హెచ్చరిక: మీ జోలెని సంకేతపరిచి మీ సంకేతపదము కోల్పోతే, <b>మీ బిట్కాయిన్లు అన్నీ కోల్పోతారు</b></translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>మీరు ఖచ్చితంగా మీ జోలెని సంకేతపరచాలని కోరుకుంటున్నారా?</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>జోలె సంకేతపరబడింది</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>జోలె సంకేతపరచడం విఫలమయ్యింది</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Error</source>
<translation>లోపం</translation>
</message>
<message>
<source>Warning</source>
<translation>హెచ్చరిక</translation>
</message>
<message>
<source>Information</source>
<translation>వర్తమానము</translation>
</message>
<message>
<source>Up to date</source>
<translation>తాజాగా ఉంది</translation>
</message>
<message>
<source>Connecting to peers...</source>
<translation>తోటివాళ్లతో అనుసంధానం కుదురుస్తుంది</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>నాణెం ఎంపిక</translation>
</message>
<message>
<source>Quantity:</source>
<translation>పరిమాణం</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
</context>
<context>
<name>FreespaceChecker</name>
</context>
<context>
<name>HelpMessageDialog</name>
</context>
<context>
<name>Intro</name>
<message>
<source>Error</source>
<translation>లోపం</translation>
</message>
</context>
<context>
<name>ModalOverlay</name>
</context>
<context>
<name>OpenURIDialog</name>
</context>
<context>
<name>OptionsDialog</name>
</context>
<context>
<name>OverviewPage</name>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
</context>
<context>
<name>QObject::QObject</name>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
</context>
<context>
<name>ReceiveCoinsDialog</name>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Label</source>
<translation>ఉల్లాకు</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Label</source>
<translation>ఉల్లాకు</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Quantity:</source>
<translation>పరిమాణం</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
</context>
<context>
<name>SplashScreen</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Label</source>
<translation>ఉల్లాకు</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>Label</source>
<translation>ఉల్లాకు</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>ఎగుమతి విఫలమయ్యింది</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Information</source>
<translation>వర్తమానము</translation>
</message>
<message>
<source>Warning</source>
<translation>హెచ్చరిక</translation>
</message>
<message>
<source>Error</source>
<translation>లోపం</translation>
</message>
</context>
</TS><|fim▁end|> | </context>
<context>
<name>AskPassphraseDialog</name> |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>#####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#<|fim▁hole|>from __future__ import absolute_import, division, print_function
from six import StringIO as NativeStringIO
from twisted.internet.selectreactor import SelectReactor
from crossbar.test import TestCase
from crossbar.controller import cli
from crossbar import _logging
from weakref import WeakKeyDictionary
import os
import sys
import platform
import twisted
class CLITestBase(TestCase):
# the tests here a mostly bogus, as they test for log message content,
# not actual functionality
skip = True
def setUp(self):
self._subprocess_timeout = 15
if platform.python_implementation() == 'PyPy':
self._subprocess_timeout = 30
self.stderr = NativeStringIO()
self.stdout = NativeStringIO()
self.patch(_logging, "_stderr", self.stderr)
self.patch(_logging, "_stdout", self.stdout)
self.patch(_logging, "_loggers", WeakKeyDictionary())
self.patch(_logging, "_loglevel", "info")
return super(CLITestBase, self).setUp()
def tearDown(self):
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
class VersionTests(CLITestBase):
"""
Tests for `crossbar version`.
"""
def test_basic(self):
"""
Just running `crossbar version` gets us the versions.
"""
reactor = SelectReactor()
cli.run("crossbar",
["version"],
reactor=reactor)
self.assertIn("Crossbar.io", self.stdout.getvalue())
self.assertIn(
("Twisted : \x1b[33m\x1b[1m" + twisted.version.short() + "-SelectReactor"),
self.stdout.getvalue())
def test_debug(self):
"""
Running `crossbar version` will give us the versions, plus the
locations of some of them.
"""
reactor = SelectReactor()
cli.run("crossbar",
["version", "--loglevel=debug"],
reactor=reactor)
self.assertIn("Crossbar.io", self.stdout.getvalue())
self.assertIn(
("Twisted : \x1b[33m\x1b[1m" + twisted.version.short() + "-SelectReactor"),
self.stdout.getvalue())
self.assertIn(
("[twisted.internet.selectreactor.SelectReactor]"),
self.stdout.getvalue())
class StartTests(CLITestBase):
"""
Tests for `crossbar start`.
"""
def setUp(self):
CLITestBase.setUp(self)
# Set up the configuration directories
self.cbdir = os.path.abspath(self.mktemp())
os.mkdir(self.cbdir)
self.config = os.path.abspath(os.path.join(self.cbdir, "config.json"))
def test_start(self):
"""
A basic start, that doesn't actually enter the reactor.
"""
with open(self.config, "w") as f:
f.write("""{"controller": {}}""")
reactor = SelectReactor()
reactor.run = lambda: False
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir),
"--logformat=syslogd"],
reactor=reactor)
self.assertIn("Entering reactor event loop", self.stdout.getvalue())
def test_configValidationFailure(self):
"""
Running `crossbar start` with an invalid config will print a warning.
"""
with open(self.config, "w") as f:
f.write("")
reactor = SelectReactor()
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir),
"--logformat=syslogd"],
reactor=reactor)
# Exit with code 1
self.assertEqual(e.exception.args[0], 1)
# The proper warning should be emitted
self.assertIn("*** Configuration validation failed ***",
self.stderr.getvalue())
self.assertIn(("configuration file does not seem to be proper JSON "),
self.stderr.getvalue())
def test_fileLogging(self):
"""
Running `crossbar start --logtofile` will log to cbdir/node.log.
"""
with open(self.config, "w") as f:
f.write("""{"controller": {}}""")
reactor = SelectReactor()
reactor.run = lambda: None
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir), "--logtofile"],
reactor=reactor)
with open(os.path.join(self.cbdir, "node.log"), "r") as f:
logFile = f.read()
self.assertIn("Entering reactor event loop", logFile)
self.assertEqual("", self.stderr.getvalue())
self.assertEqual("", self.stdout.getvalue())
def test_stalePID(self):
with open(self.config, "w") as f:
f.write("""{"controller": {}}""")
with open(os.path.join(self.cbdir, "node.pid"), "w") as f:
f.write("""{"pid": 9999999}""")
reactor = SelectReactor()
reactor.run = lambda: None
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir),
"--logformat=syslogd"],
reactor=reactor)
self.assertIn(
("Stale Crossbar.io PID file (pointing to non-existing process "
"with PID {pid}) {fp} removed").format(
fp=os.path.abspath(os.path.join(self.cbdir, "node.pid")),
pid=9999999),
self.stdout.getvalue())
class ConvertTests(CLITestBase):
"""
Tests for `crossbar convert`.
"""
def test_unknown_format(self):
"""
Running `crossbar convert` with an unknown config file produces an
error.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.blah")
open(config_file, 'wb').close()
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertEqual(e.exception.args[0], 1)
self.assertIn(
("Error: configuration file needs to be '.json' or '.yaml'."),
self.stdout.getvalue())
def test_yaml_to_json(self):
"""
Running `crossbar convert` with a YAML config file will convert it to
JSON.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.yaml")
with open(config_file, 'w') as f:
f.write("""
foo:
bar: spam
baz:
foo: cat
""")
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertIn(
("JSON formatted configuration written"),
self.stdout.getvalue())
with open(os.path.join(cbdir, "config.json"), 'r') as f:
self.assertEqual(f.read(), """{
"foo": {
"bar": "spam",
"baz": {
"foo": "cat"
}
}
}""")
def test_invalid_yaml_to_json(self):
"""
Running `crossbar convert` with an invalid YAML config file will error
saying it is invalid.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.yaml")
with open(config_file, 'w') as f:
f.write("""{{{{{{{{""")
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertEqual(e.exception.args[0], 1)
self.assertIn(
("not seem to be proper YAML"),
self.stdout.getvalue())
def test_json_to_yaml(self):
"""
Running `crossbar convert` with a YAML config file will convert it to
JSON.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.json")
with open(config_file, 'w') as f:
f.write("""{
"foo": {
"bar": "spam",
"baz": {
"foo": "cat"
}
}
}""")
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertIn(
("YAML formatted configuration written"),
self.stdout.getvalue())
with open(os.path.join(cbdir, "config.yaml"), 'r') as f:
self.assertEqual(f.read(), """foo:
bar: spam
baz:
foo: cat
""")
def test_invalid_json_to_yaml(self):
"""
Running `crossbar convert` with an invalid JSON config file will error
saying it is invalid.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.json")
with open(config_file, 'w') as f:
f.write("""{{{{{{{{""")
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertEqual(e.exception.args[0], 1)
self.assertIn(
("not seem to be proper JSON"),
self.stdout.getvalue())<|fim▁end|> | #####################################################################################
|
<|file_name|>SourceCodeResource.java<|end_file_name|><|fim▁begin|>/**
* jetbrick-template
* http://subchen.github.io/jetbrick-template/
*
* Copyright 2010-2014 Guoqiang Chen. All rights reserved.
* Email: subchen@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrick.template.resource;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.atomic.AtomicLong;
import jetbrick.template.utils.UnsafeByteArrayInputStream;
/**
* 以源码形式存在的资源.
*
* @since 1.1.3
* @author Guoqiang Chen
*/
public class SourceCodeResource extends Resource {
private static final String ENCODING = "utf-8";
private static AtomicLong index = new AtomicLong();
private final String source;
public SourceCodeResource(String source) {
super("/unknown/file." + index.incrementAndGet(), ENCODING);
this.source = source;
}
@Override
public String getAbsolutePath() {
return "(unknown)";
}
@Override
public long lastModified() {
return 0;
}
@Override
public InputStream getInputStream() throws IOException {
return new UnsafeByteArrayInputStream(source.getBytes(ENCODING));
}
@Override
public char[] getSource() {
return source.toCharArray();
}
@Override
public char[] getSource(String encoding) {
return source.toCharArray();
}
}<|fim▁end|> | * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>0012_auto_20160212_1210.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('characters', '0011_auto_20160212_1144'),
]
operations = [
migrations.CreateModel(
name='CharacterSpells',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('character', models.ForeignKey(verbose_name='Karakt\xe4r', to='characters.Character')),
],
options={
'verbose_name': 'Karakt\xe4rers magi',
'verbose_name_plural': 'Karakt\xe4rers magi',
},
),
migrations.AlterModelOptions(
name='spellextras',
options={'verbose_name': 'Magi extra', 'verbose_name_plural': 'Magi extra'},
),
migrations.AlterModelOptions(
name='spellinfo',
options={'verbose_name': 'Magi information', 'verbose_name_plural': 'Magi information'},
),
migrations.AddField(
model_name='spellinfo',
name='name',
field=models.CharField(default='Magins namn', max_length=256, verbose_name='Namn'),
),
migrations.AlterField(<|fim▁hole|> field=models.ForeignKey(verbose_name='Tillh\xf6righet', to='characters.SpellParent'),
),
migrations.AddField(
model_name='characterspells',
name='spells',
field=models.ManyToManyField(to='characters.SpellInfo', verbose_name='Magier och besv\xe4rjelser'),
),
]<|fim▁end|> | model_name='spellinfo',
name='parent', |
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::cmp::max;
use iron::{Iron, Chain};
use router::Router;
use persistent::State;
use chrono::{Duration, NaiveDate, NaiveDateTime};
use serde_json::builder::ObjectBuilder;
use serde_json::Value;
use serde;
use SERVER_ADDRESS;
use errors::*;
use load::{SummarizedWeek, Kind, TestRun, InputData, Timing};
use util::{start_idx, end_idx};
const JS_DATE_FORMAT: &'static str = "%Y-%m-%dT%H:%M:%S.000Z";
// Boilerplate for parsing and responding to both GET and POST requests.
mod handler {
use std::ops::Deref;
use std::io::Read;
use serde;
use serde_json::{self, Value};
use iron::prelude::*;
use iron::status;
use persistent::State;
use load::InputData;
use errors::*;
fn respond(res: Result<Value>) -> IronResult<Response> {
use iron::headers::{ContentType, AccessControlAllowOrigin};
use iron::mime::{Mime, TopLevel, SubLevel};
use iron::modifiers::Header;
let mut resp = match res {
Ok(json) => {
let mut resp = Response::with((status::Ok, serde_json::to_string(&json).unwrap()));
resp.set_mut(Header(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![]))));
resp
},
Err(err) => {
// TODO: Print to stderr
println!("An error occurred: {:?}", err);
Response::with((status::InternalServerError, err.to_string()))
}
};
resp.set_mut(Header(AccessControlAllowOrigin::Any));
Ok(resp)
}
pub trait PostHandler: Sized {
fn handle(_body: Self, _data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response>
where Self: serde::Deserialize {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
let mut buf = String::new();
let res = match req.body.read_to_string(&mut buf).unwrap() {
0 => Err("POST handler with 0 length body.".into()),
_ => Self::handle(serde_json::from_str(&buf).unwrap(), data.deref())
};
respond(res)
}
}
pub trait GetHandler: Sized {
fn handle(_data: &InputData) -> Result<Value>;
fn handler(req: &mut Request) -> IronResult<Response> {
let rwlock = req.get::<State<InputData>>().unwrap();
let data = rwlock.read().unwrap();
respond(Self::handle(data.deref()))
}
}
}
use self::handler::{PostHandler, GetHandler};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
enum GroupBy {
Crate,
Phase,
}
impl serde::Deserialize for GroupBy {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<GroupBy, D::Error>
where D: serde::de::Deserializer
{
struct GroupByVisitor;
impl serde::de::Visitor for GroupByVisitor {
type Value = GroupBy;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<GroupBy, E>
where E: serde::de::Error
{
match value {
"crate" => Ok(GroupBy::Crate),
"phase" => Ok(GroupBy::Phase),
_ => {
let msg = format!("unexpected {} value for group by", value);
Err(serde::de::Error::custom(msg))
}
}
}
}
deserializer.deserialize(GroupByVisitor)
}
}
enum OptionalDate {
Date(NaiveDateTime),
CouldNotParse(String),
}
impl OptionalDate {
fn as_start(&self, data: &InputData) -> NaiveDateTime {
// Handle missing start by returning 30 days before end.
if let OptionalDate::Date(date) = *self {
date
} else {
let end = self.as_end(data);
let start = (end - Duration::days(30)).timestamp();
NaiveDateTime::from_timestamp(start, 0)
}
}
fn as_end(&self, data: &InputData) -> NaiveDateTime {
// Handle missing end by using the last available date.
if let OptionalDate::Date(date) = *self {
date
} else {
data.last_date
}
}
}
impl serde::Deserialize for OptionalDate {
fn deserialize<D>(deserializer: &mut D) -> ::std::result::Result<OptionalDate, D::Error>
where D: serde::de::Deserializer
{
struct DateVisitor;
impl serde::de::Visitor for DateVisitor {
type Value = OptionalDate;
fn visit_str<E>(&mut self, value: &str) -> ::std::result::Result<OptionalDate, E>
where E: serde::de::Error
{
match NaiveDate::parse_from_str(value, "%a %b %d %Y") {
Ok(date) => Ok(OptionalDate::Date(date.and_hms(0, 0, 0))),
Err(err) => {
if !value.is_empty() {
println!("bad date {:?}: {:?}", value, err);
}
Ok(OptionalDate::CouldNotParse(value.to_string()))
}
}
}
}
deserializer.deserialize(DateVisitor)
}
}
struct Summary;
impl GetHandler for Summary {
fn handle(data: &InputData) -> Result<Value> {
let dates = data.summary_rustc.summary.iter()
.map(|s| s.date.format(JS_DATE_FORMAT).to_string())
.collect::<Vec<_>>();
fn summarize(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> String {
let mut sum = 0.0;
let mut count = 0;
for krate in benchmark.by_crate.values() {
if krate.contains_key("total") {
sum += krate["total"];
count += 1;
}
}
if rustc.by_crate["total"].contains_key("total") {
sum += 2.0 * rustc.by_crate["total"]["total"];
count += 2;
}
format!("{:.1}", sum / (count as f64))
}
// overall number for each week
let summaries = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
summarize(s, &data.summary_rustc.summary[i])
}).collect::<Vec<_>>();
fn breakdown(benchmark: &SummarizedWeek, rustc: &SummarizedWeek) -> Value {
let mut per_bench = ObjectBuilder::new();
for (crate_name, krate) in &benchmark.by_crate {
let val = krate.get("total").cloned().unwrap_or(0.0);
per_bench = per_bench.insert(crate_name.as_str(), format!("{:.1}", val));
}
let bootstrap = if rustc.by_crate["total"].contains_key("total") {
rustc.by_crate["total"]["total"]
} else {
0.0
};
per_bench = per_bench.insert("bootstrap", format!("{:.1}", bootstrap));
per_bench.build()
}
// per benchmark, per week
let breakdown_data = data.summary_benchmarks.summary.iter().enumerate().map(|(i, s)| {
breakdown(s, &data.summary_rustc.summary[i])
}).collect::<Vec<Value>>();
Ok(ObjectBuilder::new()
.insert("total_summary", summarize(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("total_breakdown", breakdown(&data.summary_benchmarks.total, &data.summary_rustc.total))
.insert("breakdown", breakdown_data)
.insert("summaries", summaries)
.insert("dates", dates)
.build())
}
}
struct Info;
impl GetHandler for Info {
fn handle(data: &InputData) -> Result<Value> {
Ok(ObjectBuilder::new()
.insert("crates", &data.crate_list)
.insert("phases", &data.phase_list)
.insert("benchmarks", &data.benchmarks)
.build())
}
}
fn get_data_for_date(day: &TestRun, crate_names: &[String], phases: &[String], group_by: GroupBy) -> Value {
#[derive(Serialize)]
struct Recording { // TODO better name (can't use Timing since we don't have a percent...)
time: f64,
rss: u64,
}
impl Recording {
fn new() -> Recording {
Recording {
time: 0.0,
rss: 0,
}
}
fn record(&mut self, phase: Option<&Timing>) {
if let Some(phase) = phase {
self.time += phase.time;
self.rss = max(self.rss, phase.rss.unwrap());
}
}
}
let crates = crate_names.into_iter().filter_map(|crate_name| {
day.by_crate.get(crate_name).map(|krate| {
(crate_name, krate)
})
}).collect::<Vec<_>>();
let mut data = HashMap::new();
for phase_name in phases {
for &(crate_name, krate) in &crates {
let entry = match group_by {
GroupBy::Crate => data.entry(crate_name),
GroupBy::Phase => data.entry(phase_name),
};
entry.or_insert(Recording::new()).record(krate.get(phase_name));
}
}
ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", day.commit.clone())
.insert("data", data)
.build()
}
#[derive(Deserialize)]
struct Data { // XXX naming
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
kind: Kind,
group_by: GroupBy,
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Data {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let mut result = Vec::new();
let mut first_idx = None;
let mut last_idx = 0;
// Iterate over date range.
let start_idx = start_idx(data.by_kind(body.kind), body.start_date.as_start(data));
let end_idx = end_idx(data.by_kind(body.kind), body.end_date.as_end(data));
for i in start_idx..(end_idx + 1) {
let today_data = get_data_for_date(
&data.by_kind(body.kind)[i],
&body.crates,
&body.phases,
body.group_by
);
if !today_data.find("data").unwrap().as_object().unwrap().is_empty() {
last_idx = i - start_idx;
if first_idx == None {
first_idx = Some(i - start_idx);
}
}
result.push(today_data);
}
// Trim the data
let result = result.drain(first_idx.unwrap()..(last_idx+1)).collect::<Vec<_>>();
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Tabular { // XXX naming
kind: Kind,
date: OptionalDate,
}
impl PostHandler for Tabular {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let kind_data = data.by_kind(body.kind);
let day = &kind_data[end_idx(kind_data, body.date.as_end(data))];
Ok(ObjectBuilder::new()
.insert("date", day.date.format(JS_DATE_FORMAT).to_string())
.insert("commit", &day.commit)
.insert("data", &day.by_crate)
.build())
}
}
#[derive(Deserialize)]
struct Days { // XXX naming
kind: Kind,
dates: Vec<OptionalDate>,
crates: Vec<String>,
phases: Vec<String>,
group_by: GroupBy,
}
impl PostHandler for Days {
fn handle(body: Self, data: &InputData) -> Result<Value> {
let data = data.by_kind(body.kind);
let mut result = Vec::new();
for date in body.dates {
if let OptionalDate::Date(date) = date {
let day = get_data_for_date(
&data[end_idx(data, date)],
&body.crates,
&body.phases,
body.group_by
);
result.push(day);
}
}
Ok(Value::Array(result))
}
}
#[derive(Deserialize)]
struct Stats { // XXX naming
kind: Kind,
#[serde(rename(deserialize="start"))]
start_date: OptionalDate,
#[serde(rename(deserialize="end"))]
end_date: OptionalDate,
// kind rustc only: crate or phase can be 'total'
crates: Vec<String>,
phases: Vec<String>,
}
impl PostHandler for Stats {
fn handle(body: Self, data: &InputData) -> Result<Value> {
if body.kind == Kind::Benchmarks && body.crates.iter().any(|s| s == "total") {
return Err("unexpected total crate with benchmarks kind".into());
}
let kinded_data = data.by_kind(body.kind);
let mut start_date = body.start_date.as_start(data);
let mut end_date = body.end_date.as_end(data);
let mut counted = Vec::new();
// Iterate over date range.
let start_idx = start_idx(kinded_data, start_date);
let end_idx = end_idx(kinded_data, end_date);
for i in start_idx..(end_idx + 1) {
let today_data = &kinded_data[i];
if !today_data.by_crate.is_empty() {
if counted.is_empty() {
start_date = today_data.date;
}
end_date = today_data.date;
counted.push(today_data);
}
}
let mut crates = ObjectBuilder::new();
for crate_name in body.crates {
let stats = mk_stats(&counted, &crate_name, &body.phases);
crates = crates.insert(crate_name, stats);
}
Ok(ObjectBuilder::new()
.insert("startDate", start_date.format(JS_DATE_FORMAT).to_string())
.insert("endDate", end_date.format(JS_DATE_FORMAT).to_string())
.insert("crates", crates.build())
.build())
}
}
fn mk_stats(data: &[&TestRun], crate_name: &str, phases: &[String]) -> Value {
let sums = data.iter()
.filter(|day| if let Some(krate) = day.by_crate.get(crate_name) {
!krate.is_empty()
} else {
false
})
.map(|day| {
let krate = &day.by_crate[crate_name];
let mut sum = 0.0;
for phase in phases {
sum += krate[phase].time;
}
sum
})
.collect::<Vec<_>>();
if sums.is_empty() {
return ObjectBuilder::new()
.insert("first", 0)
.insert("last", 0)
.insert("min", 0)
.insert("max", 0)
.insert("mean", 0)
.insert("variance", 0)
.insert("trend", 0)
.insert("trend_b", 0)
.insert("n", 0)
.build();
}
let first = sums[0];
let last = *sums.last().unwrap();
let mut min = first;
let mut max = first;
let q1_idx = data.len() / 4;
let q4_idx = 3 * data.len() / 4;
let mut total = 0.0;
let mut q1_total = 0.0;
let mut q4_total = 0.0;
for (i, &cur) in sums.iter().enumerate() {
min = min.min(cur);
max = max.max(cur);
total += cur;
if i < q1_idx { // Within the first quartile
q1_total += cur;
}
if i >= q4_idx { // Within the fourth quartile
q4_total += cur;
}
}
// Calculate the variance
let mean = total / (sums.len() as f64);
let mut var_total = 0.0;
for sum in &sums {
let diff = sum - mean;
var_total += diff * diff;
}
let variance = var_total / ((sums.len() - 1) as f64);
let trend = if sums.len() >= 10 && sums.len() == data.len() {
let q1_mean = q1_total / (q1_idx as f64);
let q4_mean = q4_total / ((data.len() - q4_idx) as f64);
100.0 * ((q4_mean - q1_mean) / first)
} else {
0.0
};
let trend_b = 100.0 * ((last - first) / first);
ObjectBuilder::new()
.insert("first", first)
.insert("last", last)
.insert("min", min)
.insert("max", max)
.insert("mean", mean)
.insert("variance", variance)
.insert("trend", trend)
.insert("trend_b", trend_b)
.insert("n", sums.len())
.build()
}
pub fn start(data: InputData) {
let mut router = Router::new();
router.get("/summary", Summary::handler);<|fim▁hole|> router.post("/get_tabular", Tabular::handler);
router.post("/get", Days::handler);
router.post("/stats", Stats::handler);
let mut chain = Chain::new(router);
chain.link(State::<InputData>::both(data));
Iron::new(chain).http(SERVER_ADDRESS).unwrap();
}<|fim▁end|> | router.get("/info", Info::handler);
router.post("/data", Data::handler); |
<|file_name|>MergeBU.java<|end_file_name|><|fim▁begin|>package com.algorithms.sorting;
public class MergeBU {
public static void sort(Comparable[] a) {
Comparable[] aux = new Comparable[a.length];
int N = a.length;
for (int size = 1; size < N; size = size*2) {
for (int i = 0; i < N; i = i + size) merge(a, aux, i, i+size-1, Math.min(i+size+size-1, N-1));
}
}
private static boolean less(Comparable v, Comparable w) {
return v.compareTo(w) < 0;
}
private static void merge(Comparable[] a, Comparable[] aux, int lo, int mid, int hi) {<|fim▁hole|>
int i = lo, j = mid + 1;
for (int k = lo; k < hi; k ++) {
if (i > mid) a[k] = aux[j++];
else if (j > hi) a[k] = aux[i++];
if (less(aux[j], aux[i])) a[k] = aux[j++];
else a[k] = aux[i++];
}
assert isSorted(a, lo, hi);
}
private static boolean isSorted(Comparable[] a, int lo, int hi) {
for (int i = lo; i < hi; i ++) {
if (less(a[i+1], a[i])) return false;
}
return true;
}
}<|fim▁end|> | assert isSorted(a, lo, mid);
assert isSorted(a, mid+1, hi);
for (int i = lo; i < hi; i++) aux[i] = a[i]; |
<|file_name|>partner.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Contributor: Pedro Manuel Baeza <pedro.baeza@serviciosbaeza.com>
# Ignacio Ibeas <ignacio@acysos.com>
# Alejandro Santana <alejandrosantana@anubia.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
zip_id = fields.Many2one('res.better.zip', 'City/Location')
@api.one
@api.onchange('zip_id')
def onchange_zip_id(self):
if self.zip_id:
self.zip = self.zip_id.name
self.city = self.zip_id.city
self.state_id = self.zip_id.state_id
self.country_id = self.zip_id.country_id
def _compute_zip_id(self, vals):
""" Ensure the zip_id is filled whenever possible. This is useful in
case segmentation is done on this field.
Try to match a zip_id based on country/zip/city or country/zip.
"""
if 'zip_id' not in vals and (
'city' in vals or
'zip' in vals or
'country_id' in vals):
domain = []
zip_ids = []
if 'country_id' in vals:
country_id = vals['country_id']
else:
country_id = self.country_id.id
if 'zip' in vals:
zipcode = vals['zip']
else:
zipcode = self.zip
if 'city' in vals:
city = vals['city']
else:
city = self.city
if country_id:
domain.append(('country_id', '=', country_id))
if zipcode:
domain.append(('name', '=', zipcode))
if city:
zip_ids = self.env['res.better.zip'].search(domain + [('city', '=ilike', city)])
if not city or not zip_ids:
zip_ids = self.env['res.better.zip'].search(domain)
if zip_ids:
vals['zip_id'] = zip_ids[0].id
@api.model
@api.returns('self', lambda value: value.id)
def create(self, vals):
self._compute_zip_id(vals)
return super(ResPartner, self).create(vals)
@api.one
def write(self, vals):
self._compute_zip_id(vals)
return super(ResPartner, self).write(vals)
def _address_fields(self, cr, uid, context=None):
""" Returns the list of address fields that are synced from the parent
when the `use_parent_address` flag is set. """
return super(ResPartner, self)._address_fields(cr, uid, context=context) + ['zip_id']<|fim▁end|> | |
<|file_name|>as_unsigned.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::IntSliceExt;
// pub trait IntSliceExt<U, S> {
// /// Converts the slice to an immutable slice of unsigned integers with the same width.
// fn as_unsigned<'a>(&'a self) -> &'a [U];
// /// Converts the slice to an immutable slice of signed integers with the same width.
// fn as_signed<'a>(&'a self) -> &'a [S];
//
// /// Converts the slice to a mutable slice of unsigned integers with the same width.
// fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U];
// /// Converts the slice to a mutable slice of signed integers with the same width.
// fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S];<|fim▁hole|> // ($u:ty, $s:ty, $t:ty) => {
// #[unstable(feature = "core")]
// impl IntSliceExt<$u, $s> for [$t] {
// #[inline]
// fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } }
// #[inline]
// fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } }
// #[inline]
// fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } }
// }
// }
// }
// macro_rules! impl_int_slices {
// ($u:ty, $s:ty) => {
// impl_int_slice! { $u, $s, $u }
// impl_int_slice! { $u, $s, $s }
// }
// }
// impl_int_slices! { u64, i64 }
type U = u64;
type S = i64;
type T = U;
#[test]
fn as_unsigned_test1() {
let slice: &[T] = &[0xffffffffffffffff];
let as_unsigned: &[U] = slice.as_unsigned();
assert_eq!(as_unsigned[0], 18446744073709551615);
}
}<|fim▁end|> | // }
// macro_rules! impl_int_slice { |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>"""
drslib exceptions<|fim▁hole|>
"""
class TranslationError(Exception):
pass<|fim▁end|> | |
<|file_name|>clean-page.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export function navigate(args) {
frame.topmost().goBack();
}<|fim▁end|> | import * as frame from '@nativescript/core/ui/frame';
|
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>// #docregion
import { Component } from '@angular/core';<|fim▁hole|>
@Component({
selector: 'my-app',
template: `
<router-outlet></router-outlet>
<div ng-view></div>
`,
})
export class AppComponent { }<|fim▁end|> | |
<|file_name|>route.py<|end_file_name|><|fim▁begin|>import numpy as np
from definition import states_by_id
import pyproj as prj
class Route:
"""
A class of Breeding Bird Survey (BBS) route
Each Route includes the following members:
id - id number of the route
name - name of the route
stateID - to which state the route belongs
routeID - route ID inside each state
length - length published by USGS
path - 2D numpy array of point coordinates along the route
in default projection: EPSG:5070 (USGS standard).
!!! Note the points are not the stops of Bird Survey
!!! They are simply points to define a polyline of the route
path2 - path array converted to longitude/latitude
stops - 2D array of stop coordinates along the route
!!! Note the location of stops are calculated
!!! by distance from starting point along the route,
!!! currently hard-coded as every 800 meter (0.5 mile)
stops2 - stops array converted to longitude/latitude
stop_d - distance between stops, by standard should be around 800m,
but may vary a lot, currently I assume the 50 stops
distributed evenly along the route, i.e. stop_d = length / 49
rating - type of route (single or multiple)
TODO: specify details on this feature
"""
# header - for export and print use
header = '%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % (
'ID', 'State ID', 'State',
'Route ID', 'Route Name', 'Route Length', 'Route Rating'
)
def __init__(self, fields, records):
"""
Initialize a Route using information in the USGS shapefile.
fields: field label used in shapefile
records: a list of route part records in the shapefile
"single" route contains only one part
"multiple" route contains multiple parts, but can be
stitched together
"""
# load information from the fields in the shapefile
i = 1
while i < len(fields):
if fields[i][0] == 'rteno':
self.id = records[0].record[i - 1]
self.stateID = self.id // 1000
self.routeID = self.id % 1000
if fields[i][0] == 'RTENAME':
self.name = records[0].record[i - 1]
if fields[i][0] == 'rte_length':
self.length = float(records[0].record[i - 1])
i = i + 1
# generate a complete route path
if len(records) == 1:
self.rating = 'single'
self.path = np.array(records[0].shape.points)
self.path2 = self.to_lonlat(self.path)
else:
self.rating = 'multiple'
# generate a list of points in each segments
self.path = np.array(records[0].shape.points)
thresh = 10.0
i = 1
while i < len(records):
r = np.array(records[i].shape.points)
p1 = self.path[0]
p2 = self.path[-1]
s1 = r[0]
s2 = r[-1]
if np.linalg.norm(p2 - s1) < thresh:
self.path = np.vstack((self.path, r))
elif np.linalg.norm(p2 - s2) < thresh:
self.path = np.vstack((self.path, r[-1::-1]))
elif np.linalg.norm(s2 - p1) < thresh:
self.path = np.vstack((r, self.path))
elif np.linalg.norm(s1 - p1) < thresh:
self.path = np.vstack((r[-1::-1], self.path))
else:
self.rating = 'broken'
break<|fim▁hole|> # calculate 50 stops along the path
if self.rating is not 'broken':
self.stops, self.stop_d = self.calc_stop()
self.stops2 = self.to_lonlat(self.stops)
else:
self.stops = np.array(())
self.stops2 = np.array(())
self.stop_d = 0.0
# output Route summary
print(self.summary())
def to_lonlat(self, pts):
"""
Convert coordinate from EPSG:5070 to EPSG:4326 (Longitude/Latitide)
"""
new_pts = np.zeros_like(pts)
# define input/output projections of lon/lat transformation
inProj = prj.Proj(init='epsg:5070') # Albers Equal Area
outProj = prj.Proj(init='epsg:4326') # Lat/Long Geodetic System
for i in range(len(pts)):
x, y = pts[i]
lon, lat = prj.transform(inProj, outProj, x, y)
new_pts[i] = (lon, lat)
return new_pts
def calc_len(self):
"""
Calculate total length, segment length, x/y displacement of
each segment along the route.
"""
if self.rating is 'broken':
print('ERROR: Cannot calculate length for broken route')
exit(1)
# python list of lengths of segments (between stops)
segments = np.zeros((len(self.path) - 1))
dxy = np.zeros((len(self.path) - 1, 2))
total_len = 0.0
for i in range(1, len(self.path)):
# !!! Only apply to EPSG:5070
# !!! Poor man's method to calc distance between two points
# !!! TODO: change to advanced method to handle different
# !!! projections
p0 = self.path[i - 1]
p1 = self.path[i]
d = np.linalg.norm(p1 - p0)
dxy[i - 1] = p1 - p0
segments[i - 1] = d
total_len += d
return total_len, segments, dxy
def calc_stop(self):
"""
Calculate 50 stops along a BBS route.
"""
if self.rating is 'broken':
print('ERROR: Cannot calculate stops for broken route')
exit(1)
# calculate total path length and generate a list of segment lengths
length, segments, dxy = self.calc_len()
#
# TODO: check if calculated length matched by published data
#
# hard-coded number of stops
nstop = 50
# use the starting point as first stop
stops = np.zeros((50, 2))
stops[0] = self.path[0]
k = 1
# distance between each stop, more or less 800 meters
# TODO: warning if the value not close to 800 meters
# TODO: handle other projections (?)
dstop = length / (nstop - 1)
# "trip" counts how many meters traveled since the last stop
trip = 0.0
for i in range(len(segments)):
seg = trip + segments[i]
# the fraction of two parts that are split by the stop in the
# current segments, used to linearly interpolate stop coordinates
frac = 0.0
while seg >= dstop:
frac += (dstop - trip)
stops[k] = self.path[i] + frac / segments[i] * dxy[i]
k = k + 1
seg -= dstop
trip = 0.0
trip = seg
# handle the last stop
if k == nstop - 1:
stops[-1] = self.path[-1]
elif k < nstop - 1:
# TODO: is this necessary?
print('!!! %d - %s: Not enough stops found, k = %d'
% (self.id, self.name, k))
elif k > nstop:
# TODO: is this necessary?
print('!!! %d - %s: More stops found, k = %d'
% (self.id, self.name, k))
return stops, dstop
def summary(self):
"""
Summary string for print and export
"""
return '%d\t%d\t%s\t%d\t%s\t%f\t%s\n' % (
self.id, self.stateID, states_by_id[self.stateID].name,
self.routeID, self.name, self.length, self.rating
)
def export(self):
"""
Export route information to a CSV file.
"""
if self.rating is 'broken':
print('ERROR: exporting broken route')
exit(1)
with open('rte_' + str(self.id) + '.csv', 'w') as f:
f.write('sep=\t\n')
f.write(self.header)
f.write(self.summary())
f.write('====\t====\t====\tStops Info\t====\t====\t====\n')
f.write('----\tStop\tX\tY\tLongitude\tLatitude\t----\n')
for i in range(50):
x, y = self.stops[i]
lon, lat = self.stops2[i]
f.write('----\t%d\t%f\t%f\t%f\t%f\t----\n' % (
i + 1, x, y, lon, lat)
)
f.write('====\t====\t====\tPath Info\t====\t====\t====\n')
f.write('----\tPoint\tX\tY\tLongitude\tLatitude\t----\n')
for i in range(len(self.path)):
x, y = self.path[i]
lon, lat = self.path2[i]
f.write('----\t%d\t%f\t%f\t%f\t%f\t----\n' % (
i + 1, x, y, lon, lat)
)<|fim▁end|> | i = i + 1
self.path2 = self.to_lonlat(self.path)
|
<|file_name|>house.py<|end_file_name|><|fim▁begin|>parts = (('house', 'Jack built'),
('malt', 'lay in'),
('rat', 'ate'),
('cat', 'killed'),
('dog', 'worried'),
('cow with the crumpled horn', 'tossed'),
('maiden all forlorn', 'milked'),
('man all tattered and torn', 'kissed'),
('priest all shaven and shorn', 'married'),
('rooster that crowed in the morn', 'woke'),
('farmer sowing his corn', 'kept'),
('horse and the hound and the horn', 'belonged to'))
def verse(n):<|fim▁hole|> return verse(v)
else:
return verse(v) + ' the ' + rhymes(v-1)
def rhyme():
return '\n'.join([ 'This is the ' + rhymes(v) + '.\n' for v in range(12) ])[:-1]<|fim▁end|> | return '{}\nthat {}'.format(parts[n][0],parts[n][1]) if n != 0 else '{} that {}'.format(parts[n][0],parts[n][1])
def rhymes(v = 11):
if v == 0: |
<|file_name|>to_limbs.rs<|end_file_name|><|fim▁begin|>use itertools::Itertools;
use malachite_base::num::basic::traits::Zero;
use malachite_base::num::conversion::traits::ExactFrom;
use malachite_base_test_util::generators::unsigned_gen_var_5;
use malachite_nz::natural::Natural;
use malachite_nz_test_util::generators::{
natural_bool_vec_pair_gen_var_1, natural_gen, natural_unsigned_pair_gen_var_4,
};
#[cfg(feature = "32_bit_limbs")]
use std::str::FromStr;
#[cfg(feature = "32_bit_limbs")]
#[test]
fn test_to_limbs_asc() {
let test = |n, out| {
let n = Natural::from_str(n).unwrap();
assert_eq!(n.limbs().collect_vec(), out);
assert_eq!(n.to_limbs_asc(), out);
assert_eq!(n.into_limbs_asc(), out);
};
test("0", vec![]);
test("123", vec![123]);
test("1000000000000", vec![3567587328, 232]);
test(
"1701411834921604967429270619762735448065",
vec![1, 2, 3, 4, 5],
);
test("4294967295", vec![u32::MAX]);
test("4294967296", vec![0, 1]);
test("18446744073709551615", vec![u32::MAX, u32::MAX]);
test("18446744073709551616", vec![0, 0, 1]);
let n = Natural::from_str("1701411834921604967429270619762735448065").unwrap();
let mut limbs = n.limbs();
assert_eq!(Some(1), limbs.next());
assert_eq!(Some(5), limbs.next_back());
assert_eq!(Some(4), limbs.next_back());
assert_eq!(Some(2), limbs.next());
assert_eq!(Some(3), limbs.next());
assert_eq!(None, limbs.next());
assert_eq!(None, limbs.next_back());
assert_eq!(limbs[0], 1);
assert_eq!(limbs[1], 2);
assert_eq!(limbs[2], 3);
assert_eq!(limbs[3], 4);
assert_eq!(limbs[4], 5);
assert_eq!(limbs[5], 0);
let mut limbs = n.limbs();
assert_eq!(Some(1), limbs.next());
assert_eq!(Some(2), limbs.next());
assert_eq!(Some(3), limbs.next());
assert_eq!(Some(5), limbs.next_back());
assert_eq!(Some(4), limbs.next_back());
assert_eq!(None, limbs.next());
assert_eq!(None, limbs.next_back());
}
#[cfg(feature = "32_bit_limbs")]
#[test]
fn test_to_limbs_desc() {
let test = |n, out| {
let n = Natural::from_str(n).unwrap();
assert_eq!(n.limbs().rev().collect_vec(), out);
assert_eq!(n.to_limbs_desc(), out);
assert_eq!(n.into_limbs_desc(), out);
};
test("0", vec![]);
test("123", vec![123]);
test("1000000000000", vec![232, 3567587328]);
test(
"1701411834921604967429270619762735448065",
vec![5, 4, 3, 2, 1],
);
test("4294967295", vec![u32::MAX]);
test("4294967296", vec![1, 0]);
test("18446744073709551615", vec![u32::MAX, u32::MAX]);
test("18446744073709551616", vec![1, 0, 0]);
}
#[test]
fn to_limbs_asc_properties() {
natural_gen().test_properties(|x| {
let limbs = x.to_limbs_asc();
assert_eq!(x.clone().into_limbs_asc(), limbs);
assert_eq!(x.limbs().collect_vec(), limbs);
assert_eq!(Natural::from_limbs_asc(&limbs), x);
if x != 0 {
assert_ne!(*limbs.last().unwrap(), 0);
}
});
}
#[test]
fn to_limbs_desc_properties() {
natural_gen().test_properties(|x| {
let limbs = x.to_limbs_desc();
assert_eq!(x.clone().into_limbs_desc(), limbs);
assert_eq!(x.limbs().rev().collect_vec(), limbs);
assert_eq!(Natural::from_limbs_desc(&limbs), x);
if x != 0 {
assert_ne!(limbs[0], 0);
}
});
}
#[test]
fn limbs_properties() {
natural_gen().test_properties(|n| {<|fim▁hole|> natural_bool_vec_pair_gen_var_1().test_properties(|(n, bs)| {
let mut limbs = n.limbs();
let mut limb_vec = Vec::new();
let mut i = 0;
for b in bs {
if b {
limb_vec.insert(i, limbs.next().unwrap());
i += 1;
} else {
limb_vec.insert(i, limbs.next_back().unwrap())
}
}
assert!(limbs.next().is_none());
assert!(limbs.next_back().is_none());
assert_eq!(n.to_limbs_asc(), limb_vec);
});
natural_unsigned_pair_gen_var_4().test_properties(|(n, u)| {
if u < usize::exact_from(n.limb_count()) {
assert_eq!(n.limbs()[u], n.to_limbs_asc()[u]);
} else {
assert_eq!(n.limbs()[u], 0);
}
});
unsigned_gen_var_5().test_properties(|u| {
assert_eq!(Natural::ZERO.limbs()[u], 0);
});
}<|fim▁end|> | let limb_count = usize::exact_from(n.limb_count());
assert_eq!(n.limbs().size_hint(), (limb_count, Some(limb_count)));
});
|
<|file_name|>ResourceManagerTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.agent.monitor.inventory;
import org.hawkular.agent.monitor.inventory.dmr.DMRResource;
import org.hawkular.agent.monitor.inventory.dmr.DMRResourceType;
import org.hawkular.dmrclient.Address;
import org.jboss.dmr.ModelNode;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.traverse.BreadthFirstIterator;
import org.jgrapht.traverse.DepthFirstIterator;
import org.junit.Assert;
import org.junit.Test;
public class ResourceManagerTest {
@Test
public void testEmptyResourceManager() {
ResourceManager<DMRResource> rm = new ResourceManager<>();
Assert.assertNull(rm.getResource(new ID("foo")));
Assert.assertTrue(rm.getAllResources().isEmpty());
Assert.assertTrue(rm.getRootResources().isEmpty());
Assert.assertFalse(rm.getBreadthFirstIterator().hasNext());
Assert.assertFalse(rm.getDepthFirstIterator().hasNext());
}<|fim▁hole|> ResourceManager<DMRResource> rm = new ResourceManager<>();
DMRResource root1 = new DMRResource(new ID("root1"), new Name("root1Name"), null, type, null, new Address(),
new ModelNode());
DMRResource root2 = new DMRResource(new ID("root2"), new Name("root2Name"), null, type, null, new Address(),
new ModelNode());
DMRResource child1 = new DMRResource(new ID("child1"), new Name("child1Name"), null, type, root1,
new Address(), new ModelNode());
DMRResource child2 = new DMRResource(new ID("child2"), new Name("child2Name"), null, type, root1,
new Address(), new ModelNode());
DMRResource grandChild1 = new DMRResource(new ID("grand1"), new Name("grand1Name"), null, type, child1,
new Address(), new ModelNode());
// add root1
rm.addResource(root1);
Assert.assertEquals(1, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(root1));
Assert.assertEquals(root1, rm.getResource(root1.getID()));
DepthFirstIterator<DMRResource, DefaultEdge> dIter = rm.getDepthFirstIterator();
Assert.assertEquals(root1, dIter.next());
Assert.assertFalse(dIter.hasNext());
BreadthFirstIterator<DMRResource, DefaultEdge> bIter = rm.getBreadthFirstIterator();
Assert.assertEquals(root1, bIter.next());
Assert.assertFalse(bIter.hasNext());
Assert.assertEquals(1, rm.getRootResources().size());
Assert.assertTrue(rm.getRootResources().contains(root1));
// add child1
rm.addResource(child1);
Assert.assertEquals(2, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(child1));
Assert.assertEquals(child1, rm.getResource(child1.getID()));
// add grandChild1
rm.addResource(grandChild1);
Assert.assertEquals(3, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(grandChild1));
Assert.assertEquals(grandChild1, rm.getResource(grandChild1.getID()));
// add root2
rm.addResource(root2);
Assert.assertEquals(4, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(root2));
Assert.assertEquals(root2, rm.getResource(root2.getID()));
Assert.assertEquals(2, rm.getRootResources().size());
Assert.assertTrue(rm.getRootResources().contains(root2));
// add child2
rm.addResource(child2);
Assert.assertEquals(5, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(child2));
Assert.assertEquals(child2, rm.getResource(child2.getID()));
//
// the tree now looks like:
//
// root1 root2
// / \
// child1 child2
// |
// grandchild1
//
Assert.assertEquals(2, rm.getChildren(root1).size());
Assert.assertTrue(rm.getChildren(root1).contains(child1));
Assert.assertTrue(rm.getChildren(root1).contains(child2));
Assert.assertEquals(1, rm.getChildren(child1).size());
Assert.assertTrue(rm.getChildren(child1).contains(grandChild1));
Assert.assertEquals(0, rm.getChildren(grandChild1).size());
Assert.assertEquals(0, rm.getChildren(root2).size());
Assert.assertEquals(null, rm.getParent(root1));
Assert.assertEquals(null, rm.getParent(root2));
Assert.assertEquals(root1, rm.getParent(child1));
Assert.assertEquals(root1, rm.getParent(child2));
Assert.assertEquals(child1, rm.getParent(grandChild1));
/*
* WHY DOESN'T THIS ITERATE LIKE IT SHOULD?
*
// iterate depth first which should be:
// root1 -> child1 -> grandchild1 -> child2 -> root2
dIter = rm.getDepthFirstIterator();
Assert.assertEquals(root1, dIter.next());
Assert.assertEquals(child1, dIter.next());
Assert.assertEquals(grandChild1, dIter.next());
Assert.assertEquals(child2, dIter.next());
Assert.assertEquals(root2, dIter.next());
Assert.assertFalse(dIter.hasNext());
// iterate breadth first which should be (assuming roots are done in order)
// root1 -> child1 -> child2 -> grandchild1 -> root2
bIter = rm.getBreadthFirstIterator();
Assert.assertEquals(root1, bIter.next());
Assert.assertEquals(child1, bIter.next());
Assert.assertEquals(child2, bIter.next());
Assert.assertEquals(grandChild1, bIter.next());
Assert.assertEquals(root2, bIter.next());
Assert.assertFalse(bIter.hasNext());
*
* THE ABOVE DOESN'T WORK AS EXPECTED
*/
}
}<|fim▁end|> |
@Test
public void testResourceManager() {
DMRResourceType type = new DMRResourceType(new ID("resType"), new Name("resTypeName")); |
<|file_name|>compress_fragment_two_pass.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
use super::backward_references::kHashMul32;
//use super::super::alloc::{SliceWrapper, SliceWrapperMut};
use super::bit_cost::BitsEntropy;
use super::brotli_bit_stream::{BrotliBuildAndStoreHuffmanTreeFast, BrotliStoreHuffmanTree};
use super::entropy_encode::{BrotliConvertBitDepthsToSymbols, BrotliCreateHuffmanTree, HuffmanTree,
NewHuffmanTree};
use super::static_dict::{BROTLI_UNALIGNED_LOAD32, BROTLI_UNALIGNED_LOAD64, BROTLI_UNALIGNED_STORE64,
FindMatchLengthWithLimit};
use super::super::alloc;
use super::util::{brotli_min_size_t, Log2FloorNonZero};
use core;
static kCompressFragmentTwoPassBlockSize: usize = (1i32 << 17i32) as (usize);
// returns number of commands inserted
fn EmitInsertLen(insertlen: u32, commands: &mut &mut [u32]) -> usize {
if insertlen < 6u32 {
(*commands)[0] = insertlen;
} else if insertlen < 130u32 {
let tail: u32 = insertlen.wrapping_sub(2u32);
let nbits: u32 = Log2FloorNonZero(tail as (u64)).wrapping_sub(1u32);
let prefix: u32 = tail >> nbits;
let inscode: u32 = (nbits << 1i32).wrapping_add(prefix).wrapping_add(2u32);
let extra: u32 = tail.wrapping_sub(prefix << nbits);
(*commands)[0] = inscode | extra << 8i32;
} else if insertlen < 2114u32 {
let tail: u32 = insertlen.wrapping_sub(66u32);
let nbits: u32 = Log2FloorNonZero(tail as (u64));
let code: u32 = nbits.wrapping_add(10u32);
let extra: u32 = tail.wrapping_sub(1u32 << nbits);
(*commands)[0] = code | extra << 8i32;
} else if insertlen < 6210u32 {
let extra: u32 = insertlen.wrapping_sub(2114u32);
(*commands)[0] = 21u32 | extra << 8i32;
} else if insertlen < 22594u32 {
let extra: u32 = insertlen.wrapping_sub(6210u32);
(*commands)[0] = 22u32 | extra << 8i32;
} else {
let extra: u32 = insertlen.wrapping_sub(22594u32);
(*commands)[0] = 23u32 | extra << 8i32;
}
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
1
}
fn EmitDistance(distance: u32, commands: &mut &mut [u32]) -> usize {
let d: u32 = distance.wrapping_add(3u32);
let nbits: u32 = Log2FloorNonZero(d as (u64)).wrapping_sub(1u32);
let prefix: u32 = d >> nbits & 1u32;
let offset: u32 = (2u32).wrapping_add(prefix) << nbits;
let distcode: u32 =
(2u32).wrapping_mul(nbits.wrapping_sub(1u32)).wrapping_add(prefix).wrapping_add(80u32);
let extra: u32 = d.wrapping_sub(offset);
(*commands)[0] = distcode | extra << 8i32;
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
1
}
fn EmitCopyLenLastDistance(copylen: usize, commands: &mut &mut [u32]) -> usize {
if copylen < 12usize {
(*commands)[0] = copylen.wrapping_add(20usize) as (u32);
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
1
} else if copylen < 72usize {
let tail: usize = copylen.wrapping_sub(8usize);
let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1u32) as (usize);
let prefix: usize = tail >> nbits;
let code: usize = (nbits << 1i32).wrapping_add(prefix).wrapping_add(28usize);
let extra: usize = tail.wrapping_sub(prefix << nbits);
(*commands)[0] = (code | extra << 8i32) as (u32);
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
1
} else if copylen < 136usize {
let tail: usize = copylen.wrapping_sub(8usize);
let code: usize = (tail >> 5i32).wrapping_add(54usize);
let extra: usize = tail & 31usize;
(*commands)[0] = (code | extra << 8i32) as (u32);
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
(*commands)[0] = 64u32;
let remainder2 = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder2[1..]);
2
} else if copylen < 2120usize {
let tail: usize = copylen.wrapping_sub(72usize);
let nbits: usize = Log2FloorNonZero(tail as u64) as (usize);
let code: usize = nbits.wrapping_add(52usize);
let extra: usize = tail.wrapping_sub(1usize << nbits);
(*commands)[0] = (code | extra << 8i32) as (u32);
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
(*commands)[0] = 64u32;
let remainder2 = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder2[1..]);
2
} else {
let extra: usize = copylen.wrapping_sub(2120usize);
(*commands)[0] = (63usize | extra << 8i32) as (u32);
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
(*commands)[0] = 64u32;
let remainder2 = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder2[1..]);
2
}
}
fn HashBytesAtOffset(v: u64, offset: i32, shift: usize, length: usize) -> u32 {
0i32;
0i32;
{
let h: u64 = (v >> 8i32 * offset << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
(h >> shift) as (u32)
}
}
fn EmitCopyLen(copylen: usize, commands: &mut &mut [u32]) -> usize {
if copylen < 10usize {
(*commands)[0] = copylen.wrapping_add(38usize) as (u32);
} else if copylen < 134usize {
let tail: usize = copylen.wrapping_sub(6usize);
let nbits: usize = Log2FloorNonZero(tail as u64).wrapping_sub(1u32) as (usize);
let prefix: usize = tail >> nbits;
let code: usize = (nbits << 1i32).wrapping_add(prefix).wrapping_add(44usize);
let extra: usize = tail.wrapping_sub(prefix << nbits);
(*commands)[0] = (code | extra << 8i32) as (u32);
} else if copylen < 2118usize {
let tail: usize = copylen.wrapping_sub(70usize);
let nbits: usize = Log2FloorNonZero(tail as u64) as (usize);
let code: usize = nbits.wrapping_add(52usize);
let extra: usize = tail.wrapping_sub(1usize << nbits);
(*commands)[0] = (code | extra << 8i32) as (u32);
} else {
let extra: usize = copylen.wrapping_sub(2118usize);
(*commands)[0] = (63usize | extra << 8i32) as (u32);
}
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
1
}
fn Hash(p: &[u8], shift: usize, length:usize) -> u32 {
let h: u64 = (BROTLI_UNALIGNED_LOAD64(p) << ((8 - length) * 8)).wrapping_mul(kHashMul32 as (u64));
(h >> shift) as (u32)
}
fn IsMatch(p1: &[u8], p2: &[u8], length: usize) -> i32 {
if BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) {
if length == 4 {
return 1;
}
return
((p1[(4usize)] as (i32) == p2[(4usize)] as (i32)) &&
(p1[(5usize)] as (i32) == p2[(5usize)] as (i32))) as i32
}
0
}
#[allow(unused_assignments)]
fn CreateCommands(input_index: usize,
block_size: usize,
input_size: usize,
base_ip: &[u8],
table: &mut [i32],
table_bits: usize,
min_match: usize,
literals: &mut &mut [u8],
num_literals: &mut usize,
commands: &mut &mut [u32],
num_commands: &mut usize) {
let mut ip_index: usize = input_index;
let shift: usize = (64u32 as (usize)).wrapping_sub(table_bits);
let ip_end: usize = input_index.wrapping_add(block_size);
let mut next_emit: usize = input_index;
let mut last_distance: i32 = -1i32;
let kInputMarginBytes: usize = 16usize;
if block_size >= kInputMarginBytes {
let len_limit: usize = brotli_min_size_t(block_size.wrapping_sub(min_match),
input_size.wrapping_sub(kInputMarginBytes));
let ip_limit: usize = input_index.wrapping_add(len_limit);
let mut next_hash: u32;
let mut goto_emit_remainder: i32 = 0i32;
next_hash = Hash(&base_ip[({
ip_index = ip_index.wrapping_add(1 as (usize));
ip_index
} as (usize))..],
shift, min_match);
while goto_emit_remainder == 0 {
let mut skip: u32 = 32u32;
let mut next_ip: usize = ip_index;
let mut candidate: usize = 0;
0i32;
loop {
{
'break3: loop {
{
let hash: u32 = next_hash;
let bytes_between_hash_lookups: u32 = ({
let _old = skip;
skip = skip.wrapping_add(1 as (u32));
_old
}) >>
5i32;
ip_index = next_ip;
0i32;
next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as (usize));
if next_ip > ip_limit {
goto_emit_remainder = 1i32;
{
{
break 'break3;
}
}
}
next_hash = Hash(&base_ip[(next_ip as (usize))..], shift, min_match);
0i32;
candidate = ip_index.wrapping_sub(last_distance as (usize));
if IsMatch(&base_ip[(ip_index as (usize))..],
&base_ip[(candidate as (usize))..], min_match) != 0 {
if candidate < ip_index {
table[(hash as (usize))] = ip_index.wrapping_sub(0usize) as (i32);
{
{
break 'break3;
}
}
}
}
candidate = table[(hash as (usize))] as (usize);
0i32;
0i32;
table[(hash as (usize))] = ip_index.wrapping_sub(0usize) as (i32);
}
if !(IsMatch(&base_ip[(ip_index as (usize))..],
&base_ip[(candidate as (usize))..], min_match) == 0) {
break;
}
}
}
if !(ip_index.wrapping_sub(candidate) >
(1usize << 18i32).wrapping_sub(16usize) as (isize) as (usize) &&
(goto_emit_remainder == 0)) {
break;
}
}
if goto_emit_remainder != 0 {
{
break;
}
}
{
let base: usize = ip_index;
let matched: usize = min_match
.wrapping_add(FindMatchLengthWithLimit(&base_ip[(candidate as (usize) + min_match)..],
&base_ip[(ip_index as (usize) + min_match)..],
ip_end.wrapping_sub(ip_index)
.wrapping_sub(min_match)));
let distance: i32 = base.wrapping_sub(candidate) as (i32);
let insert: i32 = base.wrapping_sub(next_emit) as (i32);
ip_index = ip_index.wrapping_add(matched);
0i32;
*num_commands += EmitInsertLen(insert as (u32), commands);
(*literals)[..(insert as usize)].clone_from_slice(&base_ip[(next_emit as usize)..
((next_emit +
insert as usize))]);
*num_literals += insert as usize;
let new_literals = core::mem::replace(literals, &mut []);
let _ = core::mem::replace(literals, &mut new_literals[(insert as usize)..]);
if distance == last_distance {
(*commands)[0] = 64u32;
let remainder = core::mem::replace(commands, &mut []);
let _ = core::mem::replace(commands, &mut remainder[1..]);
*num_commands += 1;
} else {
*num_commands += EmitDistance(distance as (u32), commands);
last_distance = distance;
}
*num_commands += EmitCopyLenLastDistance(matched, commands);
next_emit = ip_index;
if ip_index >= ip_limit {
goto_emit_remainder = 1i32;
{
{
break;
}
}
}
{
let mut input_bytes: u64;
let mut prev_hash: u32;
let cur_hash: u32;
if min_match == 4 {
input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index as (usize) - 3)..]);
cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);
prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(3usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(2usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(1usize) as (i32);
}else {
assert!(ip_index >= 5);
// could this be off the end FIXME
input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index as (usize) - 5)..]);
prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(5usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(4usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(3usize) as (i32);
assert!(ip_index >= 2);
input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index as (usize) - 2)..]);
cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(2usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(1usize) as (i32);
}
candidate = table[(cur_hash as (usize))] as (usize);
table[(cur_hash as (usize))] = ip_index as (i32);
}
}
while ip_index.wrapping_sub(candidate) <=
(1usize << 18i32).wrapping_sub(16usize) as (isize) as (usize) &&
(IsMatch(&base_ip[(ip_index as (usize))..],
&base_ip[(candidate as (usize))..], min_match) != 0) {
let base_index: usize = ip_index;
let matched: usize = min_match
.wrapping_add(FindMatchLengthWithLimit(&base_ip[(candidate as (usize) + min_match)..],
&base_ip[(ip_index as (usize) + min_match)..],
ip_end.wrapping_sub(ip_index)
.wrapping_sub(min_match)));
ip_index = ip_index.wrapping_add(matched);
last_distance = base_index.wrapping_sub(candidate) as (i32);
0i32;
*num_commands += EmitCopyLen(matched, commands);
*num_commands += EmitDistance(last_distance as (u32), commands);
next_emit = ip_index;
if ip_index >= ip_limit {
goto_emit_remainder = 1i32;
{
{
break;
}
}
}
{
assert!(ip_index >= 5);
let mut input_bytes: u64;
let cur_hash: u32;
let mut prev_hash: u32;
if min_match == 4 {
input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index as (usize) - 3)..]);
cur_hash = HashBytesAtOffset(input_bytes, 3i32, shift, min_match);<|fim▁hole|> prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(1usize) as (i32);
} else {
input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index as (usize) - 5)..]);
prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(5usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(4usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(3usize) as (i32);
assert!(ip_index >= 2);
input_bytes = BROTLI_UNALIGNED_LOAD64(&base_ip[(ip_index as (usize) - 2)..]);
cur_hash = HashBytesAtOffset(input_bytes, 2i32, shift, min_match);
prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(2usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(1usize) as (i32);
}
candidate = table[(cur_hash as (usize))] as (usize);
table[(cur_hash as (usize))] = ip_index as (i32);
}
}
if goto_emit_remainder == 0 {
next_hash = Hash(&base_ip[({
ip_index = ip_index.wrapping_add(1 as (usize));
ip_index
} as (usize))..],
shift, min_match);
}
}
}
0i32;
if next_emit < ip_end {
let insert: u32 = ip_end.wrapping_sub(next_emit) as (u32);
*num_commands += EmitInsertLen(insert, commands);
literals[..insert as usize].clone_from_slice(&base_ip[(next_emit as (usize))..
(next_emit + insert as usize)]);
let mut xliterals = core::mem::replace(literals, &mut []);
*literals = &mut core::mem::replace(&mut xliterals, &mut [])[(insert as usize)..];
*num_literals += insert as usize;
}
}
fn ShouldCompress(input: &[u8], input_size: usize, num_literals: usize) -> i32 {
let corpus_size: super::util::floatX = input_size as (super::util::floatX);
if num_literals as (super::util::floatX) < 0.98 as super::util::floatX * corpus_size {
1i32
} else {
let mut literal_histo: [u32; 256] = [0; 256];
let max_total_bit_cost: super::util::floatX = corpus_size * 8i32 as (super::util::floatX) * 0.98 as super::util::floatX / 43i32 as (super::util::floatX);
let mut i: usize;
i = 0usize;
while i < input_size {
{
let _rhs = 1;
let _lhs = &mut literal_histo[input[(i as (usize))] as (usize)];
*_lhs = (*_lhs).wrapping_add(_rhs as (u32));
}
i = i.wrapping_add(43usize);
}
if !!(BitsEntropy(&mut literal_histo[..], 256usize) < max_total_bit_cost) {
1i32
} else {
0i32
}
}
}
pub fn BrotliWriteBits(n_bits: usize, bits: u64, pos: &mut usize, array: &mut [u8]) {
let p = &mut array[((*pos >> 3i32) as (usize))..];
let mut v: u64 = p[0] as (u64);
v = v | bits << (*pos & 7);
BROTLI_UNALIGNED_STORE64(p, v);
*pos = (*pos).wrapping_add(n_bits);
}
pub fn BrotliStoreMetaBlockHeader(len: usize,
is_uncompressed: i32,
storage_ix: &mut usize,
storage: &mut [u8]) {
let mut nibbles: u64 = 6;
BrotliWriteBits(1, 0, storage_ix, storage);
if len <= (1u32 << 16i32) as (usize) {
nibbles = 4;
} else if len <= (1u32 << 20i32) as (usize) {
nibbles = 5;
}
BrotliWriteBits(2, nibbles.wrapping_sub(4), storage_ix, storage);
BrotliWriteBits(nibbles.wrapping_mul(4) as usize,
len.wrapping_sub(1) as u64,
storage_ix,
storage);
BrotliWriteBits(1usize, is_uncompressed as (u64), storage_ix, storage);
}
pub fn memcpy<T: Sized + Clone>(dst: &mut [T],
dst_offset: usize,
src: &[T],
src_offset: usize,
size_to_copy: usize) {
dst[dst_offset..(dst_offset + size_to_copy)].clone_from_slice(&src[src_offset..
(src_offset + size_to_copy)]);
}
fn BuildAndStoreCommandPrefixCode(histogram: &[u32],
depth: &mut [u8],
mut bits: &mut [u16],
storage_ix: &mut usize,
storage: &mut [u8]) {
let mut tree: [HuffmanTree; 129] = [NewHuffmanTree(0, 0, 0); 129];
let mut cmd_depth: [u8; 704] = [0; 704];
let mut cmd_bits: [u16; 64] = [0; 64];
BrotliCreateHuffmanTree(histogram, 64usize, 15i32, &mut tree[..], depth);
BrotliCreateHuffmanTree(&histogram[(64usize)..],
64usize,
14i32,
&mut tree[..],
&mut depth[(64usize)..]);
/* We have to jump through a few hoops here in order to compute
the command bits because the symbols are in a different order than in
the full alphabet. This looks complicated, but having the symbols
in this order in the command bits saves a few branches in the Emit*
functions. */
memcpy(&mut cmd_depth[..], 0, depth, 24, 24);
memcpy(&mut cmd_depth[..], 24, depth, 0, 8);
memcpy(&mut cmd_depth[..],
32i32 as (usize),
depth,
(48usize),
8usize);
memcpy(&mut cmd_depth[..],
40i32 as (usize),
depth,
(8usize),
8usize);
memcpy(&mut cmd_depth[..],
48i32 as (usize),
depth,
(56usize),
8usize);
memcpy(&mut cmd_depth[..],
56i32 as (usize),
depth,
(16usize),
8usize);
BrotliConvertBitDepthsToSymbols(&mut cmd_depth[..], 64usize, &mut cmd_bits[..]);
memcpy(&mut bits, 0, &cmd_bits[..], 24i32 as (usize), 16usize);
memcpy(&mut bits,
(8usize),
&cmd_bits[..],
40i32 as (usize),
8usize);
memcpy(&mut bits,
(16usize),
&cmd_bits[..],
56i32 as (usize),
8usize);
memcpy(&mut bits, (24usize), &cmd_bits[..], 0, 48usize);
memcpy(&mut bits,
(48usize),
&cmd_bits[..],
32i32 as (usize),
8usize);
memcpy(&mut bits,
(56usize),
&cmd_bits[..],
48i32 as (usize),
8usize);
BrotliConvertBitDepthsToSymbols(&mut depth[(64usize)..], 64usize, &mut bits[(64usize)..]);
{
let mut i: usize;
for item in cmd_depth[..64].iter_mut() {
*item = 0;
}
//memset(&mut cmd_depth[..], 0i32, 64usize);
memcpy(&mut cmd_depth[..], 0, depth, (24usize), 8usize);
memcpy(&mut cmd_depth[..],
64i32 as (usize),
depth,
(32usize),
8usize);
memcpy(&mut cmd_depth[..],
128i32 as (usize),
depth,
(40usize),
8usize);
memcpy(&mut cmd_depth[..],
192i32 as (usize),
depth,
(48usize),
8usize);
memcpy(&mut cmd_depth[..],
384i32 as (usize),
depth,
(56usize),
8usize);
i = 0usize;
while i < 8usize {
{
cmd_depth[(128usize).wrapping_add((8usize).wrapping_mul(i))] = depth[(i as (usize))];
cmd_depth[(256usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i.wrapping_add(8)];
cmd_depth[(448usize).wrapping_add((8usize).wrapping_mul(i))] = depth[i.wrapping_add(16)];
}
i = i.wrapping_add(1 as (usize));
}
BrotliStoreHuffmanTree(&mut cmd_depth[..],
704usize,
&mut tree[..],
storage_ix,
storage);
}
BrotliStoreHuffmanTree(&mut depth[(64usize)..],
64usize,
&mut tree[..],
storage_ix,
storage);
}
fn StoreCommands<AllocHT: alloc::Allocator<HuffmanTree>>(mht: &mut AllocHT,
mut literals: &[u8],
num_literals: usize,
commands: &[u32],
num_commands: usize,
storage_ix: &mut usize,
storage: &mut [u8]) {
static kNumExtraBits: [u32; 128] =
[0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 1u32, 1u32, 2u32, 2u32, 3u32, 3u32, 4u32, 4u32, 5u32,
5u32, 6u32, 7u32, 8u32, 9u32, 10u32, 12u32, 14u32, 24u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32,
0u32, 0u32, 1u32, 1u32, 2u32, 2u32, 3u32, 3u32, 4u32, 4u32, 0u32, 0u32, 0u32, 0u32, 0u32,
0u32, 0u32, 0u32, 1u32, 1u32, 2u32, 2u32, 3u32, 3u32, 4u32, 4u32, 5u32, 5u32, 6u32, 7u32,
8u32, 9u32, 10u32, 24u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32, 0u32,
0u32, 0u32, 0u32, 0u32, 0u32, 1u32, 1u32, 2u32, 2u32, 3u32, 3u32, 4u32, 4u32, 5u32, 5u32,
6u32, 6u32, 7u32, 7u32, 8u32, 8u32, 9u32, 9u32, 10u32, 10u32, 11u32, 11u32, 12u32, 12u32,
13u32, 13u32, 14u32, 14u32, 15u32, 15u32, 16u32, 16u32, 17u32, 17u32, 18u32, 18u32, 19u32,
19u32, 20u32, 20u32, 21u32, 21u32, 22u32, 22u32, 23u32, 23u32, 24u32, 24u32];
static kInsertOffset: [u32; 24] = [0u32, 1u32, 2u32, 3u32, 4u32, 5u32, 6u32, 8u32, 10u32, 14u32,
18u32, 26u32, 34u32, 50u32, 66u32, 98u32, 130u32, 194u32,
322u32, 578u32, 1090u32, 2114u32, 6210u32, 22594u32];
let mut lit_depths: [u8; 256] = [0; 256];
let mut lit_bits: [u16; 256] = [0; 256]; // maybe return this instead
let mut lit_histo: [u32; 256] = [0; 256]; // maybe return this instead of init
let mut cmd_depths: [u8; 128] = [0; 128];
let mut cmd_bits: [u16; 128] = [0; 128];
let mut cmd_histo: [u32; 128] = [0; 128];
let mut i: usize;
i = 0usize;
while i < num_literals {
{
let _rhs = 1;
let _lhs = &mut lit_histo[literals[(i as (usize))] as (usize)];
*_lhs = (*_lhs).wrapping_add(_rhs as (u32));
}
i = i.wrapping_add(1 as (usize));
}
BrotliBuildAndStoreHuffmanTreeFast(mht,
&lit_histo[..],
num_literals,
8usize,
&mut lit_depths[..],
&mut lit_bits[..],
storage_ix,
storage);
i = 0usize;
while i < num_commands {
{
let code: u32 = commands[(i as (usize))] & 0xffu32;
0i32;
{
let _rhs = 1;
let _lhs = &mut cmd_histo[code as (usize)];
*_lhs = (*_lhs).wrapping_add(_rhs as (u32));
}
}
i = i.wrapping_add(1 as (usize));
}
{
let _rhs = 1i32;
let _lhs = &mut cmd_histo[1usize];
*_lhs = (*_lhs).wrapping_add(_rhs as (u32));
}
{
let _rhs = 1i32;
let _lhs = &mut cmd_histo[2usize];
*_lhs = (*_lhs).wrapping_add(_rhs as (u32));
}
{
let _rhs = 1i32;
let _lhs = &mut cmd_histo[64usize];
*_lhs = (*_lhs).wrapping_add(_rhs as (u32));
}
{
let _rhs = 1i32;
let _lhs = &mut cmd_histo[84usize];
*_lhs = (*_lhs).wrapping_add(_rhs as (u32));
}
BuildAndStoreCommandPrefixCode(&mut cmd_histo[..],
&mut cmd_depths[..],
&mut cmd_bits[..],
storage_ix,
storage);
i = 0usize;
while i < num_commands {
{
let cmd: u32 = commands[(i as (usize))];
let code: u32 = cmd & 0xffu32;
let extra: u32 = cmd >> 8i32;
0i32;
BrotliWriteBits(cmd_depths[code as (usize)] as (usize),
cmd_bits[code as (usize)] as (u64),
storage_ix,
storage);
BrotliWriteBits(kNumExtraBits[code as (usize)] as (usize),
extra as (u64),
storage_ix,
storage);
if code < 24u32 {
let insert: u32 = kInsertOffset[code as (usize)].wrapping_add(extra);
for literal in literals[..(insert as usize)].iter() {
let lit: u8 = *literal;
BrotliWriteBits(lit_depths[lit as (usize)] as (usize),
lit_bits[lit as (usize)] as (u64),
storage_ix,
storage);
}
literals = &literals[insert as usize..];
}
}
i = i.wrapping_add(1 as (usize));
}
}
fn EmitUncompressedMetaBlock(input: &[u8],
input_size: usize,
storage_ix: &mut usize,
storage: &mut [u8]) {
BrotliStoreMetaBlockHeader(input_size, 1i32, storage_ix, storage);
*storage_ix = (*storage_ix).wrapping_add(7u32 as (usize)) & !7u32 as (usize);
memcpy(storage,
((*storage_ix >> 3i32) as (usize)),
input,
0,
input_size);
*storage_ix = (*storage_ix).wrapping_add(input_size << 3i32);
storage[((*storage_ix >> 3i32) as (usize))] = 0i32 as (u8);
}
#[allow(unused_variables)]
#[inline(always)]
fn BrotliCompressFragmentTwoPassImpl<AllocHT:alloc::Allocator<HuffmanTree>>(m: &mut AllocHT,
base_ip: &[u8],
mut input_size: usize,
is_last: i32,
command_buf: &mut [u32],
literal_buf: &mut [u8],
table: &mut [i32],
table_bits: usize,
min_match: usize,
storage_ix: &mut usize,
storage: &mut [u8]){
let mut input_index: usize = 0usize;
while input_size > 0usize {
let block_size: usize = brotli_min_size_t(input_size, kCompressFragmentTwoPassBlockSize);
let mut num_literals: usize = 0;
let mut num_commands: usize = 0;
{
let mut literals = &mut literal_buf[..];
let mut commands = &mut command_buf[..];
CreateCommands(input_index,
block_size,
input_size,
base_ip,
table,
table_bits,
min_match,
&mut literals,
&mut num_literals,
&mut commands,
&mut num_commands);
}
if ShouldCompress(&base_ip[(input_index as (usize))..],
block_size,
num_literals) != 0 {
BrotliStoreMetaBlockHeader(block_size, 0i32, storage_ix, storage);
BrotliWriteBits(13usize, 0, storage_ix, storage);
StoreCommands(m,
literal_buf,
num_literals,
command_buf,
num_commands,
storage_ix,
storage);
} else {
EmitUncompressedMetaBlock(&base_ip[(input_index as (usize))..],
block_size,
storage_ix,
storage);
}
input_index = input_index.wrapping_add(block_size);
input_size = input_size.wrapping_sub(block_size);
}
}
macro_rules! compress_specialization {
($table_bits : expr, $fname: ident) => {
fn $fname<AllocHT:alloc::Allocator<HuffmanTree>>(mht: &mut AllocHT,
input: &[u8],
input_size: usize,
is_last: i32,
command_buf: &mut [u32],
literal_buf: &mut [u8],
table: &mut [i32],
storage_ix: &mut usize,
storage: &mut [u8]) {
let min_match = if $table_bits < 15 {4} else {6};
BrotliCompressFragmentTwoPassImpl(mht,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
$table_bits,
min_match,
storage_ix,
storage);
}
};
}
compress_specialization!(8, BrotliCompressFragmentTwoPassImpl8);
compress_specialization!(9, BrotliCompressFragmentTwoPassImpl9);
compress_specialization!(10, BrotliCompressFragmentTwoPassImpl10);
compress_specialization!(11, BrotliCompressFragmentTwoPassImpl11);
compress_specialization!(12, BrotliCompressFragmentTwoPassImpl12);
compress_specialization!(13, BrotliCompressFragmentTwoPassImpl13);
compress_specialization!(14, BrotliCompressFragmentTwoPassImpl14);
compress_specialization!(15, BrotliCompressFragmentTwoPassImpl15);
compress_specialization!(16, BrotliCompressFragmentTwoPassImpl16);
compress_specialization!(17, BrotliCompressFragmentTwoPassImpl17);
fn RewindBitPosition(new_storage_ix: usize, storage_ix: &mut usize, storage: &mut [u8]) {
let bitpos: usize = new_storage_ix & 7usize;
let mask: usize = (1u32 << bitpos).wrapping_sub(1u32) as (usize);
{
let _rhs = mask as (u8);
let _lhs = &mut storage[((new_storage_ix >> 3i32) as (usize))];
*_lhs = (*_lhs as (i32) & _rhs as (i32)) as (u8);
}
*storage_ix = new_storage_ix;
}
pub fn BrotliCompressFragmentTwoPass<AllocHT:alloc::Allocator<HuffmanTree>>(m: &mut AllocHT,
input: &[u8],
input_size: usize,
is_last: i32,
command_buf: &mut [u32],
literal_buf: &mut [u8],
table: &mut [i32],
table_size: usize,
storage_ix: &mut usize,
storage: &mut [u8]){
let initial_storage_ix: usize = *storage_ix;
let table_bits: usize = Log2FloorNonZero(table_size as u64) as (usize);
if table_bits == 8usize {
BrotliCompressFragmentTwoPassImpl8(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 9usize {
BrotliCompressFragmentTwoPassImpl9(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 10usize {
BrotliCompressFragmentTwoPassImpl10(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 11usize {
BrotliCompressFragmentTwoPassImpl11(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 12usize {
BrotliCompressFragmentTwoPassImpl12(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 13usize {
BrotliCompressFragmentTwoPassImpl13(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 14usize {
BrotliCompressFragmentTwoPassImpl14(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 15usize {
BrotliCompressFragmentTwoPassImpl15(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 16usize {
BrotliCompressFragmentTwoPassImpl16(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if table_bits == 17usize {
BrotliCompressFragmentTwoPassImpl17(m,
input,
input_size,
is_last,
command_buf,
literal_buf,
table,
storage_ix,
storage);
}
if (*storage_ix).wrapping_sub(initial_storage_ix) > (31usize).wrapping_add(input_size << 3i32) {
RewindBitPosition(initial_storage_ix, storage_ix, storage);
EmitUncompressedMetaBlock(input, input_size, storage_ix, storage);
}
if is_last != 0 {
BrotliWriteBits(1, 1, storage_ix, storage);
BrotliWriteBits(1, 1, storage_ix, storage);
*storage_ix = (*storage_ix).wrapping_add(7u32 as (usize)) & !7u32 as (usize);
}
}<|fim▁end|> | prev_hash = HashBytesAtOffset(input_bytes, 0i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(3usize) as (i32);
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift, min_match);
table[(prev_hash as (usize))] = ip_index.wrapping_sub(2usize) as (i32); |
<|file_name|>OptRuntime.java<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.javascript.optimizer;
import org.mozilla.javascript.ArrowFunction;
import org.mozilla.javascript.Callable;
import org.mozilla.javascript.ConsString;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.ContextFactory;
import org.mozilla.javascript.ES6Generator;
import org.mozilla.javascript.Function;
import org.mozilla.javascript.JavaScriptException;
import org.mozilla.javascript.NativeFunction;
import org.mozilla.javascript.NativeGenerator;
import org.mozilla.javascript.NativeIterator;
import org.mozilla.javascript.Script;
import org.mozilla.javascript.ScriptRuntime;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.mozilla.javascript.Undefined;
/**
* <p>OptRuntime class.</p>
*
*
*
*/
public final class OptRuntime extends ScriptRuntime
{
/** Constant <code>oneObj</code> */
public static final Double oneObj = Double.valueOf(1.0);
/** Constant <code>minusOneObj</code> */
public static final Double minusOneObj = Double.valueOf(-1.0);
/**
* Implement ....() call shrinking optimizer code.
*
* @param fun a {@link org.mozilla.javascript.Callable} object.
* @param thisObj a {@link org.mozilla.javascript.Scriptable} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link java.lang.Object} object.
*/
public static Object call0(Callable fun, Scriptable thisObj,
Context cx, Scriptable scope)
{
return fun.call(cx, scope, thisObj, ScriptRuntime.emptyArgs);
}
/**
* Implement ....(arg) call shrinking optimizer code.
*
* @param fun a {@link org.mozilla.javascript.Callable} object.
* @param thisObj a {@link org.mozilla.javascript.Scriptable} object.
* @param arg0 a {@link java.lang.Object} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link java.lang.Object} object.
*/
public static Object call1(Callable fun, Scriptable thisObj, Object arg0,
Context cx, Scriptable scope)
{
return fun.call(cx, scope, thisObj, new Object[] { arg0 } );
}
/**
* Implement ....(arg0, arg1) call shrinking optimizer code.
*
* @param fun a {@link org.mozilla.javascript.Callable} object.
* @param thisObj a {@link org.mozilla.javascript.Scriptable} object.
* @param arg0 a {@link java.lang.Object} object.
* @param arg1 a {@link java.lang.Object} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link java.lang.Object} object.
*/
public static Object call2(Callable fun, Scriptable thisObj,
Object arg0, Object arg1,
Context cx, Scriptable scope)
{
return fun.call(cx, scope, thisObj, new Object[] { arg0, arg1 });
}
/**
* Implement ....(arg0, arg1, ...) call shrinking optimizer code.
*
* @param fun a {@link org.mozilla.javascript.Callable} object.
* @param thisObj a {@link org.mozilla.javascript.Scriptable} object.
* @param args an array of {@link java.lang.Object} objects.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link java.lang.Object} object.
*/
public static Object callN(Callable fun, Scriptable thisObj,
Object[] args,
Context cx, Scriptable scope)
{
return fun.call(cx, scope, thisObj, args);
}
/**
* Implement name(args) call shrinking optimizer code.
*
* @param args an array of {@link java.lang.Object} objects.
* @param name a {@link java.lang.String} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link java.lang.Object} object.
*/
public static Object callName(Object[] args, String name,
Context cx, Scriptable scope)
{
Callable f = getNameFunctionAndThis(name, cx, scope);
Scriptable thisObj = lastStoredScriptable(cx);
return f.call(cx, scope, thisObj, args);
}
/**
* Implement name() call shrinking optimizer code.
*
* @param name a {@link java.lang.String} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link java.lang.Object} object.
*/
public static Object callName0(String name,
Context cx, Scriptable scope)
{
Callable f = getNameFunctionAndThis(name, cx, scope);
Scriptable thisObj = lastStoredScriptable(cx);
return f.call(cx, scope, thisObj, ScriptRuntime.emptyArgs);
}
/**
* Implement x.property() call shrinking optimizer code.
*
* @param value a {@link java.lang.Object} object.
* @param property a {@link java.lang.String} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link java.lang.Object} object.
*/
public static Object callProp0(Object value, String property,
Context cx, Scriptable scope)
{
Callable f = getPropFunctionAndThis(value, property, cx, scope);
Scriptable thisObj = lastStoredScriptable(cx);
return f.call(cx, scope, thisObj, ScriptRuntime.emptyArgs);
}
/**
* <p>add.</p>
*
* @param val1 a {@link java.lang.Object} object.
* @param val2 a double.
* @return a {@link java.lang.Object} object.
*/
public static Object add(Object val1, double val2)
{
if (val1 instanceof Scriptable)
val1 = ((Scriptable) val1).getDefaultValue(null);
if (!(val1 instanceof CharSequence))
return wrapDouble(toNumber(val1) + val2);
return new ConsString((CharSequence)val1, toString(val2));
}
/** {@inheritDoc} */
public static Object add(double val1, Object val2)
{
if (val2 instanceof Scriptable)
val2 = ((Scriptable) val2).getDefaultValue(null);
if (!(val2 instanceof CharSequence))
return wrapDouble(toNumber(val2) + val1);
return new ConsString(toString(val1), (CharSequence)val2);
}
/** {@inheritDoc} */
@Deprecated
public static Object elemIncrDecr(Object obj, double index,
Context cx, int incrDecrMask)
{
return elemIncrDecr(obj, index, cx, getTopCallScope(cx), incrDecrMask);
}
/** {@inheritDoc} */
public static Object elemIncrDecr(Object obj, double index,
Context cx, Scriptable scope,
int incrDecrMask)
{
return ScriptRuntime.elemIncrDecr(obj, Double.valueOf(index), cx, scope,
incrDecrMask);
}
/**
* <p>padStart.</p>
*
* @param currentArgs an array of {@link java.lang.Object} objects.
* @param count a int.
* @return an array of {@link java.lang.Object} objects.
*/
public static Object[] padStart(Object[] currentArgs, int count) {
Object[] result = new Object[currentArgs.length + count];
System.arraycopy(currentArgs, 0, result, count, currentArgs.length);
return result;
}
/**
* <p>initFunction.</p>
*
* @param fn a {@link org.mozilla.javascript.NativeFunction} object.
* @param functionType a int.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
*/
public static void initFunction(NativeFunction fn, int functionType,
Scriptable scope, Context cx)
{
ScriptRuntime.initFunction(cx, scope, fn, functionType, false);
}
/**
* <p>bindThis.</p>
*
* @param fn a {@link org.mozilla.javascript.NativeFunction} object.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @param thisObj a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link org.mozilla.javascript.Function} object.
*/
public static Function bindThis(NativeFunction fn, Context cx, Scriptable scope, Scriptable thisObj)
{
return new ArrowFunction(cx, scope, fn, thisObj);
}
/** {@inheritDoc} */
public static Object callSpecial(Context cx, Callable fun,
Scriptable thisObj, Object[] args,
Scriptable scope,
Scriptable callerThis, int callType,
String fileName, int lineNumber)
{
return ScriptRuntime.callSpecial(cx, fun, thisObj, args, scope,
callerThis, callType,
fileName, lineNumber);
}
/**
* <p>newObjectSpecial.</p>
*
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param fun a {@link java.lang.Object} object.
* @param args an array of {@link java.lang.Object} objects.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @param callerThis a {@link org.mozilla.javascript.Scriptable} object.
* @param callType a int.
* @return a {@link java.lang.Object} object.
*/
public static Object newObjectSpecial(Context cx, Object fun,
Object[] args, Scriptable scope,
Scriptable callerThis, int callType)
{
return ScriptRuntime.newSpecial(cx, fun, args, scope, callType);
}
/**
* <p>wrapDouble.</p>
*
* @param num a double.
* @return a {@link java.lang.Double} object.
*/
public static Double wrapDouble(double num)
{
if (num == 0.0) {
if (1 / num > 0) {
// +0.0
return zeroObj;
}
} else if (num == 1.0) {
return oneObj;
} else if (num == -1.0) {
return minusOneObj;
} else if (Double.isNaN(num)) {
return NaNobj;
}
return Double.valueOf(num);
}
static String encodeIntArray(int[] array)
{
// XXX: this extremely inefficient for small integers
if (array == null) { return null; }
int n = array.length;
char[] buffer = new char[1 + n * 2];
buffer[0] = 1;
for (int i = 0; i != n; ++i) {
int value = array[i];
int shift = 1 + i * 2;
buffer[shift] = (char)(value >>> 16);
buffer[shift + 1] = (char)value;
}
return new String(buffer);
}
<|fim▁hole|> private static int[] decodeIntArray(String str, int arraySize)
{
// XXX: this extremely inefficient for small integers
if (arraySize == 0) {
if (str != null) throw new IllegalArgumentException();
return null;
}
if (str.length() != 1 + arraySize * 2 && str.charAt(0) != 1) {
throw new IllegalArgumentException();
}
int[] array = new int[arraySize];
for (int i = 0; i != arraySize; ++i) {
int shift = 1 + i * 2;
array[i] = (str.charAt(shift) << 16) | str.charAt(shift + 1);
}
return array;
}
/**
* <p>newArrayLiteral.</p>
*
* @param objects an array of {@link java.lang.Object} objects.
* @param encodedInts a {@link java.lang.String} object.
* @param skipCount a int.
* @param cx a {@link org.mozilla.javascript.Context} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @return a {@link org.mozilla.javascript.Scriptable} object.
*/
public static Scriptable newArrayLiteral(Object[] objects,
String encodedInts,
int skipCount,
Context cx,
Scriptable scope)
{
int[] skipIndexces = decodeIntArray(encodedInts, skipCount);
return newArrayLiteral(objects, skipIndexces, cx, scope);
}
/**
* <p>main.</p>
*
* @param script a {@link org.mozilla.javascript.Script} object.
* @param args an array of {@link java.lang.String} objects.
*/
public static void main(final Script script, final String[] args)
{
ContextFactory.getGlobal().call(cx -> {
ScriptableObject global = getGlobal(cx);
// get the command line arguments and define "arguments"
// array in the top-level object
Object[] argsCopy = new Object[args.length];
System.arraycopy(args, 0, argsCopy, 0, args.length);
Scriptable argsObj = cx.newArray(global, argsCopy);
global.defineProperty("arguments", argsObj,
ScriptableObject.DONTENUM);
script.exec(cx, global);
return null;
});
}
/**
* <p>throwStopIteration.</p>
*
* @param scope a {@link java.lang.Object} object.
* @param genState a {@link java.lang.Object} object.
*/
public static void throwStopIteration(Object scope, Object genState) {
Object value = getGeneratorReturnValue(genState);
Object si =
(value == Undefined.instance) ?
NativeIterator.getStopIterationObject((Scriptable)scope) :
new NativeIterator.StopIteration(value);
throw new JavaScriptException(si, "", 0);
}
/**
* <p>createNativeGenerator.</p>
*
* @param funObj a {@link org.mozilla.javascript.NativeFunction} object.
* @param scope a {@link org.mozilla.javascript.Scriptable} object.
* @param thisObj a {@link org.mozilla.javascript.Scriptable} object.
* @param maxLocals a int.
* @param maxStack a int.
* @return a {@link org.mozilla.javascript.Scriptable} object.
*/
public static Scriptable createNativeGenerator(NativeFunction funObj,
Scriptable scope,
Scriptable thisObj,
int maxLocals,
int maxStack)
{
GeneratorState gs = new GeneratorState(thisObj, maxLocals, maxStack);
if (Context.getCurrentContext().getLanguageVersion() >= Context.VERSION_ES6) {
return new ES6Generator(scope, funObj, gs);
} else {
return new NativeGenerator(scope, funObj, gs);
}
}
/**
* <p>getGeneratorStackState.</p>
*
* @param obj a {@link java.lang.Object} object.
* @return an array of {@link java.lang.Object} objects.
*/
public static Object[] getGeneratorStackState(Object obj) {
GeneratorState rgs = (GeneratorState) obj;
if (rgs.stackState == null)
rgs.stackState = new Object[rgs.maxStack];
return rgs.stackState;
}
/**
* <p>getGeneratorLocalsState.</p>
*
* @param obj a {@link java.lang.Object} object.
* @return an array of {@link java.lang.Object} objects.
*/
public static Object[] getGeneratorLocalsState(Object obj) {
GeneratorState rgs = (GeneratorState) obj;
if (rgs.localsState == null)
rgs.localsState = new Object[rgs.maxLocals];
return rgs.localsState;
}
/**
* <p>setGeneratorReturnValue.</p>
*
* @param obj a {@link java.lang.Object} object.
* @param val a {@link java.lang.Object} object.
*/
public static void setGeneratorReturnValue(Object obj, Object val) {
GeneratorState rgs = (GeneratorState) obj;
rgs.returnValue = val;
}
/**
* <p>getGeneratorReturnValue.</p>
*
* @param obj a {@link java.lang.Object} object.
* @return a {@link java.lang.Object} object.
*/
public static Object getGeneratorReturnValue(Object obj) {
GeneratorState rgs = (GeneratorState) obj;
return (rgs.returnValue == null ? Undefined.instance : rgs.returnValue);
}
public static class GeneratorState {
static final String CLASS_NAME =
"org/mozilla/javascript/optimizer/OptRuntime$GeneratorState";
@SuppressWarnings("unused")
public int resumptionPoint;
static final String resumptionPoint_NAME = "resumptionPoint";
static final String resumptionPoint_TYPE = "I";
@SuppressWarnings("unused")
public Scriptable thisObj;
static final String thisObj_NAME = "thisObj";
static final String thisObj_TYPE =
"Lorg/mozilla/javascript/Scriptable;";
Object[] stackState;
Object[] localsState;
int maxLocals;
int maxStack;
Object returnValue;
GeneratorState(Scriptable thisObj, int maxLocals, int maxStack) {
this.thisObj = thisObj;
this.maxLocals = maxLocals;
this.maxStack = maxStack;
}
}
}<|fim▁end|> | |
<|file_name|>words.service.spec.ts<|end_file_name|><|fim▁begin|>import {
inject,
TestBed
} from '@angular/core/testing';
import { Component } from '@angular/core';
import {
BaseRequestOptions,
ConnectionBackend,
Http<|fim▁hole|>} from '@angular/http';
import { MockBackend } from '@angular/http/testing';
import { Words } from './words.service';
import { Sentences } from '../sentences';
describe('Words', () => {
beforeEach(() => TestBed.configureTestingModule({
providers: [
BaseRequestOptions,
MockBackend,
{
provide: Http,
useFactory: function(backend: ConnectionBackend, defaultOptions: BaseRequestOptions) {
return new Http(backend, defaultOptions);
},
deps: [MockBackend, BaseRequestOptions]
},
Words,
Sentences
]}));
it('should have http', inject([ Words ], (words: Words) => {
expect(!!words.http).toEqual(true);
}));
});<|fim▁end|> | |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::error::Error as StdError;
use std::string;
use std::{fmt, io};
/// The errors that can arise while parsing a JSON stream.
#[derive(Clone, Copy, PartialEq)]
pub enum ErrorCode {
InvalidSyntax,
InvalidNumber,
EOFWhileParsingObject,
EOFWhileParsingArray,
EOFWhileParsingValue,
EOFWhileParsingString,
KeyMustBeAString,
ExpectedColon,
TrailingCharacters,
TrailingComma,
InvalidEscape,
InvalidUnicodeCodePoint,
LoneLeadingSurrogateInHexEscape,
UnexpectedEndOfHexEscape,
UnrecognizedHex,
NotFourDigit,
ControlCharacterInString,
NotUtf8,
}
#[derive(Debug)]
pub enum ParserError {
/// msg, line, col
SyntaxError(ErrorCode, usize, usize),
IoError(io::Error),
}
impl PartialEq for ParserError {
fn eq(&self, other: &ParserError) -> bool {
match (self, other) {
(&ParserError::SyntaxError(msg0, line0, col0), &ParserError::SyntaxError(msg1, line1, col1)) =>
msg0 == msg1 && line0 == line1 && col0 == col1,
(&ParserError::IoError(_), _) => false,
(_, &ParserError::IoError(_)) => false,
}
}
}
/// Returns a readable error string for a given error code.
pub fn error_str(error: ErrorCode) -> &'static str {
match error {
ErrorCode::InvalidSyntax => "invalid syntax",
ErrorCode::InvalidNumber => "invalid number",
ErrorCode::EOFWhileParsingObject => "EOF While parsing object",
ErrorCode::EOFWhileParsingArray => "EOF While parsing array",
ErrorCode::EOFWhileParsingValue => "EOF While parsing value",
ErrorCode::EOFWhileParsingString => "EOF While parsing string",
ErrorCode::KeyMustBeAString => "key must be a string",
ErrorCode::ExpectedColon => "expected `:`",
ErrorCode::TrailingCharacters => "trailing characters",
ErrorCode::TrailingComma => "trailing comma",
ErrorCode::InvalidEscape => "invalid escape",
ErrorCode::UnrecognizedHex => "invalid \\u{ esc}ape (unrecognized hex)",
ErrorCode::NotFourDigit => "invalid \\u{ esc}ape (not four digits)",
ErrorCode::ControlCharacterInString => "unescaped control character in string",
ErrorCode::NotUtf8 => "contents not utf-8",
ErrorCode::InvalidUnicodeCodePoint => "invalid Unicode code point",
ErrorCode::LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape",
ErrorCode::UnexpectedEndOfHexEscape => "unexpected end of hex escape",
}
}
#[derive(PartialEq, Debug)]
pub enum DecoderError {
ParseError(ParserError),
ExpectedError(string::String, string::String),
MissingFieldError(string::String),
UnknownVariantError(string::String),
ApplicationError(string::String),
EOF,
}
#[derive(Copy, Debug)]
pub enum EncoderError {
FmtError(fmt::Error),
BadHashmapKey,
}
impl Clone for EncoderError {
fn clone(&self) -> Self { *self }
}
impl fmt::Debug for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
error_str(*self).fmt(f)<|fim▁hole|> }
}
impl StdError for DecoderError {
fn description(&self) -> &str { "decoder error" }
fn cause(&self) -> Option<&StdError> {
match *self {
DecoderError::ParseError(ref e) => Some(e),
_ => None,
}
}
}
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self, f)
}
}
impl From<ParserError> for DecoderError {
fn from(err: ParserError) -> DecoderError {
DecoderError::ParseError(From::from(err))
}
}
impl StdError for ParserError {
fn description(&self) -> &str { "failed to parse json" }
}
impl fmt::Display for ParserError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self, f)
}
}
impl From<io::Error> for ParserError {
fn from(err: io::Error) -> ParserError {
ParserError::IoError(err)
}
}
impl StdError for EncoderError {
fn description(&self) -> &str { "encoder error" }
}
impl fmt::Display for EncoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self, f)
}
}
impl From<fmt::Error> for EncoderError {
fn from(err: fmt::Error) -> EncoderError { EncoderError::FmtError(err) }
}<|fim▁end|> | |
<|file_name|>test_scripts.py<|end_file_name|><|fim▁begin|>"""
Test plugin views.
"""<|fim▁hole|>from django.utils import simplejson as json
from django.conf import settings
from django.test.client import Client
from django.contrib.auth.models import User
from ocradmin.core.tests import testutils
from nodetree import script, node
import numpy
from mock import patch
VALID_SCRIPTDIR = "nodelib/scripts/valid"
INVALID_SCRIPTDIR = "nodelib/scripts/invalid"
from ocradmin.nodelib import cache
class ViewsTest(TestCase):
fixtures = [
"presets/fixtures/test_fixtures.json",
"ocrmodels/fixtures/test_fixtures.json"]
def setUp(self):
"""
Setup OCR tests. Creates a test user.
"""
testutils.symlink_model_fixtures()
self.scripts = {}
for fname in os.listdir(VALID_SCRIPTDIR):
if fname.endswith("json"):
with open(os.path.join(VALID_SCRIPTDIR, fname), "r") as f:
self.scripts[fname] = json.load(f)
for fname in os.listdir(INVALID_SCRIPTDIR):
if fname.endswith("json"):
with open(os.path.join(INVALID_SCRIPTDIR, fname), "r") as f:
self.scripts[fname] = json.load(f)
self.testuser = User.objects.create_user("test_user", "test@testing.com", "testpass")
self.client = Client()
self.client.login(username="test_user", password="testpass")
def tearDown(self):
"""
Revert any changes.
"""
#cache.PersistantFileCacher = self.old_cacher
def test_binarise_script(self):
"""
Test a script that should return image data, i.e.
a path to a DZI file.
"""
self._run_script("binarize.json", "SUCCESS", "image", ["output"])
def test_segment_script(self):
"""
Test a script that should return line image geometry.
"""
self._run_script("segment.json", "SUCCESS", "pseg", ["input", "lines"])
def test_ocropus_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("ocropus.json", "SUCCESS", "hocr", ["data"])
def test_tesseract_native_seg_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("tesseract_native_seg.json", "SUCCESS", "hocr", ["data"])
def test_tesseract_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("tesseract.json", "SUCCESS", "hocr", ["data"])
def test_cuneiform_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("cuneiform.json", "SUCCESS", "hocr", ["data"])
def test_evaluation_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("evaluation.json", "SUCCESS", "text", ["data"])
def test_invalid_path(self):
"""
Test a script that should return a node error.
"""
script = self.scripts.get("invalid_filein_path.json")
self.assertIsNotNone(script)
r = self.client.post("/presets/run/", dict(
script=json.dumps(script)))
content = json.loads(r.content)
for field in ["status", "errors"]:
self.assertIn(field, content, "No '%s' field in content" % field)
expectedstatus = "VALIDATION"
self.assertEqual(expectedstatus,
content["status"], "Status field is not '%s'" % expectedstatus)
self.assertIn("filein1", content["errors"], "'filein1' not in errors field" )
@patch(settings.NODETREE_PERSISTANT_CACHER, cache.TestMockCacher)
def _run_script(self, scriptname, expectedstatus, expectedtype, expecteddatafields):
"""
Run a script and assert the results resemble what we expect.
"""
script = self.scripts.get(scriptname)
self.assertIsNotNone(script)
r = self.client.post("/presets/run/", dict(script=json.dumps(script)))
content = json.loads(r.content)
for field in ["status", "task_id", "results"]:
self.assertIn(field, content, "No '%s' field in content" % field)
self.assertEqual(expectedstatus,
content["status"], "Status field is not '%s'" % expectedstatus)
for field in ["type"]:
self.assertIn(field, content["results"], "No '%s' field in content results" % field)
self.assertEqual(expectedtype,
content["results"]["type"], "Type field is not '%s'" % expectedtype)
for field in expecteddatafields:
self.assertIn(field, content["results"], "No '%s' field in content results" % field)
return content<|fim▁end|> |
import os
import glob
from django.test import TestCase |
<|file_name|>from-email-address.js<|end_file_name|><|fim▁begin|>Fox.define('views/email/fields/from-email-address', 'views/fields/link', function (Dep) {<|fim▁hole|>
listTemplate: 'email/fields/from-email-address/detail',
detailTemplate: 'email/fields/from-email-address/detail',
});
});<|fim▁end|> |
return Dep.extend({ |
<|file_name|>threeSum2.cpp<|end_file_name|><|fim▁begin|>// Time Complexity: O(n^2)
// Space Complexity: O(1)
class Solution {
public:
vector<vector<int> > threeSum(vector<int> &num) {
vector<vector<int> > ans;
const int target = 0;
sort(num.begin(), num.end());
auto last = num.rend();
for(auto a = num.rbegin(); a < prev(last, 2); ++a) {
if(a > num.rbegin() && *a == *(a - 1))
continue;
auto b = next(a);
auto c = prev(last);
while(b < c) {
if(b > next(a) && *b == *(b - 1)) {
++b;
}
else if(c < prev(last) && *c == *(c + 1)) {
--c;
}
else {
const int sum = *a + *b + *c;
if(sum < target)
--c;
else if(sum > target)
++b;
else {
ans.push_back({ *c, *b, *a});
++b;<|fim▁hole|> }
}
}
}
return ans;
}
};<|fim▁end|> | --c; |
<|file_name|>PathCommand.java<|end_file_name|><|fim▁begin|>package seedu.jobs.logic.commands;
import java.io.IOException;
import com.google.common.eventbus.Subscribe;
import seedu.jobs.commons.core.EventsCenter;
import seedu.jobs.commons.events.storage.SavePathChangedEventException;
/* Change save path
*/
//@@author A0130979U
public class PathCommand extends Command {
public static final String COMMAND_WORD = "path";
public static final String MESSAGE_USAGE = COMMAND_WORD + ": Change save path. "
+ "Parameters: path [filename] \n"
+ "Example: " + COMMAND_WORD
+ " taskbook.xml";
private String path;
private boolean isValid;
public static final String MESSAGE_SUCCESS = "Save path has been successfully updated \n";
public static final String MESSAGE_INVALID_FILE_PATH = "This path is invalid";
public PathCommand(String path) {
this.path = path;
this.isValid = true;
EventsCenter.getInstance().registerHandler(this);
}
@Override
public CommandResult execute() throws IOException {
assert model != null;<|fim▁hole|> model.changePath(path);
if (!isValid) {
throw new IOException(MESSAGE_INVALID_FILE_PATH);
}
return new CommandResult(String.format(MESSAGE_SUCCESS));
}
@Subscribe
public void handleSavePathChangedEventException(SavePathChangedEventException event) {
isValid = false;
}
}
//@@author<|fim▁end|> | |
<|file_name|>p3_backward_test.py<|end_file_name|><|fim▁begin|>import unittest
from stomp import backward3
class TestBackward3(unittest.TestCase):
def test_pack_mixed_string_and_bytes(self):
lines = ['SEND', '\n', 'header1:test', '\u6771']
self.assertEqual(backward3.encode(backward3.pack(lines)),
b'SEND\nheader1:test\xe6\x9d\xb1')
lines = ['SEND', '\n', 'header1:test', b'\xe6\x9d\xb1']
self.assertEqual(backward3.encode(backward3.pack(lines)),
b'SEND\nheader1:test\xe6\x9d\xb1')
def test_decode(self):
self.assertTrue(backward3.decode(None) is None)
self.assertEqual('test', backward3.decode(b'test'))
<|fim▁hole|> self.assertEqual(b'test', backward3.encode('test'))
self.assertEqual(b'test', backward3.encode(b'test'))
self.assertRaises(TypeError, backward3.encode, None)<|fim▁end|> | def test_encode(self):
|
<|file_name|>VariantShortcuts.hpp<|end_file_name|><|fim▁begin|>// ArduinoJson - arduinojson.org
// Copyright Benoit Blanchon 2014-2019
// MIT License
#pragma once
#include "../Array/ArrayShortcuts.hpp"
#include "../Object/ObjectShortcuts.hpp"
namespace ARDUINOJSON_NAMESPACE {
<|fim▁hole|> public ArrayShortcuts<TVariant> {
public:
using ArrayShortcuts<TVariant>::createNestedArray;
using ArrayShortcuts<TVariant>::createNestedObject;
using ArrayShortcuts<TVariant>::operator[];
using ObjectShortcuts<TVariant>::createNestedArray;
using ObjectShortcuts<TVariant>::createNestedObject;
using ObjectShortcuts<TVariant>::operator[];
};
} // namespace ARDUINOJSON_NAMESPACE<|fim▁end|> | template <typename TVariant>
class VariantShortcuts : public ObjectShortcuts<TVariant>, |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/*
<|fim▁hole|>
*/
if(OS_IOS){
// NappSlideMenu
var NappSlideMenu = require('dk.napp.slidemenu');
var drawerIphone = NappSlideMenu.createSlideMenuWindow({
centerWindow: $.navWindow,
leftWindow: Alloy.createController('menu').getView(),
leftLedge: 50
});
// Configure navigation and init app
Alloy.Globals.nav.setIphoneDrawer(drawerIphone);
Alloy.Globals.nav.setParentWindow($.firstWindow);
Alloy.Globals.nav.openCenter('Feed', 'feed', false);
Alloy.Globals.nav.init();
function openMenu(){
drawerIphone.toggleLeftView();
}
}
/*
------
ANDROID
------
*/
else if(OS_ANDROID){
// Ti.DrawerLayout
var TiDrawerLayout = require('com.tripvi.drawerlayout');
var drawer = TiDrawerLayout.createDrawer({
leftView: Alloy.createController('menu').getView(),
leftDrawerWidth: "280dp",
width: Ti.UI.FILL,
height: Ti.UI.FILL
});
// Configure navigation and init app
Alloy.Globals.nav.setAppName('My App Name');
Alloy.Globals.nav.setAndroidDrawer(drawer);
Alloy.Globals.nav.setParentWindow($.index);
Alloy.Globals.nav.openCenter('Feed', 'feed', false);
Alloy.Globals.nav.init();
}<|fim▁end|> | ------
IOS
------ |
<|file_name|>invalid.go<|end_file_name|><|fim▁begin|><|fim▁hole|>import "vend/x/invalid/vendor/foo"<|fim▁end|> | package invalid
|
<|file_name|>eventtarget.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::callback::CallbackContainer;
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::Bindings::EventListenerBinding::EventListener;
use dom::bindings::error::{Fallible, InvalidState};
use dom::bindings::js::JSRef;
use dom::bindings::trace::Traceable;
use dom::bindings::utils::{Reflectable, Reflector};
use dom::event::Event;
use dom::eventdispatcher::dispatch_event;
use dom::node::NodeTypeId;
use dom::xmlhttprequest::XMLHttpRequestId;
use dom::virtualmethods::VirtualMethods;
use js::jsapi::{JS_CompileUCFunction, JS_GetFunctionObject, JS_CloneFunctionObject};
use js::jsapi::{JSContext, JSObject};
use servo_util::str::DOMString;
use libc::{c_char, size_t};
use std::cell::RefCell;
use std::ptr;
use url::Url;
use std::collections::hashmap::HashMap;
#[deriving(PartialEq,Encodable)]
pub enum ListenerPhase {
Capturing,
Bubbling,
}
#[deriving(PartialEq,Encodable)]
pub enum EventTargetTypeId {
NodeTargetTypeId(NodeTypeId),
WindowTypeId,
XMLHttpRequestTargetTypeId(XMLHttpRequestId)
}
#[deriving(PartialEq, Encodable)]
pub enum EventListenerType {
Additive(EventListener),
Inline(EventListener),
}
impl EventListenerType {
fn get_listener(&self) -> EventListener {
match *self {
Additive(listener) | Inline(listener) => listener
}
}
}
#[deriving(PartialEq,Encodable)]
pub struct EventListenerEntry {
pub phase: ListenerPhase,
pub listener: EventListenerType
}
#[deriving(Encodable)]
pub struct EventTarget {
pub type_id: EventTargetTypeId,
reflector_: Reflector,
handlers: Traceable<RefCell<HashMap<DOMString, Vec<EventListenerEntry>>>>,
}
impl EventTarget {
pub fn new_inherited(type_id: EventTargetTypeId) -> EventTarget {
EventTarget {
type_id: type_id,
reflector_: Reflector::new(),
handlers: Traceable::new(RefCell::new(HashMap::new())),
}
}
pub fn get_listeners(&self, type_: &str) -> Option<Vec<EventListener>> {
self.handlers.deref().borrow().find_equiv(&type_).map(|listeners| {
listeners.iter().map(|entry| entry.listener.get_listener()).collect()
})
}
pub fn get_listeners_for(&self, type_: &str, desired_phase: ListenerPhase)
-> Option<Vec<EventListener>> {
self.handlers.deref().borrow().find_equiv(&type_).map(|listeners| {
let filtered = listeners.iter().filter(|entry| entry.phase == desired_phase);
filtered.map(|entry| entry.listener.get_listener()).collect()
})
}
}
pub trait EventTargetHelpers {
fn dispatch_event_with_target<'a>(&self,
target: Option<JSRef<'a, EventTarget>>,
event: &JSRef<Event>) -> Fallible<bool>;
fn set_inline_event_listener(&self,
ty: DOMString,
listener: Option<EventListener>);
fn get_inline_event_listener(&self, ty: DOMString) -> Option<EventListener>;
fn set_event_handler_uncompiled(&self,
cx: *mut JSContext,
url: Url,
scope: *mut JSObject,
ty: &str,
source: DOMString);
fn set_event_handler_common<T: CallbackContainer>(&self, ty: &str,
listener: Option<T>);
fn get_event_handler_common<T: CallbackContainer>(&self, ty: &str) -> Option<T>;
fn has_handlers(&self) -> bool;
}
impl<'a> EventTargetHelpers for JSRef<'a, EventTarget> {
fn dispatch_event_with_target<'b>(&self,
target: Option<JSRef<'b, EventTarget>>,
event: &JSRef<Event>) -> Fallible<bool> {
if event.deref().dispatching.deref().get() || !event.deref().initialized.deref().get() {
return Err(InvalidState);
}
Ok(dispatch_event(self, target, event))
}
fn set_inline_event_listener(&self,
ty: DOMString,
listener: Option<EventListener>) {
let mut handlers = self.handlers.deref().borrow_mut();
let entries = handlers.find_or_insert_with(ty, |_| vec!());
let idx = entries.iter().position(|&entry| {
match entry.listener {
Inline(_) => true,
_ => false,
}
});
match idx {
Some(idx) => {
match listener {
Some(listener) => entries.get_mut(idx).listener = Inline(listener),
None => {
entries.remove(idx);
}
}
}
None => {
if listener.is_some() {
entries.push(EventListenerEntry {
phase: Bubbling,
listener: Inline(listener.unwrap()),
});
}
}
}
}
fn get_inline_event_listener(&self, ty: DOMString) -> Option<EventListener> {
let handlers = self.handlers.deref().borrow();
let entries = handlers.find(&ty);
entries.and_then(|entries| entries.iter().find(|entry| {
match entry.listener {
Inline(_) => true,<|fim▁hole|> }).map(|entry| entry.listener.get_listener()))
}
fn set_event_handler_uncompiled(&self,
cx: *mut JSContext,
url: Url,
scope: *mut JSObject,
ty: &str,
source: DOMString) {
let url = url.to_str().to_c_str();
let name = ty.to_c_str();
let lineno = 0; //XXXjdm need to get a real number here
let nargs = 1; //XXXjdm not true for onerror
static arg_name: [c_char, ..6] =
['e' as c_char, 'v' as c_char, 'e' as c_char, 'n' as c_char, 't' as c_char, 0];
static arg_names: [*c_char, ..1] = [&arg_name as *c_char];
let source = source.to_utf16();
let handler =
name.with_ref(|name| {
url.with_ref(|url| { unsafe {
let fun = JS_CompileUCFunction(cx, ptr::mut_null(), name,
nargs, &arg_names as **i8 as *mut *i8, source.as_ptr(),
source.len() as size_t,
url, lineno);
assert!(fun.is_not_null());
JS_GetFunctionObject(fun)
}})});
let funobj = unsafe { JS_CloneFunctionObject(cx, handler, scope) };
assert!(funobj.is_not_null());
self.set_event_handler_common(ty, Some(EventHandlerNonNull::new(funobj)))
}
fn set_event_handler_common<T: CallbackContainer>(
&self, ty: &str, listener: Option<T>)
{
let event_listener = listener.map(|listener|
EventListener::new(listener.callback()));
self.set_inline_event_listener(ty.to_string(), event_listener);
}
fn get_event_handler_common<T: CallbackContainer>(&self, ty: &str) -> Option<T> {
let listener = self.get_inline_event_listener(ty.to_string());
listener.map(|listener| CallbackContainer::new(listener.parent.callback()))
}
fn has_handlers(&self) -> bool {
!self.handlers.deref().borrow().is_empty()
}
}
pub trait EventTargetMethods {
fn AddEventListener(&self,
ty: DOMString,
listener: Option<EventListener>,
capture: bool);
fn RemoveEventListener(&self,
ty: DOMString,
listener: Option<EventListener>,
capture: bool);
fn DispatchEvent(&self, event: &JSRef<Event>) -> Fallible<bool>;
}
impl<'a> EventTargetMethods for JSRef<'a, EventTarget> {
fn AddEventListener(&self,
ty: DOMString,
listener: Option<EventListener>,
capture: bool) {
match listener {
Some(listener) => {
let mut handlers = self.handlers.deref().borrow_mut();
let entry = handlers.find_or_insert_with(ty, |_| vec!());
let phase = if capture { Capturing } else { Bubbling };
let new_entry = EventListenerEntry {
phase: phase,
listener: Additive(listener)
};
if entry.as_slice().position_elem(&new_entry).is_none() {
entry.push(new_entry);
}
},
_ => (),
}
}
fn RemoveEventListener(&self,
ty: DOMString,
listener: Option<EventListener>,
capture: bool) {
match listener {
Some(listener) => {
let mut handlers = self.handlers.deref().borrow_mut();
let mut entry = handlers.find_mut(&ty);
for entry in entry.mut_iter() {
let phase = if capture { Capturing } else { Bubbling };
let old_entry = EventListenerEntry {
phase: phase,
listener: Additive(listener)
};
let position = entry.as_slice().position_elem(&old_entry);
for &position in position.iter() {
entry.remove(position);
}
}
},
_ => (),
}
}
fn DispatchEvent(&self, event: &JSRef<Event>) -> Fallible<bool> {
self.dispatch_event_with_target(None, event)
}
}
impl Reflectable for EventTarget {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
impl<'a> VirtualMethods for JSRef<'a, EventTarget> {
fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods+> {
None
}
}<|fim▁end|> | _ => false,
} |
<|file_name|>hp_procurve_ssh.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import unicode_literals
import re
import time
import socket
from netmiko.cisco_base_connection import CiscoSSHConnection
class HPProcurveSSH(CiscoSSHConnection):
def session_preparation(self):
"""
Prepare the session after the connection has been established.
Procurve uses - 'Press any key to continue'
"""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(2 * delay_factor)
self.write_channel("\n")
time.sleep(2 * delay_factor)
self.write_channel("\n")
time.sleep(2 * delay_factor)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.set_base_prompt()
self.disable_paging(command="\nno page\n")
self.set_terminal_width(command='terminal width 511')
def enable(self, cmd='enable', pattern='password', re_flags=re.IGNORECASE,
default_username='manager'):
"""Enter enable mode"""
debug = False
output = self.send_command_timing(cmd)
if 'username' in output.lower():
output += self.send_command_timing(default_username)
if 'password' in output.lower():
output += self.send_command_timing(self.secret)
if debug:
print(output)
self.clear_buffer()
return output
<|fim▁hole|> """Gracefully exit the SSH session."""
self.exit_config_mode()
self.write_channel("logout\n")
count = 0
while count <= 5:
time.sleep(.5)
output = self.read_channel()
if 'Do you want to log out' in output:
self.write_channel("y\n")
# Don't automatically save the config (user's responsibility)
elif 'Do you want to save the current' in output:
self.write_channel("n\n")
try:
self.write_channel("\n")
except socket.error:
break
count += 1<|fim▁end|> | def cleanup(self): |
<|file_name|>RKeyListener.java<|end_file_name|><|fim▁begin|>package org.rosuda.deducer.widgets.event;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
public class RKeyListener extends RListener implements KeyListener{
public void keyPressed(KeyEvent arg0) {
eventOccured(arg0,"keyPressed");
}
public void keyReleased(KeyEvent arg0) {
eventOccured(arg0,"keyReleased");
}
<|fim▁hole|> eventOccured(arg0,"keyTyped");
}
}<|fim▁end|> | public void keyTyped(KeyEvent arg0) { |
<|file_name|>Viewer.cpp<|end_file_name|><|fim▁begin|>#include "Viewer.h"
#include "Scene_draw_interface.h"
Viewer::Viewer(QWidget* parent, bool antialiasing)
: QGLViewer(parent),
scene(0),
antialiasing(antialiasing),
twosides(false),
m_isInitialized(false)
{
setBackgroundColor(::Qt::white);
}
void Viewer::setScene(Scene_draw_interface* scene)
{
this->scene = scene;
}
void Viewer::setAntiAliasing(bool b)
{
antialiasing = b;
if(m_isInitialized)
updateGL();
}
void Viewer::setTwoSides(bool b)
{
twosides = b;
if(m_isInitialized)
updateGL();
}
void Viewer::draw()<|fim▁hole|> draw_aux(false);
}
void Viewer::initializeGL()
{
m_isInitialized = true;
QGLViewer::initializeGL();
scene->initializeGL();
}
void Viewer::draw_aux(bool with_names)
{
QGLViewer::draw();
if(scene == 0)
return;
::glLineWidth(1.0f);
::glPointSize(2.f);
::glEnable(GL_POLYGON_OFFSET_FILL);
::glPolygonOffset(1.0f,1.0f);
::glClearColor(1.0f,1.0f,1.0f,0.0f);
::glPolygonMode(GL_FRONT_AND_BACK,GL_FILL);
if(twosides)
::glLightModeli(GL_LIGHT_MODEL_TWO_SIDE, GL_TRUE);
else
::glLightModeli(GL_LIGHT_MODEL_TWO_SIDE, GL_FALSE);
if(antiAliasing())
{
::glEnable(GL_BLEND);
::glEnable(GL_LINE_SMOOTH);
::glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
::glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
}
else
{
::glDisable(GL_BLEND);
::glDisable(GL_LINE_SMOOTH);
::glDisable(GL_POLYGON_SMOOTH_HINT);
::glBlendFunc(GL_ONE, GL_ZERO);
::glHint(GL_LINE_SMOOTH_HINT, GL_FASTEST);
}
if(with_names)
scene->drawWithNames();
else
scene->draw();
}
void Viewer::drawWithNames()
{
draw_aux(true);
}
void Viewer::postSelection(const QPoint&)
{
emit selected(this->selectedName());
}<|fim▁end|> | { |
<|file_name|>properties_extra.py<|end_file_name|><|fim▁begin|>'''
Ohm's law is a simple equation describing electrical circuits. It
states that the voltage V through a resistor is equal to the current
(I) times the resistance:
V = I * R
The units of these are volts, ampheres (or "amps"), and ohms,
respectively. In real circuits, often R is actually measured in
kiloohms (10**3 ohms) and I in milliamps (10**-3 amps).
Let's create a Resistor class that models this behavior. The
constructor takes two arguments - the resistance in ohms, and the
voltage in volts:
>>> resistor = Resistor(800, 5.5)
>>> resistor.resistance
800
>>> resistor.voltage
5.5
The current is derived from these two using Ohm's law:
(Hint: use @property)
>>> resistor.current
0.006875
Since we may want the value in milliamps, let's make another property
to provide that:
>>> resistor.current_in_milliamps
6.875
Let's set it up so that we can change the current, and doing so will
correspondingly modify the voltage (but keep the resistance constant).
>>> resistor.current_in_milliamps = 3.5
>>> resistor.resistance
800
>>> round(resistor.voltage, 2)
2.8
>>> resistor.current = .006875
>>> round(resistor.voltage, 2)
5.5
>>> resistor.resistance
800<|fim▁hole|>
>>> resistor.resistance = 8200
Traceback (most recent call last):
AttributeError: can't set attribute
'''
# Write your code here:
class Resistor:
def __init__(self, resistance, voltage):
self._resistance = resistance
self.voltage = voltage
@property
def resistance(self):
return self._resistance
@property
def current(self):
return self.voltage / self.resistance
@current.setter
def current(self, value):
self.voltage = self.resistance * value
@property
def current_in_milliamps(self):
return self.current * 1000
@current_in_milliamps.setter
def current_in_milliamps(self, value):
self.current = value / 1000
# Do not edit any code below this line!
if __name__ == '__main__':
import doctest
count, _ = doctest.testmod()
if count == 0:
print('*** ALL TESTS PASS ***\nGive someone a HIGH FIVE!')
# Copyright 2015-2018 Aaron Maxwell. All rights reserved.<|fim▁end|> |
Also, we've made a design decision that a Resistor cannot change its
resistance value once created: |
<|file_name|>e405.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter, CentralOutputPlotter
from neuralnilm.updates import clipped_nesterov_momentum
from lasagne.nonlinearities import sigmoid, rectify, tanh, identity
from lasagne.objectives import mse, binary_crossentropy
from lasagne.init import Uniform, Normal, Identity
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.layers.batch_norm import BatchNormLayer
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
import gc
"""
e400
'learn_init': False
independently_centre_inputs : True
e401
input is in range [0,1]
"""
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
#PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 500
GRADIENT_STEPS = 100
source_dict = dict(<|fim▁hole|> 'television'
# 'dish washer',
# ['washer dryer', 'washing machine']
],
max_appliance_powers=[300, 500, 200, 2500, 2400],
max_input_power=1000,
on_power_thresholds=[5] * 5,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=512,
# random_window=64,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
# skip_probability=0.9,
one_target_per_seq=False,
n_seq_per_batch=64,
subsample_target=4,
include_diff=False,
include_power=True,
clip_appliance_power=True,
target_is_prediction=False,
# independently_center_inputs=True,
# standardise_input=True,
# standardise_targets=True,
# unit_variance_targets=True,
input_padding=2,
lag=0
# classification=True
# reshape_target_to_2D=True
# input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
# 'std': np.array([ 0.12636775], dtype=np.float32)},
# target_stats={
# 'mean': np.array([ 0.04066789, 0.01881946,
# 0.24639061, 0.17608672, 0.10273963],
# dtype=np.float32),
# 'std': np.array([ 0.11449792, 0.07338708,
# 0.26608968, 0.33463112, 0.21250485],
# dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
# loss_function=lambda x, t: mse(x, t).mean(),
loss_function=lambda x, t: binary_crossentropy(x, t).mean(),
# loss_function=lambda x, t: binary_crossentropy(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
# loss_function=partial(scaled_cost3, ignore_inactive=False),
# updates_func=momentum,
updates_func=clipped_nesterov_momentum,
updates_kwargs={'clip_range': (0, 10)},
learning_rate=1e-1,
learning_rate_changes_by_iteration={
1000: 1e-2,
2000: 1e-3
# 800: 1e-4
# 500: 1e-3
# 4000: 1e-03,
# 6000: 5e-06,
# 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05
# 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True,
# auto_reshape=False,
# plotter=CentralOutputPlotter
# plotter=MDNPlotter
)
def exp_a(name):
# tanh and softplus output
# sane inits for other layers
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
net.load_params(2000)
return net
def exp_b(name):
# tanh and softplus output
# sane inits for other layers
# just large weights for first layer, sane biases
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Normal(std=1/sqrt(50))
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def exp_c(name):
# tanh and softplus output
# sane inits for other layers
# just large biases for first layer, sane weights
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'b': Uniform(25),
'W': Normal(std=1/sqrt(50))
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def exp_d(name):
# tanh and softplus output
# sane inits for other layers
# batch norm
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': identity,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': BatchNormLayer,
'axes': (0, 1),
'nonlinearity': tanh
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': identity,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BatchNormLayer,
'axes': (0, 1),
'nonlinearity': tanh
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh, # need nonlinearity for hid_to_hid
'learn_init': False,
'precompute_input': False
},
{
'type': BatchNormLayer,
'axes': (0, 1),
'nonlinearity': identity
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': identity,
'W': Normal(std=1/sqrt(50))
},
{
'type': BatchNormLayer,
'nonlinearity': tanh
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': BatchNormLayer,
'nonlinearity': tanh,
'axes': (0, 1)
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def exp_e(name):
# like a but with max power = 5900W
# tanh and softplus output
# sane inits for other layers
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True,
max_input_power=5900
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def exp_f(name):
# like a but with max power = 5900W and 5 appliances
# tanh and softplus output
# sane inits for other layers
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True,
max_input_power=5900
))
source_dict_copy['appliances'] = [
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
]
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
net.load_params(1000)
return net
def exp_g(name):
# like a but with max power = 1000W and 5 appliances
# tanh and softplus output
# sane inits for other layers
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True,
max_input_power=1000
))
source_dict_copy['appliances'] = [
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
]
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def exp_h(name):
# like a but with max power = 5900W and 5 appliances
# tanh and softplus output
# sane inits for other layers
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True,
max_input_power=5900,
skip_probability=0.9
))
source_dict_copy['appliances'] = [
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
]
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def exp_i(name):
# like a but with max power = 1000W and 5 appliances
# tanh and softplus output
# sane inits for other layers
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
standardise_targets=True,
unit_variance_targets=True,
max_input_power=1000,
skip_probability=0.9
))
source_dict_copy['appliances'] = [
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
]
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source,
loss_function=lambda x, t: mse(x, t).mean(),
learning_rate=1e-3,
learning_rate_changes_by_iteration={
1000: 1e-4,
2000: 1e-5
}
))
net_dict_copy['layers_config']= [
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Uniform(25),
'b': Uniform(25)
},
{
'type': DenseLayer,
'num_units': 50,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50)),
'b': Normal(std=1/sqrt(50))
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 40,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': tanh,
'W': Normal(std=1/sqrt(50))
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BidirectionalRecurrentLayer,
'num_units': 80,
'W_in_to_hid': Normal(std=1/sqrt(50)),
'gradient_steps': GRADIENT_STEPS,
'nonlinearity': tanh,
'learn_init': False,
'precompute_input': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
# EXPERIMENTS = list('abcdefghi')
EXPERIMENTS = list('fghi')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=5000)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
# raise
else:
del net.source.train_activations
gc.collect()
finally:
logging.shutdown()
if __name__ == "__main__":
main()<|fim▁end|> | filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners', |
<|file_name|>index.spec.js<|end_file_name|><|fim▁begin|>import HomeRoute from 'routes/Home';
describe('(Route) Home', () => {
let _component;
beforeEach(() => {
_component = HomeRoute.component();
});
it('Should return a route configuration object', () => {
expect(typeof HomeRoute).to.equal('object');
});
it('Should define a route component', () => {<|fim▁hole|> expect(_component.type).to.equal('div');
});
});<|fim▁end|> | |
<|file_name|>BlockEvent.java<|end_file_name|><|fim▁begin|>package com.skcraft.plume.event.block;
import com.google.common.base.Functions;
import com.google.common.base.Predicate;
import com.skcraft.plume.event.BulkEvent;
import com.skcraft.plume.event.Cause;
import com.skcraft.plume.event.DelegateEvent;
import com.skcraft.plume.event.Result;
import com.skcraft.plume.util.Location3i;
import net.minecraft.world.World;
import java.util.List;
import static com.google.common.base.Preconditions.checkNotNull;
abstract class BlockEvent extends DelegateEvent implements BulkEvent {
private final World world;
protected BlockEvent(Cause cause, World world) {
super(cause);
checkNotNull(world, "world");
this.world = world;
}
/**
* Get the world.
*
* @return The world
*/
public World getWorld() {
return world;
}
/**
* Get a list of affected locations.
*
* @return A list of affected locations
*/
public abstract List<Location3i> getLocations();
/**
* Filter the list of affected blocks with the given predicate. If the
* predicate returns {@code false}, then the block is removed.
*
* @param predicate the predicate
* @param cancelEventOnFalse true to cancel the event and clear the block
* list once the predicate returns {@code false}
* @return Whether one or more blocks were filtered out
*/
public boolean filterLocations(Predicate<Location3i> predicate, boolean cancelEventOnFalse) {
return filter(getLocations(), Functions.<Location3i>identity(), predicate, cancelEventOnFalse);
}
@Override
public Result getResult() {
if (getLocations().isEmpty()) {
return Result.DENY;
}
return super.getResult();<|fim▁hole|> @Override
public Result getExplicitResult() {
return super.getResult();
}
}<|fim▁end|> | }
|
<|file_name|>fake_infortrend_nas_data.py<|end_file_name|><|fim▁begin|># Copyright (c) 2019 Infortrend Technology, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class InfortrendNASTestData(object):
fake_share_id = ['5a0aa06e-1c57-4996-be46-b81e360e8866', # NFS
'aac4fe64-7a9c-472a-b156-9adbb50b4d29'] # CIFS
fake_share_name = [fake_share_id[0].replace('-', ''),
fake_share_id[1].replace('-', '')]
fake_channel_ip = ['172.27.112.223', '172.27.113.209']
fake_service_status_data = ('(64175, 1234, 272, 0)\n\n'
'{"cliCode": '
'[{"Return": "0x0000", "CLI": "Successful"}], '
'"returnCode": [], '
'"data": '
'[{"A": '
'{"NFS": '
'{"displayName": "NFS", '
'"state_time": "2017-05-04 14:19:53", '
'"enabled": true, '
'"cpu_rate": "0.0", '
'"mem_rate": "0.0", '
'"state": "exited", '
'"type": "share"}}}]}\n\n')
fake_folder_status_data = ('(64175, 1234, 1017, 0)\n\n'
'{"cliCode": '
'[{"Return": "0x0000", "CLI": "Successful"}], '
'"returnCode": [], '
'"data": '
'[{"utility": "1.00", '
'"used": "33886208", '
'"subshare": true, '
'"share": false, '
'"worm": "", '
'"free": "321931374592", '
'"fsType": "xfs", '
'"owner": "A", '
'"readOnly": false, '
'"modifyTime": "2017-04-27 16:16", '
'"directory": "/share-pool-01/LV-1", '
'"volumeId": "6541BAFB2E6C57B6", '
'"mounted": true, '
'"size": "321965260800"}, '
'{"utility": "1.00", '
'"used": "33779712", '
'"subshare": false, '
'"share": false, '
'"worm": "", '
'"free": "107287973888", '
'"fsType": "xfs", '
'"owner": "A", '
'"readOnly": false, '
'"modifyTime": "2017-04-27 15:45", '
'"directory": "/share-pool-02/LV-1", '
'"volumeId": "147A8FB67DA39914", '
'"mounted": true, '
'"size": "107321753600"}]}\n\n')
fake_nfs_status_off = [{
'A': {
'NFS': {
'displayName': 'NFS',
'state_time': '2017-05-04 14:19:53',
'enabled': False,
'cpu_rate': '0.0',
'mem_rate': '0.0',
'state': 'exited',
'type': 'share',
}
}
}]<|fim▁hole|>
fake_folder_status = [{
'utility': '1.00',
'used': '33886208',
'subshare': True,
'share': False,
'worm': '',
'free': '321931374592',
'fsType': 'xfs',
'owner': 'A',
'readOnly': False,
'modifyTime': '2017-04-27 16:16',
'directory': '/share-pool-01/LV-1',
'volumeId': '6541BAFB2E6C57B6',
'mounted': True,
'size': '321965260800'}, {
'utility': '1.00',
'used': '33779712',
'subshare': False,
'share': False,
'worm': '',
'free': '107287973888',
'fsType': 'xfs',
'owner': 'A',
'readOnly': False,
'modifyTime': '2017-04-27 15:45',
'directory': '/share-pool-02/LV-1',
'volumeId': '147A8FB67DA39914',
'mounted': True,
'size': '107321753600',
}]
def fake_get_channel_status(self, ch1_status='UP'):
return [{
'datalink': 'mgmt0',
'status': 'UP',
'typeConfig': 'DHCP',
'IP': '172.27.112.125',
'MAC': '00:d0:23:00:15:a6',
'netmask': '255.255.240.0',
'type': 'dhcp',
'gateway': '172.27.127.254'}, {
'datalink': 'CH0',
'status': 'UP',
'typeConfig': 'DHCP',
'IP': self.fake_channel_ip[0],
'MAC': '00:d0:23:80:15:a6',
'netmask': '255.255.240.0',
'type': 'dhcp',
'gateway': '172.27.127.254'}, {
'datalink': 'CH1',
'status': ch1_status,
'typeConfig': 'DHCP',
'IP': self.fake_channel_ip[1],
'MAC': '00:d0:23:40:15:a6',
'netmask': '255.255.240.0',
'type': 'dhcp',
'gateway': '172.27.127.254'}, {
'datalink': 'CH2',
'status': 'DOWN',
'typeConfig': 'DHCP',
'IP': '',
'MAC': '00:d0:23:c0:15:a6',
'netmask': '',
'type': '',
'gateway': ''}, {
'datalink': 'CH3',
'status': 'DOWN',
'typeConfig': 'DHCP',
'IP': '',
'MAC': '00:d0:23:20:15:a6',
'netmask': '',
'type': '',
'gateway': '',
}]
fake_fquota_status = [{
'quota': '21474836480',
'used': '0',
'name': 'test-folder',
'type': 'subfolder',
'id': '537178178'}, {
'quota': '32212254720',
'used': '0',
'name': fake_share_name[0],
'type': 'subfolder',
'id': '805306752'}, {
'quota': '53687091200',
'used': '21474836480',
'name': fake_share_name[1],
'type': 'subfolder',
'id': '69'}, {
'quota': '94091997184',
'used': '0',
'type': 'subfolder',
'id': '70',
"name": 'test-folder-02'
}]
fake_fquota_status_with_no_settings = []
def fake_get_share_status_nfs(self, status=False):
fake_share_status_nfs = [{
'ftp': False,
'cifs': False,
'oss': False,
'sftp': False,
'nfs': status,
'directory': '/LV-1/share-pool-01/' + self.fake_share_name[0],
'exist': True,
'afp': False,
'webdav': False
}]
if status:
fake_share_status_nfs[0]['nfs_detail'] = {
'hostList': [{
'uid': '65534',
'insecure': 'insecure',
'squash': 'all',
'access': 'ro',
'host': '*',
'gid': '65534',
'mode': 'async',
'no_subtree_check': 'no_subtree_check',
}]
}
return fake_share_status_nfs
def fake_get_share_status_cifs(self, status=False):
fake_share_status_cifs = [{
'ftp': False,
'cifs': status,
'oss': False,
'sftp': False,
'nfs': False,
'directory': '/share-pool-01/LV-1/' + self.fake_share_name[1],
'exist': True,
'afp': False,
'webdav': False
}]
if status:
fake_share_status_cifs[0]['cifs_detail'] = {
'available': True,
'encrypt': False,
'description': '',
'sharename': 'cifs-01',
'failover': '',
'AIO': True,
'priv': 'None',
'recycle_bin': False,
'ABE': True,
}
return fake_share_status_cifs
fake_subfolder_data = [{
'size': '6',
'index': '34',
'description': '',
'encryption': '',
'isEnd': False,
'share': False,
'volumeId': '6541BAFB2E6C57B6',
'quota': '',
'modifyTime': '2017-04-06 11:35',
'owner': 'A',
'path': '/share-pool-01/LV-1/UserHome',
'subshare': True,
'type': 'subfolder',
'empty': False,
'name': 'UserHome'}, {
'size': '6',
'index': '39',
'description': '',
'encryption': '',
'isEnd': False,
'share': False,
'volumeId': '6541BAFB2E6C57B6',
'quota': '21474836480',
'modifyTime': '2017-04-27 15:44',
'owner': 'A',
'path': '/share-pool-01/LV-1/test-folder',
'subshare': False,
'type': 'subfolder',
'empty': True,
'name': 'test-folder'}, {
'size': '6',
'index': '45',
'description': '',
'encryption': '',
'isEnd': False,
'share': True,
'volumeId': '6541BAFB2E6C57B6',
'quota': '32212254720',
'modifyTime': '2017-04-27 16:15',
'owner': 'A',
'path': '/share-pool-01/LV-1/' + fake_share_name[0],
'subshare': False,
'type': 'subfolder',
'empty': True,
'name': fake_share_name[0]}, {
'size': '6',
'index': '512',
'description': '',
'encryption': '',
'isEnd': True,
'share': True,
'volumeId': '6541BAFB2E6C57B6',
'quota': '53687091200',
'modifyTime': '2017-04-27 16:16',
'owner': 'A',
'path': '/share-pool-01/LV-1/' + fake_share_name[1],
'subshare': False,
'type': 'subfolder',
'empty': True,
'name': fake_share_name[1]}, {
'size': '6',
'index': '777',
'description': '',
'encryption': '',
'isEnd': False,
'share': False,
'volumeId': '6541BAFB2E6C57B6',
'quota': '94091997184',
'modifyTime': '2017-04-28 15:44',
'owner': 'A',
'path': '/share-pool-01/LV-1/test-folder-02',
'subshare': False,
'type': 'subfolder',
'empty': True,
'name': 'test-folder-02'
}]
fake_cifs_user_list = [{
'Superuser': 'No',
'Group': 'users',
'Description': '',
'Quota': 'none',
'PWD Expiry Date': '2291-01-19',
'Home Directory': '/share-pool-01/LV-1/UserHome/user01',
'UID': '100001',
'Type': 'Local',
'Name': 'user01'}, {
'Superuser': 'No',
'Group': 'users',
'Description': '',
'Quota': 'none',
'PWD Expiry Date': '2017-08-07',
'Home Directory': '/share-pool-01/LV-1/UserHome/user02',
'UID': '100002',
'Type': 'Local',
'Name': 'user02'
}]
fake_share_status_nfs_with_rules = [{
'ftp': False,
'cifs': False,
'oss': False,
'sftp': False,
'nfs': True,
'directory': '/share-pool-01/LV-1/' + fake_share_name[0],
'exist': True,
'nfs_detail': {
'hostList': [{
'uid': '65534',
'insecure': 'insecure',
'squash': 'all',
'access': 'ro',
'host': '*',
'gid': '65534',
'mode': 'async',
'no_subtree_check':
'no_subtree_check'}, {
'uid': '65534',
'insecure': 'insecure',
'squash': 'all',
'access': 'rw',
'host': '172.27.1.1',
'gid': '65534',
'mode': 'async',
'no_subtree_check': 'no_subtree_check'}, {
'uid': '65534',
'insecure': 'insecure',
'squash': 'all',
'access': 'rw',
'host': '172.27.1.2',
'gid': '65534',
'mode': 'async',
'no_subtree_check': 'no_subtree_check'}]
},
'afp': False,
'webdav': False,
}]
fake_share_status_cifs_with_rules = [
{
'permission': {
'Read': True,
'Write': True,
'Execute': True},
'type': 'user',
'id': '100001',
'name': 'user01'
}, {
'permission': {
'Read': True,
'Write': False,
'Execute': True},
'type': 'user',
'id': '100002',
'name': 'user02'
}, {
'permission': {
'Read': True,
'Write': False,
'Execute': True},
'type': 'group@',
'id': '100',
'name': 'users'
}, {
'permission': {
'Read': True,
'Write': False,
'Execute': True},
'type': 'other@',
'id': '',
'name': ''
}
]<|fim▁end|> | |
<|file_name|>all.rs<|end_file_name|><|fim▁begin|>#![deny(warnings)]
#![feature(io, path_ext, convert)]
extern crate cargo_registry;
extern crate conduit_middleware;
extern crate conduit_test;
extern crate rustc_serialize;
extern crate conduit;
extern crate curl;
extern crate git2;
extern crate time;
extern crate url;
extern crate semver;
use std::collections::HashMap;
use std::error::Error as StdError;
use std::process::Command;
use std::env;
use std::sync::{Once, ONCE_INIT, Arc};
use rustc_serialize::json::{self, Json};
use conduit::Request;
use conduit_test::MockRequest;
use cargo_registry::app::App;
use cargo_registry::db::{self, RequestTransaction};
use cargo_registry::dependency::Kind;
use cargo_registry::{User, Crate, Version, Keyword, Dependency};
macro_rules! t{ ($e:expr) => (
match $e {
Ok(e) => e,
Err(m) => panic!("{} failed with: {}", stringify!($e), m),
}
) }
macro_rules! t_resp{ ($e:expr) => ({
t!($e)
}) }
macro_rules! ok_resp{ ($e:expr) => ({
let resp = t_resp!($e);
if !::ok_resp(&resp) { panic!("bad response: {:?}", resp.status); }
resp
}) }
macro_rules! bad_resp{ ($e:expr) => ({
let mut resp = t_resp!($e);
match ::bad_resp(&mut resp) {
None => panic!("ok response: {:?}", resp.status),
Some(b) => b,
}
}) }<|fim▁hole|>#[derive(RustcDecodable, Debug)]
struct Error { detail: String }
#[derive(RustcDecodable)]
struct Bad { errors: Vec<Error> }
mod middleware;
mod keyword;
mod krate;
mod user;
mod record;
mod git;
mod version;
fn app() -> (record::Bomb, Arc<App>, conduit_middleware::MiddlewareBuilder) {
struct NoCommit;
static INIT: Once = ONCE_INIT;
git::init();
let (proxy, bomb) = record::proxy();
let config = cargo_registry::Config {
s3_bucket: env::var("S3_BUCKET").unwrap_or(String::new()),
s3_access_key: env::var("S3_ACCESS_KEY").unwrap_or(String::new()),
s3_secret_key: env::var("S3_SECRET_KEY").unwrap_or(String::new()),
s3_region: env::var("S3_REGION").ok(),
s3_proxy: Some(proxy),
session_key: "test".to_string(),
git_repo_checkout: git::checkout(),
gh_client_id: "".to_string(),
gh_client_secret: "".to_string(),
db_url: env("TEST_DATABASE_URL"),
env: cargo_registry::Env::Test,
max_upload_size: 1000,
};
INIT.call_once(|| db_setup(&config.db_url));
let app = App::new(&config);
let app = Arc::new(app);
let mut middleware = cargo_registry::middleware(app.clone());
middleware.add(NoCommit);
return (bomb, app, middleware);
fn env(s: &str) -> String {
match env::var(s).ok() {
Some(s) => s,
None => panic!("must have `{}` defined", s),
}
}
fn db_setup(db: &str) {
let migrate = t!(env::current_exe()).parent().unwrap().join("migrate");
assert!(t!(Command::new(&migrate).env("DATABASE_URL", db)
.status()).success());
}
impl conduit_middleware::Middleware for NoCommit {
fn after(&self, req: &mut Request,
res: Result<conduit::Response, Box<StdError+Send>>)
-> Result<conduit::Response, Box<StdError+Send>> {
req.extensions().find::<db::Transaction>()
.expect("Transaction not present in request")
.rollback();
return res;
}
}
}
fn req(app: Arc<App>, method: conduit::Method, path: &str) -> MockRequest {
let mut req = MockRequest::new(method, path);
req.mut_extensions().insert(db::Transaction::new(app));
return req;
}
fn ok_resp(r: &conduit::Response) -> bool {
r.status.0 == 200
}
fn bad_resp(r: &mut conduit::Response) -> Option<Bad> {
let bad = json::<Bad>(r);
if bad.errors.len() == 0 { return None }
Some(bad)
}
fn json<T: rustc_serialize::Decodable>(r: &mut conduit::Response) -> T {
let mut data = Vec::new();
r.body.read_to_end(&mut data).unwrap();
let s = std::str::from_utf8(&data).unwrap();
let j = match Json::from_str(s) {
Ok(t) => t,
Err(e) => panic!("failed to decode: {:?}\n{}", e, s),
};
let j = fixup(j);
let s = j.to_string();
return match json::decode(&s) {
Ok(t) => t,
Err(e) => panic!("failed to decode: {:?}\n{}", e, s),
};
fn fixup(json: Json) -> Json {
match json {
Json::Object(object) => {
Json::Object(object.into_iter().map(|(k, v)| {
let k = if k == "crate" {
"krate".to_string()
} else {
k
};
(k, fixup(v))
}).collect())
}
Json::Array(list) => {
Json::Array(list.into_iter().map(fixup).collect())
}
j => j,
}
}
}
fn user(login: &str) -> User {
User {
id: 10000,
gh_login: login.to_string(),
email: None,
name: None,
avatar: None,
gh_access_token: User::new_api_token(), // just randomize it
api_token: User::new_api_token(),
}
}
fn krate(name: &str) -> Crate {
cargo_registry::krate::Crate {
id: 10000,
name: name.to_string(),
user_id: 100,
updated_at: time::now().to_timespec(),
created_at: time::now().to_timespec(),
downloads: 10,
max_version: semver::Version::parse("0.0.0").unwrap(),
documentation: None,
homepage: None,
description: None,
readme: None,
keywords: Vec::new(),
license: None,
repository: None,
}
}
fn mock_user(req: &mut Request, u: User) -> User {
let u = User::find_or_insert(req.tx().unwrap(),
&u.gh_login,
u.email.as_ref().map(|s| &s[..]),
u.name.as_ref().map(|s| &s[..]),
u.avatar.as_ref().map(|s| &s[..]),
&u.gh_access_token,
&u.api_token).unwrap();
req.mut_extensions().insert(u.clone());
return u;
}
fn mock_crate(req: &mut Request, krate: Crate) -> (Crate, Version) {
mock_crate_vers(req, krate, &semver::Version::parse("1.0.0").unwrap())
}
fn mock_crate_vers(req: &mut Request, krate: Crate, v: &semver::Version)
-> (Crate, Version) {
let user = req.extensions().find::<User>().unwrap();
let mut krate = Crate::find_or_insert(req.tx().unwrap(), &krate.name,
user.id, &krate.description,
&krate.homepage,
&krate.documentation,
&krate.readme,
&krate.keywords,
&krate.repository,
&krate.license,
&None).unwrap();
Keyword::update_crate(req.tx().unwrap(), &krate,
&krate.keywords).unwrap();
let v = krate.add_version(req.tx().unwrap(), v, &HashMap::new(), &[]);
(krate, v.unwrap())
}
fn mock_dep(req: &mut Request, version: &Version, krate: &Crate,
target: Option<&str>) -> Dependency {
Dependency::insert(req.tx().unwrap(),
version.id,
krate.id,
&semver::VersionReq::parse(">= 0").unwrap(),
Kind::Normal,
false, true, &[],
&target.map(|s| s.to_string())).unwrap()
}
fn mock_keyword(req: &mut Request, name: &str) -> Keyword {
Keyword::find_or_insert(req.tx().unwrap(), name).unwrap()
}
fn logout(req: &mut Request) {
req.mut_extensions().pop::<User>();
}<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import os
import re
from django.conf import global_settings, settings
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.core import mail
from django.core.exceptions import SuspiciousOperation
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.http import urlquote
from django.utils._os import upath
from django.test import TestCase
from django.test.utils import override_settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm, PasswordResetForm)
from django.contrib.auth.tests.utils import skipIfCustomUser
@override_settings(
LANGUAGES=(
('en', 'English'),
),
LANGUAGE_CODE='en',
TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS,
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
USE_TZ=False,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assertTrue(SESSION_KEY in self.client.session)
def assertContainsEscaped(self, response, text, **kwargs):
return self.assertContains(response, escape(force_text(text)), **kwargs)
@skipIfCustomUser
class AuthViewNamedURLTests(AuthViewsTestCase):
urls = 'django.contrib.auth.urls'
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb36': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
@skipIfCustomUser
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertContainsEscaped(response, PasswordResetForm.error_messages['unknown'])
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
@override_settings(ALLOWED_HOSTS=['adminsite.com'])
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://adminsite.com" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertRaises(SuspiciousOperation):
self.client.post('/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertRaises(SuspiciousOperation):
self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz-1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEqual(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
fixtures = ['custom_user.json']
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
@skipIfCustomUser
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertContainsEscaped(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertContainsEscaped(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
def test_password_change_done_fails(self):
with self.settings(LOGIN_URL='/login/'):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/?next=/password_change/done/'))
@skipIfCustomUser
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})<|fim▁hole|> "%s should be allowed" % good_url)
@skipIfCustomUser
class LoginURLSettings(AuthViewsTestCase):
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver%s?%s' %
(login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
@skipIfCustomUser
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('django.contrib.auth.views.logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
self.confirm_logged_out()<|fim▁end|> | self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'], |
<|file_name|>bokwcs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env/python
import sys
import argparse
from bokpipe.bokastrom import scamp_solve
parser = argparse.ArgumentParser()
parser.add_argument("image",type=str,
help="input FITS image")
parser.add_argument("catalog",type=str,
help="input FITS catalog")
parser.add_argument("-a","--args",type=str,
help="arguments to pass to scamp config")
parser.add_argument("-f","--filter",type=str,default='g',
help="reference band")
parser.add_argument("-p","--plots",action="store_true",
help="write check plots")
parser.add_argument("-r","--reference",type=str,default=None,
help="reference catalog")
parser.add_argument('-v','--verbose',action='count',
help='increase output verbosity')
parser.add_argument("-w","--write",action="store_true",
help="write WCS to image header")
parser.add_argument("--single",action="store_true",<|fim▁hole|>kwargs = {}
if args.args is not None:
arglist = args.args.split()
for a in arglist:
k,v = a.split('=')
kwargs[k] = v
scamp_solve(args.image,args.catalog,refStarCatFile=args.reference,
filt=args.filter,savewcs=args.write,clobber=True,
check_plots=args.plots,twopass=not args.single,
verbose=args.verbose,**kwargs)<|fim▁end|> | help="single pass")
args = parser.parse_args()
|
<|file_name|>taskdb.py<|end_file_name|><|fim▁begin|>import json, time
from pyspider.database.base.taskdb import TaskDB as BaseTaskDB
from .couchdbbase import SplitTableMixin
class TaskDB(SplitTableMixin, BaseTaskDB):
collection_prefix = ''
def __init__(self, url, database='taskdb', username=None, password=None):
self.username = username
self.password = password
self.base_url = url
self.url = url + database + "/"
self.database = database
self.index = None
super().__init__()
self.create_database(database)
self.projects = set()
self._list_project()
def _get_collection_name(self, project):
return self.database + "_" + self._collection_name(project)
def _create_project(self, project):<|fim▁hole|> payload = {
'index': {
'fields': ['status', 'taskid']
},
'name': collection_name
}
res = self.session.post(self.base_url + collection_name + "/_index", json=payload).json()
self.index = res['id']
self._list_project()
def load_tasks(self, status, project=None, fields=None):
if not project:
self._list_project()
if fields is None:
fields = []
if project:
projects = [project, ]
else:
projects = self.projects
for project in projects:
collection_name = self._get_collection_name(project)
for task in self.get_docs(collection_name, {"selector": {"status": status}, "fields": fields}):
yield task
def get_task(self, project, taskid, fields=None):
if project not in self.projects:
self._list_project()
if project not in self.projects:
return
if fields is None:
fields = []
collection_name = self._get_collection_name(project)
ret = self.get_docs(collection_name, {"selector": {"taskid": taskid}, "fields": fields})
if len(ret) == 0:
return None
return ret[0]
def status_count(self, project):
if project not in self.projects:
self._list_project()
if project not in self.projects:
return {}
collection_name = self._get_collection_name(project)
def _count_for_status(collection_name, status):
total = len(self.get_docs(collection_name, {"selector": {'status': status}}))
return {'total': total, "_id": status} if total else None
c = collection_name
ret = [x for x in [_count_for_status(c, s) for s in [self.ACTIVE, self.SUCCESS, self.FAILED]] if x]
result = {}
if isinstance(ret, dict):
ret = ret.get('result', [])
for each in ret:
result[each['_id']] = each['total']
return result
def insert(self, project, taskid, obj={}):
if project not in self.projects:
self._create_project(project)
obj = dict(obj)
obj['taskid'] = taskid
obj['project'] = project
obj['updatetime'] = time.time()
return self.update(project, taskid, obj=obj)
def update(self, project, taskid, obj={}, **kwargs):
obj = dict(obj)
obj.update(kwargs)
obj['updatetime'] = time.time()
collection_name = self._get_collection_name(project)
return self.update_doc(collection_name, taskid, obj)
def drop_database(self):
return self.delete(self.url)
def drop(self, project):
collection_name = self._get_collection_name(project)
url = self.base_url + collection_name
return self.delete(url)<|fim▁end|> | collection_name = self._get_collection_name(project)
self.create_database(collection_name)
# create index |
<|file_name|>storage_policy.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import string
import textwrap
import six
from six.moves.configparser import ConfigParser
from swift.common.utils import (
config_true_value, SWIFT_CONF_FILE, whataremyips, list_from_csv)
from swift.common.ring import Ring, RingData
from swift.common.utils import quorum_size
from swift.common.exceptions import RingValidationError
from pyeclib.ec_iface import ECDriver, ECDriverError, VALID_EC_TYPES
LEGACY_POLICY_NAME = 'Policy-0'
VALID_CHARS = '-' + string.ascii_letters + string.digits
DEFAULT_POLICY_TYPE = REPL_POLICY = 'replication'
EC_POLICY = 'erasure_coding'
DEFAULT_EC_OBJECT_SEGMENT_SIZE = 1048576
class BindPortsCache(object):
def __init__(self, swift_dir, bind_ip):
self.swift_dir = swift_dir
self.mtimes_by_ring_path = {}
self.portsets_by_ring_path = {}
self.my_ips = set(whataremyips(bind_ip))
def all_bind_ports_for_node(self):
"""
Given an iterable of IP addresses identifying a storage backend server,
return a set of all bind ports defined in all rings for this storage
backend server.
The caller is responsible for not calling this method (which performs
at least a stat on all ring files) too frequently.
"""
# NOTE: we don't worry about disappearing rings here because you can't
# ever delete a storage policy.
for policy in POLICIES:
# NOTE: we must NOT use policy.load_ring to load the ring. Users
# of this utility function will not need the actual ring data, just
# the bind ports.
#
# This is duplicated with Ring.__init__ just a bit...
serialized_path = os.path.join(self.swift_dir,
policy.ring_name + '.ring.gz')
try:
new_mtime = os.path.getmtime(serialized_path)
except OSError:
continue
old_mtime = self.mtimes_by_ring_path.get(serialized_path)
if not old_mtime or old_mtime != new_mtime:
self.portsets_by_ring_path[serialized_path] = set(
dev['port']
for dev in RingData.load(serialized_path,
metadata_only=True).devs
if dev and dev['ip'] in self.my_ips)
self.mtimes_by_ring_path[serialized_path] = new_mtime
# No "break" here so that the above line will update the
# mtimes_by_ring_path entry for any ring that changes, not just
# the first one we notice.
# Return the requested set of ports from our (now-freshened) cache
return six.moves.reduce(set.union,
self.portsets_by_ring_path.values(), set())
class PolicyError(ValueError):
def __init__(self, msg, index=None):
if index is not None:
msg += ', for index %r' % index
super(PolicyError, self).__init__(msg)
def _get_policy_string(base, policy_index):
if policy_index == 0 or policy_index is None:
return_string = base
else:
return_string = base + "-%d" % int(policy_index)
return return_string
def get_policy_string(base, policy_or_index):
"""
Helper function to construct a string from a base and the policy.
Used to encode the policy index into either a file name or a
directory name by various modules.
:param base: the base string
:param policy_or_index: StoragePolicy instance, or an index
(string or int), if None the legacy
storage Policy-0 is assumed.
:returns: base name with policy index added
:raises: PolicyError if no policy exists with the given policy_index
"""
if isinstance(policy_or_index, BaseStoragePolicy):
policy = policy_or_index
else:
policy = POLICIES.get_by_index(policy_or_index)
if policy is None:
raise PolicyError("Unknown policy", index=policy_or_index)
return _get_policy_string(base, int(policy))
def split_policy_string(policy_string):
"""
Helper function to convert a string representing a base and a
policy. Used to decode the policy from either a file name or
a directory name by various modules.
:param policy_string: base name with policy index added
:raises: PolicyError if given index does not map to a valid policy
:returns: a tuple, in the form (base, policy) where base is the base
string and policy is the StoragePolicy instance for the
index encoded in the policy_string.
"""
if '-' in policy_string:
base, policy_index = policy_string.rsplit('-', 1)
else:
base, policy_index = policy_string, None
policy = POLICIES.get_by_index(policy_index)
if get_policy_string(base, policy) != policy_string:
raise PolicyError("Unknown policy", index=policy_index)
return base, policy
class BaseStoragePolicy(object):
"""
Represents a storage policy. Not meant to be instantiated directly;
implement a derived subclasses (e.g. StoragePolicy, ECStoragePolicy, etc)
or use :func:`~swift.common.storage_policy.reload_storage_policies` to
load POLICIES from ``swift.conf``.
The object_ring property is lazy loaded once the service's ``swift_dir``
is known via :meth:`~StoragePolicyCollection.get_object_ring`, but it may
be over-ridden via object_ring kwarg at create time for testing or
actively loaded with :meth:`~StoragePolicy.load_ring`.
"""
policy_type_to_policy_cls = {}
def __init__(self, idx, name='', is_default=False, is_deprecated=False,
object_ring=None, aliases=''):
# do not allow BaseStoragePolicy class to be instantiated directly
if type(self) == BaseStoragePolicy:
raise TypeError("Can't instantiate BaseStoragePolicy directly")
# policy parameter validation
try:
self.idx = int(idx)
except ValueError:
raise PolicyError('Invalid index', idx)
if self.idx < 0:
raise PolicyError('Invalid index', idx)
self.alias_list = []
if not name or not self._validate_policy_name(name):
raise PolicyError('Invalid name %r' % name, idx)
self.alias_list.append(name)
if aliases:
names_list = list_from_csv(aliases)
for alias in names_list:
if alias == name:
continue
self._validate_policy_name(alias)
self.alias_list.append(alias)
self.is_deprecated = config_true_value(is_deprecated)
self.is_default = config_true_value(is_default)
if self.policy_type not in BaseStoragePolicy.policy_type_to_policy_cls:
raise PolicyError('Invalid type', self.policy_type)
if self.is_deprecated and self.is_default:
raise PolicyError('Deprecated policy can not be default. '
'Invalid config', self.idx)
self.ring_name = _get_policy_string('object', self.idx)
self.object_ring = object_ring
@property
def name(self):
return self.alias_list[0]
@name.setter
def name_setter(self, name):
self._validate_policy_name(name)
self.alias_list[0] = name
@property
def aliases(self):
return ", ".join(self.alias_list)
def __int__(self):
return self.idx
def __cmp__(self, other):
return cmp(self.idx, int(other))
def __repr__(self):
return ("%s(%d, %r, is_default=%s, "
"is_deprecated=%s, policy_type=%r)") % \
(self.__class__.__name__, self.idx, self.alias_list,
self.is_default, self.is_deprecated, self.policy_type)
@classmethod
def register(cls, policy_type):
"""
Decorator for Storage Policy implementations to register
their StoragePolicy class. This will also set the policy_type
attribute on the registered implementation.
"""
def register_wrapper(policy_cls):
if policy_type in cls.policy_type_to_policy_cls:
raise PolicyError(
'%r is already registered for the policy_type %r' % (
cls.policy_type_to_policy_cls[policy_type],
policy_type))
cls.policy_type_to_policy_cls[policy_type] = policy_cls
policy_cls.policy_type = policy_type
return policy_cls
return register_wrapper
@classmethod
def _config_options_map(cls):
"""
Map config option name to StoragePolicy parameter name.
"""
return {
'name': 'name',
'aliases': 'aliases',
'policy_type': 'policy_type',
'default': 'is_default',
'deprecated': 'is_deprecated',
}
@classmethod
def from_config(cls, policy_index, options):
config_to_policy_option_map = cls._config_options_map()
policy_options = {}
for config_option, value in options.items():
try:
policy_option = config_to_policy_option_map[config_option]
except KeyError:
raise PolicyError('Invalid option %r in '
'storage-policy section' % config_option,
index=policy_index)
policy_options[policy_option] = value
return cls(policy_index, **policy_options)
def get_info(self, config=False):
"""
Return the info dict and conf file options for this policy.
:param config: boolean, if True all config options are returned
"""
info = {}
for config_option, policy_attribute in \
self._config_options_map().items():
info[config_option] = getattr(self, policy_attribute)
if not config:
# remove some options for public consumption
if not self.is_default:
info.pop('default')
if not self.is_deprecated:
info.pop('deprecated')
info.pop('policy_type')
return info
def _validate_policy_name(self, name):
"""
Helper function to determine the validity of a policy name. Used
to check policy names before setting them.
:param name: a name string for a single policy name.
:returns: true if the name is valid.
:raises: PolicyError if the policy name is invalid.
"""
# this is defensively restrictive, but could be expanded in the future
if not all(c in VALID_CHARS for c in name):
raise PolicyError('Names are used as HTTP headers, and can not '
'reliably contain any characters not in %r. '
'Invalid name %r' % (VALID_CHARS, name))
if name.upper() == LEGACY_POLICY_NAME.upper() and self.idx != 0:
msg = 'The name %s is reserved for policy index 0. ' \
'Invalid name %r' % (LEGACY_POLICY_NAME, name)
raise PolicyError(msg, self.idx)
if name.upper() in (existing_name.upper() for existing_name
in self.alias_list):
msg = 'The name %s is already assigned to this policy.' % name
raise PolicyError(msg, self.idx)
return True
def add_name(self, name):
"""
Adds an alias name to the storage policy. Shouldn't be called
directly from the storage policy but instead through the
storage policy collection class, so lookups by name resolve
correctly.
:param name: a new alias for the storage policy
"""
if self._validate_policy_name(name):
self.alias_list.append(name)
def remove_name(self, name):
"""
Removes an alias name from the storage policy. Shouldn't be called
directly from the storage policy but instead through the storage
policy collection class, so lookups by name resolve correctly. If
the name removed is the primary name then the next availiable alias
will be adopted as the new primary name.
:param name: a name assigned to the storage policy
"""
if name not in self.alias_list:
raise PolicyError("%s is not a name assigned to policy %s"
% (name, self.idx))
if len(self.alias_list) == 1:
raise PolicyError("Cannot remove only name %s from policy %s. "
"Policies must have at least one name."
% (name, self.idx))
else:
self.alias_list.remove(name)
def change_primary_name(self, name):
"""
Changes the primary/default name of the policy to a specified name.
:param name: a string name to replace the current primary name.
"""
if name == self.name:
return
elif name in self.alias_list:
self.remove_name(name)
else:
self._validate_policy_name(name)
self.alias_list.insert(0, name)
def _validate_ring(self):
"""
Hook, called when the ring is loaded. Can be used to
validate the ring against the StoragePolicy configuration.
"""
pass
def load_ring(self, swift_dir):
"""
Load the ring for this policy immediately.
:param swift_dir: path to rings
"""
if self.object_ring:
return
self.object_ring = Ring(swift_dir, ring_name=self.ring_name)
# Validate ring to make sure it conforms to policy requirements
self._validate_ring()
@property
def quorum(self):
"""
Number of successful backend requests needed for the proxy to
consider the client request successful.
"""
raise NotImplementedError()
@BaseStoragePolicy.register(REPL_POLICY)
class StoragePolicy(BaseStoragePolicy):<|fim▁hole|> Not meant to be instantiated directly; use
:func:`~swift.common.storage_policy.reload_storage_policies` to load
POLICIES from ``swift.conf``.
"""
@property
def quorum(self):
"""
Quorum concept in the replication case:
floor(number of replica / 2) + 1
"""
if not self.object_ring:
raise PolicyError('Ring is not loaded')
return quorum_size(self.object_ring.replica_count)
@BaseStoragePolicy.register(EC_POLICY)
class ECStoragePolicy(BaseStoragePolicy):
"""
Represents a storage policy of type 'erasure_coding'.
Not meant to be instantiated directly; use
:func:`~swift.common.storage_policy.reload_storage_policies` to load
POLICIES from ``swift.conf``.
"""
def __init__(self, idx, name='', aliases='', is_default=False,
is_deprecated=False, object_ring=None,
ec_segment_size=DEFAULT_EC_OBJECT_SEGMENT_SIZE,
ec_type=None, ec_ndata=None, ec_nparity=None):
super(ECStoragePolicy, self).__init__(
idx=idx, name=name, aliases=aliases, is_default=is_default,
is_deprecated=is_deprecated, object_ring=object_ring)
# Validate erasure_coding policy specific members
# ec_type is one of the EC implementations supported by PyEClib
if ec_type is None:
raise PolicyError('Missing ec_type')
if ec_type not in VALID_EC_TYPES:
raise PolicyError('Wrong ec_type %s for policy %s, should be one'
' of "%s"' % (ec_type, self.name,
', '.join(VALID_EC_TYPES)))
self._ec_type = ec_type
# Define _ec_ndata as the number of EC data fragments
# Accessible as the property "ec_ndata"
try:
value = int(ec_ndata)
if value <= 0:
raise ValueError
self._ec_ndata = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_num_data_fragments %r' %
ec_ndata, index=self.idx)
# Define _ec_nparity as the number of EC parity fragments
# Accessible as the property "ec_nparity"
try:
value = int(ec_nparity)
if value <= 0:
raise ValueError
self._ec_nparity = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_num_parity_fragments %r'
% ec_nparity, index=self.idx)
# Define _ec_segment_size as the encode segment unit size
# Accessible as the property "ec_segment_size"
try:
value = int(ec_segment_size)
if value <= 0:
raise ValueError
self._ec_segment_size = value
except (TypeError, ValueError):
raise PolicyError('Invalid ec_object_segment_size %r' %
ec_segment_size, index=self.idx)
# Initialize PyECLib EC backend
try:
self.pyeclib_driver = \
ECDriver(k=self._ec_ndata, m=self._ec_nparity,
ec_type=self._ec_type)
except ECDriverError as e:
raise PolicyError("Error creating EC policy (%s)" % e,
index=self.idx)
# quorum size in the EC case depends on the choice of EC scheme.
self._ec_quorum_size = \
self._ec_ndata + self.pyeclib_driver.min_parity_fragments_needed()
@property
def ec_type(self):
return self._ec_type
@property
def ec_ndata(self):
return self._ec_ndata
@property
def ec_nparity(self):
return self._ec_nparity
@property
def ec_segment_size(self):
return self._ec_segment_size
@property
def fragment_size(self):
"""
Maximum length of a fragment, including header.
NB: a fragment archive is a sequence of 0 or more max-length
fragments followed by one possibly-shorter fragment.
"""
# Technically pyeclib's get_segment_info signature calls for
# (data_len, segment_size) but on a ranged GET we don't know the
# ec-content-length header before we need to compute where in the
# object we should request to align with the fragment size. So we
# tell pyeclib a lie - from it's perspective, as long as data_len >=
# segment_size it'll give us the answer we want. From our
# perspective, because we only use this answer to calculate the
# *minimum* size we should read from an object body even if data_len <
# segment_size we'll still only read *the whole one and only last
# fragment* and pass than into pyeclib who will know what to do with
# it just as it always does when the last fragment is < fragment_size.
return self.pyeclib_driver.get_segment_info(
self.ec_segment_size, self.ec_segment_size)['fragment_size']
@property
def ec_scheme_description(self):
"""
This short hand form of the important parts of the ec schema is stored
in Object System Metadata on the EC Fragment Archives for debugging.
"""
return "%s %d+%d" % (self._ec_type, self._ec_ndata, self._ec_nparity)
def __repr__(self):
return ("%s, EC config(ec_type=%s, ec_segment_size=%d, "
"ec_ndata=%d, ec_nparity=%d)") % \
(super(ECStoragePolicy, self).__repr__(), self.ec_type,
self.ec_segment_size, self.ec_ndata, self.ec_nparity)
@classmethod
def _config_options_map(cls):
options = super(ECStoragePolicy, cls)._config_options_map()
options.update({
'ec_type': 'ec_type',
'ec_object_segment_size': 'ec_segment_size',
'ec_num_data_fragments': 'ec_ndata',
'ec_num_parity_fragments': 'ec_nparity',
})
return options
def get_info(self, config=False):
info = super(ECStoragePolicy, self).get_info(config=config)
if not config:
info.pop('ec_object_segment_size')
info.pop('ec_num_data_fragments')
info.pop('ec_num_parity_fragments')
info.pop('ec_type')
return info
def _validate_ring(self):
"""
EC specific validation
Replica count check - we need _at_least_ (#data + #parity) replicas
configured. Also if the replica count is larger than exactly that
number there's a non-zero risk of error for code that is considering
the number of nodes in the primary list from the ring.
"""
if not self.object_ring:
raise PolicyError('Ring is not loaded')
nodes_configured = self.object_ring.replica_count
if nodes_configured != (self.ec_ndata + self.ec_nparity):
raise RingValidationError(
'EC ring for policy %s needs to be configured with '
'exactly %d nodes. Got %d.' % (
self.name, self.ec_ndata + self.ec_nparity,
nodes_configured))
@property
def quorum(self):
"""
Number of successful backend requests needed for the proxy to consider
the client request successful.
The quorum size for EC policies defines the minimum number
of data + parity elements required to be able to guarantee
the desired fault tolerance, which is the number of data
elements supplemented by the minimum number of parity
elements required by the chosen erasure coding scheme.
For example, for Reed-Solomon, the minimum number parity
elements required is 1, and thus the quorum_size requirement
is ec_ndata + 1.
Given the number of parity elements required is not the same
for every erasure coding scheme, consult PyECLib for
min_parity_fragments_needed()
"""
return self._ec_quorum_size
class StoragePolicyCollection(object):
"""
This class represents the collection of valid storage policies for the
cluster and is instantiated as :class:`StoragePolicy` objects are added to
the collection when ``swift.conf`` is parsed by
:func:`parse_storage_policies`.
When a StoragePolicyCollection is created, the following validation
is enforced:
* If a policy with index 0 is not declared and no other policies defined,
Swift will create one
* The policy index must be a non-negative integer
* If no policy is declared as the default and no other policies are
defined, the policy with index 0 is set as the default
* Policy indexes must be unique
* Policy names are required
* Policy names are case insensitive
* Policy names must contain only letters, digits or a dash
* Policy names must be unique
* The policy name 'Policy-0' can only be used for the policy with index 0
* If any policies are defined, exactly one policy must be declared default
* Deprecated policies can not be declared the default
"""
def __init__(self, pols):
self.default = []
self.by_name = {}
self.by_index = {}
self._validate_policies(pols)
def _add_policy(self, policy):
"""
Add pre-validated policies to internal indexes.
"""
for name in policy.alias_list:
self.by_name[name.upper()] = policy
self.by_index[int(policy)] = policy
def __repr__(self):
return (textwrap.dedent("""
StoragePolicyCollection([
%s
])
""") % ',\n '.join(repr(p) for p in self)).strip()
def __len__(self):
return len(self.by_index)
def __getitem__(self, key):
return self.by_index[key]
def __iter__(self):
return iter(self.by_index.values())
def _validate_policies(self, policies):
"""
:param policies: list of policies
"""
for policy in policies:
if int(policy) in self.by_index:
raise PolicyError('Duplicate index %s conflicts with %s' % (
policy, self.get_by_index(int(policy))))
for name in policy.alias_list:
if name.upper() in self.by_name:
raise PolicyError('Duplicate name %s conflicts with %s' % (
policy, self.get_by_name(name)))
if policy.is_default:
if not self.default:
self.default = policy
else:
raise PolicyError(
'Duplicate default %s conflicts with %s' % (
policy, self.default))
self._add_policy(policy)
# If a 0 policy wasn't explicitly given, or nothing was
# provided, create the 0 policy now
if 0 not in self.by_index:
if len(self) != 0:
raise PolicyError('You must specify a storage policy '
'section for policy index 0 in order '
'to define multiple policies')
self._add_policy(StoragePolicy(0, name=LEGACY_POLICY_NAME))
# at least one policy must be enabled
enabled_policies = [p for p in self if not p.is_deprecated]
if not enabled_policies:
raise PolicyError("Unable to find policy that's not deprecated!")
# if needed, specify default
if not self.default:
if len(self) > 1:
raise PolicyError("Unable to find default policy")
self.default = self[0]
self.default.is_default = True
def get_by_name(self, name):
"""
Find a storage policy by its name.
:param name: name of the policy
:returns: storage policy, or None
"""
return self.by_name.get(name.upper())
def get_by_index(self, index):
"""
Find a storage policy by its index.
An index of None will be treated as 0.
:param index: numeric index of the storage policy
:returns: storage policy, or None if no such policy
"""
# makes it easier for callers to just pass in a header value
if index in ('', None):
index = 0
else:
try:
index = int(index)
except ValueError:
return None
return self.by_index.get(index)
@property
def legacy(self):
return self.get_by_index(None)
def get_object_ring(self, policy_idx, swift_dir):
"""
Get the ring object to use to handle a request based on its policy.
An index of None will be treated as 0.
:param policy_idx: policy index as defined in swift.conf
:param swift_dir: swift_dir used by the caller
:returns: appropriate ring object
"""
policy = self.get_by_index(policy_idx)
if not policy:
raise PolicyError("No policy with index %s" % policy_idx)
if not policy.object_ring:
policy.load_ring(swift_dir)
return policy.object_ring
def get_policy_info(self):
"""
Build info about policies for the /info endpoint
:returns: list of dicts containing relevant policy information
"""
policy_info = []
for pol in self:
# delete from /info if deprecated
if pol.is_deprecated:
continue
policy_entry = pol.get_info()
policy_info.append(policy_entry)
return policy_info
def add_policy_alias(self, policy_index, *aliases):
"""
Adds a new name or names to a policy
:param policy_index: index of a policy in this policy collection.
:param *aliases: arbitrary number of string policy names to add.
"""
policy = self.get_by_index(policy_index)
for alias in aliases:
if alias.upper() in self.by_name:
raise PolicyError('Duplicate name %s in use '
'by policy %s' % (alias,
self.get_by_name(alias)))
else:
policy.add_name(alias)
self.by_name[alias.upper()] = policy
def remove_policy_alias(self, *aliases):
"""
Removes a name or names from a policy. If the name removed is the
primary name then the next availiable alias will be adopted
as the new primary name.
:param *aliases: arbitrary number of existing policy names to remove.
"""
for alias in aliases:
policy = self.get_by_name(alias)
if not policy:
raise PolicyError('No policy with name %s exists.' % alias)
if len(policy.alias_list) == 1:
raise PolicyError('Policy %s with name %s has only one name. '
'Policies must have at least one name.' % (
policy, alias))
else:
policy.remove_name(alias)
del self.by_name[alias.upper()]
def change_policy_primary_name(self, policy_index, new_name):
"""
Changes the primary or default name of a policy. The new primary
name can be an alias that already belongs to the policy or a
completely new name.
:param policy_index: index of a policy in this policy collection.
:param new_name: a string name to set as the new default name.
"""
policy = self.get_by_index(policy_index)
name_taken = self.get_by_name(new_name)
# if the name belongs to some other policy in the collection
if name_taken and name_taken != policy:
raise PolicyError('Other policy %s with name %s exists.' %
(self.get_by_name(new_name).idx, new_name))
else:
policy.change_primary_name(new_name)
self.by_name[new_name.upper()] = policy
def parse_storage_policies(conf):
"""
Parse storage policies in ``swift.conf`` - note that validation
is done when the :class:`StoragePolicyCollection` is instantiated.
:param conf: ConfigParser parser object for swift.conf
"""
policies = []
for section in conf.sections():
if not section.startswith('storage-policy:'):
continue
policy_index = section.split(':', 1)[1]
config_options = dict(conf.items(section))
policy_type = config_options.pop('policy_type', DEFAULT_POLICY_TYPE)
policy_cls = BaseStoragePolicy.policy_type_to_policy_cls[policy_type]
policy = policy_cls.from_config(policy_index, config_options)
policies.append(policy)
return StoragePolicyCollection(policies)
class StoragePolicySingleton(object):
"""
An instance of this class is the primary interface to storage policies
exposed as a module level global named ``POLICIES``. This global
reference wraps ``_POLICIES`` which is normally instantiated by parsing
``swift.conf`` and will result in an instance of
:class:`StoragePolicyCollection`.
You should never patch this instance directly, instead patch the module
level ``_POLICIES`` instance so that swift code which imported
``POLICIES`` directly will reference the patched
:class:`StoragePolicyCollection`.
"""
def __iter__(self):
return iter(_POLICIES)
def __len__(self):
return len(_POLICIES)
def __getitem__(self, key):
return _POLICIES[key]
def __getattribute__(self, name):
return getattr(_POLICIES, name)
def __repr__(self):
return repr(_POLICIES)
def reload_storage_policies():
"""
Reload POLICIES from ``swift.conf``.
"""
global _POLICIES
policy_conf = ConfigParser()
policy_conf.read(SWIFT_CONF_FILE)
try:
_POLICIES = parse_storage_policies(policy_conf)
except PolicyError as e:
raise SystemExit('ERROR: Invalid Storage Policy Configuration '
'in %s (%s)' % (SWIFT_CONF_FILE, e))
# parse configuration and setup singleton
_POLICIES = None
reload_storage_policies()
POLICIES = StoragePolicySingleton()<|fim▁end|> | """
Represents a storage policy of type 'replication'. Default storage policy
class unless otherwise overridden from swift.conf.
|
<|file_name|>PyShell.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3
import getopt
import os
import os.path
import re
import socket
import subprocess
import sys
import threading
import time
import tokenize
import traceback
import types
import linecache
from code import InteractiveInterpreter
try:
from tkinter import *
except ImportError:
print("** IDLE can't import Tkinter. " \
"Your Python may not be configured for Tk. **", file=sys.__stderr__)
sys.exit(1)
import tkinter.messagebox as tkMessageBox
from idlelib.EditorWindow import EditorWindow, fixwordbreaks
from idlelib.FileList import FileList
from idlelib.ColorDelegator import ColorDelegator
from idlelib.UndoDelegator import UndoDelegator
from idlelib.OutputWindow import OutputWindow
from idlelib.configHandler import idleConf
from idlelib import idlever
from idlelib import rpc
from idlelib import Debugger
from idlelib import RemoteDebugger
from idlelib import macosxSupport
HOST = '127.0.0.1' # python execution server on localhost loopback
PORT = 0 # someday pass in host, port for remote debug capability
# Override warnings module to write to warning_stream. Initialize to send IDLE
# internal warnings to the console. ScriptBinding.check_syntax() will
# temporarily redirect the stream to the shell window to display warnings when
# checking user's code.
global warning_stream
warning_stream = sys.__stderr__
try:
import warnings
except ImportError:
pass
else:
def idle_showwarning(message, category, filename, lineno,
file=None, line=None):
if file is None:
file = warning_stream
try:
file.write(warnings.formatwarning(message, category, filename,
lineno, line=line))
except IOError:
pass ## file (probably __stderr__) is invalid, warning dropped.
warnings.showwarning = idle_showwarning
def idle_formatwarning(message, category, filename, lineno, line=None):
"""Format warnings the IDLE way"""
s = "\nWarning (from warnings module):\n"
s += ' File \"%s\", line %s\n' % (filename, lineno)
if line is None:
line = linecache.getline(filename, lineno)
line = line.strip()
if line:
s += " %s\n" % line
s += "%s: %s\n>>> " % (category.__name__, message)
return s
warnings.formatwarning = idle_formatwarning
def extended_linecache_checkcache(filename=None,
orig_checkcache=linecache.checkcache):
"""Extend linecache.checkcache to preserve the <pyshell#...> entries
Rather than repeating the linecache code, patch it to save the
<pyshell#...> entries, call the original linecache.checkcache()
(skipping them), and then restore the saved entries.
orig_checkcache is bound at definition time to the original
method, allowing it to be patched.
"""
cache = linecache.cache
save = {}
for key in list(cache):
if key[:1] + key[-1:] == '<>':
save[key] = cache.pop(key)
orig_checkcache(filename)
cache.update(save)
# Patch linecache.checkcache():
linecache.checkcache = extended_linecache_checkcache
class PyShellEditorWindow(EditorWindow):
"Regular text edit window in IDLE, supports breakpoints"
def __init__(self, *args):
self.breakpoints = []
EditorWindow.__init__(self, *args)
self.text.bind("<<set-breakpoint-here>>", self.set_breakpoint_here)
self.text.bind("<<clear-breakpoint-here>>", self.clear_breakpoint_here)
self.text.bind("<<open-python-shell>>", self.flist.open_shell)
self.breakpointPath = os.path.join(idleConf.GetUserCfgDir(),
'breakpoints.lst')
# whenever a file is changed, restore breakpoints
if self.io.filename: self.restore_file_breaks()
def filename_changed_hook(old_hook=self.io.filename_change_hook,
self=self):
self.restore_file_breaks()
old_hook()
self.io.set_filename_change_hook(filename_changed_hook)
rmenu_specs = [("Set Breakpoint", "<<set-breakpoint-here>>"),
("Clear Breakpoint", "<<clear-breakpoint-here>>")]
def set_breakpoint(self, lineno):
text = self.text
filename = self.io.filename
text.tag_add("BREAK", "%d.0" % lineno, "%d.0" % (lineno+1))
try:
i = self.breakpoints.index(lineno)
except ValueError: # only add if missing, i.e. do once
self.breakpoints.append(lineno)
try: # update the subprocess debugger
debug = self.flist.pyshell.interp.debugger
debug.set_breakpoint_here(filename, lineno)
except: # but debugger may not be active right now....
pass
def set_breakpoint_here(self, event=None):
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
lineno = int(float(text.index("insert")))
self.set_breakpoint(lineno)
def clear_breakpoint_here(self, event=None):
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
lineno = int(float(text.index("insert")))
try:<|fim▁hole|> text.tag_remove("BREAK", "insert linestart",\
"insert lineend +1char")
try:
debug = self.flist.pyshell.interp.debugger
debug.clear_breakpoint_here(filename, lineno)
except:
pass
def clear_file_breaks(self):
if self.breakpoints:
text = self.text
filename = self.io.filename
if not filename:
text.bell()
return
self.breakpoints = []
text.tag_remove("BREAK", "1.0", END)
try:
debug = self.flist.pyshell.interp.debugger
debug.clear_file_breaks(filename)
except:
pass
def store_file_breaks(self):
"Save breakpoints when file is saved"
# XXX 13 Dec 2002 KBK Currently the file must be saved before it can
# be run. The breaks are saved at that time. If we introduce
# a temporary file save feature the save breaks functionality
# needs to be re-verified, since the breaks at the time the
# temp file is created may differ from the breaks at the last
# permanent save of the file. Currently, a break introduced
# after a save will be effective, but not persistent.
# This is necessary to keep the saved breaks synched with the
# saved file.
#
# Breakpoints are set as tagged ranges in the text. Certain
# kinds of edits cause these ranges to be deleted: Inserting
# or deleting a line just before a breakpoint, and certain
# deletions prior to a breakpoint. These issues need to be
# investigated and understood. It's not clear if they are
# Tk issues or IDLE issues, or whether they can actually
# be fixed. Since a modified file has to be saved before it is
# run, and since self.breakpoints (from which the subprocess
# debugger is loaded) is updated during the save, the visible
# breaks stay synched with the subprocess even if one of these
# unexpected breakpoint deletions occurs.
breaks = self.breakpoints
filename = self.io.filename
try:
with open(self.breakpointPath, "r") as fp:
lines = fp.readlines()
except IOError:
lines = []
try:
with open(self.breakpointPath, "w") as new_file:
for line in lines:
if not line.startswith(filename + '='):
new_file.write(line)
self.update_breakpoints()
breaks = self.breakpoints
if breaks:
new_file.write(filename + '=' + str(breaks) + '\n')
except IOError as err:
if not getattr(self.root, "breakpoint_error_displayed", False):
self.root.breakpoint_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
message='Unable to update breakpoint list:\n%s'
% str(err),
parent=self.text)
def restore_file_breaks(self):
self.text.update() # this enables setting "BREAK" tags to be visible
filename = self.io.filename
if filename is None:
return
if os.path.isfile(self.breakpointPath):
with open(self.breakpointPath, "r") as fp:
lines = fp.readlines()
for line in lines:
if line.startswith(filename + '='):
breakpoint_linenumbers = eval(line[len(filename)+1:])
for breakpoint_linenumber in breakpoint_linenumbers:
self.set_breakpoint(breakpoint_linenumber)
def update_breakpoints(self):
"Retrieves all the breakpoints in the current window"
text = self.text
ranges = text.tag_ranges("BREAK")
linenumber_list = self.ranges_to_linenumbers(ranges)
self.breakpoints = linenumber_list
def ranges_to_linenumbers(self, ranges):
lines = []
for index in range(0, len(ranges), 2):
lineno = int(float(ranges[index]))
end = int(float(ranges[index+1]))
while lineno < end:
lines.append(lineno)
lineno += 1
return lines
# XXX 13 Dec 2002 KBK Not used currently
# def saved_change_hook(self):
# "Extend base method - clear breaks if module is modified"
# if not self.get_saved():
# self.clear_file_breaks()
# EditorWindow.saved_change_hook(self)
def _close(self):
"Extend base method - clear breaks when module is closed"
self.clear_file_breaks()
EditorWindow._close(self)
class PyShellFileList(FileList):
"Extend base class: IDLE supports a shell and breakpoints"
# override FileList's class variable, instances return PyShellEditorWindow
# instead of EditorWindow when new edit windows are created.
EditorWindow = PyShellEditorWindow
pyshell = None
def open_shell(self, event=None):
if self.pyshell:
self.pyshell.top.wakeup()
else:
self.pyshell = PyShell(self)
if self.pyshell:
if not self.pyshell.begin():
return None
return self.pyshell
class ModifiedColorDelegator(ColorDelegator):
"Extend base class: colorizer for the shell window itself"
def __init__(self):
ColorDelegator.__init__(self)
self.LoadTagDefs()
def recolorize_main(self):
self.tag_remove("TODO", "1.0", "iomark")
self.tag_add("SYNC", "1.0", "iomark")
ColorDelegator.recolorize_main(self)
def LoadTagDefs(self):
ColorDelegator.LoadTagDefs(self)
theme = idleConf.GetOption('main','Theme','name')
self.tagdefs.update({
"stdin": {'background':None,'foreground':None},
"stdout": idleConf.GetHighlight(theme, "stdout"),
"stderr": idleConf.GetHighlight(theme, "stderr"),
"console": idleConf.GetHighlight(theme, "console"),
})
class ModifiedUndoDelegator(UndoDelegator):
"Extend base class: forbid insert/delete before the I/O mark"
def insert(self, index, chars, tags=None):
try:
if self.delegate.compare(index, "<", "iomark"):
self.delegate.bell()
return
except TclError:
pass
UndoDelegator.insert(self, index, chars, tags)
def delete(self, index1, index2=None):
try:
if self.delegate.compare(index1, "<", "iomark"):
self.delegate.bell()
return
except TclError:
pass
UndoDelegator.delete(self, index1, index2)
class MyRPCClient(rpc.RPCClient):
def handle_EOF(self):
"Override the base class - just re-raise EOFError"
raise EOFError
class ModifiedInterpreter(InteractiveInterpreter):
def __init__(self, tkconsole):
self.tkconsole = tkconsole
locals = sys.modules['__main__'].__dict__
InteractiveInterpreter.__init__(self, locals=locals)
self.save_warnings_filters = None
self.restarting = False
self.subprocess_arglist = None
self.port = PORT
self.original_compiler_flags = self.compile.compiler.flags
rpcclt = None
rpcsubproc = None
def spawn_subprocess(self):
if self.subprocess_arglist is None:
self.subprocess_arglist = self.build_subprocess_arglist()
self.rpcsubproc = subprocess.Popen(self.subprocess_arglist)
def build_subprocess_arglist(self):
assert (self.port!=0), (
"Socket should have been assigned a port number.")
w = ['-W' + s for s in sys.warnoptions]
# Maybe IDLE is installed and is being accessed via sys.path,
# or maybe it's not installed and the idle.py script is being
# run from the IDLE source directory.
del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc',
default=False, type='bool')
if __name__ == 'idlelib.PyShell':
command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
else:
command = "__import__('run').main(%r)" % (del_exitf,)
return [sys.executable] + w + ["-c", command, str(self.port)]
def start_subprocess(self):
addr = (HOST, self.port)
# GUI makes several attempts to acquire socket, listens for connection
for i in range(3):
time.sleep(i)
try:
self.rpcclt = MyRPCClient(addr)
break
except socket.error as err:
pass
else:
self.display_port_binding_error()
return None
# if PORT was 0, system will assign an 'ephemeral' port. Find it out:
self.port = self.rpcclt.listening_sock.getsockname()[1]
# if PORT was not 0, probably working with a remote execution server
if PORT != 0:
# To allow reconnection within the 2MSL wait (cf. Stevens TCP
# V1, 18.6), set SO_REUSEADDR. Note that this can be problematic
# on Windows since the implementation allows two active sockets on
# the same address!
self.rpcclt.listening_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.spawn_subprocess()
#time.sleep(20) # test to simulate GUI not accepting connection
# Accept the connection from the Python execution server
self.rpcclt.listening_sock.settimeout(10)
try:
self.rpcclt.accept()
except socket.timeout as err:
self.display_no_subprocess_error()
return None
self.rpcclt.register("stdin", self.tkconsole)
self.rpcclt.register("stdout", self.tkconsole.stdout)
self.rpcclt.register("stderr", self.tkconsole.stderr)
self.rpcclt.register("flist", self.tkconsole.flist)
self.rpcclt.register("linecache", linecache)
self.rpcclt.register("interp", self)
self.transfer_path(with_cwd=True)
self.poll_subprocess()
return self.rpcclt
def restart_subprocess(self, with_cwd=False):
if self.restarting:
return self.rpcclt
self.restarting = True
# close only the subprocess debugger
debug = self.getdebugger()
if debug:
try:
# Only close subprocess debugger, don't unregister gui_adap!
RemoteDebugger.close_subprocess_debugger(self.rpcclt)
except:
pass
# Kill subprocess, spawn a new one, accept connection.
self.rpcclt.close()
self.terminate_subprocess()
console = self.tkconsole
was_executing = console.executing
console.executing = False
self.spawn_subprocess()
try:
self.rpcclt.accept()
except socket.timeout as err:
self.display_no_subprocess_error()
return None
self.transfer_path(with_cwd=with_cwd)
# annotate restart in shell window and mark it
console.text.delete("iomark", "end-1c")
if was_executing:
console.write('\n')
console.showprompt()
halfbar = ((int(console.width) - 16) // 2) * '='
console.write(halfbar + ' RESTART ' + halfbar)
console.text.mark_set("restart", "end-1c")
console.text.mark_gravity("restart", "left")
console.showprompt()
# restart subprocess debugger
if debug:
# Restarted debugger connects to current instance of debug GUI
gui = RemoteDebugger.restart_subprocess_debugger(self.rpcclt)
# reload remote debugger breakpoints for all PyShellEditWindows
debug.load_breakpoints()
self.compile.compiler.flags = self.original_compiler_flags
self.restarting = False
return self.rpcclt
def __request_interrupt(self):
self.rpcclt.remotecall("exec", "interrupt_the_server", (), {})
def interrupt_subprocess(self):
threading.Thread(target=self.__request_interrupt).start()
def kill_subprocess(self):
try:
self.rpcclt.close()
except AttributeError: # no socket
pass
self.terminate_subprocess()
self.tkconsole.executing = False
self.rpcclt = None
def terminate_subprocess(self):
"Make sure subprocess is terminated"
try:
self.rpcsubproc.kill()
except OSError:
# process already terminated
return
else:
try:
self.rpcsubproc.wait()
except OSError:
return
def transfer_path(self, with_cwd=False):
if with_cwd: # Issue 13506
path = [''] # include Current Working Directory
path.extend(sys.path)
else:
path = sys.path
self.runcommand("""if 1:
import sys as _sys
_sys.path = %r
del _sys
\n""" % (path,))
active_seq = None
def poll_subprocess(self):
clt = self.rpcclt
if clt is None:
return
try:
response = clt.pollresponse(self.active_seq, wait=0.05)
except (EOFError, IOError, KeyboardInterrupt):
# lost connection or subprocess terminated itself, restart
# [the KBI is from rpc.SocketIO.handle_EOF()]
if self.tkconsole.closing:
return
response = None
self.restart_subprocess()
if response:
self.tkconsole.resetoutput()
self.active_seq = None
how, what = response
console = self.tkconsole.console
if how == "OK":
if what is not None:
print(repr(what), file=console)
elif how == "EXCEPTION":
if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
self.remote_stack_viewer()
elif how == "ERROR":
errmsg = "PyShell.ModifiedInterpreter: Subprocess ERROR:\n"
print(errmsg, what, file=sys.__stderr__)
print(errmsg, what, file=console)
# we received a response to the currently active seq number:
try:
self.tkconsole.endexecuting()
except AttributeError: # shell may have closed
pass
# Reschedule myself
if not self.tkconsole.closing:
self.tkconsole.text.after(self.tkconsole.pollinterval,
self.poll_subprocess)
debugger = None
def setdebugger(self, debugger):
self.debugger = debugger
def getdebugger(self):
return self.debugger
def open_remote_stack_viewer(self):
"""Initiate the remote stack viewer from a separate thread.
This method is called from the subprocess, and by returning from this
method we allow the subprocess to unblock. After a bit the shell
requests the subprocess to open the remote stack viewer which returns a
static object looking at the last exception. It is queried through
the RPC mechanism.
"""
self.tkconsole.text.after(300, self.remote_stack_viewer)
return
def remote_stack_viewer(self):
from idlelib import RemoteObjectBrowser
oid = self.rpcclt.remotequeue("exec", "stackviewer", ("flist",), {})
if oid is None:
self.tkconsole.root.bell()
return
item = RemoteObjectBrowser.StubObjectTreeItem(self.rpcclt, oid)
from idlelib.TreeWidget import ScrolledCanvas, TreeNode
top = Toplevel(self.tkconsole.root)
theme = idleConf.GetOption('main','Theme','name')
background = idleConf.GetHighlight(theme, 'normal')['background']
sc = ScrolledCanvas(top, bg=background, highlightthickness=0)
sc.frame.pack(expand=1, fill="both")
node = TreeNode(sc.canvas, None, item)
node.expand()
# XXX Should GC the remote tree when closing the window
gid = 0
def execsource(self, source):
"Like runsource() but assumes complete exec source"
filename = self.stuffsource(source)
self.execfile(filename, source)
def execfile(self, filename, source=None):
"Execute an existing file"
if source is None:
with tokenize.open(filename) as fp:
source = fp.read()
try:
code = compile(source, filename, "exec")
except (OverflowError, SyntaxError):
self.tkconsole.resetoutput()
tkerr = self.tkconsole.stderr
print('*** Error in script or command!\n', file=tkerr)
print('Traceback (most recent call last):', file=tkerr)
InteractiveInterpreter.showsyntaxerror(self, filename)
self.tkconsole.showprompt()
else:
self.runcode(code)
def runsource(self, source):
"Extend base class method: Stuff the source in the line cache first"
filename = self.stuffsource(source)
self.more = 0
self.save_warnings_filters = warnings.filters[:]
warnings.filterwarnings(action="error", category=SyntaxWarning)
# at the moment, InteractiveInterpreter expects str
assert isinstance(source, str)
#if isinstance(source, str):
# from idlelib import IOBinding
# try:
# source = source.encode(IOBinding.encoding)
# except UnicodeError:
# self.tkconsole.resetoutput()
# self.write("Unsupported characters in input\n")
# return
try:
# InteractiveInterpreter.runsource() calls its runcode() method,
# which is overridden (see below)
return InteractiveInterpreter.runsource(self, source, filename)
finally:
if self.save_warnings_filters is not None:
warnings.filters[:] = self.save_warnings_filters
self.save_warnings_filters = None
def stuffsource(self, source):
"Stuff source in the filename cache"
filename = "<pyshell#%d>" % self.gid
self.gid = self.gid + 1
lines = source.split("\n")
linecache.cache[filename] = len(source)+1, 0, lines, filename
return filename
def prepend_syspath(self, filename):
"Prepend sys.path with file's directory if not already included"
self.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import dirname as _dirname
_dir = _dirname(_filename)
if not _dir in _sys.path:
_sys.path.insert(0, _dir)
del _filename, _sys, _dirname, _dir
\n""" % (filename,))
def showsyntaxerror(self, filename=None):
"""Override Interactive Interpreter method: Use Colorizing
Color the offending position instead of printing it and pointing at it
with a caret.
"""
tkconsole = self.tkconsole
text = tkconsole.text
text.tag_remove("ERROR", "1.0", "end")
type, value, tb = sys.exc_info()
msg = getattr(value, 'msg', '') or value or "<no detail available>"
lineno = getattr(value, 'lineno', '') or 1
offset = getattr(value, 'offset', '') or 0
if offset == 0:
lineno += 1 #mark end of offending line
if lineno == 1:
pos = "iomark + %d chars" % (offset-1)
else:
pos = "iomark linestart + %d lines + %d chars" % \
(lineno-1, offset-1)
tkconsole.colorize_syntax_error(text, pos)
tkconsole.resetoutput()
self.write("SyntaxError: %s\n" % msg)
tkconsole.showprompt()
def showtraceback(self):
"Extend base class method to reset output properly"
self.tkconsole.resetoutput()
self.checklinecache()
InteractiveInterpreter.showtraceback(self)
if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
self.tkconsole.open_stack_viewer()
def checklinecache(self):
c = linecache.cache
for key in list(c.keys()):
if key[:1] + key[-1:] != "<>":
del c[key]
def runcommand(self, code):
"Run the code without invoking the debugger"
# The code better not raise an exception!
if self.tkconsole.executing:
self.display_executing_dialog()
return 0
if self.rpcclt:
self.rpcclt.remotequeue("exec", "runcode", (code,), {})
else:
exec(code, self.locals)
return 1
def runcode(self, code):
"Override base class method"
if self.tkconsole.executing:
self.interp.restart_subprocess()
self.checklinecache()
if self.save_warnings_filters is not None:
warnings.filters[:] = self.save_warnings_filters
self.save_warnings_filters = None
debugger = self.debugger
try:
self.tkconsole.beginexecuting()
if not debugger and self.rpcclt is not None:
self.active_seq = self.rpcclt.asyncqueue("exec", "runcode",
(code,), {})
elif debugger:
debugger.run(code, self.locals)
else:
exec(code, self.locals)
except SystemExit:
if not self.tkconsole.closing:
if tkMessageBox.askyesno(
"Exit?",
"Do you want to exit altogether?",
default="yes",
master=self.tkconsole.text):
raise
else:
self.showtraceback()
else:
raise
except:
if use_subprocess:
print("IDLE internal error in runcode()",
file=self.tkconsole.stderr)
self.showtraceback()
self.tkconsole.endexecuting()
else:
if self.tkconsole.canceled:
self.tkconsole.canceled = False
print("KeyboardInterrupt", file=self.tkconsole.stderr)
else:
self.showtraceback()
finally:
if not use_subprocess:
try:
self.tkconsole.endexecuting()
except AttributeError: # shell may have closed
pass
def write(self, s):
"Override base class method"
self.tkconsole.stderr.write(s)
def display_port_binding_error(self):
tkMessageBox.showerror(
"Port Binding Error",
"IDLE can't bind to a TCP/IP port, which is necessary to "
"communicate with its Python execution server. This might be "
"because no networking is installed on this computer. "
"Run IDLE with the -n command line switch to start without a "
"subprocess and refer to Help/IDLE Help 'Running without a "
"subprocess' for further details.",
master=self.tkconsole.text)
def display_no_subprocess_error(self):
tkMessageBox.showerror(
"Subprocess Startup Error",
"IDLE's subprocess didn't make connection. Either IDLE can't "
"start a subprocess or personal firewall software is blocking "
"the connection.",
master=self.tkconsole.text)
def display_executing_dialog(self):
tkMessageBox.showerror(
"Already executing",
"The Python Shell window is already executing a command; "
"please wait until it is finished.",
master=self.tkconsole.text)
class PyShell(OutputWindow):
shell_title = "Python Shell"
# Override classes
ColorDelegator = ModifiedColorDelegator
UndoDelegator = ModifiedUndoDelegator
# Override menus
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("debug", "_Debug"),
("options", "_Options"),
("windows", "_Windows"),
("help", "_Help"),
]
if macosxSupport.runningAsOSXApp():
del menu_specs[-3]
menu_specs[-2] = ("windows", "_Window")
# New classes
from idlelib.IdleHistory import History
def __init__(self, flist=None):
if use_subprocess:
ms = self.menu_specs
if ms[2][0] != "shell":
ms.insert(2, ("shell", "She_ll"))
self.interp = ModifiedInterpreter(self)
if flist is None:
root = Tk()
fixwordbreaks(root)
root.withdraw()
flist = PyShellFileList(root)
#
OutputWindow.__init__(self, flist, None, None)
#
## self.config(usetabs=1, indentwidth=8, context_use_ps1=1)
self.usetabs = True
# indentwidth must be 8 when using tabs. See note in EditorWindow:
self.indentwidth = 8
self.context_use_ps1 = True
#
text = self.text
text.configure(wrap="char")
text.bind("<<newline-and-indent>>", self.enter_callback)
text.bind("<<plain-newline-and-indent>>", self.linefeed_callback)
text.bind("<<interrupt-execution>>", self.cancel_callback)
text.bind("<<end-of-file>>", self.eof_callback)
text.bind("<<open-stack-viewer>>", self.open_stack_viewer)
text.bind("<<toggle-debugger>>", self.toggle_debugger)
text.bind("<<toggle-jit-stack-viewer>>", self.toggle_jit_stack_viewer)
self.color = color = self.ColorDelegator()
self.per.insertfilter(color)
if use_subprocess:
text.bind("<<view-restart>>", self.view_restart_mark)
text.bind("<<restart-shell>>", self.restart_shell)
#
self.save_stdout = sys.stdout
self.save_stderr = sys.stderr
self.save_stdin = sys.stdin
from idlelib import IOBinding
self.stdout = PseudoFile(self, "stdout", IOBinding.encoding)
self.stderr = PseudoFile(self, "stderr", IOBinding.encoding)
self.console = PseudoFile(self, "console", IOBinding.encoding)
if not use_subprocess:
sys.stdout = self.stdout
sys.stderr = self.stderr
sys.stdin = self
try:
# page help() text to shell.
import pydoc # import must be done here to capture i/o rebinding.
# XXX KBK 27Dec07 use a textView someday, but must work w/o subproc
pydoc.pager = pydoc.plainpager
except:
sys.stderr = sys.__stderr__
raise
#
self.history = self.History(self.text)
#
self.pollinterval = 50 # millisec
def get_standard_extension_names(self):
return idleConf.GetExtensions(shell_only=True)
reading = False
executing = False
canceled = False
endoffile = False
closing = False
def set_warning_stream(self, stream):
global warning_stream
warning_stream = stream
def get_warning_stream(self):
return warning_stream
def toggle_debugger(self, event=None):
if self.executing:
tkMessageBox.showerror("Don't debug now",
"You can only toggle the debugger when idle",
master=self.text)
self.set_debugger_indicator()
return "break"
else:
db = self.interp.getdebugger()
if db:
self.close_debugger()
else:
self.open_debugger()
def set_debugger_indicator(self):
db = self.interp.getdebugger()
self.setvar("<<toggle-debugger>>", not not db)
def toggle_jit_stack_viewer(self, event=None):
pass # All we need is the variable
def close_debugger(self):
db = self.interp.getdebugger()
if db:
self.interp.setdebugger(None)
db.close()
if self.interp.rpcclt:
RemoteDebugger.close_remote_debugger(self.interp.rpcclt)
self.resetoutput()
self.console.write("[DEBUG OFF]\n")
sys.ps1 = ">>> "
self.showprompt()
self.set_debugger_indicator()
def open_debugger(self):
if self.interp.rpcclt:
dbg_gui = RemoteDebugger.start_remote_debugger(self.interp.rpcclt,
self)
else:
dbg_gui = Debugger.Debugger(self)
self.interp.setdebugger(dbg_gui)
dbg_gui.load_breakpoints()
sys.ps1 = "[DEBUG ON]\n>>> "
self.showprompt()
self.set_debugger_indicator()
def beginexecuting(self):
"Helper for ModifiedInterpreter"
self.resetoutput()
self.executing = 1
def endexecuting(self):
"Helper for ModifiedInterpreter"
self.executing = 0
self.canceled = 0
self.showprompt()
def close(self):
"Extend EditorWindow.close()"
if self.executing:
response = tkMessageBox.askokcancel(
"Kill?",
"The program is still running!\n Do you want to kill it?",
default="ok",
parent=self.text)
if response is False:
return "cancel"
if self.reading:
self.top.quit()
self.canceled = True
self.closing = True
# Wait for poll_subprocess() rescheduling to stop
self.text.after(2 * self.pollinterval, self.close2)
def close2(self):
return EditorWindow.close(self)
def _close(self):
"Extend EditorWindow._close(), shut down debugger and execution server"
self.close_debugger()
if use_subprocess:
self.interp.kill_subprocess()
# Restore std streams
sys.stdout = self.save_stdout
sys.stderr = self.save_stderr
sys.stdin = self.save_stdin
# Break cycles
self.interp = None
self.console = None
self.flist.pyshell = None
self.history = None
EditorWindow._close(self)
def ispythonsource(self, filename):
"Override EditorWindow method: never remove the colorizer"
return True
def short_title(self):
return self.shell_title
COPYRIGHT = \
'Type "copyright", "credits" or "license()" for more information.'
def begin(self):
self.text.mark_set("iomark", "insert")
self.resetoutput()
if use_subprocess:
nosub = ''
client = self.interp.start_subprocess()
if not client:
self.close()
return False
else:
nosub = "==== No Subprocess ===="
self.write("Python %s on %s\n%s\n%s" %
(sys.version, sys.platform, self.COPYRIGHT, nosub))
self.showprompt()
import tkinter
tkinter._default_root = None # 03Jan04 KBK What's this?
return True
def readline(self):
save = self.reading
try:
self.reading = 1
self.top.mainloop() # nested mainloop()
finally:
self.reading = save
line = self.text.get("iomark", "end-1c")
if len(line) == 0: # may be EOF if we quit our mainloop with Ctrl-C
line = "\n"
self.resetoutput()
if self.canceled:
self.canceled = 0
if not use_subprocess:
raise KeyboardInterrupt
if self.endoffile:
self.endoffile = 0
line = ""
return line
def isatty(self):
return True
def cancel_callback(self, event=None):
try:
if self.text.compare("sel.first", "!=", "sel.last"):
return # Active selection -- always use default binding
except:
pass
if not (self.executing or self.reading):
self.resetoutput()
self.interp.write("KeyboardInterrupt\n")
self.showprompt()
return "break"
self.endoffile = 0
self.canceled = 1
if (self.executing and self.interp.rpcclt):
if self.interp.getdebugger():
self.interp.restart_subprocess()
else:
self.interp.interrupt_subprocess()
if self.reading:
self.top.quit() # exit the nested mainloop() in readline()
return "break"
def eof_callback(self, event):
if self.executing and not self.reading:
return # Let the default binding (delete next char) take over
if not (self.text.compare("iomark", "==", "insert") and
self.text.compare("insert", "==", "end-1c")):
return # Let the default binding (delete next char) take over
if not self.executing:
self.resetoutput()
self.close()
else:
self.canceled = 0
self.endoffile = 1
self.top.quit()
return "break"
def linefeed_callback(self, event):
# Insert a linefeed without entering anything (still autoindented)
if self.reading:
self.text.insert("insert", "\n")
self.text.see("insert")
else:
self.newline_and_indent_event(event)
return "break"
def enter_callback(self, event):
if self.executing and not self.reading:
return # Let the default binding (insert '\n') take over
# If some text is selected, recall the selection
# (but only if this before the I/O mark)
try:
sel = self.text.get("sel.first", "sel.last")
if sel:
if self.text.compare("sel.last", "<=", "iomark"):
self.recall(sel, event)
return "break"
except:
pass
# If we're strictly before the line containing iomark, recall
# the current line, less a leading prompt, less leading or
# trailing whitespace
if self.text.compare("insert", "<", "iomark linestart"):
# Check if there's a relevant stdin range -- if so, use it
prev = self.text.tag_prevrange("stdin", "insert")
if prev and self.text.compare("insert", "<", prev[1]):
self.recall(self.text.get(prev[0], prev[1]), event)
return "break"
next = self.text.tag_nextrange("stdin", "insert")
if next and self.text.compare("insert lineend", ">=", next[0]):
self.recall(self.text.get(next[0], next[1]), event)
return "break"
# No stdin mark -- just get the current line, less any prompt
indices = self.text.tag_nextrange("console", "insert linestart")
if indices and \
self.text.compare(indices[0], "<=", "insert linestart"):
self.recall(self.text.get(indices[1], "insert lineend"), event)
else:
self.recall(self.text.get("insert linestart", "insert lineend"), event)
return "break"
# If we're between the beginning of the line and the iomark, i.e.
# in the prompt area, move to the end of the prompt
if self.text.compare("insert", "<", "iomark"):
self.text.mark_set("insert", "iomark")
# If we're in the current input and there's only whitespace
# beyond the cursor, erase that whitespace first
s = self.text.get("insert", "end-1c")
if s and not s.strip():
self.text.delete("insert", "end-1c")
# If we're in the current input before its last line,
# insert a newline right at the insert point
if self.text.compare("insert", "<", "end-1c linestart"):
self.newline_and_indent_event(event)
return "break"
# We're in the last line; append a newline and submit it
self.text.mark_set("insert", "end-1c")
if self.reading:
self.text.insert("insert", "\n")
self.text.see("insert")
else:
self.newline_and_indent_event(event)
self.text.tag_add("stdin", "iomark", "end-1c")
self.text.update_idletasks()
if self.reading:
self.top.quit() # Break out of recursive mainloop()
else:
self.runit()
return "break"
def recall(self, s, event):
# remove leading and trailing empty or whitespace lines
s = re.sub(r'^\s*\n', '' , s)
s = re.sub(r'\n\s*$', '', s)
lines = s.split('\n')
self.text.undo_block_start()
try:
self.text.tag_remove("sel", "1.0", "end")
self.text.mark_set("insert", "end-1c")
prefix = self.text.get("insert linestart", "insert")
if prefix.rstrip().endswith(':'):
self.newline_and_indent_event(event)
prefix = self.text.get("insert linestart", "insert")
self.text.insert("insert", lines[0].strip())
if len(lines) > 1:
orig_base_indent = re.search(r'^([ \t]*)', lines[0]).group(0)
new_base_indent = re.search(r'^([ \t]*)', prefix).group(0)
for line in lines[1:]:
if line.startswith(orig_base_indent):
# replace orig base indentation with new indentation
line = new_base_indent + line[len(orig_base_indent):]
self.text.insert('insert', '\n'+line.rstrip())
finally:
self.text.see("insert")
self.text.undo_block_stop()
def runit(self):
line = self.text.get("iomark", "end-1c")
# Strip off last newline and surrounding whitespace.
# (To allow you to hit return twice to end a statement.)
i = len(line)
while i > 0 and line[i-1] in " \t":
i = i-1
if i > 0 and line[i-1] == "\n":
i = i-1
while i > 0 and line[i-1] in " \t":
i = i-1
line = line[:i]
more = self.interp.runsource(line)
def open_stack_viewer(self, event=None):
if self.interp.rpcclt:
return self.interp.remote_stack_viewer()
try:
sys.last_traceback
except:
tkMessageBox.showerror("No stack trace",
"There is no stack trace yet.\n"
"(sys.last_traceback is not defined)",
master=self.text)
return
from idlelib.StackViewer import StackBrowser
sv = StackBrowser(self.root, self.flist)
def view_restart_mark(self, event=None):
self.text.see("iomark")
self.text.see("restart")
def restart_shell(self, event=None):
"Callback for Run/Restart Shell Cntl-F6"
self.interp.restart_subprocess(with_cwd=True)
def showprompt(self):
self.resetoutput()
try:
s = str(sys.ps1)
except:
s = ""
self.console.write(s)
self.text.mark_set("insert", "end-1c")
self.set_line_and_column()
self.io.reset_undo()
def resetoutput(self):
source = self.text.get("iomark", "end-1c")
if self.history:
self.history.history_store(source)
if self.text.get("end-2c") != "\n":
self.text.insert("end-1c", "\n")
self.text.mark_set("iomark", "end-1c")
self.set_line_and_column()
def write(self, s, tags=()):
try:
self.text.mark_gravity("iomark", "right")
OutputWindow.write(self, s, tags, "iomark")
self.text.mark_gravity("iomark", "left")
except:
raise ###pass # ### 11Aug07 KBK if we are expecting exceptions
# let's find out what they are and be specific.
if self.canceled:
self.canceled = 0
if not use_subprocess:
raise KeyboardInterrupt
class PseudoFile(object):
def __init__(self, shell, tags, encoding=None):
self.shell = shell
self.tags = tags
self.encoding = encoding
def write(self, s):
self.shell.write(s, self.tags)
def writelines(self, lines):
for line in lines:
self.write(line)
def flush(self):
pass
def isatty(self):
return True
usage_msg = """\
USAGE: idle [-deins] [-t title] [file]*
idle [-dns] [-t title] (-c cmd | -r file) [arg]*
idle [-dns] [-t title] - [arg]*
-h print this help message and exit
-n run IDLE without a subprocess (see Help/IDLE Help for details)
The following options will override the IDLE 'settings' configuration:
-e open an edit window
-i open a shell window
The following options imply -i and will open a shell:
-c cmd run the command in a shell, or
-r file run script from file
-d enable the debugger
-s run $IDLESTARTUP or $PYTHONSTARTUP before anything else
-t title set title of shell window
A default edit window will be bypassed when -c, -r, or - are used.
[arg]* are passed to the command (-c) or script (-r) in sys.argv[1:].
Examples:
idle
Open an edit window or shell depending on IDLE's configuration.
idle foo.py foobar.py
Edit the files, also open a shell if configured to start with shell.
idle -est "Baz" foo.py
Run $IDLESTARTUP or $PYTHONSTARTUP, edit foo.py, and open a shell
window with the title "Baz".
idle -c "import sys; print(sys.argv)" "foo"
Open a shell window and run the command, passing "-c" in sys.argv[0]
and "foo" in sys.argv[1].
idle -d -s -r foo.py "Hello World"
Open a shell window, run a startup script, enable the debugger, and
run foo.py, passing "foo.py" in sys.argv[0] and "Hello World" in
sys.argv[1].
echo "import sys; print(sys.argv)" | idle - "foobar"
Open a shell window, run the script piped in, passing '' in sys.argv[0]
and "foobar" in sys.argv[1].
"""
def main():
global flist, root, use_subprocess
use_subprocess = True
enable_shell = True
enable_edit = False
debug = False
cmd = None
script = None
startup = False
try:
opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:")
except getopt.error as msg:
sys.stderr.write("Error: %s\n" % str(msg))
sys.stderr.write(usage_msg)
sys.exit(2)
for o, a in opts:
if o == '-c':
cmd = a
enable_shell = True
if o == '-d':
debug = True
enable_shell = True
if o == '-e':
enable_edit = True
enable_shell = False
if o == '-h':
sys.stdout.write(usage_msg)
sys.exit()
if o == '-i':
enable_shell = True
if o == '-n':
use_subprocess = False
if o == '-r':
script = a
if os.path.isfile(script):
pass
else:
print("No script file: ", script)
sys.exit()
enable_shell = True
if o == '-s':
startup = True
enable_shell = True
if o == '-t':
PyShell.shell_title = a
enable_shell = True
if args and args[0] == '-':
cmd = sys.stdin.read()
enable_shell = True
# process sys.argv and sys.path:
for i in range(len(sys.path)):
sys.path[i] = os.path.abspath(sys.path[i])
if args and args[0] == '-':
sys.argv = [''] + args[1:]
elif cmd:
sys.argv = ['-c'] + args
elif script:
sys.argv = [script] + args
elif args:
enable_edit = True
pathx = []
for filename in args:
pathx.append(os.path.dirname(filename))
for dir in pathx:
dir = os.path.abspath(dir)
if not dir in sys.path:
sys.path.insert(0, dir)
else:
dir = os.getcwd()
if dir not in sys.path:
sys.path.insert(0, dir)
# check the IDLE settings configuration (but command line overrides)
edit_start = idleConf.GetOption('main', 'General',
'editor-on-startup', type='bool')
enable_edit = enable_edit or edit_start
# start editor and/or shell windows:
root = Tk(className="Idle")
fixwordbreaks(root)
root.withdraw()
flist = PyShellFileList(root)
macosxSupport.setupApp(root, flist)
if enable_edit:
if not (cmd or script):
for filename in args:
flist.open(filename)
if not args:
flist.new()
if enable_shell:
shell = flist.open_shell()
if not shell:
return # couldn't open shell
if macosxSupport.runningAsOSXApp() and flist.dict:
# On OSX: when the user has double-clicked on a file that causes
# IDLE to be launched the shell window will open just in front of
# the file she wants to see. Lower the interpreter window when
# there are open files.
shell.top.lower()
shell = flist.pyshell
# handle remaining options:
if debug:
shell.open_debugger()
if startup:
filename = os.environ.get("IDLESTARTUP") or \
os.environ.get("PYTHONSTARTUP")
if filename and os.path.isfile(filename):
shell.interp.execfile(filename)
if shell and cmd or script:
shell.interp.runcommand("""if 1:
import sys as _sys
_sys.argv = %r
del _sys
\n""" % (sys.argv,))
if cmd:
shell.interp.execsource(cmd)
elif script:
shell.interp.prepend_syspath(script)
shell.interp.execfile(script)
# Check for problematic OS X Tk versions and print a warning message
# in the IDLE shell window; this is less intrusive than always opening
# a separate window.
tkversionwarning = macosxSupport.tkVersionWarning(root)
if tkversionwarning:
shell.interp.runcommand(''.join(("print('", tkversionwarning, "')")))
root.mainloop()
root.destroy()
if __name__ == "__main__":
sys.modules['PyShell'] = sys.modules['__main__']
main()<|fim▁end|> | self.breakpoints.remove(lineno)
except:
pass |
<|file_name|>default_config.py<|end_file_name|><|fim▁begin|>"""Default website configurations, used only for testing.
"""
from donut import environment
# Public Test Database
TEST = environment.Environment(
db_hostname="localhost",
db_name="donut_test",<|fim▁hole|> testing=True,
secret_key="1234567890",
imgur_api={
"id": "b579f690cacf867",
"secret": "****************************************"
},
restricted_ips=r"127\.0\.0\.1")<|fim▁end|> | db_user="donut_test",
db_password="public",
debug=True, |
<|file_name|>ccg_editor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Author: Ben Wing <ben@666.com>
# Date: April 2006
#############################################################################
# #
# ccg_editor.ply #
# #
# Edit a CCG-format file, graphically. Will have a mode for displaying #
# CCG files in a friendly fashion and allowing for editing of parts or #
# all of the file. Will also have a mode for testing a CCG grammar, and #
# allow for compilation and error-finding under control of the editor. #
# #
#############################################################################
# This code is based on PyEdit version 1.1, from Oreilly's Programming
# Python, 2nd Edition, 2001, by Mark Lutz.
from Tkinter import * # base widgets, constants
from tkFileDialog import * # standard dialogs
from tkMessageBox import *
from tkSimpleDialog import *
from tkColorChooser import askcolor
from string import split, atoi
import sys, os, string, md5
import ccg2xml
import Tree
import re
START = '1.0' # index of first char: row=1,col=0
SEL_FIRST = SEL + '.first' # map sel tag to index
SEL_LAST = SEL + '.last' # same as 'sel.last'
FontScale = 0 # use bigger font on linux
if sys.platform[:3] != 'win': # and other non-windows boxes
FontScale = 3
# Initial top-level window; it's not clear we need this.
# FIXME: It sucks that we have to call Tk() to get the first top-level window
# but Toplevel() for all others. We should be able to call Tk() initially,
# and then Toplevel() to create all top-level windows, including the first.
root = None
# List of all open CFile objects
openfiles = {}
filenames = []
class CTab(Frame):
# Initialize this tab. Usually called from a subclass. PARENT is
# the parent widget, CFILE the CFile object associated with the
# top-level window, and TABNAME is the name of this tab (that tab
# will be removed from the toolbar).
def __init__(self, parent, cfile, tabname):
Frame.__init__(self, parent)
self.parent = parent
self.cfile = cfile
self.toolbar = None
self.checkbar = None
self.menubar = [
('File', 0,
[('Open...', 0, self.cfile.onOpen),
('New', 0, self.cfile.onNew),
('Save', 0, self.onSave),
('Save As...', 5, self.onSaveAs),
('Close', 0, self.cfile.onClose),
'separator',
('Quit VisCCG', 0, self.cfile.onQuit)]
),
('Tools', 0,
[('Font List', 0, self.cfile.onFontList),
('Pick Bg...', 4, self.cfile.onPickBg),
('Pick Fg...', 0, self.cfile.onPickFg),
('Color List', 0, self.cfile.onColorList),
'separator',
('Info...', 0, self.cfile.onInfo)]
)
]
self.toolbar = [
# ('Display', self.cfile.onDisplay, {'side': LEFT}),
('Edit', self.cfile.onEdit, {'side': LEFT}),
('Lexicon', self.cfile.onLexicon, {'side': LEFT}),
('Testbed', self.cfile.onTestbed, {'side': LEFT}),
('Features', self.cfile.onFeatures, {'side': LEFT}),
('Words', self.cfile.onWords, {'side': LEFT}),
('Rules', self.cfile.onRules, {'side': LEFT}),
('Quit', self.cfile.onClose, {'side': RIGHT}),
('Help', self.cfile.help, {'side': RIGHT}),
('Save', self.onSave, {'side': RIGHT}),
]
# self.remove_toolbar_button(tabname)
# Add MENU (a tuple corresponding to a single top-level menu item)
# after the item with the name AFTER.
def add_menu(self, after, menu):
newmenu = []
for x in self.menubar:
newmenu += [x]
if x[0] == after:
newmenu += [menu]
self.menubar = newmenu
# Remove the toolbar button named NAME.
def remove_toolbar_button(self, name):
newtoolbar = []
for x in self.toolbar:
if x[0] != name:
newtoolbar += [x]
self.toolbar = newtoolbar
def reinit(self):
pass
#####################
# File menu commands
#####################
def onSave(self):
self.onSaveAs(self.cfile.currfile) # may be None
def onSaveAs(self, forcefile=None):
file = forcefile or self.cfile.my_asksaveasfilename()
if file:
text = self.cfile.getAllText()
try:
open(file, 'w').write(text)
except:
showerror('CCG Editor', 'Could not write file ' + file)
else:
self.cfile.setFileName(file) # may be newly created
self.cfile.edit_modified(NO)
self.cfile.last_save_signature = self.cfile.getSignature(text)
class CEdit(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Edit')
self.debugFrame= None
# Add a frame here, so that debug mode can be enabled
# by embedding other objects within this frame
editFrame = Frame(self, bd=1, bg= 'white')
editFrame.pack(fill=BOTH, expand=YES, side=TOP)
# Add a button frame, embed the button and
# link to command for the debug mode
btnFrame = Frame(editFrame, bd = 1)
btnFrame.grid (row=0, columnspan=3, sticky=NSEW)
vldButton = Button (btnFrame, text='Validate', command = lambda: self.onValidate(editFrame, cfile))
vldButton.pack(side=RIGHT)
# Put the main edit window in the row below this
vbar = Scrollbar(editFrame)
hbar = Scrollbar(editFrame, orient='horizontal')
self.text = Text(editFrame, padx=5, wrap='none', undo=YES)
vbar.grid(row=1, column=2, sticky=NS)
hbar.grid(row=2, columnspan=2, sticky=EW) # pack text last
self.text.grid(row=1, column=1, sticky=NSEW) # else sbars clipped
editFrame.columnconfigure(1, weight=1)
editFrame.rowconfigure(1, weight=1)
# Add a list containing line numbers
self.lineList = Text(editFrame, relief=SUNKEN, bg='white', bd=2, yscrollcommand = vbar.set, width=3)
self.lineList.grid(row=1, column=0, sticky=NS)
self.lineList.config(font=self.cfile.fonts[0],
bg=self.cfile.colors[0]['bg'], fg=self.cfile.colors[0]['fg'])
# TODO: The first time the display of the line numbers
# strangely doesn't go through --- somehow cfile
# isn't initialized. However, it works properly in the display.
# Need to understand why this happens.
try:
self.showLineNums()
except KeyError:
self.text.config(yscrollcommand=vbar.set) # call vbar.set on text move
self.text.config(xscrollcommand=hbar.set)
#vbar.config(command=text.yview) # call text.yview on scroll move
hbar.config(command=self.text.xview) # or hbar['command']=text.xview
self.text.config(font=self.cfile.fonts[0],
bg=self.cfile.colors[0]['bg'], fg=self.cfile.colors[0]['fg'])
#Setting the movement of the listbox and the text
#together to be controlled by the scrollbar
vbar.config(command=self.scrollSet)
self.add_menu('File',
('Edit', 0,
[('Cut', 0, self.onCut),
('Copy', 1, self.onCopy),
('Paste', 0, self.onPaste),
'separator',
('Delete', 0, self.onDelete),
('Select All', 0, self.onSelectAll)]
))
self.add_menu('Edit',
('Search', 0,
[('Goto...', 0, self.cfile.onGoto),
('Find...', 0, self.cfile.onFind),
('Refind', 0, self.cfile.onRefind),
('Change...', 0, self.onChange)]
))
def scrollSet(self, *args):
self.lineList.yview(*args)
self.text.yview(*args)
def reinit(self):
self.showLineNums()
self.text.focus()
def showLineNums(self):
#Make the list of lines editable
self.lineList.config(state=NORMAL)
textData = self.cfile.getAllText()
listOfLines = textData.split('\n')
for num in range(1,len(listOfLines)):
self.lineList.insert(END,"%s\n" % num)
#Now that we are done changing the number of lines,
#we reset the text to be uneditable
self.lineList.config(state=NORMAL)
def onValidate(self, editFrame, cfile):
#showwarning(title= 'Sorry', message='Validate and debug feature coming soon!')
# Destroy previous display of debug or error messages
# if present
if self.debugFrame:
self.debugFrame.grid_forget()
# Compile if file signature has changed
cfile.compile_if_needed()
# Now, call the error debug routine if errors are found
if (ccg2xml.error_count > 0):
self.debugError(editFrame, cfile)
else:
showinfo(title='VisCCG: Success', message='No validation errors!')
def debugError(self, editFrame, cfile):
self.debugFrame = Frame(editFrame, bg='white', bd=2)
self.debugFrame.grid(row=3, columnspan=2, sticky=NSEW)
# Create Listbox and scrollbars
sbar = Scrollbar(self.debugFrame)
list = Listbox(self.debugFrame, relief=SUNKEN, bg='white', bd=2, yscrollcommand = sbar.set)
sbar.config(command=list.yview)
list.pack(fill=BOTH, side=LEFT, expand=YES)
sbar.pack(fill=Y, side=RIGHT)
# Display each message in the log
for mesg in ccg2xml.message_log:
type = mesg[0]
lineno = mesg[1]
errwarn = mesg[2]
if lineno:
dispError = type+' at Line '+str(lineno)+': '+errwarn
else:
dispError = type+': '+errwarn
list.insert(END, dispError)
#####################
# Edit menu commands
#####################
def onCopy(self): # get text selected by mouse,etc
if not self.text.tag_ranges(SEL): # save in cross-app clipboard
showerror('CCG Editor', 'No text selected')
else:
text = self.text.get(SEL_FIRST, SEL_LAST)
self.clipboard_clear()
self.clipboard_append(text)
def onDelete(self): # delete selected text, no save
if not self.text.tag_ranges(SEL):
showerror('CCG Editor', 'No text selected')
else:
self.text.delete(SEL_FIRST, SEL_LAST)
def onCut(self):
if not self.text.tag_ranges(SEL):
showerror('CCG Editor', 'No text selected')
else:
self.onCopy() # save and delete selected text
self.onDelete()
def onPaste(self):
try:
text = self.selection_get(selection='CLIPBOARD')
except TclError:
showerror('CCG Editor', 'Nothing to paste')
return
self.text.insert(INSERT, text) # add at current insert cursor
self.text.tag_remove(SEL, '1.0', END)
self.text.tag_add(SEL, INSERT+'-%dc' % len(text), INSERT)
self.text.see(INSERT) # select it, so it can be cut
def onSelectAll(self):
self.text.tag_add(SEL, '1.0', END+'-1c') # select entire text
self.text.mark_set(INSERT, '1.0') # move insert point to top
self.text.see(INSERT) # scroll to top
#######################
# Search menu commands
#######################
def onChange(self):
new = Toplevel(self)
Label(new, text='Find text:').grid(row=0, column=0)
Label(new, text='Change to:').grid(row=1, column=0)
self.change1 = Entry(new)
self.change2 = Entry(new)
self.change1.grid(row=0, column=1, sticky=EW)
self.change2.grid(row=1, column=1, sticky=EW)
Button(new, text='Find',
command=self.onDoFind).grid(row=0, column=2, sticky=EW)
Button(new, text='Apply',
command=self.onDoChange).grid(row=1, column=2, sticky=EW)
new.columnconfigure(1, weight=1) # expandable entrys
def onDoFind(self):
self.onFind(self.change1.get()) # Find in change box
def onDoChange(self):
if self.text.tag_ranges(SEL): # must find first
self.text.delete(SEL_FIRST, SEL_LAST) # Apply in change
self.text.insert(INSERT, self.change2.get()) # deletes if empty
self.text.see(INSERT)
self.onFind(self.change1.get()) # goto next appear
self.text.update() # force refresh
####################################
# Others, useful outside this class
####################################
def isEmpty(self):
return not self.getAllText()
def getAllText(self):
return self.text.get('1.0', END+'-1c') # extract text as a string
def setAllText(self, text):
self.text.delete('1.0', END) # store text string in widget
self.text.insert(END, text) # or '1.0'
self.text.mark_set(INSERT, '1.0') # move insert point to top
self.text.see(INSERT) # scroll to top, insert set
self.cfile.edit_modified(NO)
def clearAllText(self):
self.text.delete('1.0', END) # clear text in widget
class CWords(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Words')
self.child=None
self.wordList = None
self.cfile = cfile
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
if self.child:
self.child.pack_forget()
self.child = Frame(self, background='white')
self.child.pack(expand=YES, fill=BOTH)
scrollbar = Scrollbar(self.child, orient=VERTICAL)
self.wordList = Listbox(self.child, yscrollcommand=scrollbar.set)
self.wordList.grid(row=0, column=0, sticky=N+S+E+W)
scrollbar.config(command= self.wordList.yview)
scrollbar.grid(row=0, column=1, sticky=N+S)
self.child.grid_rowconfigure(0, weight=1)
self.child.grid_columnconfigure(0, weight=1)
#If the data hasn't been compiled yet, then do so
try:
dummy = ccg2xml.morph_xml
except:
self.cfile.compile_if_needed()
#Adding dummy code for all words
for x in ccg2xml.morph_xml:
assert x[0] == 'entry'
self.wordList.insert (END, ccg2xml.getprop('word', x[1]))
#print ccg2xml.getprop('word', x[1])
class CLexicon(CTab):
class lexicon_vars(object):
def __init__(self):
self.show_feat_id = IntVar()
self.show_feat_id.set(1)
self.show_feat_struct = IntVar()
self.show_feat_struct.set(1)
self.show_full_features = IntVar()
self.show_full_features.set(0)
self.show_semantics = IntVar()
self.show_semantics.set(1)
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Lexicon')
self.child = None
self.cnv = None
self.mainFrame = None
self.vars = self.lexicon_vars()
# FIXME? It's a bit awkward that ccg.ply has references to the
# variables below scattered throughout it. But I'm not sure what
# a better solution would be.
self.checkbar = [
("Show feature ID's", self.vars.show_feat_id),
("Show features", self.vars.show_feat_struct),
('Full-form features', self.vars.show_full_features),
('Show semantics', self.vars.show_semantics),
]
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
self.redraw()
def redraw(self):
self.cfile.compile_if_needed()
if self.child:
self.child.pack_forget()
if self.mainFrame:
self.mainFrame.pack_forget()
self.mainFrame = Frame(self, bd=1, bg='white')
self.mainFrame.pack_propagate(0)
self.mainFrame.pack(expand=YES, fill=BOTH)
self.mainFrame.grid_rowconfigure(0, weight=1)
self.mainFrame.grid_columnconfigure(0, weight=1)
xscrollbar = Scrollbar(self.mainFrame, orient=HORIZONTAL)
xscrollbar.grid(row=1, column=0, sticky=E+W)
yscrollbar = Scrollbar(self.mainFrame)
yscrollbar.grid(row=0, column=1, sticky=N+S)
self.cnv = Canvas(self.mainFrame, bd=2, xscrollcommand=xscrollbar.set,
yscrollcommand=yscrollbar.set, width = 847, height=369)
xscrollbar.config(command= self.cnv.xview)
yscrollbar.config(command= self.cnv.yview)
self.child = Frame(self.cnv, bd=2, relief=SUNKEN, background='white')
self.cnv.create_window(0, 0, anchor='nw', window=self.child)
ccg2xml.draw_parse(self.cfile.curparse.parse, self.cfile, self.child, self.vars, self.cnv, self.mainFrame)
self.child.update_idletasks()
self.cnv.config(scrollregion=self.cnv.bbox("all"))
self.cnv.grid(row=0, column=0, sticky='NSEW')
<|fim▁hole|>class CRules(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Rules')
class CFeatures(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Features')
self.child=None
self.checkbar=None
self.edit=None
self.text=None
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
if self.child:
self.child.pack_forget()
self.child = Frame(self, background='white', width = 847, height = 369)
self.child.pack(expand=YES, fill=BOTH)
butframe = Frame(self.child, cursor='hand2',
relief=SUNKEN, bd=2)
butframe.pack(fill=X)
but1 = Button(butframe, text='Expand All', command=self.expand_all)
but1.pack(side=LEFT)
but2 = Button(butframe, text='Contract All', command=self.contract_all)
but2.pack(side=LEFT)
# Force editing in the same frame: but a lower view:
# pass self.child as the parent frame
self.edit = Button(butframe, text='Edit', command= lambda:self.edit_tree(self.child))
self.edit.pack(side=RIGHT)
featframe = Frame(self.child, bd=2, relief=SUNKEN,
background='white')
featframe.pack(expand=YES, fill=BOTH)
self.cfile.compile_if_needed()
# Build the tree
self.tree={}
self.root_name = re.sub(r'^(.*)\.(.*)$', r'\1', self.cfile.file)
self.tree[self.root_name]=[]
for feat in self.cfile.curparse.feature_to_values:
self.tree[self.root_name] += [str(feat)]
for feat in self.cfile.curparse.feature_to_values:
self.tree[feat] = []
for x in self.cfile.curparse.feature_to_values[feat]:
if x.name not in self.tree:
self.tree[x.name] = []
for x in self.cfile.curparse.feature_to_values[feat]:
if x.parents:
par = x.parents[0]
self.tree[par.name] += [x.name]
else:
self.tree[feat] += [x.name]
# Define the images for opened and closed categories
shut_icon=PhotoImage(data='R0lGODlhCQAQAJH/AMDAwAAAAGnD/wAAACH5BAEAAAAALAAA'
'AAAJABAAQAIdhI8hu2EqXIroyQrb\nyRf0VG0UxnSZ5jFjulrhaxQ'
'AO6olVwAAOw==')
open_icon=PhotoImage(data='R0lGODlhEAAJAJH/AMDAwAAAAGnD/wAAACH5BAEAAAAALAAA'
'AAAQAAkAQAIahI+pyyEPg3KwPrko\nTqH7/yGUJWxcZTapUQAAO8b'
'yUgAAOw==')
# Create the tree
self.t=Tree.Tree(master=featframe,
root_id='',
root_label=self.root_name,
collapsed_icon=shut_icon,
expanded_icon=open_icon,
get_contents_callback=self.get_treedata,
line_flag=False)
self.t.grid(row=0, column=0, sticky = 'nsew')
featframe.grid_rowconfigure(0, weight=1)
featframe.grid_columnconfigure(0, weight=1)
sb=Scrollbar(featframe)
sb.grid(row=0, column=1, sticky='ns')
self.t.configure(yscrollcommand=sb.set)
sb.configure(command=self.t.yview)
sb=Scrollbar(featframe, orient=HORIZONTAL)
sb.grid(row=1, column=0, sticky='ew')
self.t.configure(xscrollcommand=sb.set)
sb.configure(command=self.t.xview)
# Expand the whole tree out
self.expand_tree(self.t.root)
# Returns the nodes rooted at the node passed and adds them to the tree
def get_treedata(self,node):
lbl = str(node.get_label())
children = self.tree[lbl]
for x in children:
if self.tree[x]:
expands=1
else:
expands=0
self.t.add_node(name=x,flag=expands)
# Expand the tree rooted at node recursively
def expand_tree(self, node):
node.expand()
for child in node.children():
if child.expandable():
self.expand_tree(child)
def expand_all(self):
self.expand_tree(self.t.root)
def contract_all(self):
self.t.root.collapse()
def edit_tree(self, parent):
editFrame = Frame(parent, bd=1, background='white')
self.text = Text(editFrame, padx=5, wrap=None, undo = YES, background='white')
vbar = Scrollbar(editFrame)
hbar = Scrollbar(editFrame, orient='horizontal')
self.text.config(yscrollcommand=vbar.set) # call vbar.set on text move
self.text.config(xscrollcommand=hbar.set)
vbar.config(command=self.text.yview) # call text.yview on scroll move
hbar.config(command=self.text.xview) # or hbar['command']=text.xview
# Change the text on the button, and also pass the rest
# of the arguments so that the grid for the statements can be reset
self.edit.config(text='Done', command= lambda:self.save_tree(parent))
# Changing the mode of the cfile object here,
# so that once the user clicks done,
# the whole object is recompiled and redisplayed
self.cfile.mode= 'Edit'
vbar.pack(side=RIGHT, fill=Y)
hbar.pack(side=BOTTOM, fill=X)
self.text.pack(fill= BOTH, expand= YES)
# Set a mark at the beginning of the text
self.text.mark_set("START", INSERT)
self.text.mark_gravity("START", LEFT)
# Push in the rest of the file's contents
fileData = self.cfile.getAllText()
self.text.insert(INSERT, fileData)
# Move the insert position to the first occurence of the family name
# FIXME: this is poor implementation
# The positioning of the insert cursor should be happening by parsing the
# CFG production rules, using CSFamily.prod.lineno and endlineno
self.text.config(takefocus=True)
idx= self.text.search('feature', "START")
if idx:
self.text.mark_set(CURRENT, idx)
self.text.see(CURRENT)
else:
showwarning('Warning','Features not located in text')
editFrame.pack(expand=YES, fill=BOTH)
def save_tree(self, parent):
# We force the text contents of the cfile object to copy over
# all that is presently in the current text-box
self.cfile.setAllText(self.text.get(1.0,END))
self.edit.config(text='Edit', command= lambda:self.edit_tree(parent))
# Recompile whatever was edited and redisplay
# Note: changes are not saved hereby!!
self.cfile.compile_if_needed()
self.cfile.onFeatures()
class CTestbed(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Testbed')
self.child = None
self.edit = None
self.text = None
self.editFrame = None
self.cnv = None
self.mainFrame = None
self.newInsert = None
def makelab(self, text, row, col, **props):
lab = Label(self.child, text=text, background='white', **props)
# Make the label grow to fill all space allocated for the column
lab.grid(row=row, column=col, sticky='NSEW')
# Called when we switch to this mode using the toolbar at top.
def reinit(self):
if self.child:
self.child.pack_forget()
if self.mainFrame:
self.mainFrame.pack_forget()
self.mainFrame = Frame(self, bd=1, bg='white')
self.mainFrame.pack(expand=YES, fill=BOTH)
self.mainFrame.grid_rowconfigure(0, weight=1)
self.mainFrame.grid_columnconfigure(0, weight=1)
xscrollbar = Scrollbar(self.mainFrame, orient=HORIZONTAL)
xscrollbar.grid(row=1, column=0, sticky=E+W)
yscrollbar = Scrollbar(self.mainFrame)
yscrollbar.grid(row=0, column=1, sticky=N+S)
self.cnv= Canvas(self.mainFrame, bd=2, xscrollcommand=xscrollbar.set,
yscrollcommand=yscrollbar.set, width = 847, height=369)
xscrollbar.config(command=self.cnv.xview)
yscrollbar.config(command=self.cnv.yview)
self.child = Frame(self.cnv, bd=2, relief=SUNKEN, background='white')
self.child.rowconfigure(1, weight=1)
self.child.columnconfigure(1, weight=1)
self.child.pack(expand=YES, fill=BOTH)
butnFrame = Frame(self.child, relief=SUNKEN, bd=2)
butnFrame.grid(row=0, sticky='NSEW', columnspan=2)
self.edit = Button(butnFrame, text='Edit', command= self.edit_testbed)
self.edit.pack(side=RIGHT)
self.newInsert = Button(butnFrame, text='New Sentence', command= self.new_sentence)
self.newInsert.pack(side=RIGHT)
self.cfile.compile_if_needed()
self.makelab("Num Parses", 1, 0, bd=1, relief=SUNKEN, fg="#77AA77", font = ("Helvetica", FontScale +12))
self.makelab("Sentence", 1, 1, bd=1, relief=SUNKEN, fg="#77AA77", font = ("Helvetica", FontScale +12))
# Make the column containing the sentences grow to include all
# extra space
self.child.columnconfigure(1, weight=1)
for i in xrange(len(self.cfile.curparse.testbed_statements)):
x = self.cfile.curparse.testbed_statements[i]
assert x[0] == 'item'
x = x[1]
# Left-justify the text
numparse = ccg2xml.getprop('numOfParses', x)
string = ccg2xml.getprop('string', x)
# How many parses of the sentence are produced?
self.makelab('%s' % numparse, i+2, 0)
# Print the sentence itself
self.makelab('%s%s' % (numparse == 0 and '*' or '', string),
i+2, 1, anchor=W)
self.cnv.create_window(0, 0, anchor='nw', window=self.child)
self.child.update_idletasks()
#self.child.grid(row=0, column=0, sticky=NSEW)
self.cnv.config(scrollregion=self.cnv.bbox("all"))
self.cnv.grid(row=0, column=0, sticky='NSEW')
# Edit the testbed
def edit_testbed(self):
self.editFrame = Frame(self.mainFrame, bd=1, background='white')
#self.editFrame.grid(row=len(self.cfile.curparse.testbed_statements)+3, columnspan=2, sticky='NSEW')
self.editFrame.grid(row=2, columnspan=2, sticky='NSEW')
self.text = Text(self.editFrame, padx=5, wrap=None, undo = YES, background='white')
vbar = Scrollbar(self.editFrame)
hbar = Scrollbar(self.editFrame, orient='horizontal')
self.text.config(yscrollcommand=vbar.set) # call vbar.set on text move
self.text.config(xscrollcommand=hbar.set)
vbar.config(command=self.text.yview) # call text.yview on scroll move
hbar.config(command=self.text.xview) # or hbar['command']=text.xview
# Change the text on the button, and also pass the rest
# of the arguments so that the grid for the statements can be reset
self.edit.config(text='Done', command= self.save_testbed)
# Changing the mode of the cfile object here,
# so that once the user clicks done,
# the whole object is recompiled and redisplayed
self.cfile.mode= 'Edit'
vbar.pack(side=RIGHT, fill=Y)
hbar.pack(side=BOTTOM, fill=X)
self.text.pack(fill= BOTH, expand= YES)
# Set a mark at the beginning of the text
self.text.mark_set("START", INSERT)
self.text.mark_gravity("START", LEFT)
# Push in the rest of the file's contents
fileData = self.cfile.getAllText()
self.text.insert(INSERT, fileData)
# Move the insert position to the first occurence of the family name
# FIXME: this is poor implementation
# The positioning of the insert cursor should be happening by parsing the
# CFG production rules, using CSFamily.prod.lineno and endlineno
self.text.config(takefocus=True)
idx= self.text.search('testbed', "START")
if idx:
self.text.mark_set(CURRENT, idx)
self.text.see(CURRENT)
else:
showwarning(title= 'VisCCG: Warning', message='No initial testbed found')
#self.editFrame.pack(expand=YES, fill=BOTH)
self.child.update_idletasks()
self.cnv.config(scrollregion=self.cnv.bbox("all"))
# Save the edited text
def save_testbed(self):
# We force the text contents of the cfile object to copy over
# all that is presently in the current text-box
self.cfile.setAllText(self.text.get(1.0,END))
self.edit.config(text='Edit', command= self.edit_testbed)
self.editFrame.pack_forget()
# Recompile whatever was edited and redisplay
# Note: changes are not saved hereby!!
self.cfile.compile_if_needed()
self.cfile.onTestbed()
# Enter a new sentence
def new_sentence(self):
master = Tk()
master.title('VisCCG: New Sentence for the testbed')
sent = Entry(master, bg='#FFFFFF', width = 100)
nParses = Entry(master, bg='#FFFFFF', width = 2)
sLabel = Label (master, text = 'Sentence:')
nLabel = Label (master, text = 'Number of parses:')
sent.focus_set()
b = Button(master, text="Add sentence", width=10, command= lambda:self.editNew(master, sent, nParses))
c = Button(master, text="Cancel", command= master.destroy)
sent.grid (row=1, column=0, sticky = W)
nParses.grid (row=1, column=1, sticky= W)
sLabel.grid (row=0, column=0, sticky=W)
nLabel.grid (row=0, column=1, sticky = W)
b.grid (row=2, column = 0)
c.grid (row=2, column = 1)
# Print from the new sentence
def editNew(self, master, sent, nParses):
# Prepare the file's contents for editing
fileData = self.cfile.getAllText()
self.text = Text(master)
self.text.mark_set("START", INSERT)
self.text.mark_gravity("START", LEFT)
self.text.insert(INSERT, fileData)
testSent = sent.get()
npSent = nParses.get()
self.text.config(takefocus=True)
idx= self.text.search('testbed', "START")
if idx:
self.text.mark_set("START", idx)
idx = self.text.search('{', "START", forwards = True)
self.text.mark_set("START", idx)
idx = self.text.search('\n', "START", forwards = True)
# FIXME: really poor search for locating the right position
# to insert text here. Needs correction!
self.text.mark_set(INSERT, idx)
self.text.mark_gravity(INSERT, RIGHT)
self.text.insert (INSERT, '\n\t'+ testSent+ ':\t'+ npSent+ ';')
else:
showwarning(title= 'VisCCG: Warning', message='No initial testbed found, creating new')
self.text.mark_set(INSERT, END)
self.text.mark_gravity(INSERT, RIGHT)
self.text.insert (INSERT, ' testbed {\n')
self.text.insert (INSERT, '\n\t'+ testSent+ ':\t'+ npSent+ ';')
self.text.insert (INSERT, '}\n')
# Set the original file's data to be this
fileData= self.text.get(1.0, END)
self.cfile.setAllText(fileData)
# Destroy the entry window
master.destroy()
# Update the display
self.cfile.mode= 'Edit'
self.cfile.compile_if_needed()
self.cfile.onTestbed()
# Creates the top-level window and populates the widgets below it.
class CFile(object):
#### NOTE NOTE NOTE! Variables declared like this, in the class itself,
#### are class variables (not instance variables) until they are
#### assigned to. If you want pure instance variables, you need to
#### initialize them inside of __init__().
# Hash table describing modes and the associated class
modelist = {'Edit':CEdit, 'Lexicon':CLexicon, 'Features':CFeatures,
'Words':CWords, 'Testbed':CTestbed, 'Rules':CRules}
startfiledir = '.'
ftypes = [('All files', '*'), # for file open dialog
('Text files', '.txt'), # customize in subclass
('Python files', '.py')] # or set in each instance
colors = [{'fg':'black', 'bg':'white'}, # color pick list
{'fg':'yellow', 'bg':'black'}, # first item is default
{'fg':'white', 'bg':'blue'}, # tailor me as desired
{'fg':'black', 'bg':'beige'}, # or do PickBg/Fg chooser
{'fg':'yellow', 'bg':'purple'},
{'fg':'black', 'bg':'brown'},
{'fg':'lightgreen', 'bg':'darkgreen'},
{'fg':'darkblue', 'bg':'orange'},
{'fg':'orange', 'bg':'darkblue'}]
fonts = [('courier', 9+FontScale, 'normal'), # platform-neutral fonts
('courier', 12+FontScale, 'normal'), # (family, size, style)
('courier', 10+FontScale, 'bold'), # or popup a listbox
('courier', 10+FontScale, 'italic'), # make bigger on linux
('times', 10+FontScale, 'normal'),
('helvetica', 10+FontScale, 'normal'),
('ariel', 10+FontScale, 'normal'),
('system', 10+FontScale, 'normal'),
('courier', 20+FontScale, 'normal')]
def __init__(self, file=None):
self.file = file
self.openDialog = None
self.saveDialog = None
self.lastfind = None
self.current_parse = None
self.mode = None
self.last_save_signature = None
self.last_compile_signature = None
# First top-level window is Tk(); rest are Toplevel()
global root
if not root:
root = Tk()
self.top = root
else:
self.top = Toplevel(root)
ccg2xml.late_init_graphics()
openfiles[self] = True
self.top.protocol('WM_DELETE_WINDOW', self.onClose)
# We create an outer frame to hold the toolbar and the main widget.
# Create all the different kinds of main widget.
# FIXME: Maybe outer isn't necessary?
self.outer = Frame(self.top)
self.outer.pack(expand=YES, fill=BOTH) # make frame stretchable
self.modes = {}
for mode in self.modelist:
self.modes[mode] = self.modelist[mode](self.outer, self)
self.main = None
self.toolbar_widget = None
self.checkbar_widget = None
#self.switch_to('Edit')
self.setFileName(None)
if file:
self.onFirstOpen(file)
else:
# When the user has just opened a new file
# Need to load template from the src folder
openccg_home = os.environ['OPENCCG_HOME']
template = open(openccg_home + '/src/ccg2xml/grammar_template.ccg', 'r').read()
self.setAllText(template)
# Save the MD5 signature for future comparison
self.last_save_signature = self.getSignature(self.getAllText())
self.switch_to('Edit')
def switch_to(self, mode):
# Switch to a different mode (display, edit, test). Remove the
# existing main and toolbar widgets, if existing. Redo the menubar
# and toolbar widgets according to the new mode and then display
# the new widgets.
#
# FIXME: We should probably create the menubar and toolbar widgets
# only once, and remember them.
if self.mode != mode:
if self.main:
self.main.pack_forget()
if self.toolbar_widget:
self.toolbar_widget.pack_forget()
if self.checkbar_widget:
self.checkbar_widget.pack_forget()
self.mode = mode
self.main = self.modes[mode]
self.makeMenubar()
self.makeToolbar(mode)
self.makeCheckbar()
#print "Reinit being called now... "
self.main.reinit()
# Pack the main widget after the toolbar, so it goes below it.
self.main.pack(side=TOP, expand=YES, fill=BOTH)
# Create the menubar; assumes that self.menubar has been set to the
# appropriate menubar description. Note that the menubar has to be a
# child of the top-level window itself rather than any child of it, so
# that it can be correctly displayed at the top of the window -- or
# possibly in its decoration (Windows) or at top of screen (Mac).
#
# From PP2E guimaker.py.
def makeMenubar(self):
menubar = Menu(self.top)
self.top.config(menu=menubar)
for (name, key, items) in self.main.menubar:
pulldown = Menu(menubar)
self.addMenuItems(pulldown, items)
menubar.add_cascade(label=name, underline=key, menu=pulldown)
if sys.platform[:3] == 'win':
menubar.add_command(label='Help', command=self.help)
else:
pulldown = Menu(menubar) # linux needs real pulldown
pulldown.add_command(label='About', command=self.help)
menubar.add_cascade(label='Help', menu=pulldown)
# Add items to a menu or submenu. From PP2E guimaker.py.
def addMenuItems(self, menu, items):
for item in items: # scan nested items list
if item == 'separator': # string: add separator
menu.add_separator({})
elif type(item) is list: # list: disabled item list
for num in item:
menu.entryconfig(num, state=DISABLED)
elif type(item[2]) is not list:
menu.add_command(label = item[0], # command:
underline = item[1], # add command
command = item[2]) # cmd=callable
else:
pullover = Menu(menu)
self.addMenuItems(pullover, item[2]) # sublist:
menu.add_cascade(label = item[0], # make submenu
underline = item[1], # add cascade
menu = pullover)
def makeToolbar(self, selected):
"""
make toolbar (of buttons) at top, if any
expand=no, fill=x so same width on resize
"""
if self.main.toolbar:
self.toolbar_widget = Frame(self.outer, cursor='hand2',
relief=SUNKEN, bd=2)
self.toolbar_widget.pack(side=TOP, fill=X)
for (name, action, where) in self.main.toolbar:
but = Button(self.toolbar_widget, text=name,
command=action)
if name == selected:
but.config(relief=SUNKEN)
but.pack(where)
def makeCheckbar(self):
"""
make check-button bar at top, if any
expand=no, fill=x so same width on resize
"""
if self.main.checkbar:
self.checkbar_widget = Frame(self.outer, cursor='hand2',
relief=SUNKEN, bd=2)
self.checkbar_widget.pack(side=TOP, fill=X)
for (name, var) in self.main.checkbar:
Checkbutton(self.checkbar_widget, text=name,
variable=var,
command=self.main.redraw).pack(side=LEFT)
def getAllText(self):
return self.modes['Edit'].getAllText()
def setAllText(self, text):
self.modes['Edit'].setAllText(text)
#self.modes['Display'].setAllText(text)
def _getints(self, string):
"""Internal function."""
if string:
if type(string) is str:
textwid = self.modes['Edit'].text
return tuple(map(getint, textwid.tk.splitlist(string)))
else:
return string
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
textwid = self.modes['Edit'].text
return self._getints(
textwid.tk.call((textwid._w, 'edit') + args)) or ()
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
# Added to use md5 functionality to watch for changed data
if arg is None:
alltext = self.getAllText()
if (self.last_save_signature != self.getSignature(alltext)):
return YES
return self.edit("modified", arg)
def onInfo(self):
text = self.getAllText() # added on 5/3/00 in 15 mins
bytes = len(text) # words uses a simple guess:
lines = len(string.split(text, '\n')) # any separated by whitespace
words = len(string.split(text))
index = self.main.text.index(INSERT)
where = tuple(string.split(index, '.'))
showinfo('CCG Editor Information',
'Current location:\n\n' +
'line:\t%s\ncolumn:\t%s\n\n' % where +
'File text statistics:\n\n' +
'Modified: %s\n\n' % self.edit_modified()+
'bytes:\t%d\nlines:\t%d\nwords:\t%d\n' %
(bytes, lines, words))
#######################
# Search menu commands
#######################
def onGoto(self, line=None):
if not line:
line = askinteger('CCG Editor', 'Enter line number')
self.main.text.update()
self.main.text.focus()
if line is not None:
maxindex = self.main.text.index(END+'-1c')
maxline = atoi(split(maxindex, '.')[0])
if line > 0 and line <= maxline:
self.main.text.mark_set(INSERT, '%d.0' % line) # goto line
self.main.text.tag_remove(SEL, '1.0', END) # delete selects
self.main.text.tag_add(SEL, INSERT, 'insert + 1l') # select line
self.main.text.see(INSERT) # scroll to line
else:
showerror('CCG Editor', 'Bad line number')
def onFind(self, lastkey=None):
key = lastkey or askstring('CCG Editor', 'Enter search string')
self.main.text.update()
self.main.text.focus()
self.lastfind = key
if key:
where = self.main.text.search(key, INSERT, END) # don't wrap
if not where:
showerror('CCG Editor', 'String not found')
else:
pastkey = where + '+%dc' % len(key) # index past key
self.main.text.tag_remove(SEL, '1.0', END) # remove any sel
self.main.text.tag_add(SEL, where, pastkey) # select key
self.main.text.mark_set(INSERT, pastkey) # for next find
self.main.text.see(where) # scroll display
def onRefind(self):
self.onFind(self.lastfind)
######################
# Tools menu commands
######################
def onFontList(self):
self.fonts.append(self.fonts[0]) # pick next font in list
del self.fonts[0] # resizes the text area
self.modes['Edit'].text.config(font=self.fonts[0])
self.modes['Display'].text.config(font=self.fonts[0])
def onColorList(self):
self.colors.append(self.colors[0]) # pick next color in list
del self.colors[0] # move current to end
self.modes['Edit'].text.config(fg=self.colors[0]['fg'], bg=self.colors[0]['bg'])
self.modes['Display'].text.config(fg=self.colors[0]['fg'], bg=self.colors[0]['bg'])
def onPickFg(self):
self.pickColor('fg')
def onPickBg(self):
self.pickColor('bg')
def pickColor(self, part):
(triple, hexstr) = askcolor()
if hexstr:
apply(self.modes['Edit'].text.config, (), {part: hexstr})
apply(self.modes['Display'].text.config, (), {part: hexstr})
# def onRunCode(self, parallelmode=1):
# """
# run Python code being edited--not an ide, but handy;
# tries to run in file's dir, not cwd (may be pp2e root);
# inputs and adds command-line arguments for script files;
# code's stdin/out/err = editor's start window, if any;
# but parallelmode uses start to open a dos box for i/o;
# """
# from PP2E.launchmodes import System, Start, Fork
# filemode = 0
# thefile = str(self.getFileName())
# cmdargs = askstring('CCG Editor', 'Commandline arguments?') or ''
# if os.path.exists(thefile):
# filemode = askyesno('CCG Editor', 'Run from file?')
# if not filemode: # run text string
# namespace = {'__name__': '__main__'} # run as top-level
# sys.argv = [thefile] + string.split(cmdargs) # could use threads
# exec self.getAllText() + '\n' in namespace # exceptions ignored
# elif askyesno('CCG Editor', 'Text saved in file?'):
# mycwd = os.getcwd() # cwd may be root
# os.chdir(os.path.dirname(thefile) or mycwd) # cd for filenames
# thecmd = thefile + ' ' + cmdargs
# if not parallelmode: # run as file
# System(thecmd, thecmd)() # block editor
# else:
# if sys.platform[:3] == 'win': # spawn in parallel
# Start(thecmd, thecmd)() # or use os.spawnv
# else:
# Fork(thecmd, thecmd)() # spawn in parallel
# os.chdir(mycwd)
#####################
# File menu commands
#####################
def getSignature(self, contents):
return md5.md5(contents).digest()
def my_askopenfilename(self): # objects remember last result dir/file
if not self.openDialog:
self.openDialog = Open(initialdir=self.startfiledir,
filetypes=self.ftypes)
return self.openDialog.show()
def my_asksaveasfilename(self): # objects remember last result dir/file
if not self.saveDialog:
self.saveDialog = SaveAs(initialdir=self.startfiledir,
filetypes=self.ftypes)
self.last_save_signature = self.getSignature(self.getAllText())
return self.saveDialog.show()
def onOpen(self):
file = self.my_askopenfilename()
# FIXME! Only create new window if file exists and is readable
if file:
CFile(file)
def onFirstOpen(self, file):
try:
text = open(file, 'r').read()
except:
showerror('CCG Editor', 'Could not open file ' + file)
else:
self.setAllText(text)
self.setFileName(file)
def compile_if_needed(self):
# Compare the last compiled MD5 signature and present one
# and compile if needed.
# To force compilation, set this signature to None
text = self.getAllText()
textSign = self.getSignature(text)
if textSign != self.last_compile_signature:
# Now compile
ccg2xml.init_global_state(errors_to_string=True)
ccg2xml.options.quiet = True
self.curparse = ccg2xml.parse_string(text)
self.last_compiled_signature = textSign
def onDisplay(self):
self.switch_to('Display')
def onEdit(self):
self.switch_to('Edit')
def onLexicon(self):
self.switch_to('Lexicon')
def onTestbed(self):
self.switch_to('Testbed')
def onRules(self):
self.switch_to('Rules')
def onWords(self):
self.switch_to('Words')
def onFeatures(self):
self.switch_to('Features')
def onNew(self):
CFile()
def getFileName(self):
return self.currfile
def setFileName(self, name):
self.currfile = name # for save
if name:
title = 'VisCCG Editor: %s' % name
else:
title = 'VisCCG Editor'
self.top.title(title)
self.top.iconname(title)
def help(self):
showinfo('Help', 'Sorry, no help for ' + self.__class__.__name__)
# Close this window; if this is the last window, quit
def onClose(self):
assert self in openfiles
if len(openfiles) == 1 or self.top == root:
self.onQuit()
# If we got this far, the user refused to quit, so do nothing
else:
ccg2xml.debug("fooooo\n")
del openfiles[self]
self.top.destroy()
def onQuit(self):
modfiles = False
for f in openfiles:
if f.edit_modified() == YES:
modfiles = True
break
if not modfiles or askyesno('CCG Editor', 'Files are modified, Really quit?'):
self.top.quit()
def main():
ccg2xml.parse_arguments(sys.argv[1:])
ccg2xml.init_global_state_once()
if ccg2xml.global_args and len(ccg2xml.global_args) > 0:
# file name:
fname = ccg2xml.global_args[0]
else:
fname = None
CFile(fname)
mainloop()
if __name__ == '__main__': # when run as a script
main()<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2015 Alex Brandt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import inspect
import logging
import typing # noqa (use mypy typing)
import unittest
import uuid
from torment import fixtures
from torment import contexts
logger = logging.getLogger(__name__)
class FixturesCreateUnitTest(unittest.TestCase):
def test_fixture_create_without_context(self) -> None:
'''torment.fixtures.Fixture() → TypeError'''
self.assertRaises(TypeError, fixtures.Fixture)
def test_fixture_create_with_context(self) -> None:
'''torment.fixtures.Fixture(context).context == context'''
c = unittest.TestCase()
f = fixtures.Fixture(c)
self.assertEqual(f.context, c)
class FixturesPropertyUnitTest(unittest.TestCase):
def setUp(self) -> None:
self.c = unittest.TestCase()
self.f = fixtures.Fixture(self.c)
def test_fixture_category(self) -> None:
'''torment.fixtures.Fixture(context).category == 'fixtures' '''
self.f.__module__ = unittest.mock.MagicMock(__name__ = 'test_torment.test_unit.test_fixtures.fixture_a44bc6dda6654b1395a8c2cbd55d964d')
self.assertEqual(self.f.category, 'fixtures')
def test_fixture_description(self) -> None:
'''torment.fixtures.Fixture(context).description == '94d7c58f6ee44683936c21cb84d1e458—torment.fixtures' '''
self.f.context.module = 'fixtures'
self.f.uuid = uuid.UUID('94d7c58f6ee44683936c21cb84d1e458')
self.assertEqual(self.f.description, '94d7c58f6ee44683936c21cb84d1e458—fixtures')
def test_fixture_name(self) -> None:
'''torment.fixtures.Fixture(context).name == 'test_94d7c58f6ee44683936c21cb84d1e458' '''
self.f.__class__.__name__ = '94d7c58f6ee44683936c21cb84d1e458'
self.assertEqual(self.f.name, 'test_94d7c58f6ee44683936c21cb84d1e458')
class ErrorFixturesPropertyUnitTest(unittest.TestCase):
def test_error_fixture_description(self) -> None:
'''torment.fixtures.ErrorFixture(context).description == 'expected → failure' '''
class fixture(fixtures.Fixture):
@property
def description(self) -> str:
return 'expected'
class error_fixture(fixtures.ErrorFixture, fixture):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.error = RuntimeError('failure')
c = unittest.TestCase()
e = error_fixture(c)
self.assertEqual(e.description, 'expected → failure')
class ErrorFixturesRunTest(unittest.TestCase):
def test_error_fixture_run(self) -> None:
'''torment.fixtures.ErrorFixture(context).run()'''
class fixture(fixtures.Fixture):
def run(self):
raise RuntimeError('failure')
class error_fixture(fixtures.ErrorFixture, fixture):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.error = RuntimeError('failure')
c = unittest.TestCase()
e = error_fixture(c)
e.run()
self.assertIsInstance(e.exception, RuntimeError)
self.assertEqual(e.exception.args, ( 'failure', ))
class OfUnitTest(unittest.TestCase):
def test_of_zero(self) -> None:
'''torment.fixtures.of(()) == []'''
self.assertEqual(len(fixtures.of(())), 0)
def test_of_many_without_subclasses(self) -> None:
'''torment.fixtures.of(( FixtureA, )) == []'''
class FixtureA(object):
def __init__(self, context) -> None:
pass
self.assertEqual(len(fixtures.of(( FixtureA, ))), 0)
def test_of_many_with_subclasses(self) -> None:
'''torment.fixtures.of(( FixtureA, )) == [ fixture_a, ]'''
class FixtureA(object):
def __init__(self, context) -> None:
pass
class FixtureB(FixtureA):
pass
result = fixtures.of(( FixtureA, ))
self.assertEqual(len(result), 1)
self.assertIsInstance(result[0], FixtureB)
class RegisterUnitTest(unittest.TestCase):
def setUp(self) -> None:
_ = unittest.mock.patch('torment.fixtures.inspect')
mocked_inspect = _.start()
self.addCleanup(_.stop)
mocked_inspect.configure_mock(**{ 'isclass': inspect.isclass, 'isfunction': inspect.isfunction, })
mocked_inspect.stack.return_value = ( None, ( None, 'test_unit/test_d43830e2e9624dd19c438b15250c5818.py', ), )
class ContextStub(object):
pass
self.context = ContextStub()
self.context.module = mocked_inspect.getmodule.return_value = 'stack'
self.ns = {} # type: Dict[str, Any]
self.class_name = 'f_d43830e2e9624dd19c438b15250c5818'
def test_zero_properties(self) -> None:
'''torment.fixtures.register({}, (), {})'''
fixtures.register(self.ns, ( fixtures.Fixture, ), {})
_ = self.ns[self.class_name](self.context)
self.assertEqual(_.uuid, uuid.UUID('d43830e2e9624dd19c438b15250c5818'))
def test_one_literal_properties(self) -> None:
'''torment.fixtures.register({}, (), { 'a': 'a', })'''
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'a': 'a', })
_ = self.ns[self.class_name](self.context)
self.assertEqual(_.a, 'a')
def test_one_class_properties(self) -> None:
'''torment.fixtures.register({}, (), { 'a': class, })'''
class A(object):
pass
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'a': A, })
_ = self.ns[self.class_name](self.context)
self.assertIsInstance(_.a, A)
def test_one_fixture_class_properties(self) -> None:
'''torment.fixtures.register({}, (), { 'a': fixture_class, })'''
class A(fixtures.Fixture):
pass
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'a': A, })
_ = self.ns[self.class_name](self.context)
self.assertIsInstance(_.a, A)
self.assertEqual(_.a.context, self.context)
def test_one_function_properties(self) -> None:
'''torment.fixtures.register({}, (), { 'a': self → None, })'''
def a(self) -> None:
pass
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'a': a, })
_ = self.ns[self.class_name](self.context)
self.assertIsNone(_.a)
def test_description_property(self) -> None:
'''torment.fixtures.register({}, (), { 'description': 'needle', })'''
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'description': 'needle', })
_ = self.ns[self.class_name](self.context)
self.assertEqual(_.description, 'd43830e2e9624dd19c438b15250c5818—stack—needle')
def test_error_property(self) -> None:
'''torment.fixtures.register({}, (), { 'error': …, })'''
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'error': { 'class': RuntimeError, }, })
_ = self.ns[self.class_name](self.context)
self.assertIsInstance(_.error, RuntimeError)
def test_mocks_mock_property(self) -> None:
'''torment.fixtures.register({}, (), { 'mocks': { 'symbol': …, }, }).setup()'''
_ = unittest.mock.patch('torment.fixtures._find_mocker')
mocked_fixtures_find_mocker = _.start()
self.addCleanup(_.stop)
mocked_fixtures_find_mocker.return_value = lambda: True
_ = unittest.mock.patch('torment.fixtures._prepare_mock')
mocked_fixtures_prepare_mock = _.start()
self.addCleanup(_.stop)
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'mocks': { 'symbol': {}, }, })
_ = self.ns[self.class_name](self.context)
_.setup()
mocked_fixtures_find_mocker.assert_called_once_with('symbol', self.context)
mocked_fixtures_prepare_mock.assert_called_once_with(self.context, 'symbol')
class PrepareMockUnitTest(unittest.TestCase):
def setUp(self) -> None:
class ContextStub(contexts.TestContext):
mocked_symbol = unittest.mock.MagicMock(name = 'ContextStub.mocked_symbol')
self.context = ContextStub()
def test_prepare_mock_empty_call_list(self) -> None:
'''torment.fixtures._prepare_mock(ContextStub, 'symbol'): mock has empty call list'''
fixtures._prepare_mock(self.context, 'symbol')
self.assertEqual(len(self.context.mocked_symbol.mock_calls), 0)
def test_prepare_mock_side_effect_zero_dots(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol', side_effect = range(2))'''
fixtures._prepare_mock(self.context, 'symbol', side_effect = range(2))
self.assertEqual(self.context.mocked_symbol(), 0)
self.assertEqual(self.context.mocked_symbol(), 1)
self.assertRaises(StopIteration, self.context.mocked_symbol)
def test_prepare_mock_return_value_zero_dots(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol', return_value = 'a')'''
fixtures._prepare_mock(self.context, 'symbol', return_value = 'a')
self.assertEqual(self.context.mocked_symbol(), 'a')
def test_prepare_mock_return_value_one_dots(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol.Sub', return_value = 'a')'''
fixtures._prepare_mock(self.context, 'symbol.Sub', return_value = 'a')
self.assertEqual(self.context.mocked_symbol.Sub(), 'a')
def test_prepare_mock_return_value_many_dots(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol.sub.a.b.c', return_value = 'a')'''
fixtures._prepare_mock(self.context, 'symbol.sub.a.b.c', return_value = 'a')
self.assertEqual(self.context.mocked_symbol.sub.a.b.c(), 'a')
def test_prepare_mock_return_value_many_dots_second_level(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol.sub.a.b.c', return_value = 'a')'''
class ContextStub(contexts.TestContext):
mocked_symbol_sub = unittest.mock.MagicMock(name = 'ContextStub.mocked_symbol_sub')
c = ContextStub()
fixtures._prepare_mock(c, 'symbol.sub.a.b.c', return_value = 'a')
self.assertEqual(c.mocked_symbol_sub.a.b.c(), 'a')
def test_prepare_mock_return_value_many_dots_all_levels(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol.Sub.a.b.c', return_value = 'a')'''
class ContextStub(contexts.TestContext):
mocked_symbol_sub_a_b_c = unittest.mock.MagicMock(name = 'ContextStub.mocked_symbol_sub_a_b_c')
c = ContextStub()
fixtures._prepare_mock(c, 'symbol.Sub.a.b.c', return_value = 'a')
self.assertEqual(c.mocked_symbol_sub_a_b_c(), 'a')<|fim▁hole|>
class FindMockerUnitTest(unittest.TestCase):
def test_find_mocker_found_zero_levels(self) -> None:
'''tormnet.fixtures._find_mocker('symbol', ContextStub) == mock_symbol'''
class ContextStub(contexts.TestContext):
def mock_symbol(self):
pass
c = ContextStub()
method = fixtures._find_mocker('symbol', c)
self.assertEqual(method, c.mock_symbol)
def test_find_mocker_found_second_level(self) -> None:
'''tormnet.fixtures._find_mocker('symbol.Sub', ContextStub) == mock_symbol_Sub'''
class ContextStub(contexts.TestContext):
def mock_symbol_sub(self):
pass
c = ContextStub()
method = fixtures._find_mocker('symbol.Sub', c)
self.assertEqual(method, c.mock_symbol_sub)
def test_find_mocker_found_many_levels(self) -> None:
'''tormnet.fixtures._find_mocker('symbol.sub.a.b', ContextStub) == mock_symbol_sub_a_b'''
class ContextStub(contexts.TestContext):
def mock_symbol_sub_a_b(self):
pass
c = ContextStub()
method = fixtures._find_mocker('symbol.sub.a.b', c)
self.assertEqual(method, c.mock_symbol_sub_a_b)
def test_find_mocker_not_found(self) -> None:
'''tormnet.fixtures._find_mocker('fakesymbol', ContextStub) == lambda: False'''
class ContextStub(contexts.TestContext):
pass
c = ContextStub()
method = fixtures._find_mocker('fakesymbol', c)
self.assertFalse(method())
self.assertEqual(method.__name__, 'noop')
class ResolveFunctionsUnitTest(unittest.TestCase):
def setUp(self) -> None:
class StubFixture(object):
pass
self.f = StubFixture()
self.f.name = 'testing_fixture_stub'
self.o = copy.deepcopy(self.f)
def test_zero_functions(self) -> None:
'''torment.fixtures._resolve_functions({}, fixture)'''
fixtures._resolve_functions({}, self.f)
self.assertEqual(dir(self.o), dir(self.f))
def test_one_functions_without_parameters(self) -> None:
'''torment.fixtures._resolve_functions({ 'a': ø → None, }, fixture)'''
def a() -> None:
pass
fixtures._resolve_functions({ 'a': a, }, self.f)
self.assertEqual(id(self.f.a), id(a))
def test_one_functions_with_self_parameter(self) -> None:
'''torment.fixtures._resolve_functions({ 'a': self → None, }, fixture)'''
def a(self) -> None:
pass
fixtures._resolve_functions({ 'a': a, }, self.f)
self.assertIsNone(self.f.a)
def test_one_functions_with_self_parameter_raises_attributeerror(self) -> None:
'''torment.fixtures._resolve_functions({ 'a': self → self.b, }, fixture)'''
def a(self):
return self.b
fixtures._resolve_functions({ 'a': a, }, self.f)
self.assertEqual(id(self.f.a), id(a))
def test_many_functions(self) -> None:
'''torment.fixtures._resolve_functions({ 'a': self → self.b, 'b': self → None, }, fixture)'''
def a(self) -> None:
return self.b
def b(self) -> None:
pass
fixtures._resolve_functions({ 'a': a, 'b': b, }, self.f)
self.assertIsNone(self.f.a)
self.assertIsNone(self.f.b)
class UniqueClassNameUnitTest(unittest.TestCase):
def setUp(self) -> None:
self.uuid = uuid.uuid4()
def test_empty_namespace(self) -> None:
'''torment.fixtures._unique_class_name({}, uuid) == 'f_{uuid}' '''
n = fixtures._unique_class_name({}, self.uuid)
self.assertEqual(n, 'f_' + self.uuid.hex)
def test_one_namespace(self) -> None:
'''torment.fixtures._unique_class_name({ 'f_{uuid}': None, }, uuid) == 'f_{uuid}_1' '''
n = fixtures._unique_class_name({ 'f_' + self.uuid.hex: None, }, self.uuid)
self.assertEqual(n, 'f_' + self.uuid.hex + '_1')
def test_two_namespace(self) -> None:
'''torment.fixtures._unique_class_name({ 'f_{uuid}': None, 'f_{uuid}_1': None, }, uuid) == 'f_{uuid}_2' '''
n = fixtures._unique_class_name({ 'f_' + self.uuid.hex: None, 'f_' + self.uuid.hex + '_1': None, }, self.uuid)
self.assertEqual(n, 'f_' + self.uuid.hex + '_2')<|fim▁end|> | |
<|file_name|>register.py<|end_file_name|><|fim▁begin|># Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.backend.jvm.artifact import Artifact
from pants.backend.jvm.ossrh_publication_metadata import (
Developer,
License,
OSSRHPublicationMetadata,
Scm,
)
from pants.backend.jvm.repository import Repository as repo
from pants.backend.jvm.scala_artifact import ScalaArtifact
from pants.backend.jvm.subsystems.jar_dependency_management import JarDependencyManagementSetup
from pants.backend.jvm.subsystems.scala_platform import ScalaPlatform
from pants.backend.jvm.subsystems.scoverage_platform import ScoveragePlatform
from pants.backend.jvm.subsystems.shader import Shading
from pants.backend.jvm.targets.annotation_processor import AnnotationProcessor
from pants.backend.jvm.targets.benchmark import Benchmark
from pants.backend.jvm.targets.credentials import LiteralCredentials, NetrcCredentials
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_agent import JavaAgent
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.javac_plugin import JavacPlugin
from pants.backend.jvm.targets.junit_tests import JUnitTests
from pants.backend.jvm.targets.jvm_app import JvmApp
from pants.backend.jvm.targets.jvm_binary import Duplicate, JarRules, JvmBinary, Skip
from pants.backend.jvm.targets.jvm_prep_command import JvmPrepCommand
from pants.backend.jvm.targets.managed_jar_dependencies import (
ManagedJarDependencies,
ManagedJarLibraries,
)
from pants.backend.jvm.targets.scala_exclude import ScalaExclude
from pants.backend.jvm.targets.scala_jar_dependency import ScalaJarDependency
from pants.backend.jvm.targets.scala_library import ScalaLibrary
from pants.backend.jvm.targets.scalac_plugin import ScalacPlugin
from pants.backend.jvm.targets.unpacked_jars import UnpackedJars
from pants.backend.jvm.tasks.analysis_extraction import AnalysisExtraction
from pants.backend.jvm.tasks.benchmark_run import BenchmarkRun
from pants.backend.jvm.tasks.binary_create import BinaryCreate
from pants.backend.jvm.tasks.bootstrap_jvm_tools import BootstrapJvmTools
from pants.backend.jvm.tasks.bundle_create import BundleCreate
from pants.backend.jvm.tasks.check_published_deps import CheckPublishedDeps
from pants.backend.jvm.tasks.checkstyle import Checkstyle
from pants.backend.jvm.tasks.classmap import ClassmapTask
from pants.backend.jvm.tasks.consolidate_classpath import ConsolidateClasspath
from pants.backend.jvm.tasks.coursier_resolve import CoursierResolve
from pants.backend.jvm.tasks.detect_duplicates import DuplicateDetector
from pants.backend.jvm.tasks.ivy_imports import IvyImports
from pants.backend.jvm.tasks.ivy_outdated import IvyOutdated
from pants.backend.jvm.tasks.jar_create import JarCreate
from pants.backend.jvm.tasks.jar_publish import JarPublish
from pants.backend.jvm.tasks.javadoc_gen import JavadocGen
from pants.backend.jvm.tasks.junit_run import JUnitRun
from pants.backend.jvm.tasks.jvm_compile.javac.javac_compile import JavacCompile
from pants.backend.jvm.tasks.jvm_compile.jvm_classpath_publisher import RuntimeClasspathPublisher
from pants.backend.jvm.tasks.jvm_compile.rsc.rsc_compile import RscCompile
from pants.backend.jvm.tasks.jvm_dependency_check import JvmDependencyCheck
from pants.backend.jvm.tasks.jvm_dependency_usage import JvmDependencyUsage
from pants.backend.jvm.tasks.jvm_platform_analysis import JvmPlatformExplain, JvmPlatformValidate
from pants.backend.jvm.tasks.jvm_run import JvmRun
from pants.backend.jvm.tasks.nailgun_task import NailgunKillall
from pants.backend.jvm.tasks.prepare_resources import PrepareResources
from pants.backend.jvm.tasks.prepare_services import PrepareServices
from pants.backend.jvm.tasks.provide_tools_jar import ProvideToolsJar
from pants.backend.jvm.tasks.run_jvm_prep_command import (
RunBinaryJvmPrepCommand,
RunCompileJvmPrepCommand,
RunTestJvmPrepCommand,
)
from pants.backend.jvm.tasks.scala_repl import ScalaRepl
from pants.backend.jvm.tasks.scaladoc_gen import ScaladocGen
from pants.backend.jvm.tasks.scalafix_task import ScalaFixCheck, ScalaFixFix
from pants.backend.jvm.tasks.scalafmt_task import ScalaFmtCheckFormat, ScalaFmtFormat
from pants.backend.jvm.tasks.scalastyle_task import ScalastyleTask
from pants.backend.jvm.tasks.unpack_jars import UnpackJars
from pants.backend.project_info.tasks.export_dep_as_jar import ExportDepAsJar
from pants.build_graph.app_base import Bundle, DirectoryReMapper
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.goal import Goal
from pants.goal.task_registrar import TaskRegistrar as task
from pants.java.jar.exclude import Exclude
from pants.java.jar.jar_dependency import JarDependencyParseContextWrapper
def build_file_aliases():
return BuildFileAliases(
targets={
"annotation_processor": AnnotationProcessor,
"benchmark": Benchmark,
"credentials": LiteralCredentials,
"jar_library": JarLibrary,
"java_agent": JavaAgent,
"java_library": JavaLibrary,
"javac_plugin": JavacPlugin,
"junit_tests": JUnitTests,
"jvm_app": JvmApp,
"jvm_binary": JvmBinary,
"jvm_prep_command": JvmPrepCommand,
"managed_jar_dependencies": ManagedJarDependencies,
"netrc_credentials": NetrcCredentials,
"scala_library": ScalaLibrary,
"scalac_plugin": ScalacPlugin,
"unpacked_jars": UnpackedJars,
},
objects={
"artifact": Artifact,
"scala_artifact": ScalaArtifact,
"ossrh": OSSRHPublicationMetadata,
"license": License,
"scm": Scm,
"developer": Developer,
"github": Scm.github,
"DirectoryReMapper": DirectoryReMapper,
"Duplicate": Duplicate,
"exclude": Exclude,
"scala_jar": ScalaJarDependency,
"scala_exclude": ScalaExclude,
"jar_rules": JarRules,
"repository": repo,
"Skip": Skip,
"shading_relocate": Shading.create_relocate,
"shading_exclude": Shading.create_exclude,
"shading_keep": Shading.create_keep,
"shading_zap": Shading.create_zap,
"shading_relocate_package": Shading.create_relocate_package,
"shading_exclude_package": Shading.create_exclude_package,
"shading_keep_package": Shading.create_keep_package,
"shading_zap_package": Shading.create_zap_package,
},
context_aware_object_factories={
"bundle": Bundle,
"jar": JarDependencyParseContextWrapper,
"managed_jar_libraries": ManagedJarLibraries,
},
)
def global_subsystems():
return (
ScalaPlatform,
ScoveragePlatform,
)
# TODO https://github.com/pantsbuild/pants/issues/604 register_goals
def register_goals():
ng_killall = task(name="ng-killall", action=NailgunKillall)
ng_killall.install()
Goal.by_name("invalidate").install(ng_killall, first=True)
Goal.by_name("clean-all").install(ng_killall, first=True)
task(name="jar-dependency-management", action=JarDependencyManagementSetup).install("bootstrap")
task(name="jvm-platform-explain", action=JvmPlatformExplain).install("jvm-platform-explain")
task(name="jvm-platform-validate", action=JvmPlatformValidate).install("jvm-platform-validate")
task(name="bootstrap-jvm-tools", action=BootstrapJvmTools).install("bootstrap")
task(name="provide-tools-jar", action=ProvideToolsJar).install("bootstrap")
# Compile
task(name="rsc", action=RscCompile).install("compile")
task(name="javac", action=JavacCompile).install("compile")
# Analysis extraction.
task(name="zinc", action=AnalysisExtraction).install("analysis")
# Dependency resolution.
task(name="coursier", action=CoursierResolve).install("resolve")
task(name="ivy-imports", action=IvyImports).install("imports")
task(name="unpack-jars", action=UnpackJars).install()
task(name="ivy", action=IvyOutdated).install("outdated")
# Resource preparation.
task(name="prepare", action=PrepareResources).install("resources")
task(name="services", action=PrepareServices).install("resources")
task(name="export-classpath", action=RuntimeClasspathPublisher).install()
# This goal affects the contents of the runtime_classpath, and should not be
# combined with any other goals on the command line.
task(name="export-dep-as-jar", action=ExportDepAsJar).install()
task(name="jvm", action=JvmDependencyUsage).install("dep-usage")
task(name="classmap", action=ClassmapTask).install("classmap")
# Generate documentation.
task(name="javadoc", action=JavadocGen).install("doc")
task(name="scaladoc", action=ScaladocGen).install("doc")
# Bundling.
task(name="create", action=JarCreate).install("jar")
detect_duplicates = task(name="dup", action=DuplicateDetector)
task(name="jvm", action=BinaryCreate).install("binary")
detect_duplicates.install("binary")
task(name="consolidate-classpath", action=ConsolidateClasspath).install("bundle")
task(name="jvm", action=BundleCreate).install("bundle")
detect_duplicates.install("bundle")
task(name="detect-duplicates", action=DuplicateDetector).install()
# Publishing.
task(name="check-published-deps", action=CheckPublishedDeps).install("check-published-deps")
task(name="jar", action=JarPublish).install("publish")
# Testing.
task(name="junit", action=JUnitRun).install("test")
task(name="bench", action=BenchmarkRun).install("bench")
# Linting.<|fim▁hole|> task(name="scalafmt", action=ScalaFmtCheckFormat, serialize=False).install("lint")
task(name="scalastyle", action=ScalastyleTask, serialize=False).install("lint")
task(name="checkstyle", action=Checkstyle, serialize=False).install("lint")
task(name="jvm-dep-check", action=JvmDependencyCheck, serialize=False).install("lint")
# Formatting.
# Scalafix has to go before scalafmt in order not to
# further change Scala files after scalafmt.
task(name="scalafix", action=ScalaFixFix).install("fmt")
task(name="scalafmt", action=ScalaFmtFormat, serialize=False).install("fmt")
# Running.
task(name="jvm", action=JvmRun, serialize=False).install("run")
task(name="jvm-dirty", action=JvmRun, serialize=False).install("run-dirty")
task(name="scala", action=ScalaRepl, serialize=False).install("repl")
task(name="scala-dirty", action=ScalaRepl, serialize=False).install("repl-dirty")
task(name="test-jvm-prep-command", action=RunTestJvmPrepCommand).install("test", first=True)
task(name="binary-jvm-prep-command", action=RunBinaryJvmPrepCommand).install(
"binary", first=True
)
task(name="compile-jvm-prep-command", action=RunCompileJvmPrepCommand).install(
"compile", first=True
)<|fim▁end|> | task(name="scalafix", action=ScalaFixCheck).install("lint") |
<|file_name|>huge-largest-array.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem::size_of;
#[cfg(target_pointer_width = "32")]
pub fn main() {
assert_eq!(size_of::<[u8; (1 << 31) - 1]>(), (1 << 31) - 1);
}
#[cfg(target_pointer_width = "64")]
pub fn main() {
assert_eq!(size_of::<[u8; (1 << 47) - 1]>(), (1 << 47) - 1);
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const Card = require('./src/main');
Card.install = function(Vue) {
Vue.component(Card.name, Card);
};
<|fim▁hole|>module.exports = Card;<|fim▁end|> | |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#<|fim▁hole|>#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
from spack import *
class Slepc(Package):
"""Scalable Library for Eigenvalue Problem Computations."""
homepage = "http://www.grycap.upv.es/slepc"
url = "http://slepc.upv.es/download/distrib/slepc-3.6.2.tar.gz"
git = "https://bitbucket.org/slepc/slepc.git"
version('develop', branch='master')
version('3.9.1', 'e174ea7c127d9161eef976b0288f0c56d443a58d6ab2dc8af1e8bd66f156ce17')
version('3.9.0', '1f3930db56b4065aaf214ea758ddff1a70bf19d45544cbdfd19d2787db4bfe0b')
version('3.8.2', '1e7d20d20eb26da307d36017461fe4a55f40e947e232739179dbe6412e22ed13')
version('3.8.0', 'c58ccc4e852d1da01112466c48efa41f0839649f3a265925788237d76cd3d963')
version('3.7.4', '2fb782844e3bc265a8d181c3c3e2632a4ca073111c874c654f1365d33ca2eb8a')
version('3.7.3', '3ef9bcc645a10c1779d56b3500472ceb66df692e389d635087d30e7c46424df9')
version('3.7.1', '670216f263e3074b21e0623c01bc0f562fdc0bffcd7bd42dd5d8edbe73a532c2')
version('3.6.3', '384939d009546db37bc05ed81260c8b5ba451093bf891391d32eb7109ccff876')
version('3.6.2', '2ab4311bed26ccf7771818665991b2ea3a9b15f97e29fd13911ab1293e8e65df')
variant('arpack', default=True, description='Enables Arpack wrappers')
variant('blopex', default=False, description='Enables BLOPEX wrappers')
# NOTE: make sure PETSc and SLEPc use the same python.
depends_on('python@2.6:2.8', type='build')
# Cannot mix release and development versions of SLEPc and PETSc:
depends_on('petsc@develop', when='@develop')
depends_on('petsc@3.9:3.9.99', when='@3.9:3.9.99')
depends_on('petsc@3.8:3.8.99', when='@3.8:3.8.99')
depends_on('petsc@3.7:3.7.7', when='@3.7.1:3.7.4')
depends_on('petsc@3.6.3:3.6.4', when='@3.6.2:3.6.3')
depends_on('arpack-ng~mpi', when='+arpack^petsc~mpi~int64')
depends_on('arpack-ng+mpi', when='+arpack^petsc+mpi~int64')
patch('install_name_371.patch', when='@3.7.1')
# Arpack can not be used with 64bit integers.
conflicts('+arpack', when='^petsc+int64')
resource(name='blopex',
url='http://slepc.upv.es/download/external/blopex-1.1.2.tar.gz',
sha256='0081ee4c4242e635a8113b32f655910ada057c59043f29af4b613508a762f3ac',
destination=join_path('installed-arch-' + sys.platform + '-c-opt',
'externalpackages'),
when='+blopex')
def install(self, spec, prefix):
# set SLEPC_DIR for installation
# Note that one should set the current (temporary) directory instead
# its symlink in spack/stage/ !
os.environ['SLEPC_DIR'] = os.getcwd()
options = []
if '+arpack' in spec:
options.extend([
'--with-arpack-dir=%s' % spec['arpack-ng'].prefix.lib,
])
if 'arpack-ng~mpi' in spec:
options.extend([
'--with-arpack-flags=-larpack'
])
else:
options.extend([
'--with-arpack-flags=-lparpack,-larpack'
])
# It isn't possible to install BLOPEX separately and link to it;
# BLOPEX has to be downloaded with SLEPc at configure time
if '+blopex' in spec:
options.append('--download-blopex')
configure('--prefix=%s' % prefix, *options)
make('MAKE_NP=%s' % make_jobs, parallel=False)
if self.run_tests:
make('test', parallel=False)
make('install', parallel=False)
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
# set up SLEPC_DIR for everyone using SLEPc package
spack_env.set('SLEPC_DIR', self.prefix)<|fim▁end|> | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999. |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub(crate) mod file;
pub(crate) mod dir;
pub(crate) mod vfat;
pub(crate) mod ebpb;
pub(crate) mod error;<|fim▁hole|>pub(crate) mod entry;
pub(crate) mod metadata;
pub(crate) mod cache;
pub(crate) mod shared;
pub use self::ebpb::BiosParameterBlock;
pub use self::file::File;
pub use self::dir::Dir;
pub use self::error::Error;
pub use self::vfat::VFat;
pub use self::entry::Entry;
pub use self::metadata::{Metadata, Attributes, Date, Time, Timestamp};
pub use self::shared::Shared;
pub(crate) use self::cache::{CachedDevice, Partition};
pub(crate) use self::fat::{Status, FatEntry};
pub(crate) use self::cluster::Cluster;<|fim▁end|> | pub(crate) mod cluster;
pub(crate) mod fat; |
<|file_name|>generate.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import json
import time<|fim▁hole|>content = u"""\n
type cmdConf struct {
name string
argDesc string
group string
readonly bool
}
"""
def json_to_js(json_path, js_path):
"""Convert `commands.json` to `commands.js`"""
keys = []
with open(json_path) as fp:
_json = json.load(fp)
for k in _json.keys():
keys.append(k.encode('utf-8'))
with open(js_path, "w") as fp:
generate_time(fp)
fp.write("module.exports = [\n")
for k in sorted(keys):
fp.write('\t"%s",\n' % k.lower())
fp.write("]")
def json_to_go_array(json_path, go_path):
g_fp = open(go_path, "w")
with open(json_path) as fp:
_json = json.load(fp)
generate_time(g_fp)
g_fp.write("package main\n\nvar helpCommands = [][]string{\n")
_json_sorted = dict(sorted(_json.items(), key=lambda x: x[0]))
for k, v in _json_sorted.iteritems():
g_fp.write('\t{"%s", "%s", "%s"},\n' % (k, v["arguments"], v["group"]))
g_fp.write("}\n")
g_fp.close()
def json_to_command_cnf(json_path, go_path):
g_fp = open(go_path, "w")
with open(json_path) as fp:
_json = json.load(fp)
generate_time(g_fp)
g_fp.write("package server")
print >> g_fp, content
g_fp.write("var cnfCmds = []cmdConf{\n")
for k, v in _json.iteritems():
g_fp.write('\t{\n\t\t"%s",\n\t\t"%s",\n\t\t"%s", \n\t\t%s,\n\t},\n' %
(k, v["arguments"], v["group"], "true" if v["readonly"] else "false" ))
g_fp.write("}\n")
g_fp.close()
def generate_time(fp):
fp.write("//This file was generated by ./generate.py on %s \n" %
time.strftime('%a %b %d %Y %H:%M:%S %z'))
if __name__ == "__main__":
usage = """
Usage: python %s src_path dst_path"
1. for Node.js client:
python generate.py /path/to/commands.json /path/to/commands.js
2. for cmd/ledis_cli/const.go
python generate.py /path/to/commands.json /path/to/const.go
3. for server/command_cnf.go
python generate.py /path/to/commands.json /path/to/command_cnf.go
"""
if len(sys.argv) != 3:
sys.exit(usage % os.path.basename(sys.argv[0]))
src_path, dst_path = sys.argv[1:]
dst_path_base = os.path.basename(dst_path)
if dst_path_base.endswith(".js"):
json_to_js(src_path, dst_path)
elif dst_path_base.startswith("const.go"):
json_to_go_array(src_path, dst_path)
elif dst_path_base.startswith("command"):
json_to_command_cnf(src_path, dst_path)
else:
print "Not support arguments"<|fim▁end|> | import sys
import os
from collections import OrderedDict as dict
|
<|file_name|>redhat_subscription.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# James Laska (jlaska@redhat.com)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: redhat_subscription
short_description: Manage registration and subscriptions to RHSM using the C(subscription-manager) command
description:
- Manage registration and subscription to the Red Hat Subscription Management entitlement platform using the C(subscription-manager) command
version_added: "1.2"
author: "Barnaby Court (@barnabycourt)"
notes:
- In order to register a system, subscription-manager requires either a username and password, or an activationkey and an Organization ID.
- Since 2.5 values for I(server_hostname), I(server_insecure), I(rhsm_baseurl),
I(server_proxy_hostname), I(server_proxy_port), I(server_proxy_user) and
I(server_proxy_password) are no longer taken from the C(/etc/rhsm/rhsm.conf)
config file and default to None.
requirements:
- subscription-manager
options:
state:
description:
- whether to register and subscribe (C(present)), or unregister (C(absent)) a system
choices: [ "present", "absent" ]
default: "present"
username:
description:
- access.redhat.com or Sat6 username
password:
description:
- access.redhat.com or Sat6 password
server_hostname:
description:
- Specify an alternative Red Hat Subscription Management or Sat6 server
server_insecure:
description:
- Enable or disable https server certificate verification when connecting to C(server_hostname)
rhsm_baseurl:
description:
- Specify CDN baseurl
server_proxy_hostname:
description:
- Specify a HTTP proxy hostname
version_added: "2.4"
server_proxy_port:
description:
- Specify a HTTP proxy port
version_added: "2.4"
server_proxy_user:
description:
- Specify a user for HTTP proxy with basic authentication
version_added: "2.4"
server_proxy_password:
description:
- Specify a password for HTTP proxy with basic authentication
version_added: "2.4"
auto_attach:
description:
- Upon successful registration, auto-consume available subscriptions
- Added in favor of depracated autosubscribe in 2.5.
type: bool
default: 'no'
version_added: "2.5"
aliases: [autosubscribe]
activationkey:
description:
- supply an activation key for use with registration
org_id:
description:
- Organization ID to use in conjunction with activationkey
version_added: "2.0"
environment:
description:
- Register with a specific environment in the destination org. Used with Red Hat Satellite 6.x or Katello
version_added: "2.2"
pool:
description:
- |
Specify a subscription pool name to consume. Regular expressions accepted. Use I(pool_ids) instead if
possible, as it is much faster. Mutually exclusive with I(pool_ids).
default: '^$'
pool_ids:
description:
- |
Specify subscription pool IDs to consume. Prefer over I(pool) when possible as it is much faster.
A pool ID may be specified as a C(string) - just the pool ID (ex. C(0123456789abcdef0123456789abcdef)),
or as a C(dict) with the pool ID as the key, and a quantity as the value (ex.
C(0123456789abcdef0123456789abcdef: 2). If the quantity is provided, it is used to consume multiple
entitlements from a pool (the pool must support this). Mutually exclusive with I(pool).
default: []
version_added: "2.4"
consumer_type:
description:
- The type of unit to register, defaults to system
version_added: "2.1"
consumer_name:
description:
- Name of the system to register, defaults to the hostname
version_added: "2.1"
consumer_id:
description:
- |
References an existing consumer ID to resume using a previous registration
for this system. If the system's identity certificate is lost or corrupted,
this option allows it to resume using its previous identity and subscriptions.
The default is to not specify a consumer ID so a new ID is created.
version_added: "2.1"
force_register:
description:
- Register the system even if it is already registered
type: bool
default: 'no'
version_added: "2.2"
'''
EXAMPLES = '''
- name: Register as user (joe_user) with password (somepass) and auto-subscribe to available content.
redhat_subscription:
state: present
username: joe_user
password: somepass
auto_attach: true
- name: Same as above but subscribe to a specific pool by ID.
redhat_subscription:
state: present
username: joe_user
password: somepass
pool_ids: 0123456789abcdef0123456789abcdef
- name: Register and subscribe to multiple pools.
redhat_subscription:
state: present
username: joe_user
password: somepass
pool_ids:
- 0123456789abcdef0123456789abcdef
- 1123456789abcdef0123456789abcdef
- name: Same as above but consume multiple entitlements.
redhat_subscription:
state: present
username: joe_user
password: somepass
pool_ids:
- 0123456789abcdef0123456789abcdef: 2
- 1123456789abcdef0123456789abcdef: 4
- name: Register and pull existing system data.
redhat_subscription:
state: present
username: joe_user
password: somepass
consumer_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- name: Register with activationkey and consume subscriptions matching Red Hat Enterprise Server or Red Hat Virtualization
redhat_subscription:
state: present
activationkey: 1-222333444
org_id: 222333444
pool: '^(Red Hat Enterprise Server|Red Hat Virtualization)$'
- name: Update the consumed subscriptions from the previous example (remove Red Hat Virtualization subscription)
redhat_subscription:
state: present
activationkey: 1-222333444
org_id: 222333444
pool: '^Red Hat Enterprise Server$'
- name: Register as user credentials into given environment (against Red Hat Satellite 6.x), and auto-subscribe.
redhat_subscription:
state: present
username: joe_user
password: somepass
environment: Library
auto_attach: true
'''
RETURN = '''
subscribed_pool_ids:
description: List of pool IDs to which system is now subscribed
returned: success
type: complex
contains: {
"8a85f9815ab905d3015ab928c7005de4": "1"
}
'''
import os
import re
import shutil
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.six.moves import configparser
SUBMAN_CMD = None
class RegistrationBase(object):
def __init__(self, module, username=None, password=None):
self.module = module
self.username = username
self.password = password
def configure(self):
raise NotImplementedError("Must be implemented by a sub-class")
def enable(self):
# Remove any existing redhat.repo
redhat_repo = '/etc/yum.repos.d/redhat.repo'
if os.path.isfile(redhat_repo):
os.unlink(redhat_repo)
def register(self):
raise NotImplementedError("Must be implemented by a sub-class")
def unregister(self):
raise NotImplementedError("Must be implemented by a sub-class")
def unsubscribe(self):
raise NotImplementedError("Must be implemented by a sub-class")
def update_plugin_conf(self, plugin, enabled=True):
plugin_conf = '/etc/yum/pluginconf.d/%s.conf' % plugin
if os.path.isfile(plugin_conf):
tmpfd, tmpfile = tempfile.mkstemp()
shutil.copy2(plugin_conf, tmpfile)
cfg = configparser.ConfigParser()
cfg.read([tmpfile])
if enabled:
cfg.set('main', 'enabled', 1)
else:
cfg.set('main', 'enabled', 0)
fd = open(tmpfile, 'w+')
cfg.write(fd)
fd.close()
self.module.atomic_move(tmpfile, plugin_conf)
def subscribe(self, **kwargs):
raise NotImplementedError("Must be implemented by a sub-class")
class Rhsm(RegistrationBase):
def __init__(self, module, username=None, password=None):
RegistrationBase.__init__(self, module, username, password)
self.module = module
def enable(self):
'''
Enable the system to receive updates from subscription-manager.
This involves updating affected yum plugins and removing any
conflicting yum repositories.
'''
RegistrationBase.enable(self)
self.update_plugin_conf('rhnplugin', False)
self.update_plugin_conf('subscription-manager', True)
def configure(self, **kwargs):
'''
Configure the system as directed for registration with RHSM
Raises:
* Exception - if error occurs while running command
'''
args = [SUBMAN_CMD, 'config']
# Pass supplied **kwargs as parameters to subscription-manager. Ignore
# non-configuration parameters and replace '_' with '.'. For example,
# 'server_hostname' becomes '--server.hostname'.
for k, v in kwargs.items():
if re.search(r'^(server|rhsm)_', k) and v is not None:
args.append('--%s=%s' % (k.replace('_', '.', 1), v))
self.module.run_command(args, check_rc=True)
@property
def is_registered(self):
'''
Determine whether the current system
Returns:
* Boolean - whether the current system is currently registered to
RHSM.
'''
args = [SUBMAN_CMD, 'identity']
rc, stdout, stderr = self.module.run_command(args, check_rc=False)
if rc == 0:
return True
else:
return False
def register(self, username, password, auto_attach, activationkey, org_id,
consumer_type, consumer_name, consumer_id, force_register, environment,
rhsm_baseurl, server_insecure, server_hostname, server_proxy_hostname,
server_proxy_port, server_proxy_user, server_proxy_password):
'''
Register the current system to the provided RHSM or Sat6 server
Raises:
* Exception - if error occurs while running command
'''
args = [SUBMAN_CMD, 'register']
# Generate command arguments
if force_register:
args.extend(['--force'])
if rhsm_baseurl:
args.extend(['--baseurl', rhsm_baseurl])
if server_insecure:
args.extend(['--insecure'])
if server_hostname:
args.extend(['--serverurl', server_hostname])
if org_id:
args.extend(['--org', org_id])
if activationkey:
args.extend(['--activationkey', activationkey])
else:
if auto_attach:
args.append('--auto-attach')
if username:
args.extend(['--username', username])
if password:
args.extend(['--password', password])
if consumer_type:
args.extend(['--type', consumer_type])
if consumer_name:
args.extend(['--name', consumer_name])
if consumer_id:
args.extend(['--consumerid', consumer_id])
if environment:
args.extend(['--environment', environment])
if server_proxy_hostname and server_proxy_port:
args.extend(['--proxy', server_proxy_hostname + ':' + server_proxy_port])
if server_proxy_user:
args.extend(['--proxyuser', server_proxy_user])
if server_proxy_password:
args.extend(['--proxypassword', server_proxy_password])
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
def unsubscribe(self, serials=None):
'''
Unsubscribe a system from subscribed channels
Args:
serials(list or None): list of serials to unsubscribe. If
serials is none or an empty list, then
all subscribed channels will be removed.
Raises:
* Exception - if error occurs while running command
'''
items = []
if serials is not None and serials:
items = ["--serial=%s" % s for s in serials]
if serials is None:
items = ["--all"]
if items:
args = [SUBMAN_CMD, 'unsubscribe'] + items
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
return serials
def unregister(self):
'''
Unregister a currently registered system
Raises:
* Exception - if error occurs while running command
'''
args = [SUBMAN_CMD, 'unregister']
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
self.update_plugin_conf('rhnplugin', False)
self.update_plugin_conf('subscription-manager', False)
def subscribe(self, regexp):
'''
Subscribe current system to available pools matching the specified
regular expression. It matches regexp against available pool ids first.
If any pool ids match, subscribe to those pools and return.
If no pool ids match, then match regexp against available pool product
names. Note this can still easily match many many pools. Then subscribe
to those pools.
Since a pool id is a more specific match, we only fallback to matching
against names if we didn't match pool ids.
Raises:
* Exception - if error occurs while running command
'''
# See https://github.com/ansible/ansible/issues/19466
# subscribe to pools whose pool id matches regexp (and only the pool id)
subscribed_pool_ids = self.subscribe_pool(regexp)
# If we found any matches, we are done
# Don't attempt to match pools by product name
if subscribed_pool_ids:
return subscribed_pool_ids
# We didn't match any pool ids.
# Now try subscribing to pools based on product name match
# Note: This can match lots of product names.
subscribed_by_product_pool_ids = self.subscribe_product(regexp)
if subscribed_by_product_pool_ids:
return subscribed_by_product_pool_ids
# no matches
return []
def subscribe_by_pool_ids(self, pool_ids):
for pool_id, quantity in pool_ids.items():
args = [SUBMAN_CMD, 'attach', '--pool', pool_id, '--quantity', quantity]
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
return pool_ids
def subscribe_pool(self, regexp):
'''
Subscribe current system to available pools matching the specified
regular expression
Raises:
* Exception - if error occurs while running command
'''
# Available pools ready for subscription
available_pools = RhsmPools(self.module)
subscribed_pool_ids = []
for pool in available_pools.filter_pools(regexp):
pool.subscribe()
subscribed_pool_ids.append(pool.get_pool_id())
return subscribed_pool_ids
def subscribe_product(self, regexp):
'''
Subscribe current system to available pools matching the specified
regular expression
Raises:
* Exception - if error occurs while running command
'''
# Available pools ready for subscription
available_pools = RhsmPools(self.module)
subscribed_pool_ids = []
for pool in available_pools.filter_products(regexp):
pool.subscribe()
subscribed_pool_ids.append(pool.get_pool_id())
return subscribed_pool_ids
def update_subscriptions(self, regexp):
changed = False
consumed_pools = RhsmPools(self.module, consumed=True)
pool_ids_to_keep = [p.get_pool_id() for p in consumed_pools.filter_pools(regexp)]
pool_ids_to_keep.extend([p.get_pool_id() for p in consumed_pools.filter_products(regexp)])
serials_to_remove = [p.Serial for p in consumed_pools if p.get_pool_id() not in pool_ids_to_keep]
serials = self.unsubscribe(serials=serials_to_remove)
subscribed_pool_ids = self.subscribe(regexp)
if subscribed_pool_ids or serials:
changed = True
return {'changed': changed, 'subscribed_pool_ids': subscribed_pool_ids,
'unsubscribed_serials': serials}
def update_subscriptions_by_pool_ids(self, pool_ids):
changed = False
consumed_pools = RhsmPools(self.module, consumed=True)
existing_pools = {}
for p in consumed_pools:
existing_pools[p.get_pool_id()] = p.QuantityUsed
serials_to_remove = [p.Serial for p in consumed_pools if pool_ids.get(p.get_pool_id(), 0) != p.QuantityUsed]
serials = self.unsubscribe(serials=serials_to_remove)
missing_pools = {}
for pool_id, quantity in pool_ids.items():
if existing_pools.get(pool_id, 0) != quantity:
missing_pools[pool_id] = quantity
self.subscribe_by_pool_ids(missing_pools)
if missing_pools or serials:
changed = True
return {'changed': changed, 'subscribed_pool_ids': missing_pools.keys(),
'unsubscribed_serials': serials}
class RhsmPool(object):
'''
Convenience class for housing subscription information
'''
def __init__(self, module, **kwargs):
self.module = module
for k, v in kwargs.items():
setattr(self, k, v)
def __str__(self):
return str(self.__getattribute__('_name'))
def get_pool_id(self):
return getattr(self, 'PoolId', getattr(self, 'PoolID'))
def subscribe(self):
args = "subscription-manager subscribe --pool %s" % self.get_pool_id()
rc, stdout, stderr = self.module.run_command(args, check_rc=True)
if rc == 0:
return True
else:
return False
class RhsmPools(object):
"""
This class is used for manipulating pools subscriptions with RHSM
"""
def __init__(self, module, consumed=False):
self.module = module
self.products = self._load_product_list(consumed)
def __iter__(self):
return self.products.__iter__()
def _load_product_list(self, consumed=False):
"""
Loads list of all available or consumed pools for system in data structure
Args:
consumed(bool): if True list consumed pools, else list available pools (default False)
"""
args = "subscription-manager list"
if consumed:
args += " --consumed"
else:
args += " --available"
rc, stdout, stderr = self.module.run_command(args, check_rc=True)
products = []
for line in stdout.split('\n'):
# Remove leading+trailing whitespace
line = line.strip()
# An empty line implies the end of a output group
if len(line) == 0:
continue
# If a colon ':' is found, parse
elif ':' in line:
(key, value) = line.split(':', 1)
key = key.strip().replace(" ", "") # To unify
value = value.strip()
if key in ['ProductName', 'SubscriptionName']:
# Remember the name for later processing
products.append(RhsmPool(self.module, _name=value, key=value))
elif products:
# Associate value with most recently recorded product
products[-1].__setattr__(key, value)<|fim▁hole|> # FIXME - log some warning?
# else:
# warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value))
return products
def filter_pools(self, regexp='^$'):
'''
Return a list of RhsmPools whose pool id matches the provided regular expression
'''
r = re.compile(regexp)
for product in self.products:
if r.search(product.get_pool_id()):
yield product
def filter_products(self, regexp='^$'):
'''
Return a list of RhsmPools whose product name matches the provided regular expression
'''
r = re.compile(regexp)
for product in self.products:
if r.search(product._name):
yield product
def main():
# Load RHSM configuration from file
rhsm = Rhsm(None)
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present',
choices=['present', 'absent']),
username=dict(default=None,
required=False),
password=dict(default=None,
required=False,
no_log=True),
server_hostname=dict(default=None,
required=False),
server_insecure=dict(default=None,
required=False),
rhsm_baseurl=dict(default=None,
required=False),
auto_attach=dict(aliases=['autosubscribe'], default=False, type='bool'),
activationkey=dict(default=None,
required=False,
no_log=True),
org_id=dict(default=None,
required=False),
environment=dict(default=None,
required=False, type='str'),
pool=dict(default='^$',
required=False,
type='str'),
pool_ids=dict(default=[],
required=False,
type='list'),
consumer_type=dict(default=None,
required=False),
consumer_name=dict(default=None,
required=False),
consumer_id=dict(default=None,
required=False),
force_register=dict(default=False,
type='bool'),
server_proxy_hostname=dict(default=None,
required=False),
server_proxy_port=dict(default=None,
required=False),
server_proxy_user=dict(default=None,
required=False),
server_proxy_password=dict(default=None,
required=False,
no_log=True),
),
required_together=[['username', 'password'],
['server_proxy_hostname', 'server_proxy_port'],
['server_proxy_user', 'server_proxy_password']],
mutually_exclusive=[['activationkey', 'username'],
['activationkey', 'consumer_id'],
['activationkey', 'environment'],
['activationkey', 'autosubscribe'],
['force', 'consumer_id'],
['pool', 'pool_ids']],
required_if=[['state', 'present', ['username', 'activationkey'], True]],
)
rhsm.module = module
state = module.params['state']
username = module.params['username']
password = module.params['password']
server_hostname = module.params['server_hostname']
server_insecure = module.params['server_insecure']
rhsm_baseurl = module.params['rhsm_baseurl']
auto_attach = module.params['auto_attach']
activationkey = module.params['activationkey']
org_id = module.params['org_id']
if activationkey and not org_id:
module.fail_json(msg='org_id is required when using activationkey')
environment = module.params['environment']
pool = module.params['pool']
pool_ids = {}
for value in module.params['pool_ids']:
if isinstance(value, dict):
if len(value) != 1:
module.fail_json(msg='Unable to parse pool_ids option.')
pool_id, quantity = value.items()[0]
else:
pool_id, quantity = value, 1
pool_ids[pool_id] = str(quantity)
consumer_type = module.params["consumer_type"]
consumer_name = module.params["consumer_name"]
consumer_id = module.params["consumer_id"]
force_register = module.params["force_register"]
server_proxy_hostname = module.params['server_proxy_hostname']
server_proxy_port = module.params['server_proxy_port']
server_proxy_user = module.params['server_proxy_user']
server_proxy_password = module.params['server_proxy_password']
global SUBMAN_CMD
SUBMAN_CMD = module.get_bin_path('subscription-manager', True)
# Ensure system is registered
if state == 'present':
# Register system
if rhsm.is_registered and not force_register:
if pool != '^$' or pool_ids:
try:
if pool_ids:
result = rhsm.update_subscriptions_by_pool_ids(pool_ids)
else:
result = rhsm.update_subscriptions(pool)
except Exception as e:
module.fail_json(msg="Failed to update subscriptions for '%s': %s" % (server_hostname, to_native(e)))
else:
module.exit_json(**result)
else:
module.exit_json(changed=False, msg="System already registered.")
else:
try:
rhsm.enable()
rhsm.configure(**module.params)
rhsm.register(username, password, auto_attach, activationkey, org_id,
consumer_type, consumer_name, consumer_id, force_register,
environment, rhsm_baseurl, server_insecure, server_hostname,
server_proxy_hostname, server_proxy_port, server_proxy_user, server_proxy_password)
if pool_ids:
subscribed_pool_ids = rhsm.subscribe_by_pool_ids(pool_ids)
else:
subscribed_pool_ids = rhsm.subscribe(pool)
except Exception as e:
module.fail_json(msg="Failed to register with '%s': %s" % (server_hostname, to_native(e)))
else:
module.exit_json(changed=True,
msg="System successfully registered to '%s'." % server_hostname,
subscribed_pool_ids=subscribed_pool_ids)
# Ensure system is *not* registered
if state == 'absent':
if not rhsm.is_registered:
module.exit_json(changed=False, msg="System already unregistered.")
else:
try:
rhsm.unsubscribe()
rhsm.unregister()
except Exception as e:
module.fail_json(msg="Failed to unregister: %s" % to_native(e))
else:
module.exit_json(changed=True, msg="System successfully unregistered from %s." % server_hostname)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>PublisherManager.java<|end_file_name|><|fim▁begin|>/**
* vertigo - simple java starter
*
* Copyright (C) 2013-2017, KleeGroup, direction.technique@kleegroup.com (http://www.kleegroup.com)
* KleeGroup, Centre d'affaire la Boursidiere - BP 159 - 92357 Le Plessis Robinson Cedex - France
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.vertigo.quarto.publisher;
import java.net.URL;
import io.vertigo.dynamo.file.model.VFile;
import io.vertigo.lang.Manager;
import io.vertigo.quarto.publisher.model.PublisherData;
/**
* Gestionnaire centralisé des éditions.
* Le choix du type d'édition est fait par l'appelant qui fournit les paramètres adaptés à son besoin.
*<|fim▁hole|>public interface PublisherManager extends Manager {
/**
* Création d'une nouvelle édition.
* @param fileName Nom du document à générer (! pas son emplacement de stockage !)
* @param modelFileURL Chemin vers le fichier model
* @param data Données à fusionner avec le model
* @return Tache permettant la production d'un document au format passé en paramètre
*/
VFile publish(String fileName, URL modelFileURL, PublisherData data);
}<|fim▁end|> | * @author pchretien, npiedeloup
*/ |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""
Data models used for Blockstore API Client
"""
from datetime import datetime
from uuid import UUID
import attr
import six
def _convert_to_uuid(value):
if not isinstance(value, UUID):
return UUID(value)
return value
@attr.s(frozen=True)
class Collection(object):
"""
Metadata about a blockstore collection
"""
uuid = attr.ib(type=UUID, converter=_convert_to_uuid)
title = attr.ib(type=six.text_type)
@attr.s(frozen=True)
class Bundle(object):
"""
Metadata about a blockstore bundle
"""
uuid = attr.ib(type=UUID, converter=_convert_to_uuid)
title = attr.ib(type=six.text_type)
description = attr.ib(type=six.text_type)
slug = attr.ib(type=six.text_type)
drafts = attr.ib(type=dict) # Dict of drafts, where keys are the draft names and values are draft UUIDs
# Note that if latest_version is 0, it means that no versions yet exist
latest_version = attr.ib(type=int, validator=attr.validators.instance_of(int))
@attr.s(frozen=True)
class Draft(object):
"""
Metadata about a blockstore draft
"""
uuid = attr.ib(type=UUID, converter=_convert_to_uuid)
bundle_uuid = attr.ib(type=UUID, converter=_convert_to_uuid)
name = attr.ib(type=six.text_type)
updated_at = attr.ib(type=datetime, validator=attr.validators.instance_of(datetime))
files = attr.ib(type=dict)
links = attr.ib(type=dict)
@attr.s(frozen=True)
class BundleFile(object):
"""
Metadata about a file in a blockstore bundle or draft.
"""
path = attr.ib(type=six.text_type)
size = attr.ib(type=int)
url = attr.ib(type=six.text_type)
hash_digest = attr.ib(type=six.text_type)
@attr.s(frozen=True)
class DraftFile(BundleFile):
"""
Metadata about a file in a blockstore draft.
"""
modified = attr.ib(type=bool) # Was this file modified in the draft?
@attr.s(frozen=True)
class LinkReference(object):
"""
A pointer to a specific BundleVersion
"""
bundle_uuid = attr.ib(type=UUID, converter=_convert_to_uuid)
version = attr.ib(type=int)
snapshot_digest = attr.ib(type=six.text_type)
@attr.s(frozen=True)
class LinkDetails(object):
"""
Details about a specific link in a BundleVersion or Draft
"""
name = attr.ib(type=str)
direct = attr.ib(type=LinkReference)
indirect = attr.ib(type=list) # List of LinkReference objects
<|fim▁hole|> """
Details about a specific link in a Draft
"""
modified = attr.ib(type=bool)<|fim▁end|> | @attr.s(frozen=True)
class DraftLinkDetails(LinkDetails): |
<|file_name|>app-evented.js<|end_file_name|><|fim▁begin|>var Counter = require('./counterEvented.js');
var c = new Counter(1);
c.increment();
<|fim▁hole|>});<|fim▁end|> | c.on('even', function(){
console.log('even! ' + c.count);
return; |
<|file_name|>pipeline.py<|end_file_name|><|fim▁begin|>from pymuse.pipelinestages.pipeline_stage import PipelineStage
from pymuse.utils.stoppablequeue import StoppableQueue
from pymuse.signal import Signal
from pymuse.constants import PIPELINE_QUEUE_SIZE
class PipelineFork():
"""
This class is used to fork a Pipeline. Ex.: PipelineFork([stage1, stage2], [stage3]) fork the pipeline
in two paths and has two outputs (stage2 and stage3). It is used during the construction of Pipeline.
"""
def __init__(self, *branches):
self.forked_branches: list = list(branches)
class Pipeline():
"""
This class create a multithreaded pipeline. It automatically links together every contiguous stages.
E.g.: Pipeline(Signal(), PipelineStage(), PipelineFork([PipelineStage(), PipelineStage()], [PipelineStage()] ))
"""
def __init__(self, input_signal: Signal, *stages):
self._output_queues = []
self._stages: list = list(stages)
self._link_stages(self._stages)
self._stages[0]._queue_in = input_signal.signal_queue
def get_output_queue(self, queue_index=0) -> StoppableQueue:
"""Return a ref to the queue given by queue_index"""
return self._output_queues[queue_index]
def read_output_queue(self, queue_index=0):
"""Wait to read a data in a queue given by queue_index"""
return self._output_queues[queue_index].get()
def start(self):
"""Start all pipelines stages."""
self._start(self._stages)
def shutdown(self):<|fim▁hole|> def join(self):
"""Ensure every thread (PipelineStage) of the pipeline are done"""
for stage in self._stages:
stage.join()
def _link_pipeline_fork(self, stages: list, index: int):
for fork in stages[index].forked_branches:
stages[index - 1].add_queue_out(fork[0].queue_in)
self._link_stages(fork)
def _link_stages(self, stages: list):
for i in range(1, len(stages)):
if type(stages[i]) == PipelineFork:
self._link_pipeline_fork(stages, i)
else:
stages[i - 1].add_queue_out(stages[i].queue_in)
if issubclass(type(stages[-1]), PipelineStage):
output_queue = StoppableQueue(PIPELINE_QUEUE_SIZE)
stages[-1].add_queue_out(output_queue)
self._output_queues.append(output_queue)
def _start(self, stages: list):
for stage in stages:
if type(stage) == PipelineFork:
for forked_branch in stage.forked_branches:
self._start(forked_branch)
else:
stage.start()
def _shutdown(self, stages: list):
for stage in stages:
if type(stage) == PipelineFork:
for forked_branch in stage.forked_branches:
self._shutdown(forked_branch)
else:
stage.shutdown()<|fim▁end|> | """ shutdowns every child thread (PipelineStage)"""
self._shutdown(self._stages)
|
<|file_name|>RenderTextureSystem.ts<|end_file_name|><|fim▁begin|>import { System } from '../System';
import { Rectangle } from '@pixi/math';
import { BUFFER_BITS } from '@pixi/constants';
import type { Renderer } from '../Renderer';
import type { RenderTexture } from './RenderTexture';
import type { BaseRenderTexture } from './BaseRenderTexture';
import type { MaskData } from '../mask/MaskData';
// Temporary rectangle for assigned sourceFrame or destinationFrame
const tempRect = new Rectangle();
// Temporary rectangle for renderTexture destinationFrame
const tempRect2 = new Rectangle();
// Temporary rectangle for passing the framebuffer viewport
const viewportFrame = new Rectangle();
/**
* System plugin to the renderer to manage render textures.
*
* Should be added after FramebufferSystem
*
* @class
* @extends PIXI.System
* @memberof PIXI.systems
*/
export class RenderTextureSystem extends System
{
public clearColor: number[];
public defaultMaskStack: Array<MaskData>;
public current: RenderTexture;
public readonly sourceFrame: Rectangle;
public readonly destinationFrame: Rectangle;
/**
* @param {PIXI.Renderer} renderer - The renderer this System works for.
*/
constructor(renderer: Renderer)
{
super(renderer);
/**
* The clear background color as rgba
* @member {number[]}
*/
this.clearColor = renderer._backgroundColorRgba;
// TODO move this property somewhere else!
/**
* List of masks for the StencilSystem
* @member {PIXI.Graphics[]}
* @readonly
*/
this.defaultMaskStack = [];
// empty render texture?
/**
* Render texture
* @member {PIXI.RenderTexture}
* @readonly
*/
this.current = null;
/**
* Source frame
* @member {PIXI.Rectangle}
* @readonly
*/
this.sourceFrame = new Rectangle();
/**
* Destination frame
* @member {PIXI.Rectangle}
* @readonly
*/
this.destinationFrame = new Rectangle();
}
/**
* Bind the current render texture
*
* @param {PIXI.RenderTexture} [renderTexture] - RenderTexture to bind, by default its `null`, the screen
* @param {PIXI.Rectangle} [sourceFrame] - part of screen that is mapped to the renderTexture
* @param {PIXI.Rectangle} [destinationFrame] - part of renderTexture, by default it has the same size as sourceFrame
*/
bind(renderTexture: RenderTexture = null, sourceFrame?: Rectangle, destinationFrame?: Rectangle): void
{
const renderer = this.renderer;
this.current = renderTexture;
<|fim▁hole|> let framebuffer;
let resolution;
if (renderTexture)
{
baseTexture = renderTexture.baseTexture as BaseRenderTexture;
resolution = baseTexture.resolution;
if (!sourceFrame)
{
tempRect.width = renderTexture.frame.width;
tempRect.height = renderTexture.frame.height;
sourceFrame = tempRect;
}
if (!destinationFrame)
{
tempRect2.x = renderTexture.frame.x;
tempRect2.y = renderTexture.frame.y;
tempRect2.width = sourceFrame.width;
tempRect2.height = sourceFrame.height;
destinationFrame = tempRect2;
}
framebuffer = baseTexture.framebuffer;
}
else
{
resolution = renderer.resolution;
if (!sourceFrame)
{
tempRect.width = renderer.screen.width;
tempRect.height = renderer.screen.height;
sourceFrame = tempRect;
}
if (!destinationFrame)
{
destinationFrame = tempRect;
destinationFrame.width = sourceFrame.width;
destinationFrame.height = sourceFrame.height;
}
}
viewportFrame.x = destinationFrame.x * resolution;
viewportFrame.y = destinationFrame.y * resolution;
viewportFrame.width = destinationFrame.width * resolution;
viewportFrame.height = destinationFrame.height * resolution;
this.renderer.framebuffer.bind(framebuffer, viewportFrame);
this.renderer.projection.update(destinationFrame, sourceFrame, resolution, !framebuffer);
if (renderTexture)
{
this.renderer.mask.setMaskStack(baseTexture.maskStack);
}
else
{
this.renderer.mask.setMaskStack(this.defaultMaskStack);
}
this.sourceFrame.copyFrom(sourceFrame);
this.destinationFrame.copyFrom(destinationFrame);
}
/**
* Erases the render texture and fills the drawing area with a colour
*
* @param {number[]} [clearColor] - The color as rgba, default to use the renderer backgroundColor
* @param {PIXI.BUFFER_BITS} [mask=BUFFER_BITS.COLOR | BUFFER_BITS.DEPTH] - Bitwise OR of masks
* that indicate the buffers to be cleared, by default COLOR and DEPTH buffers.
* @return {PIXI.Renderer} Returns itself.
*/
clear(clearColor?: number[], mask?: BUFFER_BITS): void
{
if (this.current)
{
clearColor = clearColor || (this.current.baseTexture as BaseRenderTexture).clearColor;
}
else
{
clearColor = clearColor || this.clearColor;
}
this.renderer.framebuffer.clear(clearColor[0], clearColor[1], clearColor[2], clearColor[3], mask);
}
resize(): void // screenWidth, screenHeight)
{
// resize the root only!
this.bind(null);
}
/**
* Resets renderTexture state
*/
reset(): void
{
this.bind(null);
}
}<|fim▁end|> | let baseTexture: BaseRenderTexture; |
<|file_name|>0003_votetoken.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-28 17:08
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):<|fim▁hole|> ('voting', '0002_auto_20150813_2010'),
]
operations = [
migrations.CreateModel(
name='VoteToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ticket_code', models.CharField(max_length=255)),
('token_sent', models.DateTimeField(blank=True, null=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]<|fim▁end|> |
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL), |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { Clock } from './Clock'; |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>export { IMediumSelectedEvent } from './IMediumSelectedEvent';
export { IMediumAddedEvent } from './IMediumAddedEvent';
export { IReaderSelectedEvent } from './IReaderSelectedEvent';
export { IServerMessage } from './IServerMessage';<|fim▁hole|>export { IStats } from './IStats';
export { IGoogleBook } from './google';
export { ISBNDbBook } from './isbndb';<|fim▁end|> | export { IAcl } from './IAcl';
export { IComponentData } from './IComponentData';
export { IWorldCatEntry, IMediaEntry } from './IWorldCat_extern'; |
<|file_name|>model.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
import numpy as np
from tr55.tablelookup import lookup_cn, lookup_bmp_storage, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, \
get_pollutants, get_bmps, lookup_pitt_runoff, lookup_bmp_drainage_ratio
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, evaptrans, soil_type, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
This uses numpy to make a linear interpolation between tabular values to
calculate the exact runoff for a given value
`precip` is the amount of precipitation in inches.
"""
runoff_ratios = lookup_pitt_runoff(soil_type, land_use)
runoff_ratio = np.interp(precip, runoff_ratios['precip'], runoff_ratios['Rv'])
runoff = precip*runoff_ratio
return min(runoff, precip - evaptrans)
def nrcs_cutoff(precip, curve_number):<|fim▁hole|> """
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
`precip` is the amount of precipitation in inches.
"""
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration in inches per day - this is the
ET for the cell after taking the crop/landscape factor into account
this is NOT the ETmax.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as a volume (inches * #cells).
"""
def clamp(runoff, et, inf, precip):
"""
This function ensures that runoff + et + inf <= precip.
NOTE: Infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration; however, there is evapotranspiration. (It is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation.)
if precip == 0.0:
return {
'runoff-vol': 0.0,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# If the BMP is cluster_housing or no_till, then make it the
# land-use. This is done because those two types of BMPs behave
# more like land-uses than they do BMPs.
if bmp and not is_bmp(bmp):
land_use = bmp or land_use
# When the land-use is a built-type use the Pitt Small Storm Hydrology
# Model until the runoff predicted by the NRCS model is greater than that
# predicted by the NRCS model.
if is_built_type(land_use):
pitt_runoff = runoff_pitt(precip, evaptrans, soil_type, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
# (runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use as input to `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use as input to `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
pct=1.0, current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`pct` is the percentage of calculated water volume to retain.
`cell_res` is the size of each cell/pixel in meters squared
(used for turning inches of water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
pct, cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
runoff_adjustment = result['runoff-vol'] - (result['runoff-vol'] * pct)
result['runoff-vol'] -= runoff_adjustment
result['inf-vol'] += runoff_adjustment
tree.update(result)
# perform water quality calculation
if n != 0:
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def compute_bmp_effect(census, m2_per_pixel, precip):
"""
Compute the overall amount of water retained by infiltration/retention
type BMP's.
Result is a percent of runoff remaining after water is trapped in
infiltration/retention BMP's
"""
meters_per_inch = 0.0254
cubic_meters = census['runoff-vol'] * meters_per_inch * m2_per_pixel
# 'runoff-vol' in census is in inches*#cells
bmp_dict = census.get('BMPs', {})
bmp_keys = set(bmp_dict.keys())
reduction = 0.0
for bmp in set.intersection(set(get_bmps()), bmp_keys):
bmp_area = bmp_dict[bmp]
storage_space = (lookup_bmp_storage(bmp) * bmp_area)
max_reduction = lookup_bmp_drainage_ratio(bmp) * bmp_area * precip * meters_per_inch
bmp_reduction = min(max_reduction, storage_space)
reduction += bmp_reduction
return 0 if not cubic_meters else \
max(0.0, cubic_meters - reduction) / cubic_meters
def simulate_modifications(census, fn, cell_res, precip, pc=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=pc)
pct = compute_bmp_effect(mod, cell_res, precip)
simulate_water_quality(mod, cell_res, fn, pct=pct, precolumbian=pc)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=pc)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
# From the EPA WaterSense data finder for the Philadelphia airport (19153)
# Converted to daily number in inches per day.
# http://www3.epa.gov/watersense/new_homes/wb_data_finder.html
# TODO: include Potential Max ET as a data layer from CGIAR
# http://csi.cgiar.org/aridity/Global_Aridity_PET_Methodolgy.asp
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precip, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")<|fim▁end|> | """
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not. |
<|file_name|>function_name_property_function_expressions.js<|end_file_name|><|fim▁begin|>function test() {
return (function foo(){}).name === 'foo' &&<|fim▁hole|>if (!test())
throw new Error("Test failed");<|fim▁end|> | (function(){}).name === '';
}
|
<|file_name|>mkeventd.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import ast
import re
import socket
import time
import os
from pathlib2 import Path
import config
import livestatus
import sites
from gui_exceptions import MKGeneralException
import cmk.paths
import cmk.ec.settings
import cmk.ec.export
import cmk.store
import cmk.utils
if cmk.is_managed_edition():
import managed
else:
managed = None
# ASN1 MIB source directory candidates. Non existing dirs are ok.
# Please sync these paths with htdocs/mkeventd.py
mib_dirs = [ ('/usr/share/snmp/mibs', _('System MIBs')) ]
socket_path = cmk.paths.omd_root + "/tmp/run/mkeventd/status"
compiled_mibs_dir = cmk.paths.omd_root + "/local/share/check_mk/compiled_mibs"
# Please sync these paths with htdocs/mkeventd.py
mib_upload_dir = cmk.paths.omd_root + "/local/share/snmp/mibs"
mib_dirs.insert(0, (cmk.paths.omd_root + "/share/snmp/mibs", _('MIBs shipped with Check_MK')))
mib_dirs.insert(0, (mib_upload_dir, _('Custom MIBs')))
syslog_priorities = [
(0, "emerg" ),
(1, "alert" ),
(2, "crit" ),
(3, "err" ),
(4, "warning" ),
(5, "notice" ),
(6, "info" ),
(7, "debug" ),
]
syslog_facilities = [
(0, "kern"),
(1, "user"),
(2, "mail"),
(3, "daemon"),
(4, "auth"),
(5, "syslog"),
(6, "lpr"),
(7, "news"),
(8, "uucp"),
(9, "cron"),
(10, "authpriv"),
(11, "ftp"),
(12, "(12: unused)"),
(13, "(13: unused)"),
(14, "(14: unused)"),
(15, "(15: unused)"),
(16, "local0"),
(17, "local1"),
(18, "local2"),
(19, "local3"),
(20, "local4"),
(21, "local5"),
(22, "local6"),
(23, "local7"),
(31, "snmptrap"),
]
phase_names = {
'counting' : _("counting"),
'delayed' : _("delayed"),
'open' : _("open"),
'ack' : _("acknowledged"),
'closed' : _("closed"),
}
action_whats = {
"ORPHANED" : _("Event deleted in counting state because rule was deleted."),
"NOCOUNT" : _("Event deleted in counting state because rule does not count anymore"),
"DELAYOVER" : _("Event opened because the delay time has elapsed before cancelling event arrived."),
"EXPIRED" : _("Event deleted because its livetime expired"),
"COUNTREACHED" : _("Event deleted because required count had been reached"),
"COUNTFAILED" : _("Event created by required count was not reached in time"),
"UPDATE" : _("Event information updated by user"),
"NEW" : _("New event created"),
"DELETE" : _("Event deleted manually by user"),
"EMAIL" : _("Email sent"),
"SCRIPT" : _("Script executed"),
"CANCELLED" : _("The event was cancelled because the corresponding OK message was received"),
"ARCHIVED" : _("Event was archived because no rule matched and archiving is activated in global settings."),
"AUTODELETE" : _("Event was deleted automatically"),
"CHANGESTATE" : _("State of event changed by user"),
}
def service_levels():
try:
return config.mkeventd_service_levels
except:
return [(0, "(no service level)")]
def action_choices(omit_hidden = False):
# The possible actions are configured in mkeventd.mk,
# not in multisite.mk (like the service levels). That
# way we have not direct access to them but need
# to load them from the configuration.
return [ ( "@NOTIFY", _("Send monitoring notification")) ] + \
[ (a["id"], a["title"])
for a in eventd_configuration().get("actions", [])
if not omit_hidden or not a.get("hidden") ]
cached_config = None
def eventd_configuration():
global cached_config
if cached_config and cached_config[0] is html:
return cached_config[1]
settings = cmk.ec.settings.settings('',
Path(cmk.paths.omd_root),
Path(cmk.paths.default_config_dir),
[''])
config = cmk.ec.export.load_config(settings)
cached_config = (html, config)
return config
def daemon_running():
return os.path.exists(socket_path)
# Note: in order to be able to simulate an original IP address
# we put hostname|ipaddress into the host name field. The EC
# recognizes this and unpacks the data correctly.
def send_event(event):
# "<%PRI%>@%TIMESTAMP%;%SL% %HOSTNAME% %syslogtag% %msg%\n"
prio = (event["facility"] << 3) + event["priority"]
rfc = [
"<%d>@%d" % (prio, int(time.time())),
"%d %s|%s %s: %s\n" % (event["sl"], event["host"],
event["ipaddress"], event["application"], event["text"]),
]
execute_command("CREATE", map(cmk.utils.make_utf8, rfc), site=event["site"])
return ";".join(rfc)
def get_local_ec_status():
response = livestatus.LocalConnection().query("GET eventconsolestatus")
return dict(zip(response[0], response[1]))
def replication_mode():
try:
status = get_local_ec_status()
return status["status_replication_slavemode"]
except livestatus.MKLivestatusSocketError:
return "stopped"
# Only use this for master/slave replication. For status queries use livestatus
def query_ec_directly(query):
try:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
timeout = config.mkeventd_connect_timeout
except:
timeout = 10
sock.settimeout(timeout)
sock.connect(socket_path)
sock.sendall(query)
sock.shutdown(socket.SHUT_WR)
response_text = ""
while True:
chunk = sock.recv(8192)
response_text += chunk
if not chunk:
break
return ast.literal_eval(response_text)
except SyntaxError, e:
raise MKGeneralException(_("Invalid response from event daemon: "
"<pre>%s</pre>") % response_text)
except Exception, e:
raise MKGeneralException(_("Cannot connect to event daemon via %s: %s") %
(socket_path, e))
def execute_command(name, args=None, site=None):
if args:
formated_args = ";" + ";".join(args)
else:
formated_args = ""
query = "[%d] EC_%s%s" % (int(time.time()), name, formated_args)
sites.live().command(query, site)
def get_total_stats(only_sites):
stats_keys = [
"status_average_message_rate",
"status_average_rule_trie_rate",
"status_average_rule_hit_rate",
"status_average_event_rate",
"status_average_connect_rate",
"status_average_overflow_rate",
"status_average_rule_trie_rate",
"status_average_rule_hit_rate",
"status_average_processing_time",
"status_average_request_time",
"status_average_sync_time",
]
stats_per_site = list(get_stats_per_site(only_sites, stats_keys))
# First simply add rates. Times must then be averaged
# weighted by message rate or connect rate
total_stats = {}
for row in stats_per_site:
for key, value in row.items():
if key.endswith("rate"):
total_stats.setdefault(key, 0.0)
total_stats[key] += value<|fim▁hole|> if not total_stats:
if only_sites is None:
raise MKGeneralException(_("Got no data from any site"))
else:
raise MKGeneralException(_("Got no data from this site"))
for row in stats_per_site:
for time_key, in_relation_to in [
( "status_average_processing_time", "status_average_message_rate" ),
( "status_average_request_time", "status_average_connect_rate" ),
]:
total_stats.setdefault(time_key, 0.0)
if total_stats[in_relation_to]: # avoid division by zero
my_weight = row[in_relation_to] / total_stats[in_relation_to]
total_stats[time_key] += my_weight * row[time_key]
total_sync_time = 0.0
count = 0
for row in stats_per_site:
if row["status_average_sync_time"] > 0.0:
count += 1
total_sync_time += row["status_average_sync_time"]
if count > 0:
total_stats["status_average_sync_time"] = total_sync_time / count
return total_stats
def get_stats_per_site(only_sites, stats_keys):
try:
sites.live().set_only_sites(only_sites)
for list_row in sites.live().query("GET eventconsolestatus\nColumns: %s" % " ".join(stats_keys)):
yield dict(zip(stats_keys, list_row))
finally:
sites.live().set_only_sites(None)
# Rule matching for simulation. Yes - there is some hateful code duplication
# here. But it does not make sense to query the live eventd here since it
# does not know anything about the currently configured but not yet activated
# rules. And also we do not want to have shared code.
def event_rule_matches(rule_pack, rule, event):
result = event_rule_matches_non_inverted(rule_pack, rule, event)
if rule.get("invert_matching"):
if type(result) == tuple:
return _("The rule would match, but matching is inverted.")
else:
return False, ()
else:
return result
def event_rule_matches_non_inverted(rule_pack, rule, event):
if False == match_ipv4_network(rule.get("match_ipaddress", "0.0.0.0/0"), event["ipaddress"]):
return _("The source IP address does not match.")
if False == match(rule.get("match_host"), event["host"], complete=True):
return _("The host name does not match.")
if False == match(rule.get("match_application"), event["application"], complete=False):
return _("The application (syslog tag) does not match")
if "match_facility" in rule and event["facility"] != rule["match_facility"]:
return _("The syslog facility does not match")
# First try cancelling rules
if "match_ok" in rule or "cancel_priority" in rule:
if "cancel_priority" in rule:
up, lo = rule["cancel_priority"]
cp = event["priority"] >= lo and event["priority"] <= up
else:
cp = True
match_groups = match(rule.get("match_ok", ""), event["text"], complete = False)
if match_groups != False and cp:
if match_groups == True:
match_groups = ()
return True, match_groups
try:
match_groups = match(rule.get("match"), event["text"], complete = False)
except Exception, e:
return _("Invalid regular expression: %s") % e
if match_groups == False:
return _("The message text does not match the required pattern.")
if "match_priority" in rule:
prio_from, prio_to = rule["match_priority"]
if prio_from > prio_to:
prio_to, prio_from = prio_from, prio_to
p = event["priority"]
if p < prio_from or p > prio_to:
return _("The syslog priority is not in the required range.")
if "match_sl" in rule:
sl_from, sl_to = rule["match_sl"]
if sl_from > sl_to:
sl_to, sl_from = sl_from, sl_to
p = event.get("sl")
if p == None:
return _("No service level is set in event")
if p < sl_from or p > sl_to:
return _("Wrong service level %d (need %d..%d)") % (p, sl_from, sl_to)
if "match_timeperiod" in rule:
reason = check_timeperiod(rule["match_timeperiod"])
if reason:
return reason
if cmk.is_managed_edition():
import managed
if "customer" in rule_pack:
rule_customer_id = rule_pack["customer"]
else:
rule_customer_id = rule.get("customer", managed.SCOPE_GLOBAL)
site_customer_id = managed.get_customer_id(config.sites[event["site"]])
if rule_customer_id != managed.SCOPE_GLOBAL and site_customer_id != rule_customer_id:
return _("Wrong customer")
if match_groups == True:
match_groups = () # no matching groups
return False, match_groups
def check_timeperiod(tpname):
try:
livesock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
livesock.connect(cmk.paths.livestatus_unix_socket)
livesock.send("GET timeperiods\nFilter: name = %s\nColumns: in\n" % tpname)
livesock.shutdown(socket.SHUT_WR)
answer = livesock.recv(100).strip()
if answer == "":
return _("The timeperiod %s is not known to the local monitoring core") % tpname
elif int(answer) == 0:
return _("The timeperiod %s is currently not active") % tpname
except Exception, e:
if config.debug:
raise
return _("Cannot update timeperiod information for %s: %s") % (tpname, e)
def match(pattern, text, complete = True):
if pattern == None:
return True
else:
if complete:
if not pattern.endswith("$"):
pattern += '$'
m = re.compile(pattern, re.IGNORECASE).match(text)
else:
m = re.compile(pattern, re.IGNORECASE).search(text)
if m:
return m.groups()
else:
return False
def match_ipv4_network(pattern, ipaddress_text):
network, network_bits = parse_ipv4_network(pattern) # is validated by valuespec
if network_bits == 0:
return True # event if ipaddress is empty
try:
ipaddress = parse_ipv4_address(ipaddress_text)
except:
return False # invalid address never matches
# first network_bits of network and ipaddress must be
# identical. Create a bitmask.
bitmask = 0
for n in range(32):
bitmask = bitmask << 1
if n < network_bits:
bit = 1
else:
bit = 0
bitmask += bit
return (network & bitmask) == (ipaddress & bitmask)
def parse_ipv4_address(text):
parts = map(int, text.split("."))
return (parts[0] << 24) + (parts[1] << 16) + (parts[2] << 8) + parts[3]
def parse_ipv4_network(text):
if "/" not in text:
return parse_ipv4_address(text), 32
network_text, bits_text = text.split("/")
return parse_ipv4_address(network_text), int(bits_text)<|fim▁end|> | |
<|file_name|>embedErrorStyles.ts<|end_file_name|><|fim▁begin|>/**
* @copyright 2009-2019 Vanilla Forums Inc.
* @license GPL-2.0-only
*/
import { styleFactory } from "@library/styles/styleUtils";
import { useThemeCache } from "@library/styles/themeCache";
import { flexHelper } from "@library/styles/styleHelpers";
import { px } from "csx";
export const embedErrorClasses = useThemeCache(() => {
const style = styleFactory("embedError");
const renderErrorRoot = style("renderErrorRoot", {
display: "block",
textAlign: "left",
});
const renderErrorIconLink = style("renderErrorIconLink", {
paddingLeft: px(4),
verticalAlign: "middle",<|fim▁hole|> return { renderErrorRoot, renderErrorIconLink };
});<|fim▁end|> | });
|
<|file_name|>object_detector_dataloader.py<|end_file_name|><|fim▁begin|># Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dataloader for object detection."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
from typing import Collection, Dict, List, Optional, Tuple, TypeVar, Union
import tensorflow as tf
from tensorflow_examples.lite.model_maker.core.api.api_util import mm_export
from tensorflow_examples.lite.model_maker.core.data_util import dataloader
from tensorflow_examples.lite.model_maker.core.data_util import object_detector_dataloader_util as util
import yaml
from tensorflow_examples.lite.model_maker.third_party.efficientdet import dataloader as det_dataloader
from tensorflow_examples.lite.model_maker.third_party.efficientdet.keras import label_util
DetectorDataLoader = TypeVar('DetectorDataLoader', bound='DataLoader')
# Csv lines with the label map.
CsvLines = Tuple[List[List[List[str]]], Dict[int, str]]
def _get_label_map(label_map):
"""Gets the label map dict."""
if isinstance(label_map, list):
label_map_dict = {}
for i, label in enumerate(label_map):
# 0 is resevered for background.
label_map_dict[i + 1] = label
label_map = label_map_dict
label_map = label_util.get_label_map(label_map)
if 0 in label_map and label_map[0] != 'background':
raise ValueError('0 must be resevered for background.')
label_map.pop(0, None)
name_set = set()
for idx, name in label_map.items():
if not isinstance(idx, int):
raise ValueError('The key (label id) in label_map must be integer.')
if not isinstance(name, str):
raise ValueError('The value (label name) in label_map must be string.')
if name in name_set:
raise ValueError('The value: %s (label name) can\'t be duplicated.' %
name)
name_set.add(name)
return label_map
def _group_csv_lines(csv_file: str,
set_prefixes: List[str],
delimiter: str = ',',
quotechar: str = '"') -> CsvLines:
"""Groups csv_lines for different set_names and label_map.
Args:
csv_file: filename of the csv file.
set_prefixes: Set prefix names for training, validation and test data. e.g.
['TRAIN', 'VAL', 'TEST'].
delimiter: Character used to separate fields.
quotechar: Character used to quote fields containing special characters.
Returns:
[training csv lines, validation csv lines, test csv lines], label_map
"""
# Dict that maps integer label ids to string label names.
label_map = {}
with tf.io.gfile.GFile(csv_file, 'r') as f:
reader = csv.reader(f, delimiter=delimiter, quotechar=quotechar)
# `lines_list` = [training csv lines, validation csv lines, test csv lines]
# Each csv line is a list of strings separated by delimiter. e.g.
# row 'one,two,three' in the csv file will be ['one', two', 'three'].
lines_list = [[], [], []]
for line in reader:
# Groups lines by the set_name.
set_name = line[0].strip()
for i, set_prefix in enumerate(set_prefixes):
if set_name.startswith(set_prefix):
lines_list[i].append(line)
label = line[2].strip()
# Updates label_map if it's a new label.
if label not in label_map.values():
label_map[len(label_map) + 1] = label
return lines_list, label_map
@mm_export('object_detector.DataLoader')
class DataLoader(dataloader.DataLoader):
"""DataLoader for object detector."""
def __init__(self,
tfrecord_file_patten,
size,
label_map,
annotations_json_file=None):
"""Initialize DataLoader for object detector.
Args:
tfrecord_file_patten: Glob for tfrecord files. e.g. "/tmp/coco*.tfrecord".
size: The size of the dataset.
label_map: Variable shows mapping label integers ids to string label
names. 0 is the reserved key for `background` and doesn't need to be
included in label_map. Label names can't be duplicated. Supported
formats are:
1. Dict, map label integers ids to string label names, such as {1:
'person', 2: 'notperson'}. 2. List, a list of label names such as
['person', 'notperson'] which is
the same as setting label_map={1: 'person', 2: 'notperson'}.
3. String, name for certain dataset. Accepted values are: 'coco', 'voc'
and 'waymo'. 4. String, yaml filename that stores label_map.
annotations_json_file: JSON with COCO data format containing golden
bounding boxes. Used for validation. If None, use the ground truth from
the dataloader. Refer to
https://towardsdatascience.com/coco-data-format-for-object-detection-a4c5eaf518c5
for the description of COCO data format.
"""
super(DataLoader, self).__init__(dataset=None, size=size)
self.tfrecord_file_patten = tfrecord_file_patten
self.label_map = _get_label_map(label_map)
self.annotations_json_file = annotations_json_file
@classmethod
def from_pascal_voc(
cls,
images_dir: str,
annotations_dir: str,
label_map: Union[List[str], Dict[int, str], str],
annotation_filenames: Optional[Collection[str]] = None,
ignore_difficult_instances: bool = False,
num_shards: int = 100,
max_num_images: Optional[int] = None,
cache_dir: Optional[str] = None,
cache_prefix_filename: Optional[str] = None) -> DetectorDataLoader:
"""Loads from dataset with PASCAL VOC format.
Refer to
https://towardsdatascience.com/coco-data-format-for-object-detection-a4c5eaf518c5
for the description of PASCAL VOC data format.
LabelImg Tool (https://github.com/tzutalin/labelImg) can annotate the image
and save annotations as XML files in PASCAL VOC data format.
Annotations are in the folder: `annotations_dir`.
Raw images are in the foloder: `images_dir`.
Args:
images_dir: Path to directory that store raw images.
annotations_dir: Path to the annotations directory.
label_map: Variable shows mapping label integers ids to string label
names. 0 is the reserved key for `background`. Label names can't be
duplicated. Supported format: 1. Dict, map label integers ids to string
label names, e.g.
{1: 'person', 2: 'notperson'}. 2. List, a list of label names. e.g.
['person', 'notperson'] which is
the same as setting label_map={1: 'person', 2: 'notperson'}.
3. String, name for certain dataset. Accepted values are: 'coco', 'voc'
and 'waymo'. 4. String, yaml filename that stores label_map.
annotation_filenames: Collection of annotation filenames (strings) to be
loaded. For instance, if there're 3 annotation files [0.xml, 1.xml,
2.xml] in `annotations_dir`, setting annotation_filenames=['0', '1']
makes this method only load [0.xml, 1.xml].
ignore_difficult_instances: Whether to ignore difficult instances.
`difficult` can be set inside `object` item in the annotation xml file.
num_shards: Number of shards for output file.
max_num_images: Max number of imags to process.
cache_dir: The cache directory to save TFRecord, metadata and json file.
When cache_dir is not set, a temporary folder will be created and will
not be removed automatically after training which makes it can be used
later.
cache_prefix_filename: The cache prefix filename. If not set, will
automatically generate it based on `image_dir`, `annotations_dir` and
`annotation_filenames`.
Returns:
ObjectDetectorDataLoader object.
"""
label_map = _get_label_map(label_map)
# If `cache_prefix_filename` is None, automatically generates a hash value.
if cache_prefix_filename is None:
cache_prefix_filename = util.get_cache_prefix_filename_from_pascal(
images_dir=images_dir,
annotations_dir=annotations_dir,
annotation_filenames=annotation_filenames,
num_shards=num_shards)
cache_files = util.get_cache_files(
cache_dir=cache_dir,
cache_prefix_filename=cache_prefix_filename,
num_shards=num_shards)
# If not cached, writes data into tfrecord_file_paths and
# annotations_json_file_path.
# If `num_shards` differs, it's still not cached.
if not util.is_cached(cache_files):
cache_writer = util.PascalVocCacheFilesWriter(
label_map=label_map,
images_dir=images_dir,
num_shards=num_shards,
max_num_images=max_num_images,
ignore_difficult_instances=ignore_difficult_instances)
cache_writer.write_files(
cache_files=cache_files,
annotations_dir=annotations_dir,
annotation_filenames=annotation_filenames)
return cls.from_cache(cache_files.cache_prefix)
@classmethod
def from_csv(
cls,
filename: str,
images_dir: Optional[str] = None,
delimiter: str = ',',
quotechar: str = '"',
num_shards: int = 10,
max_num_images: Optional[int] = None,
cache_dir: Optional[str] = None,
cache_prefix_filename: Optional[str] = None
) -> List[Optional[DetectorDataLoader]]:
"""Loads the data from the csv file.
The csv format is shown in
https://cloud.google.com/vision/automl/object-detection/docs/csv-format. We
supports bounding box with 2 vertices for now. We support the files in the
local machine as well.<|fim▁hole|> Args:
filename: Name of the csv file.
images_dir: Path to directory that store raw images. If None, the image
path in the csv file is the path to Google Cloud Storage or the absolute
path in the local machine.
delimiter: Character used to separate fields.
quotechar: Character used to quote fields containing special characters.
num_shards: Number of shards for output file.
max_num_images: Max number of imags to process.
cache_dir: The cache directory to save TFRecord, metadata and json file.
When cache_dir is None, a temporary folder will be created and will not
be removed automatically after training which makes it can be used
later.
cache_prefix_filename: The cache prefix filename. If None, will
automatically generate it based on `filename`.
Returns:
train_data, validation_data, test_data which are ObjectDetectorDataLoader
objects. Can be None if without such data.
"""
# If `cache_prefix_filename` is None, automatically generates a hash value.
if cache_prefix_filename is None:
cache_prefix_filename = util.get_cache_prefix_filename_from_csv(
csv_file=filename, num_shards=num_shards)
# Gets a list of cache files mapping `set_prefixes`.
set_prefixes = ['TRAIN', 'VAL', 'TEST']
cache_files_list = util.get_cache_files_sequence(
cache_dir=cache_dir,
cache_prefix_filename=cache_prefix_filename,
set_prefixes=set_prefixes,
num_shards=num_shards)
# If not cached, writes data into tfrecord_file_paths and
# annotations_json_file_path.
# If `num_shards` differs, it's still not cached.
if not util.is_all_cached(cache_files_list):
lines_list, label_map = _group_csv_lines(
csv_file=filename,
set_prefixes=set_prefixes,
delimiter=delimiter,
quotechar=quotechar)
cache_writer = util.CsvCacheFilesWriter(
label_map=label_map,
images_dir=images_dir,
num_shards=num_shards,
max_num_images=max_num_images)
for cache_files, csv_lines in zip(cache_files_list, lines_list):
if csv_lines:
cache_writer.write_files(cache_files, csv_lines=csv_lines)
# Loads training & validation & test data from cache.
data = []
for cache_files in cache_files_list:
cache_prefix = cache_files.cache_prefix
try:
data.append(cls.from_cache(cache_prefix))
except ValueError:
# No training / validation / test data in the csv file.
# For instance, there're only training and test data in the csv file,
# this will make this function return `train_data, None, test_data`
data.append(None)
return data
@classmethod
def from_cache(cls, cache_prefix):
"""Loads the data from cache.
Args:
cache_prefix: The cache prefix including the cache directory and the cache
prefix filename, e.g: '/tmp/cache/train'.
Returns:
ObjectDetectorDataLoader object.
"""
# Gets TFRecord files.
tfrecord_file_patten = cache_prefix + '*.tfrecord'
if not tf.io.gfile.glob(tfrecord_file_patten):
raise ValueError('TFRecord files are empty.')
# Loads meta_data.
meta_data_file = cache_prefix + util.META_DATA_FILE_SUFFIX
if not tf.io.gfile.exists(meta_data_file):
raise ValueError('Metadata file %s doesn\'t exist.' % meta_data_file)
with tf.io.gfile.GFile(meta_data_file, 'r') as f:
meta_data = yaml.load(f, Loader=yaml.FullLoader)
# Gets annotation json file.
ann_json_file = cache_prefix + util.ANN_JSON_FILE_SUFFIX
if not tf.io.gfile.exists(ann_json_file):
ann_json_file = None
return DataLoader(tfrecord_file_patten, meta_data['size'],
meta_data['label_map'], ann_json_file)
def gen_dataset(self,
model_spec,
batch_size=None,
is_training=False,
use_fake_data=False):
"""Generate a batched tf.data.Dataset for training/evaluation.
Args:
model_spec: Specification for the model.
batch_size: A integer, the returned dataset will be batched by this size.
is_training: A boolean, when True, the returned dataset will be optionally
shuffled and repeated as an endless dataset.
use_fake_data: Use fake input.
Returns:
A TF dataset ready to be consumed by Keras model.
"""
reader = det_dataloader.InputReader(
self.tfrecord_file_patten,
is_training=is_training,
use_fake_data=use_fake_data,
max_instances_per_image=model_spec.config.max_instances_per_image,
debug=model_spec.config.debug)
self._dataset = reader(model_spec.config.as_dict(), batch_size=batch_size)
return self._dataset
def split(self, fraction):
"""This function isn't implemented for the object detection task."""
raise NotImplementedError(
'split function is not supported in the object detection task.')<|fim▁end|> | |
<|file_name|>bp_user_metadata_game_system.py<|end_file_name|><|fim▁begin|>from common import common_global
from common import common_pagination_bootstrap
from sanic import Blueprint
blueprint_user_metadata_game_system = Blueprint('name_blueprint_user_metadata_game_system',
url_prefix='/user')
@blueprint_user_metadata_game_system.route('/user_meta_game_system', methods=['GET', 'POST'])
@common_global.jinja_template.template('bss_user/metadata/bss_user_metadata_game_system.html')
@common_global.auth.login_required
async def url_bp_user_metadata_game_system(request):
"""
Display list of game system metadata
"""
page, offset = common_pagination_bootstrap.com_pagination_page_calc(request)<|fim▁hole|> item_count=await request.app.db_functions.db_meta_game_system_list_count(
db_connection=db_connection),
client_items_per_page=
int(request.ctx.session[
'per_page']),
format_number=True)
media_data = await request.app.db_functions.db_meta_game_system_list(offset,
int(request.ctx.session[
'per_page']),
request.ctx.session[
'search_text'],
db_connection=db_connection)
await request.app.db_pool.release(db_connection)
return {
'media': media_data,
'pagination_links': pagination,
}
@blueprint_user_metadata_game_system.route('/user_meta_game_system_detail/<guid>')
@common_global.jinja_template.template(
'bss_user/metadata/bss_user_metadata_game_system_detail.html')
@common_global.auth.login_required
async def url_bp_user_metadata_game_system_detail(request, guid):
"""
Display metadata game detail
"""
db_connection = await request.app.db_pool.acquire()
media_data = await request.app.db_functions.db_meta_game_system_by_guid(guid,
db_connection=db_connection)
await request.app.db_pool.release(db_connection)
return {
'guid': guid,
'data': media_data,
}<|fim▁end|> | request.ctx.session['search_page'] = 'meta_game_system'
db_connection = await request.app.db_pool.acquire()
pagination = common_pagination_bootstrap.com_pagination_boot_html(page,
url='/user/user_meta_game', |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
"""
Variables that are shared between modules
"""
import sys
import codecs
version = ""
scriptPath = ""
scriptName = ""
mediaInfoExe = ""
mets_ns = ""
mods_ns = ""
premis_ns = ""
ebucore_ns = ""
xlink_ns = ""
xsi_ns = ""
isolyzer_ns = ""
cdInfo_ns = ""
dfxml_ns = ""
metsSchema = ""
modsSchema = ""
premisSchema = ""
ebucoreSchema = ""
NSMAP = {}
failedPPNs = []
errors = 0<|fim▁hole|>createSIPs = False
pruneBatch = False
skipChecksumFlag = False
batchErr = ""
dirOut = ""
dirsInMetaCarriers = []
carrierTypeAllowedValues = []
# Set encoding of the terminal to UTF-8
if sys.version.startswith("2"):
out = codecs.getwriter("UTF-8")(sys.stdout)
err = codecs.getwriter("UTF-8")(sys.stderr)
elif sys.version.startswith("3"):
out = codecs.getwriter("UTF-8")(sys.stdout.buffer)
err = codecs.getwriter("UTF-8")(sys.stderr.buffer)<|fim▁end|> | warnings = 0 |
<|file_name|>ip.py<|end_file_name|><|fim▁begin|>from charmhelpers.core.hookenv import (
config,
unit_get,
)
from charmhelpers.contrib.network.ip import (
get_address_in_network,
is_address_in_network,
is_ipv6,
get_ipv6_addr,
)
from charmhelpers.contrib.hahelpers.cluster import is_clustered
PUBLIC = 'public'
INTERNAL = 'int'
ADMIN = 'admin'
_address_map = {
PUBLIC: {
'config': 'os-public-network',
'fallback': 'public-address'
},
INTERNAL: {
'config': 'os-internal-network',<|fim▁hole|> 'fallback': 'private-address'
}
}
def canonical_url(configs, endpoint_type=PUBLIC):
'''
Returns the correct HTTP URL to this host given the state of HTTPS
configuration, hacluster and charm configuration.
:configs OSTemplateRenderer: A config tempating object to inspect for
a complete https context.
:endpoint_type str: The endpoint type to resolve.
:returns str: Base URL for services on the current service unit.
'''
scheme = 'http'
if 'https' in configs.complete_contexts():
scheme = 'https'
address = resolve_address(endpoint_type)
if is_ipv6(address):
address = "[{}]".format(address)
return '%s://%s' % (scheme, address)
def resolve_address(endpoint_type=PUBLIC):
resolved_address = None
if is_clustered():
if config(_address_map[endpoint_type]['config']) is None:
# Assume vip is simple and pass back directly
resolved_address = config('vip')
else:
for vip in config('vip').split():
if is_address_in_network(
config(_address_map[endpoint_type]['config']),
vip):
resolved_address = vip
else:
if config('prefer-ipv6'):
fallback_addr = get_ipv6_addr()
else:
fallback_addr = unit_get(_address_map[endpoint_type]['fallback'])
resolved_address = get_address_in_network(
config(_address_map[endpoint_type]['config']), fallback_addr)
if resolved_address is None:
raise ValueError('Unable to resolve a suitable IP address'
' based on charm state and configuration')
else:
return resolved_address<|fim▁end|> | 'fallback': 'private-address'
},
ADMIN: {
'config': 'os-admin-network', |
<|file_name|>multimedia.py<|end_file_name|><|fim▁begin|>from django.contrib import messages
from django.http import Http404, HttpResponse
from django.shortcuts import render
from corehq.apps.app_manager.dbaccessors import get_app
from corehq.apps.app_manager.decorators import require_deploy_apps, \
require_can_edit_apps
from corehq.apps.app_manager.xform import XForm
from corehq.util.view_utils import set_file_download
from dimagi.utils.logging import notify_exception
from dimagi.utils.subprocess_timeout import ProcessTimedOut
@require_can_edit_apps
def multimedia_list_download(request, domain, app_id):
app = get_app(domain, app_id)
include_audio = request.GET.get("audio", True)
include_images = request.GET.get("images", True)
strip_jr = request.GET.get("strip_jr", True)
filelist = []
for m in app.get_modules():
for f in m.get_forms():
parsed = XForm(f.source)
parsed.validate()
if include_images:
filelist.extend(parsed.image_references)
if include_audio:
filelist.extend(parsed.audio_references)
if strip_jr:
filelist = [s.replace("jr://file/", "") for s in filelist if s]
response = HttpResponse()
set_file_download(response, 'list.txt')
response.write("\n".join(sorted(set(filelist))))
return response
@require_deploy_apps
def multimedia_ajax(request, domain, app_id, template='app_manager/v1/partials/multimedia_ajax.html'):
app = get_app(domain, app_id)
if app.get_doc_type() == 'Application':
try:
multimedia_state = app.check_media_state()<|fim▁hole|> except ProcessTimedOut:
notify_exception(request)
messages.warning(request, (
"We were unable to check if your forms had errors. "
"Refresh the page and we will try again."
))
multimedia_state = {
'has_media': False,
'has_form_errors': True,
'has_missing_refs': False,
}
context = {
'multimedia_state': multimedia_state,
'domain': domain,
'app': app,
}
return render(request, template, context)
else:
raise Http404()<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.